max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
740
#ifndef VG_ALGORITHMS_FIND_MIN_CUT_IN_GRAPH_HPP_INCLUDED #define VG_ALGORITHMS_FIND_MIN_CUT_IN_GRAPH_HPP_INCLUDED /** * \file min_cut_graph.hpp * A randomized, probabilistic algorithm that * finds the min cut on an undirected, weighted graph * that holds the indices of snarls */ #include <vector> #include <utility> #include <unordered_map> #include <map> #include <unordered_set> namespace vg { namespace algorithms { using namespace std; struct Edge{ int other; //node at other end int weight; }; struct Node{ int weight; vector<Edge> edges; }; struct Graph { private: unordered_map<size_t,Node> nodes; public: inline vector<size_t> get_node_ids(){ vector<size_t> node_ids; for (auto& id_and_node : nodes){ size_t node_id = id_and_node.first; node_ids.push_back(node_id); } return node_ids; } inline size_t get_size(){ return nodes.size(); } inline Node& get_node_by_id(size_t node_id){ Node& node = nodes.at(node_id); return node; } inline void add_node(size_t id, Node node){ nodes.emplace(id, node); } // only use this method for unittesting a linear graph with nodes that each contain biderectional edges between nodes // since the prev node points to current node, and current node points back to it // we can get the randomly generated edge weight for prev<-current from prev -> other (current) inline size_t get_weight_using_other(Node prev_node, size_t other){ size_t to_return; for(size_t i = 0; i < prev_node.edges.size(); i++){ if(prev_node.edges[i].other == other){ to_return = prev_node.edges[i].weight; } } return to_return; } }; pair<vector<unordered_set<size_t>>, size_t> kargers_min_cut(Graph graph, const int seed); pair<vector<unordered_set<size_t>>, size_t> compute_min_cut(Graph graph, const int seed); //Assumption: handles one connected component at a time //Assumption: all edge weights are > 0 vector<unordered_set<size_t>> min_cut_decomposition(Graph graph, const int seed); } } #endif
1,293
4,145
<filename>examples/nlp/token_classification/data/create_punctuation_capitalization_tarred_dataset.py # Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import multiprocessing as mp from pathlib import Path from nemo.collections.nlp.data.token_classification.punctuation_capitalization_tarred_dataset import ( DEFAULT_CAPIT_LABEL_VOCAB_FILE_NAME, DEFAULT_PUNCT_LABEL_VOCAB_FILE_NAME, METADATA_CAPIT_LABEL_VOCAB_KEY, METADATA_PUNCT_LABEL_VOCAB_KEY, build_label_ids_from_list_of_labels, check_labels_for_being_unique_before_building_label_ids, create_tarred_dataset, ) """ A tarred dataset allows to train on large amounts without storing it all into memory simultaneously. In case of punctuation and capitalization model, tarred dataset is a directory which contains metadata file, tar files with batches, punct_label_vocab.csv and capit_label_vocab.csv files. A metadata file is a JSON file with 4 fields: 'num_batches', 'tar_files', 'punct_label_vocab_file', 'capit_label_vocab_file'. 'num_batches' (int) is a total number of batches in tarred dataset. 'tar_files' is a list of paths to tar files relative to directory containing the metadata file. 'punct_label_vocab_file' and 'capit_label_vocab_file' are paths to .csv files containing all unique punctuation and capitalization labels. Each label in these files is written in a separate line. The first labels in both files are equal and serve for padding and as neutral labels. Every tar file contains objects written using `webdataset.TarWriter`. Each object is a dictionary with two items: '__key__' and 'batch.pyd'. '__key__' is a name of a batch and 'batch.pyd' is a pickled dictionary which contains 'input_ids', 'subtokens_mask', 'punct_labels', 'capit_labels'. 'input_ids' is an array containing ids of source tokens, 'subtokens_mask' is a boolean array showing first tokens in words, 'punct_labels' and 'capit_labels' are arrays with ids of labels. Metadata file should be passed to constructor of `nemo.collections.nlp.data.token_classification.PunctuationCapitalizationTarredDataset` and the instance of the class will handle iteration and constructing masks and token types for BERT model. Example of usage: python create_punctuation_capitalization_tarred_dataset.py \ --text <PATH_TO_TEXT_FILE> \ --labels <PATH_TO_LABELS_FILE> \ --output_dir <PATH_TO_OUTPUT_DIR> \ --lines_per_dataset_fragment 10000 \ --tokens_in_batch 8000 \ --num_batches_per_tarfile 5 \ --tokenizer_name char \ --vocab_file <PATH_TO_CHAR_TOKENIZER_VOCABULARY> """ def get_args() -> argparse.Namespace: parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, description=f"A tarred dataset allows to train on large amounts without storing it all into memory " f"simultaneously. In case of punctuation and capitalization model, tarred dataset is a directory which " f"contains metadata file, tar files with batches, {DEFAULT_PUNCT_LABEL_VOCAB_FILE_NAME} and " f"{DEFAULT_CAPIT_LABEL_VOCAB_FILE_NAME} files. A metadata file is a JSON file with 4 fields: 'num_batches', " f"'tar_files', '{METADATA_PUNCT_LABEL_VOCAB_KEY}', '{METADATA_CAPIT_LABEL_VOCAB_KEY}'. 'num_batches' (int) is " f"a total number of batches in tarred dataset. 'tar_files' is a list of paths to tar files relative " f"to directory containing the metadata file. '{METADATA_PUNCT_LABEL_VOCAB_KEY}' and " f"'{METADATA_CAPIT_LABEL_VOCAB_KEY}' are paths to .csv files containing all unique punctuation and " f"capitalization labels. Each label in these files is written in a separate line. The first labels in both " f"files are equal and serve for padding and as neutral labels. Every tar file contains objects written " f"using `webdataset.TarWriter`. Each object is a dictionary with two items: '__key__' and 'batch.pyd'. " f"'__key__' is a name of a batch and 'batch.pyd' is a pickled dictionary which contains 'input_ids', " f"'subtokens_mask', 'punct_labels', 'capit_labels'. 'input_ids' is an array containing ids of source tokens, " f"'subtokens_mask' is a boolean array showing first tokens in words, 'punct_labels' and 'capit_labels' are " f"arrays with ids of labels. Metadata file should be passed to constructor of " "`nemo.collections.nlp.data.token_classification.PunctuationCapitalizationTarredDataset` and the instance of " "the class will handle iteration and constructing masks and token types for BERT model.", ) parser.add_argument( "--text", "-t", help="Path to source lowercased text without punctuation. Number of lines in `--text` file has to be equal " "to number of lines in `--labels` file.", type=Path, required=True, ) parser.add_argument( "--labels", "-L", type=Path, required=True, help="Path to file with labels in the format described here " "https://docs.nvidia.com/deeplearning/nemo/user-guide/docs/en/main/nlp/punctuation_and_capitalization.html#" "nemo-data-format . Number of lines in `--labels` file has to be equal to the number of lines in `--text` " "file.", ) parser.add_argument( "--output_dir", "-o", type=Path, required=True, help="Path to directory where .tar files, metadata file, label id files are stored.", ) parser.add_argument( "--max_seq_length", "-s", type=int, default=512, help="Maximum number of subtokens in an input sequence. A source sequence which contain too many subtokens are " "clipped to `--max_seq_length - 2` subtokens and then [CLS] token is prepended to the clipped sequence and " "[SEP] token is appended to the clipped sequence. The clipping is performed via removal of subtokens in the " "end of a source sequence.", ) parser.add_argument( "--tokens_in_batch", "-b", type=int, default=15000, help="Maximum number of tokens in a batch including [CLS], [SEP], [UNK], and [PAD] tokens. Before packing into " "batches source sequences are sorted by number of tokens in order to reduce number of pad tokens. So the " "number of sequences in a batch may be different.", ) parser.add_argument( "--lines_per_dataset_fragment", type=int, default=10 ** 6, help="A number of lines processed by one worker during creation of tarred dataset. A worker tokenizes " "`--lines_per_dataset_fragment` lines and keeps in RAM tokenized text labels before packing them into " "batches. Reducing `--lines_per_dataset_fragment` leads to reducing of the amount of memory required by this " "script.", ) parser.add_argument( "--num_batches_per_tarfile", type=int, default=1000, help="A number of batches saved in a tar file. If you increase `--num_batches_per_tarfile`, then there will " "be less tar files in the dataset. There cannot be less then `--num_batches_per_tarfile` batches in a tar " "file, and all excess batches are removed. Maximum number of discarded batches is " "`--num_batches_per_tarfile - 1`.", ) parser.add_argument( "--tokenizer_name", "-T", default="bert-base-uncased", help="Name of the tokenizer used for tokenization of source sequences. Possible options are 'sentencepiece', " "'word', 'char', HuggingFace tokenizers. For more options see function " "`nemo.collections.nlp.modules.common.get_tokenizer`. The tokenizer has to have properties `cls_id`, " "`pad_id`, `sep_id`, `unk_id`.", ) parser.add_argument( "--tokenizer_model", "-m", type=Path, help="Path to tokenizer model required for 'sentencepiece' tokenizer." ) parser.add_argument( "--vocab_file", "-v", type=Path, help="Path to vocabulary file which can be used in 'word', 'char', and HuggingFace tokenizers.", ) parser.add_argument( "--merges_file", "-M", type=Path, help="Path to merges file which can be used in HuggingFace tokenizers." ) parser.add_argument( "--special_token_names", "-n", nargs="+", help="Names of special tokens which may be passed to constructors of 'char', 'word', 'sentencepiece', and " "HuggingFace tokenizers.", ) parser.add_argument( "--special_token_values", "-V", nargs="+", help="Values of special tokens which may be passed to constructors of 'char', 'word', 'sentencepiece', and " "HuggingFace tokenizers.", ) parser.add_argument( "--use_fast_tokenizer", "-f", action="store_true", help="Whether to use fast HuggingFace tokenizer." ) parser.add_argument( "--pad_label", "-P", default='O', help="Pad label both for punctuation and capitalization. This label is also is used for marking words which " "do not need punctuation and capitalization. It is also a neutral label used for marking words which do " "not require punctuation and capitalization.", ) punct = parser.add_mutually_exclusive_group(required=False) punct.add_argument( "--punct_labels", "-p", nargs="+", help="All punctuation labels EXCEPT PAD LABEL. Punctuation labels are strings separated by spaces. " "Alternatively you can use parameter `--punct_label_vocab_file`. If none of parameters `--punct_labels` " "and `--punct_label_vocab_file` are provided, then punctuation label ids will be inferred from `--labels` " "file.", ) punct.add_argument( "--punct_label_vocab_file", type=Path, help="A path to file with punctuation labels. These labels include pad label. Pad label has to be the first " "label in the file. Each label is written on separate line. Alternatively you can use `--punct_labels` " "parameter. If none of parameters `--punct_labels` and `--punct_label_vocab_file` are provided, then " "punctuation label ids will be inferred from `--labels` file.", ) capit = parser.add_mutually_exclusive_group(required=False) capit.add_argument( "--capit_labels", "-c", nargs="+", help="All capitalization labels EXCEPT PAD LABEL. Capitalization labels are strings separated by spaces. " "Alternatively you can use parameter `--capit_label_vocab_file`. If none of parameters `--capit_labels` " "and `--capit_label_vocab_file` are provided, then capitalization label ids will be inferred from `--labels` " "file.", ) capit.add_argument( "--capit_label_vocab_file", type=Path, help="A path to file with capitalization labels. These labels include pad label. Pad label has to be the " "first label in the file. Each label is written on separate line. Alternatively you can use `--capit_labels` " "parameter. If none of parameters `--capit_labels` and `--capit_label_vocab_file` are provided, then " "capitalization label ids will be inferred from `--labels` file.", ) parser.add_argument( "--tar_file_prefix", "-x", default="punctuation_capitalization", help="A string from which tar file names start.", ) parser.add_argument( "--n_jobs", "-j", type=int, default=mp.cpu_count(), help="Number of workers for creating tarred dataset. By default it is equal to the number of CPU cores.", ) args = parser.parse_args() for name in [ "text", "labels", "output_dir", "tokenizer_model", "vocab_file", "merges_file", "punct_label_vocab_file", "capit_label_vocab_file", ]: if getattr(args, name) is not None: setattr(args, name, getattr(args, name).expanduser()) if args.special_token_names is not None or args.special_token_values is not None: if args.special_token_names is None: parser.error( "If you provide parameter `--special_token_values` you have to provide parameter " "`--special_token_names`." ) if args.special_token_values is None: parser.error( "If you provide parameter `--special_token_names` you have to provide parameter " "`--special_token_values`." ) if len(args.special_token_names) != len(args.special_token_values): parser.error( f"Parameters `--special_token_names` and `--special_token_values` have to have equal number of values " f"whereas parameter `--special_token_names` has {len(args.special_token_names)} values and parameter " f"`--special_token_values` has {len(args.special_token_values)} values." ) if len(set(args.special_token_names)) != len(args.special_token_names): for i in range(len(args.special_token_names) - 1): if args.special_token_names[i] in args.special_token_names[i + 1 :]: parser.error( f"Values of parameter `--special_token_names` has to be unique. Found duplicate value " f"'{args.special_token_names[i]}'." ) if args.punct_labels is not None: check_labels_for_being_unique_before_building_label_ids( args.pad_label, args.punct_labels, '--pad_label', '--punct_labels', parser.error ) check_labels_for_being_unique_before_building_label_ids( args.pad_label, args.capit_labels, '--pad_label', '--capit_labels', parser.error ) return args def main() -> None: args = get_args() if args.special_token_names is None: special_tokens = None else: special_tokens = dict(zip(args.special_token_names, args.special_token_values)) if args.punct_labels is not None: punct_label_ids = build_label_ids_from_list_of_labels(args.pad_label, args.punct_labels) else: punct_label_ids = None if args.capit_labels is not None: capit_label_ids = build_label_ids_from_list_of_labels(args.pad_label, args.capit_labels) else: capit_label_ids = None create_tarred_dataset( args.text, args.labels, args.output_dir, args.max_seq_length, args.tokens_in_batch, args.lines_per_dataset_fragment, args.num_batches_per_tarfile, args.tokenizer_name, tokenizer_model=args.tokenizer_model, vocab_file=args.vocab_file, merges_file=args.merges_file, special_tokens=special_tokens, use_fast_tokenizer=args.use_fast_tokenizer, pad_label=args.pad_label, punct_label_ids=punct_label_ids, capit_label_ids=capit_label_ids, punct_label_vocab_file=args.punct_label_vocab_file, capit_label_vocab_file=args.capit_label_vocab_file, tar_file_prefix=args.tar_file_prefix, n_jobs=args.n_jobs, ) if __name__ == "__main__": main()
6,239
2,151
// Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "components/offline_pages/core/downloads/download_notifying_observer.h" #include "base/memory/ptr_util.h" #include "components/offline_pages/core/background/request_coordinator.h" #include "components/offline_pages/core/background/save_page_request.h" #include "components/offline_pages/core/client_policy_controller.h" #include "components/offline_pages/core/downloads/download_ui_adapter.h" #include "components/offline_pages/core/downloads/offline_item_conversions.h" #include "components/offline_pages/core/downloads/offline_page_download_notifier.h" namespace offline_pages { namespace { int kUserDataKey; // Only address is used. } // namespace DownloadNotifyingObserver::DownloadNotifyingObserver( std::unique_ptr<OfflinePageDownloadNotifier> notifier, ClientPolicyController* policy_controller) : notifier_(std::move(notifier)), policy_controller_(policy_controller) {} DownloadNotifyingObserver::~DownloadNotifyingObserver() {} // static DownloadNotifyingObserver* DownloadNotifyingObserver::GetFromRequestCoordinator( RequestCoordinator* request_coordinator) { DCHECK(request_coordinator); return static_cast<DownloadNotifyingObserver*>( request_coordinator->GetUserData(&kUserDataKey)); } // static void DownloadNotifyingObserver::CreateAndStartObserving( RequestCoordinator* request_coordinator, std::unique_ptr<OfflinePageDownloadNotifier> notifier) { DCHECK(request_coordinator); DCHECK(notifier); std::unique_ptr<DownloadNotifyingObserver> observer = base::WrapUnique(new DownloadNotifyingObserver( std::move(notifier), request_coordinator->GetPolicyController())); request_coordinator->AddObserver(observer.get()); request_coordinator->SetUserData(&kUserDataKey, std::move(observer)); } void DownloadNotifyingObserver::OnAdded(const SavePageRequest& request) { DCHECK(notifier_); if (!IsVisibleInUI(request.client_id())) return; // Calling Progress ensures notification is created in lieu of specific // Add/Create call. notifier_->NotifyDownloadProgress( OfflineItemConversions::CreateOfflineItem(request)); // Now we need to update the notification if it is not active/offlining. if (request.request_state() != SavePageRequest::RequestState::OFFLINING) NotifyRequestStateChange(request); } void DownloadNotifyingObserver::OnChanged(const SavePageRequest& request) { DCHECK(notifier_); if (!IsVisibleInUI(request.client_id())) return; NotifyRequestStateChange(request); } void DownloadNotifyingObserver::OnNetworkProgress( const SavePageRequest& request, int64_t received_bytes) { // TODO(dimich): Enable this back in M59. See bug 704049 for more info and // what was temporarily (for M58) reverted. } void DownloadNotifyingObserver::OnCompleted( const SavePageRequest& request, RequestCoordinator::BackgroundSavePageResult status) { DCHECK(notifier_); if (!IsVisibleInUI(request.client_id())) return; if (status == RequestCoordinator::BackgroundSavePageResult::SUCCESS) { // Suppress notifications for certin downloads resulting from CCT. OfflineItem item = OfflineItemConversions::CreateOfflineItem(request); if (!notifier_->MaybeSuppressNotification(request.request_origin(), item)) { notifier_->NotifyDownloadSuccessful(item); } } else if (status == RequestCoordinator::BackgroundSavePageResult::USER_CANCELED || status == RequestCoordinator::BackgroundSavePageResult:: DOWNLOAD_THROTTLED) { notifier_->NotifyDownloadCanceled( OfflineItemConversions::CreateOfflineItem(request)); } else { notifier_->NotifyDownloadFailed( OfflineItemConversions::CreateOfflineItem(request)); } } bool DownloadNotifyingObserver::IsVisibleInUI(const ClientId& page) { return policy_controller_->IsSupportedByDownload(page.name_space) && base::IsValidGUID(page.id); } // Calls the appropriate notifier method depending upon the state of the // request. For example, an AVAILABLE request is not active (aka, pending) // which the notifier understands as an Interrupted operation vs. one that // has Progress or is Paused. void DownloadNotifyingObserver::NotifyRequestStateChange( const SavePageRequest& request) { if (request.request_state() == SavePageRequest::RequestState::PAUSED) notifier_->NotifyDownloadPaused( OfflineItemConversions::CreateOfflineItem(request)); else if (request.request_state() == SavePageRequest::RequestState::AVAILABLE) notifier_->NotifyDownloadInterrupted( OfflineItemConversions::CreateOfflineItem(request)); else notifier_->NotifyDownloadProgress( OfflineItemConversions::CreateOfflineItem(request)); } } // namespace offline_pages
1,573
988
/** * Copyright (C) 2011-2015 The XDocReport Team <<EMAIL>> * * All rights reserved. * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package fr.opensagres.xdocreport.document.textstyling.properties; /** * Container properties. */ public abstract class ContainerProperties { public enum ContainerType { SPAN, PARAGRAPH, LIST, LIST_ITEM, HEADER } private boolean pageBreakBefore; private boolean pageBreakAfter; private boolean bold; private boolean italic; private boolean underline; private boolean strike; private boolean subscript; private boolean superscript; private TextAlignment textAlignment; private Color color; private String styleName; private final ContainerType type; public ContainerProperties( ContainerType type ) { this.type = type; } public ContainerType getType() { return type; } public boolean isPageBreakBefore() { return pageBreakBefore; } public void setPageBreakBefore( boolean pageBreakBefore ) { this.pageBreakBefore = pageBreakBefore; } public boolean isPageBreakAfter() { return pageBreakAfter; } public void setPageBreakAfter( boolean pageBreakAfter ) { this.pageBreakAfter = pageBreakAfter; } public boolean isBold() { return bold; } public void setBold( boolean bold ) { this.bold = bold; } public boolean isItalic() { return italic; } public void setItalic( boolean italic ) { this.italic = italic; } public boolean isUnderline() { return underline; } public void setUnderline( boolean underline ) { this.underline = underline; } public boolean isStrike() { return strike; } public void setStrike( boolean strike ) { this.strike = strike; } public boolean isSubscript() { return subscript; } public void setSubscript( boolean subscript ) { this.subscript = subscript; } public boolean isSuperscript() { return superscript; } public void setSuperscript( boolean superscript ) { this.superscript = superscript; } public TextAlignment getTextAlignment() { return textAlignment; } public void setTextAlignment( TextAlignment textAlignment ) { this.textAlignment = textAlignment; } public String getStyleName() { return styleName; } public void setStyleName( String styleName ) { this.styleName = styleName; } public Color getColor() { return color; } public void setColor( Color color ) { this.color = color; } @Override public boolean equals( Object o ) { if ( this == o ) return true; if ( o == null || getClass() != o.getClass() ) return false; ContainerProperties that = (ContainerProperties) o; if ( isPageBreakBefore() != that.isPageBreakBefore() ) return false; if ( isPageBreakAfter() != that.isPageBreakAfter() ) return false; if ( isBold() != that.isBold() ) return false; if ( isItalic() != that.isItalic() ) return false; if ( isUnderline() != that.isUnderline() ) return false; if ( isStrike() != that.isStrike() ) return false; if ( isSubscript() != that.isSubscript() ) return false; if ( isSuperscript() != that.isSuperscript() ) return false; if ( getTextAlignment() != that.getTextAlignment() ) return false; if ( getColor() != null ? !getColor().equals( that.getColor() ) : that.getColor() != null ) return false; if ( getStyleName() != null ? !getStyleName().equals( that.getStyleName() ) : that.getStyleName() != null ) return false; return getType() == that.getType(); } @Override public int hashCode() { int result = ( isPageBreakBefore() ? 1 : 0 ); result = 31 * result + ( isPageBreakAfter() ? 1 : 0 ); result = 31 * result + ( isBold() ? 1 : 0 ); result = 31 * result + ( isItalic() ? 1 : 0 ); result = 31 * result + ( isUnderline() ? 1 : 0 ); result = 31 * result + ( isStrike() ? 1 : 0 ); result = 31 * result + ( isSubscript() ? 1 : 0 ); result = 31 * result + ( isSuperscript() ? 1 : 0 ); result = 31 * result + ( getTextAlignment() != null ? getTextAlignment().hashCode() : 0 ); result = 31 * result + ( getColor() != null ? getColor().hashCode() : 0 ); result = 31 * result + ( getStyleName() != null ? getStyleName().hashCode() : 0 ); result = 31 * result + ( getType() != null ? getType().hashCode() : 0 ); return result; } }
2,635
669
/* * MinIO Java SDK for Amazon S3 Compatible Cloud Storage, * (C) 2020 MinIO, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.minio.messages; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Map; /** Helper class to denote object information for {@link EventMetadata}. */ public class ObjectMetadata { @JsonProperty private String key; @JsonProperty private long size; @JsonProperty private String eTag; @JsonProperty private String versionId; @JsonProperty private String sequencer; @JsonProperty private Map<String, String> userMetadata; // MinIO specific extension. public String key() { return key; } public long size() { return size; } public String etag() { return eTag; } public String versionId() { return versionId; } public String sequencer() { return sequencer; } public Map<String, String> userMetadata() { return userMetadata; } }
431
504
package org.dayatang.ioc.guice; import com.google.inject.*; import com.google.inject.Module; import com.google.inject.name.Names; import org.dayatang.domain.InstanceProvider; import java.lang.annotation.Annotation; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * 实例提供者接口的Google Guice实现。 * * @author yyang (<a href="mailto:<EMAIL>"><EMAIL></a>) */ public class GuiceInstanceProvider implements InstanceProvider { private final Injector injector; /** * 以一批guice模块初始化guice实例提供者。 * * @param modules 一或多个guice模块 */ public GuiceInstanceProvider(Module... modules) { injector = Guice.createInjector(Stage.PRODUCTION, modules); } /** * 从Injector生成GuiceProvider * * @param injector */ public GuiceInstanceProvider(Injector injector) { this.injector = injector; } /** * 根据类型获取对象实例。返回的对象实例所属的类是T或它的实现类或子类。如果找不到该类型的实例则返回null。 * @param <T> 类型参数 * @param beanType 实例的类型 * @return 指定类型的实例。 */ @Override public <T> T getInstance(Class<T> beanType) { try { return injector.getInstance(beanType); } catch (com.google.inject.ConfigurationException e) { return null; } } /** * 根据类型和名称获取对象实例。如果找不到该类型的实例则返回null。 * 假如有两个类MyService1和MyService2都实现了接口Service,而在Guice模块中这样注册: * <pre> * binder.bind(Service.class).to(MyService1.class) * binder.bind(Service.class).annotatedWith(Names.named("service2")).to(MyService2.class) * </pre> * 那么getInstance(Service.class, "service2")将返回MyService2的实例。 * * @param <T> 类型参数 * @param beanName 实现类在容器中配置的名字 * @param beanType 实例的类型 * @return 指定类型的实例。 */ @Override public <T> T getInstance(Class<T> beanType, String beanName) { Key<T> key = Key.get(beanType, Names.named(beanName)); try { return injector.getInstance(key); } catch (com.google.inject.ConfigurationException e) { return null; } } /** * 根据类型和Annotation获取对象实例。如果找不到该类型的实例则返回null。 * 假如有两个类MyService1和MyService2都实现了接口Service,其中MyService2标记为@MyAnnotation, * 同时MyAnnotation标记为@BindingAnnotation @Retention(RetentionPolicy.RUNTIME) , * 而在Guice模块中这样注册: * binder.bind(Service.class).to(MyService1.class) * binder.bind(Service.class).annotatedWith(MyAnnotation.class).to(MyService2.class) * 那么getInstance(Service.class, MyAnnotation.class)将返回MyService2的实例。 * * @param <T> 类型参数 * @param beanType 实例的类型 * @param annotationType 实现类的annotation类型 * @return 指定类型的实例。 */ @Override public <T> T getInstance(Class<T> beanType, Class<? extends Annotation> annotationType) { Key<T> key = Key.get(beanType, annotationType); try { return injector.getInstance(key); } catch (com.google.inject.ConfigurationException e) { return null; } } @Override public <T> Set<T> getInstances(Class<T> beanType) { Map<Key<?>, Binding<?>> allBindings = injector.getAllBindings(); Set<T> results = new HashSet<T>(); for (Map.Entry<Key<?>, Binding<?>> entry : allBindings.entrySet()) { Key<?> key = entry.getKey(); System.out.println("Key: " + key.getTypeLiteral().getRawType()); if (beanType.isAssignableFrom(key.getTypeLiteral().getRawType())) { System.out.println("True"); results.add((T) entry.getValue().getProvider().get()); } } return results; } }
2,082
6,594
<filename>code/2.7/13_enumerate.py<gh_stars>1000+ def enum_test(): my_list = ['apple', 'banana', 'grapes', 'pear'] for c, v in enumerate(my_list, 1): print c, v def main(): enum_test() if __name__ == '__main__': main()
102
1,376
from enum import Enum import pytest import gino from gino.dialects.aiomysql import AsyncEnum pytestmark = pytest.mark.asyncio db = gino.Gino() class MyEnum(Enum): ONE = "one" TWO = "two" class Blog(db.Model): __tablename__ = "s_blog" id = db.Column(db.BigInteger(), primary_key=True) title = db.Column(db.Unicode(255), index=True, comment="Title Comment") visits = db.Column(db.BigInteger(), default=0) comment_id = db.Column(db.ForeignKey("s_comment.id")) number = db.Column(db.Enum(MyEnum), nullable=False, default=MyEnum.TWO) number2 = db.Column(AsyncEnum(MyEnum), nullable=False, default=MyEnum.TWO) class Comment(db.Model): __tablename__ = "s_comment" id = db.Column(db.BigInteger(), primary_key=True) blog_id = db.Column(db.ForeignKey("s_blog.id", name="blog_id_fk")) blog_seq = db.Sequence("blog_seq", metadata=db, schema="schema_test") async def test(engine, define=True): async with engine.acquire() as conn: assert not await engine.dialect.has_table(conn, "non_exist") Blog.__table__.comment = "Blog Comment" db.bind = engine await db.gino.create_all() await Blog.number.type.create_async(engine, checkfirst=True) await Blog.number2.type.create_async(engine, checkfirst=True) await db.gino.create_all(tables=[Blog.__table__], checkfirst=True) await blog_seq.gino.create(checkfirst=True) await Blog.__table__.gino.create(checkfirst=True) await db.gino.drop_all() await db.gino.drop_all(tables=[Blog.__table__], checkfirst=True) await Blog.__table__.gino.drop(checkfirst=True) await blog_seq.gino.drop(checkfirst=True) if define: class Comment2(db.Model): __tablename__ = "s_comment_2" id = db.Column(db.BigInteger(), primary_key=True) blog_id = db.Column(db.ForeignKey("s_blog.id")) await db.gino.create_all() await db.gino.drop_all()
794
2,023
def hashed_float(s): """returns a float in the range [0, 1) based on a hash of the string. A given string will always return the same value, but different strings will return very different values.""" import md5, struct [number] = struct.unpack("<H", md5.new(s).digest()[:2]) return number / float(0xFFFF)
114
742
package org.support.project.web.dao; import java.util.List; import org.support.project.aop.Aspect; import org.support.project.di.Container; import org.support.project.di.DI; import org.support.project.di.Instance; import org.support.project.ormapping.common.SQLManager; import org.support.project.web.dao.gen.GenRolesDao; import org.support.project.web.entity.RolesEntity; /** * 権限 */ @DI(instance = Instance.Singleton) public class RolesDao extends GenRolesDao { /** SerialVersion */ private static final long serialVersionUID = 1L; /** * インスタンス取得 AOPに対応 * * @return インスタンス */ public static RolesDao get() { return Container.getComp(RolesDao.class); } /** * ID */ private int currentId = 0; /** * IDを採番 ※コミットしなくても次のIDを採番する為、保存しなければ欠番になる * @return next id */ public Integer getNextId() { String sql = "SELECT MAX(ROLE_ID) FROM ROLES;"; Integer integer = executeQuerySingle(sql, Integer.class); if (integer != null) { if (currentId < integer) { currentId = integer; } } currentId++; return currentId; } /** * ユーザのキーでロールの一覧を取得 * * @param userKey userKey * @return role list */ public List<RolesEntity> selectOnUserKey(String userKey) { String sql = SQLManager.getInstance().getSql("/org/support/project/web/dao/sql/RolesDao/RolesDao_select_on_userkey.sql"); return executeQueryList(sql, RolesEntity.class, userKey); } /** * データをtruncateする */ @Aspect(advice = org.support.project.ormapping.transaction.Transaction.class) public void truncate() { String sql = SQLManager.getInstance().getSql("/org/support/project/web/dao/sql/RolesDao/RolesDao_truncate.sql"); executeUpdate(sql); } /** * ロールのキー文字列で取得 * @param roleKey ロールのキー文字列 * @return ロール情報 */ public RolesEntity selectOnRoleKey(String roleKey) { String sql = "SELECT * FROM ROLES WHERE ROLE_KEY = ? AND DELETE_FLAG = 0"; return executeQuerySingle(sql, RolesEntity.class, roleKey); } }
1,019
1,721
<gh_stars>1000+ #define TEST_PARSE_FLAG kParseFullPrecisionFlag #define TEST_NAME "RapidJSON_FullPrec (C++)" #define TEST_CLASS RapidjsonFullPrecTest #include "rapidjsontest.cpp"
67
776
package act.app; /*- * #%L * ACT Framework * %% * Copyright (C) 2014 - 2017 ActFramework * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import act.util.ClassNames; import org.eclipse.jdt.internal.compiler.env.ICompilationUnit; import org.osgl.$; import org.osgl.util.*; import java.io.File; import java.util.*; /** * Encapsulate java srccode unit data including srccode code, byte code etc. * A java srccode unit specifies a java class */ public class Source { public enum State { /** * The Source instance has been created */ CREATED, /** * Source code loaded */ LOADED, /** * Byte code compiled out of the srccode code */ COMPILED, /** * Tried to compile but there is compile error */ ERROR_COMPILE, /** * File deleted */ DELETED, /** * Byte code enhanced by framework */ ENHANCED } // the srccode file private File file; // the class className. Can't be 1-1 map to file as // embedded classes do not have separate srccode file private String simpleName; private String packageName; private String className; // The srccode code private String code; // The byte code private byte[] bytes; private Map<String, byte[]> innerBytes = new HashMap<>(); private State state = State.CREATED; private ICompilationUnit compilationUnit; private boolean isController; private long ts; private Source(File file, String className) { E.NPE(file, className); this.file = file; this.simpleName = S.afterLast(className, "."); this.packageName = S.beforeLast(className, "."); this.className = className; compilationUnit = _compilationUnit(); } public String simpleName() { return simpleName; } public String packageName() { return packageName; } public String className() { return className; } public String code() { if (null == code) { load(); } return code; } public List<String> lines() { return C.listOf(code.split("\n")); } public byte[] bytes() { return bytes; } public byte[] bytes(String innerClass) { return innerBytes.get(innerClass); } public Set<String> innerClassNames() { return innerBytes.keySet(); } public File file() { return file; } public void load() { code = IO.readContentAsString(file); updateState(State.LOADED); } public void markAsController() { isController = true; } public boolean isController() { return isController; } void compiled(byte[] bytecode) { this.bytes = $.requireNotNull(bytecode); updateState(State.COMPILED); } void compiled(String innerClassName, byte[] bytecode) { innerBytes.put(innerClassName, bytecode); } void enhanced(byte[] bytecode) { this.bytes = $.requireNotNull(bytecode); updateState(State.ENHANCED); } public void refresh() { bytes = null; ts = 0L; tryLoadSourceFile(); } private void updateState(State state) { this.state = state; this.ts = $.ms(); } private void tryLoadSourceFile() { if (file.exists()) { code = IO.readContentAsString(file); updateState(State.LOADED); } else { updateState(State.DELETED); } } public static Source ofFile(File sourceRoot, File file) { String className = Util.className(sourceRoot, file); return null == className ? null : new Source(file, className); } public static Source ofClass(List<File> sourceRoots, String className) { File file = Util.sourceFile(sourceRoots, className); if (null != file) { return new Source(file, className); } return null; } public static Source ofInnerClass(File sourceFile, String innerClassName) { return new Source(sourceFile, innerClassName); } private ICompilationUnit _compilationUnit() { return new ICompilationUnit() { char[] mainTypeName = _mainTypeName(); char[][] packageName = _packageName(); char[] fileName = _fileName(); @Override public char[] getContents() { return code().toCharArray(); } @Override public char[] getMainTypeName() { return mainTypeName; } private char[] _mainTypeName() { String s = simpleName(); int pos = s.indexOf('$'); if (pos > -1) { s = s.substring(0, pos); } return s.toCharArray(); } @Override public char[][] getPackageName() { return packageName; } char[][] _packageName() { StringTokenizer tokens = new StringTokenizer(packageName(), "."); char[][] ca = new char[tokens.countTokens()][]; for (int i = 0; i < ca.length; i++) { ca[i] = tokens.nextToken().toCharArray(); } return ca; } @Override public boolean ignoreOptionalProblems() { return false; } @Override public char[] getFileName() { return fileName; } char[] _fileName() { String s = simpleName(); int pos = s.indexOf('$'); if (pos > -1) { s = s.substring(0, pos); } s = s.replace('.', '/'); s = s + ".java"; return s.toCharArray(); } }; } ICompilationUnit compilationUnit() { return compilationUnit; } public enum Util { ; public static String className(File sourceRoot, File file) { return ClassNames.sourceFileNameToClassName(sourceRoot, file.getAbsolutePath()); } public static File sourceFile(List<File> sourceRoots, String className) { FastStr s = FastStr.of(className).beforeFirst('$'); s = s.replace('.', File.separatorChar).append(".java"); for (File sourceRoot : sourceRoots) { File file = new File(sourceRoot, s.toString()); if (file.canRead()) { return file; } } return null; } public static void main(String[] args) throws Exception { } } }
3,350
1,127
// Copyright (C) 2018-2022 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // #include "ngraph/runtime/reference/softsign.hpp" #include <openvino/core/validation_util.hpp> #include "itt.hpp" #include "openvino/core/attribute_visitor.hpp" #include "openvino/op/softsign.hpp" #include "openvino/runtime/tensor.hpp" namespace { template <ov::element::Type_t ET> inline bool evaluate(const ov::Tensor& arg, const ov::Tensor& out, const size_t count) { using T = typename ov::element_type_traits<ET>::value_type; ngraph::runtime::reference::softsign<T>(arg.data<T>(), out.data<T>(), count); return true; } bool evaluate_softsign(const ov::Tensor& arg, const ov::Tensor& out) { bool rc = true; size_t count = arg.get_size(); switch (arg.get_element_type()) { NGRAPH_TYPE_CASE(evaluate_softsign, bf16, arg, out, count); NGRAPH_TYPE_CASE(evaluate_softsign, f16, arg, out, count); NGRAPH_TYPE_CASE(evaluate_softsign, f32, arg, out, count); NGRAPH_TYPE_CASE(evaluate_softsign, f64, arg, out, count); default: rc = false; break; } return rc; } } // namespace BWDCMP_RTTI_DEFINITION(ov::op::v9::SoftSign); ov::op::v9::SoftSign::SoftSign(const Output<Node>& arg) : UnaryElementwiseArithmetic(arg) { constructor_validate_and_infer_types(); } void ov::op::v9::SoftSign::validate_and_infer_types() { NGRAPH_OP_SCOPE(v9_SoftSign_validate_and_infer_types); const element::Type& input_et = get_input_element_type(0); NODE_VALIDATION_CHECK(this, input_et.is_dynamic() || input_et.is_real(), "Input element type must be float, instead got: ", input_et); UnaryElementwiseArithmetic::validate_and_infer_types(); } bool ov::op::v9::SoftSign::visit_attributes(AttributeVisitor& visitor) { NGRAPH_OP_SCOPE(v9_SoftSign_visit_attributes); return true; } std::shared_ptr<ov::Node> ov::op::v9::SoftSign::clone_with_new_inputs(const OutputVector& new_args) const { NGRAPH_OP_SCOPE(v9_SoftSign_clone_with_new_inputs); check_new_args_count(this, new_args); return std::make_shared<ov::op::v9::SoftSign>(new_args.at(0)); } bool ov::op::v9::SoftSign::has_evaluate() const { NGRAPH_OP_SCOPE(v9_SoftSign_has_evaluate); switch (get_input_element_type(0)) { case ov::element::bf16: case ov::element::f16: case ov::element::f32: case ov::element::f64: return true; default: break; } return false; } bool ov::op::v9::SoftSign::evaluate(ov::TensorVector& outputs, const ov::TensorVector& inputs, const ov::EvaluationContext& evaluation_context) const { NGRAPH_OP_SCOPE(v9_SoftSign_evaluate); OPENVINO_ASSERT(outputs.size() == 1 && inputs.size() == 1, "SoftSign evaluate needs exactly 1 input and 1 output, instead got:", inputs.size(), " input(s) and ", outputs.size(), " output(s)."); const auto& in = inputs[0]; auto& out = outputs[0]; out.set_shape(in.get_shape()); return evaluate_softsign(in, out); }
1,458
956
/* SPDX-License-Identifier: BSD-3-Clause * Copyright(c) 2020 Intel Corporation */ #ifndef _RTE_ACC100_PMD_H_ #define _RTE_ACC100_PMD_H_ #include "acc100_pf_enum.h" #include "acc100_vf_enum.h" #include "rte_acc100_cfg.h" /* Helper macro for logging */ #define rte_bbdev_log(level, fmt, ...) \ rte_log(RTE_LOG_ ## level, acc100_logtype, fmt "\n", \ ##__VA_ARGS__) #ifdef RTE_LIBRTE_BBDEV_DEBUG #define rte_bbdev_log_debug(fmt, ...) \ rte_bbdev_log(DEBUG, "acc100_pmd: " fmt, \ ##__VA_ARGS__) #else #define rte_bbdev_log_debug(fmt, ...) #endif /* ACC100 PF and VF driver names */ #define ACC100PF_DRIVER_NAME intel_acc100_pf #define ACC100VF_DRIVER_NAME intel_acc100_vf /* ACC100 PCI vendor & device IDs */ #define RTE_ACC100_VENDOR_ID (0x8086) #define RTE_ACC100_PF_DEVICE_ID (0x0d5c) #define RTE_ACC100_VF_DEVICE_ID (0x0d5d) /* Define as 1 to use only a single FEC engine */ #ifndef RTE_ACC100_SINGLE_FEC #define RTE_ACC100_SINGLE_FEC 0 #endif /* Values used in filling in descriptors */ #define ACC100_DMA_DESC_TYPE 2 #define ACC100_DMA_CODE_BLK_MODE 0 #define ACC100_DMA_BLKID_FCW 1 #define ACC100_DMA_BLKID_IN 2 #define ACC100_DMA_BLKID_OUT_ENC 1 #define ACC100_DMA_BLKID_OUT_HARD 1 #define ACC100_DMA_BLKID_OUT_SOFT 2 #define ACC100_DMA_BLKID_OUT_HARQ 3 #define ACC100_DMA_BLKID_IN_HARQ 3 /* Values used in filling in decode FCWs */ #define ACC100_FCW_TD_VER 1 #define ACC100_FCW_TD_EXT_COLD_REG_EN 1 #define ACC100_FCW_TD_AUTOMAP 0x0f #define ACC100_FCW_TD_RVIDX_0 2 #define ACC100_FCW_TD_RVIDX_1 26 #define ACC100_FCW_TD_RVIDX_2 50 #define ACC100_FCW_TD_RVIDX_3 74 /* Values used in writing to the registers */ #define ACC100_REG_IRQ_EN_ALL 0x1FF83FF /* Enable all interrupts */ /* ACC100 Specific Dimensioning */ #define ACC100_SIZE_64MBYTE (64*1024*1024) /* Number of elements in an Info Ring */ #define ACC100_INFO_RING_NUM_ENTRIES 1024 /* Number of elements in HARQ layout memory */ #define ACC100_HARQ_LAYOUT (64*1024*1024) /* Assume offset for HARQ in memory */ #define ACC100_HARQ_OFFSET (32*1024) /* Mask used to calculate an index in an Info Ring array (not a byte offset) */ #define ACC100_INFO_RING_MASK (ACC100_INFO_RING_NUM_ENTRIES-1) /* Number of Virtual Functions ACC100 supports */ #define ACC100_NUM_VFS 16 #define ACC100_NUM_QGRPS 8 #define ACC100_NUM_QGRPS_PER_WORD 8 #define ACC100_NUM_AQS 16 #define MAX_ENQ_BATCH_SIZE 255 /* All ACC100 Registers alignment are 32bits = 4B */ #define ACC100_BYTES_IN_WORD 4 #define ACC100_MAX_E_MBUF 64000 #define ACC100_GRP_ID_SHIFT 10 /* Queue Index Hierarchy */ #define ACC100_VF_ID_SHIFT 4 /* Queue Index Hierarchy */ #define ACC100_VF_OFFSET_QOS 16 /* offset in Memory specific to QoS Mon */ #define ACC100_TMPL_PRI_0 0x03020100 #define ACC100_TMPL_PRI_1 0x07060504 #define ACC100_TMPL_PRI_2 0x0b0a0908 #define ACC100_TMPL_PRI_3 0x0f0e0d0c #define ACC100_QUEUE_ENABLE 0x80000000 /* Bit to mark Queue as Enabled */ #define ACC100_WORDS_IN_ARAM_SIZE (128 * 1024 / 4) #define ACC100_FDONE 0x80000000 #define ACC100_SDONE 0x40000000 #define ACC100_NUM_TMPL 32 /* Mapping of signals for the available engines */ #define ACC100_SIG_UL_5G 0 #define ACC100_SIG_UL_5G_LAST 7 #define ACC100_SIG_DL_5G 13 #define ACC100_SIG_DL_5G_LAST 15 #define ACC100_SIG_UL_4G 16 #define ACC100_SIG_UL_4G_LAST 21 #define ACC100_SIG_DL_4G 27 #define ACC100_SIG_DL_4G_LAST 31 #define ACC100_NUM_ACCS 5 #define ACC100_ACCMAP_0 0 #define ACC100_ACCMAP_1 2 #define ACC100_ACCMAP_2 1 #define ACC100_ACCMAP_3 3 #define ACC100_ACCMAP_4 4 #define ACC100_PF_VAL 2 /* max number of iterations to allocate memory block for all rings */ #define ACC100_SW_RING_MEM_ALLOC_ATTEMPTS 5 #define ACC100_MAX_QUEUE_DEPTH 1024 #define ACC100_DMA_MAX_NUM_POINTERS 14 #define ACC100_DMA_DESC_PADDING 8 #define ACC100_FCW_PADDING 12 #define ACC100_DESC_FCW_OFFSET 192 #define ACC100_DESC_SIZE 256 #define ACC100_DESC_OFFSET (ACC100_DESC_SIZE / 64) #define ACC100_FCW_TE_BLEN 32 #define ACC100_FCW_TD_BLEN 24 #define ACC100_FCW_LE_BLEN 32 #define ACC100_FCW_LD_BLEN 36 #define ACC100_5GUL_SIZE_0 16 #define ACC100_5GUL_SIZE_1 40 #define ACC100_5GUL_OFFSET_0 36 #define ACC100_FCW_VER 2 #define ACC100_MUX_5GDL_DESC 6 #define ACC100_CMP_ENC_SIZE 20 #define ACC100_CMP_DEC_SIZE 24 #define ACC100_ENC_OFFSET (32) #define ACC100_DEC_OFFSET (80) #define ACC100_EXT_MEM /* Default option with memory external to CPU */ #define ACC100_HARQ_OFFSET_THRESHOLD 1024 /* Constants from K0 computation from 3GPP 38.212 Table 5.4.2.1-2 */ #define ACC100_N_ZC_1 66 /* N = 66 Zc for BG 1 */ #define ACC100_N_ZC_2 50 /* N = 50 Zc for BG 2 */ #define ACC100_K0_1_1 17 /* K0 fraction numerator for rv 1 and BG 1 */ #define ACC100_K0_1_2 13 /* K0 fraction numerator for rv 1 and BG 2 */ #define ACC100_K0_2_1 33 /* K0 fraction numerator for rv 2 and BG 1 */ #define ACC100_K0_2_2 25 /* K0 fraction numerator for rv 2 and BG 2 */ #define ACC100_K0_3_1 56 /* K0 fraction numerator for rv 3 and BG 1 */ #define ACC100_K0_3_2 43 /* K0 fraction numerator for rv 3 and BG 2 */ /* ACC100 Configuration */ #define ACC100_DDR_ECC_ENABLE #define ACC100_CFG_DMA_ERROR 0x3D7 #define ACC100_CFG_AXI_CACHE 0x11 #define ACC100_CFG_QMGR_HI_P 0x0F0F #define ACC100_CFG_PCI_AXI 0xC003 #define ACC100_CFG_PCI_BRIDGE 0x40006033 #define ACC100_ENGINE_OFFSET 0x1000 #define ACC100_RESET_HI 0x20100 #define ACC100_RESET_LO 0x20000 #define ACC100_RESET_HARD 0x1FF #define ACC100_ENGINES_MAX 9 #define ACC100_LONG_WAIT 1000 #define ACC100_GPEX_AXIMAP_NUM 17 /* ACC100 DMA Descriptor triplet */ struct acc100_dma_triplet { uint64_t address; uint32_t blen:20, res0:4, last:1, dma_ext:1, res1:2, blkid:4; } __rte_packed; /* ACC100 DMA Response Descriptor */ union acc100_dma_rsp_desc { uint32_t val; struct { uint32_t crc_status:1, synd_ok:1, dma_err:1, neg_stop:1, fcw_err:1, output_err:1, input_err:1, timestampEn:1, iterCountFrac:8, iter_cnt:8, rsrvd3:6, sdone:1, fdone:1; uint32_t add_info_0; uint32_t add_info_1; }; }; /* ACC100 Queue Manager Enqueue PCI Register */ union acc100_enqueue_reg_fmt { uint32_t val; struct { uint32_t num_elem:8, addr_offset:3, rsrvd:1, req_elem_addr:20; }; }; /* FEC 4G Uplink Frame Control Word */ struct __rte_packed acc100_fcw_td { uint8_t fcw_ver:4, num_maps:4; /* Unused */ uint8_t filler:6, /* Unused */ rsrvd0:1, bypass_sb_deint:1; uint16_t k_pos; uint16_t k_neg; /* Unused */ uint8_t c_neg; /* Unused */ uint8_t c; /* Unused */ uint32_t ea; /* Unused */ uint32_t eb; /* Unused */ uint8_t cab; /* Unused */ uint8_t k0_start_col; /* Unused */ uint8_t rsrvd1; uint8_t code_block_mode:1, /* Unused */ turbo_crc_type:1, rsrvd2:3, bypass_teq:1, /* Unused */ soft_output_en:1, /* Unused */ ext_td_cold_reg_en:1; union { /* External Cold register */ uint32_t ext_td_cold_reg; struct { uint32_t min_iter:4, /* Unused */ max_iter:4, ext_scale:5, /* Unused */ rsrvd3:3, early_stop_en:1, /* Unused */ sw_soft_out_dis:1, /* Unused */ sw_et_cont:1, /* Unused */ sw_soft_out_saturation:1, /* Unused */ half_iter_on:1, /* Unused */ raw_decoder_input_on:1, /* Unused */ rsrvd4:10; }; }; }; /* FEC 5GNR Uplink Frame Control Word */ struct __rte_packed acc100_fcw_ld { uint32_t FCWversion:4, qm:4, nfiller:11, BG:1, Zc:9, res0:1, synd_precoder:1, synd_post:1; uint32_t ncb:16, k0:16; uint32_t rm_e:24, hcin_en:1, hcout_en:1, crc_select:1, bypass_dec:1, bypass_intlv:1, so_en:1, so_bypass_rm:1, so_bypass_intlv:1; uint32_t hcin_offset:16, hcin_size0:16; uint32_t hcin_size1:16, hcin_decomp_mode:3, llr_pack_mode:1, hcout_comp_mode:3, res2:1, dec_convllr:4, hcout_convllr:4; uint32_t itmax:7, itstop:1, so_it:7, res3:1, hcout_offset:16; uint32_t hcout_size0:16, hcout_size1:16; uint32_t gain_i:8, gain_h:8, negstop_th:16; uint32_t negstop_it:7, negstop_en:1, res4:24; }; /* FEC 4G Downlink Frame Control Word */ struct __rte_packed acc100_fcw_te { uint16_t k_neg; uint16_t k_pos; uint8_t c_neg; uint8_t c; uint8_t filler; uint8_t cab; uint32_t ea:17, rsrvd0:15; uint32_t eb:17, rsrvd1:15; uint16_t ncb_neg; uint16_t ncb_pos; uint8_t rv_idx0:2, rsrvd2:2, rv_idx1:2, rsrvd3:2; uint8_t bypass_rv_idx0:1, bypass_rv_idx1:1, bypass_rm:1, rsrvd4:5; uint8_t rsrvd5:1, rsrvd6:3, code_block_crc:1, rsrvd7:3; uint8_t code_block_mode:1, rsrvd8:7; uint64_t rsrvd9; }; /* FEC 5GNR Downlink Frame Control Word */ struct __rte_packed acc100_fcw_le { uint32_t FCWversion:4, qm:4, nfiller:11, BG:1, Zc:9, res0:3; uint32_t ncb:16, k0:16; uint32_t rm_e:24, res1:2, crc_select:1, res2:1, bypass_intlv:1, res3:3; uint32_t res4_a:12, mcb_count:3, res4_b:17; uint32_t res5; uint32_t res6; uint32_t res7; uint32_t res8; }; /* ACC100 DMA Request Descriptor */ struct __rte_packed acc100_dma_req_desc { union { struct{ uint32_t type:4, rsrvd0:26, sdone:1, fdone:1; uint32_t rsrvd1; uint32_t rsrvd2; uint32_t pass_param:8, sdone_enable:1, irq_enable:1, timeStampEn:1, res0:5, numCBs:4, res1:4, m2dlen:4, d2mlen:4; }; struct{ uint32_t word0; uint32_t word1; uint32_t word2; uint32_t word3; }; }; struct acc100_dma_triplet data_ptrs[ACC100_DMA_MAX_NUM_POINTERS]; /* Virtual addresses used to retrieve SW context info */ union { void *op_addr; uint64_t pad1; /* pad to 64 bits */ }; /* * Stores additional information needed for driver processing: * - last_desc_in_batch - flag used to mark last descriptor (CB) * in batch * - cbs_in_tb - stores information about total number of Code Blocks * in currently processed Transport Block */ union { struct { union { struct acc100_fcw_ld fcw_ld; struct acc100_fcw_td fcw_td; struct acc100_fcw_le fcw_le; struct acc100_fcw_te fcw_te; uint32_t pad2[ACC100_FCW_PADDING]; }; uint32_t last_desc_in_batch :8, cbs_in_tb:8, pad4 : 16; }; uint64_t pad3[ACC100_DMA_DESC_PADDING]; /* pad to 64 bits */ }; }; /* ACC100 DMA Descriptor */ union acc100_dma_desc { struct acc100_dma_req_desc req; union acc100_dma_rsp_desc rsp; uint64_t atom_hdr; }; /* Union describing Info Ring entry */ union acc100_harq_layout_data { uint32_t val; struct { uint16_t offset; uint16_t size0; }; } __rte_packed; /* Union describing Info Ring entry */ union acc100_info_ring_data { uint32_t val; struct { union { uint16_t detailed_info; struct { uint16_t aq_id: 4; uint16_t qg_id: 4; uint16_t vf_id: 6; uint16_t reserved: 2; }; }; uint16_t int_nb: 7; uint16_t msi_0: 1; uint16_t vf2pf: 6; uint16_t loop: 1; uint16_t valid: 1; }; } __rte_packed; struct acc100_registry_addr { unsigned int dma_ring_dl5g_hi; unsigned int dma_ring_dl5g_lo; unsigned int dma_ring_ul5g_hi; unsigned int dma_ring_ul5g_lo; unsigned int dma_ring_dl4g_hi; unsigned int dma_ring_dl4g_lo; unsigned int dma_ring_ul4g_hi; unsigned int dma_ring_ul4g_lo; unsigned int ring_size; unsigned int info_ring_hi; unsigned int info_ring_lo; unsigned int info_ring_en; unsigned int info_ring_ptr; unsigned int tail_ptrs_dl5g_hi; unsigned int tail_ptrs_dl5g_lo; unsigned int tail_ptrs_ul5g_hi; unsigned int tail_ptrs_ul5g_lo; unsigned int tail_ptrs_dl4g_hi; unsigned int tail_ptrs_dl4g_lo; unsigned int tail_ptrs_ul4g_hi; unsigned int tail_ptrs_ul4g_lo; unsigned int depth_log0_offset; unsigned int depth_log1_offset; unsigned int qman_group_func; unsigned int ddr_range; }; /* Structure holding registry addresses for PF */ static const struct acc100_registry_addr pf_reg_addr = { .dma_ring_dl5g_hi = HWPfDmaFec5GdlDescBaseHiRegVf, .dma_ring_dl5g_lo = HWPfDmaFec5GdlDescBaseLoRegVf, .dma_ring_ul5g_hi = HWPfDmaFec5GulDescBaseHiRegVf, .dma_ring_ul5g_lo = HWPfDmaFec5GulDescBaseLoRegVf, .dma_ring_dl4g_hi = HWPfDmaFec4GdlDescBaseHiRegVf, .dma_ring_dl4g_lo = HWPfDmaFec4GdlDescBaseLoRegVf, .dma_ring_ul4g_hi = HWPfDmaFec4GulDescBaseHiRegVf, .dma_ring_ul4g_lo = HWPfDmaFec4GulDescBaseLoRegVf, .ring_size = HWPfQmgrRingSizeVf, .info_ring_hi = HWPfHiInfoRingBaseHiRegPf, .info_ring_lo = HWPfHiInfoRingBaseLoRegPf, .info_ring_en = HWPfHiInfoRingIntWrEnRegPf, .info_ring_ptr = HWPfHiInfoRingPointerRegPf, .tail_ptrs_dl5g_hi = HWPfDmaFec5GdlRespPtrHiRegVf, .tail_ptrs_dl5g_lo = HWPfDmaFec5GdlRespPtrLoRegVf, .tail_ptrs_ul5g_hi = HWPfDmaFec5GulRespPtrHiRegVf, .tail_ptrs_ul5g_lo = HWPfDmaFec5GulRespPtrLoRegVf, .tail_ptrs_dl4g_hi = HWPfDmaFec4GdlRespPtrHiRegVf, .tail_ptrs_dl4g_lo = HWPfDmaFec4GdlRespPtrLoRegVf, .tail_ptrs_ul4g_hi = HWPfDmaFec4GulRespPtrHiRegVf, .tail_ptrs_ul4g_lo = HWPfDmaFec4GulRespPtrLoRegVf, .depth_log0_offset = HWPfQmgrGrpDepthLog20Vf, .depth_log1_offset = HWPfQmgrGrpDepthLog21Vf, .qman_group_func = HWPfQmgrGrpFunction0, .ddr_range = HWPfDmaVfDdrBaseRw, }; /* Structure holding registry addresses for VF */ static const struct acc100_registry_addr vf_reg_addr = { .dma_ring_dl5g_hi = HWVfDmaFec5GdlDescBaseHiRegVf, .dma_ring_dl5g_lo = HWVfDmaFec5GdlDescBaseLoRegVf, .dma_ring_ul5g_hi = HWVfDmaFec5GulDescBaseHiRegVf, .dma_ring_ul5g_lo = HWVfDmaFec5GulDescBaseLoRegVf, .dma_ring_dl4g_hi = HWVfDmaFec4GdlDescBaseHiRegVf, .dma_ring_dl4g_lo = HWVfDmaFec4GdlDescBaseLoRegVf, .dma_ring_ul4g_hi = HWVfDmaFec4GulDescBaseHiRegVf, .dma_ring_ul4g_lo = HWVfDmaFec4GulDescBaseLoRegVf, .ring_size = HWVfQmgrRingSizeVf, .info_ring_hi = HWVfHiInfoRingBaseHiVf, .info_ring_lo = HWVfHiInfoRingBaseLoVf, .info_ring_en = HWVfHiInfoRingIntWrEnVf, .info_ring_ptr = HWVfHiInfoRingPointerVf, .tail_ptrs_dl5g_hi = HWVfDmaFec5GdlRespPtrHiRegVf, .tail_ptrs_dl5g_lo = HWVfDmaFec5GdlRespPtrLoRegVf, .tail_ptrs_ul5g_hi = HWVfDmaFec5GulRespPtrHiRegVf, .tail_ptrs_ul5g_lo = HWVfDmaFec5GulRespPtrLoRegVf, .tail_ptrs_dl4g_hi = HWVfDmaFec4GdlRespPtrHiRegVf, .tail_ptrs_dl4g_lo = HWVfDmaFec4GdlRespPtrLoRegVf, .tail_ptrs_ul4g_hi = HWVfDmaFec4GulRespPtrHiRegVf, .tail_ptrs_ul4g_lo = HWVfDmaFec4GulRespPtrLoRegVf, .depth_log0_offset = HWVfQmgrGrpDepthLog20Vf, .depth_log1_offset = HWVfQmgrGrpDepthLog21Vf, .qman_group_func = HWVfQmgrGrpFunction0Vf, .ddr_range = HWVfDmaDdrBaseRangeRoVf, }; /* Structure associated with each queue. */ struct __rte_cache_aligned acc100_queue { union acc100_dma_desc *ring_addr; /* Virtual address of sw ring */ rte_iova_t ring_addr_iova; /* IOVA address of software ring */ uint32_t sw_ring_head; /* software ring head */ uint32_t sw_ring_tail; /* software ring tail */ /* software ring size (descriptors, not bytes) */ uint32_t sw_ring_depth; /* mask used to wrap enqueued descriptors on the sw ring */ uint32_t sw_ring_wrap_mask; /* MMIO register used to enqueue descriptors */ void *mmio_reg_enqueue; uint8_t vf_id; /* VF ID (max = 63) */ uint8_t qgrp_id; /* Queue Group ID */ uint16_t aq_id; /* Atomic Queue ID */ uint16_t aq_depth; /* Depth of atomic queue */ uint32_t aq_enqueued; /* Count how many "batches" have been enqueued */ uint32_t aq_dequeued; /* Count how many "batches" have been dequeued */ uint32_t irq_enable; /* Enable ops dequeue interrupts if set to 1 */ struct rte_mempool *fcw_mempool; /* FCW mempool */ enum rte_bbdev_op_type op_type; /* Type of this Queue: TE or TD */ /* Internal Buffers for loopback input */ uint8_t *lb_in; uint8_t *lb_out; rte_iova_t lb_in_addr_iova; rte_iova_t lb_out_addr_iova; struct acc100_device *d; }; /* Private data structure for each ACC100 device */ struct acc100_device { void *mmio_base; /**< Base address of MMIO registers (BAR0) */ void *sw_rings_base; /* Base addr of un-aligned memory for sw rings */ void *sw_rings; /* 64MBs of 64MB aligned memory for sw rings */ rte_iova_t sw_rings_iova; /* IOVA address of sw_rings */ /* Virtual address of the info memory routed to the this function under * operation, whether it is PF or VF. * HW may DMA information data at this location asynchronously */ union acc100_info_ring_data *info_ring; union acc100_harq_layout_data *harq_layout; /* Virtual Info Ring head */ uint16_t info_ring_head; /* Number of bytes available for each queue in device, depending on * how many queues are enabled with configure() */ uint32_t sw_ring_size; uint32_t ddr_size; /* Size in kB */ uint32_t *tail_ptrs; /* Base address of response tail pointer buffer */ rte_iova_t tail_ptr_iova; /* IOVA address of tail pointers */ /* Max number of entries available for each queue in device, depending * on how many queues are enabled with configure() */ uint32_t sw_ring_max_depth; struct rte_acc100_conf acc100_conf; /* ACC100 Initial configuration */ /* Bitmap capturing which Queues have already been assigned */ uint16_t q_assigned_bit_map[ACC100_NUM_QGRPS]; bool pf_device; /**< True if this is a PF ACC100 device */ bool configured; /**< True if this ACC100 device is configured */ }; /** * Structure with details about RTE_BBDEV_EVENT_DEQUEUE event. It's passed to * the callback function. */ struct acc100_deq_intr_details { uint16_t queue_id; }; #endif /* _RTE_ACC100_PMD_H_ */
8,704
592
// Copyright (c) 2014-present, Facebook, Inc. All rights reserved. // // You are hereby granted a non-exclusive, worldwide, royalty-free license to use, // copy, modify, and distribute this software in source code or binary form for use // in connection with the web services and APIs provided by Facebook. // // As with any software that integrates with the Facebook platform, your use of // this software is subject to the Facebook Developer Principles and Policies // [http://developers.facebook.com/policy/]. This copyright notice shall be // included in all copies or substantial portions of the software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS // FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR // COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER // IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN // CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. #import <UIKit/UIKit.h> #import "TileView.h" @protocol BoardViewDelegate; @interface BoardView : UIView @property (nonatomic, strong) IBOutlet UIImageView *backgroundView; @property (nonatomic, weak) IBOutlet id<BoardViewDelegate> delegate; - (BOOL)addTileView:(TileView *)tileView; - (BOOL)addTileViewWithValue:(NSUInteger)value atPosition:(NSUInteger)position; - (void)clear; - (void)lockPosition:(NSUInteger)position; - (void)setTileViewValid:(BOOL)valid atPosition:(NSUInteger)position; @end @protocol BoardViewDelegate <NSObject> - (BOOL)boardView:(BoardView *)boardView canRemoveTileViewAtPosition:(NSUInteger)position; - (void)boardView:(BoardView *)boardView didAddTileView:(TileView *)tileView atPosition:(NSUInteger)position; - (void)boardView:(BoardView *)boardView didRemoveTileView:(TileView *)tileView atPosition:(NSUInteger)position; @end
563
488
/******************************************************************************* * Copyright 2012 University of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * This code was developed by the Information Integration Group as part * of the Karma project at the Information Sciences Institute of the * University of Southern California. For more information, publications, * and related projects, please see: http://www.isi.edu/integration ******************************************************************************/ package edu.isi.karma.rep.sources; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import edu.isi.karma.common.HttpMethods; import edu.isi.karma.util.RandomGUID; import edu.isi.karma.webserver.KarmaException; public class InvocationManager { static Logger logger = LoggerFactory.getLogger(InvocationManager.class); private List<URL> requestURLs; private List<String> idList; private List<Invocation> invocations; private Table serviceData; private String urlColumnName; private JsonArray json; private JsonArray jsonUrl; private JsonArray jsonInputs; private JsonArray jsonOutputs; private JsonArray jsonUrlAndInputs; private JsonArray jsonUrlAndOutputs; private JsonArray jsonInputsAndOutputs; private String encoding; public InvocationManager(String urlColumnName, List<String> idList, List<String> requestURLStrings, String encoding) throws MalformedURLException, KarmaException { this.urlColumnName = (urlColumnName == null || urlColumnName.trim().length() == 0) ? "url" : urlColumnName; this.idList = idList; this.encoding = encoding; requestURLs = URLManager.getURLsFromStrings(requestURLStrings); if (requestURLs == null || requestURLs.isEmpty()) throw new KarmaException("Cannot model a service without any request example."); this.serviceData = null; this.invocations = new ArrayList<>(); json = new JsonArray(); jsonUrl = new JsonArray(); jsonInputs = new JsonArray(); jsonOutputs = new JsonArray(); jsonUrlAndInputs = new JsonArray(); jsonUrlAndOutputs = new JsonArray(); jsonInputsAndOutputs = new JsonArray(); invokeAndGetResponse(); } public InvocationManager(String urlColumnName, String requestURLString) throws MalformedURLException, KarmaException { this.urlColumnName = (urlColumnName == null || urlColumnName.trim().length() == 0) ? "url" : urlColumnName; this.idList = new ArrayList<>(); this.idList.add("1"); List<String> requestURLList = new ArrayList<>(); requestURLList.add(requestURLString); requestURLs = URLManager.getURLsFromStrings(requestURLList); if (requestURLs == null || requestURLs.isEmpty()) throw new KarmaException("Cannot model a service without any request example."); this.serviceData = null; this.invocations = new ArrayList<>(); json = new JsonArray(); jsonUrl = new JsonArray(); jsonInputs = new JsonArray(); jsonOutputs = new JsonArray(); jsonUrlAndInputs = new JsonArray(); jsonUrlAndOutputs = new JsonArray(); jsonInputsAndOutputs = new JsonArray(); invokeAndGetResponse(); } private void invokeAndGetResponse() { for (int i = 0; i < requestURLs.size(); i++) { URL url = requestURLs.get(i); String requestId = null; if (idList != null) requestId = idList.get(i); Request request = new Request(url); Invocation invocation = new Invocation(requestId, request, encoding); logger.info("Invoking the service " + request.getUrl().toString() + " ..."); invocation.invokeAPI(); invocations.add(invocation); } List<Table> invocationData = new ArrayList<>(); for (Invocation inv : this.invocations) { populateJsonArraysFromInvocation(inv); invocationData.add(inv.getJointInputAndOutput()); } logger.info("Integrating the results of all invocations ..."); Table result = Table.union(invocationData); logger.info("Integrating finished."); this.serviceData = result; } private void populateJsonArraysFromInvocation(Invocation inv) { try { JsonElement out = new JsonParser().parse(inv.getJsonResponse()); // JsonArray outArray = new JsonArray(); // outArray.add(out); this.jsonOutputs.add(out); JsonObject url = new JsonObject(); url.addProperty(this.urlColumnName, inv.getRequest().getUrl().toString()); // JsonArray urlArray = new JsonArray(); // urlArray.add(url); this.jsonUrl.add(url); JsonObject in = new JsonObject(); for (Attribute att : inv.getRequest().getAttributes()) in.addProperty(att.getName(), att.getValue()); // JsonArray inArray = new JsonArray(); // inArray.add(in); this.jsonInputs.add(in); JsonObject urlAndIn = new JsonObject(); urlAndIn.addProperty(this.urlColumnName, inv.getRequest().getUrl().toString()); for (Attribute att : inv.getRequest().getAttributes()) urlAndIn.addProperty(att.getName(), att.getValue()); this.jsonUrlAndInputs.add(urlAndIn); JsonArray urlAndOut = new JsonArray(); urlAndOut.add(url); urlAndOut.add(out); this.jsonUrlAndOutputs.add(urlAndOut); JsonArray inAndOut = new JsonArray(); inAndOut.add(in); inAndOut.add(out); this.jsonInputsAndOutputs.add(inAndOut); JsonArray all = new JsonArray(); all.add(urlAndIn); all.add(out); this.json.add(all); } catch (Exception e) { logger.debug("Error in parsing json returned by the invocation " + inv.getRequest().getUrl().toString()); } } public String getServiceJson(boolean includeURL, boolean includeInputAttributes, boolean includeOutputAttributes) { if (includeURL && includeInputAttributes && includeOutputAttributes) return this.json.toString(); else if (includeURL && includeInputAttributes) return this.jsonUrlAndInputs.toString(); else if (includeURL && includeOutputAttributes) return this.jsonUrlAndOutputs.toString(); else if (includeInputAttributes && includeOutputAttributes) return this.jsonInputsAndOutputs.toString(); else if (includeURL) return this.jsonUrl.toString(); else if (includeInputAttributes) return this.jsonInputs.toString(); else if (includeOutputAttributes) return this.jsonOutputs.toString(); else return ""; } public Table getServiceData(boolean includeURL, boolean includeInputAttributes, boolean includeOutputAttributes) { if (includeURL && includeInputAttributes && includeOutputAttributes) return this.serviceData; List<Attribute> headers = this.serviceData.getHeaders(); List<List<String>> values = this.serviceData.getValues(); Table newTable = new Table(); List<Attribute> newHeader = new ArrayList<>(); List<List<String>> newValues = new ArrayList<>(); List<String> newRowIds = new ArrayList<>(this.serviceData.getRowIds()); List<Integer> includingColumns = new ArrayList<>(); if (headers != null) { if (includeURL && !headers.isEmpty()) includingColumns.add(0); for (int i = 1; i < this.serviceData.getHeaders().size(); i++) { if (includeInputAttributes && headers.get(i).getIOType() == IOType.INPUT) includingColumns.add(i); if (includeOutputAttributes && headers.get(i).getIOType() == IOType.OUTPUT) includingColumns.add(i); } } for (Integer colIndex : includingColumns) { newHeader.add(headers.get(colIndex)); } for (List<String> vals : values) { List<String> rowVals = new ArrayList<>(); for (Integer colIndex : includingColumns) rowVals.add(vals.get(colIndex)); newValues.add(rowVals); } newTable.setHeaders(newHeader); newTable.setValues(newValues); newTable.setRowIds(newRowIds); return newTable; } public Table getServiceData() { return getServiceData(true, true, true); } public String getServiceJson(boolean includeInputAttributes) { if (includeInputAttributes) return getServiceJson(true, true, true); return getServiceJson(false, false, true); } private List<Attribute> getInputAttributes() { List<Attribute> inAttributes = new ArrayList<>(); Table serviceTable = getServiceData(); for (Attribute p : serviceTable.getHeaders()) { if (p.getIOType().equalsIgnoreCase(IOType.INPUT)) { inAttributes.add(p); } } return inAttributes; } private List<Attribute> getOutputAttributes() { List<Attribute> outAttributes = new ArrayList<>(); Table serviceTable = getServiceData(); for (Attribute p : serviceTable.getHeaders()) { if (p.getIOType().equalsIgnoreCase(IOType.OUTPUT)) outAttributes.add(p); } return outAttributes; } /** * This method creates a new service model which includes only the * service endpoint, http method, input and output attributes * @return */ public WebService getInitialServiceModel(String serviceName) { String guid = new RandomGUID().toString(); // guid = "E9C3F8D3-F778-5C4B-E089-C1749D50AE1F"; URL sampleUrl = requestURLs.get(0); if (sampleUrl == null) return null; WebService service = null; if (serviceName == null || serviceName.trim().length() == 0) service = new WebService(guid, sampleUrl); else service = new WebService(guid, serviceName, sampleUrl); service.setMethod(HttpMethods.GET.name()); service.setInputAttributes(getInputAttributes()); service.setOutputAttributes(getOutputAttributes()); return service; } /*public static void main(String[] args) { // String s1 = "http://colo-vm10.isi.edu:8080/DovetailService/GetSampleData?sourceName=KDD-02-B-TOSIG"; String s1 = "http://api.geonames.org/neighbourhood?lat=40.78343&lng=-73.96625&username=karma"; // String s1 = "http://api.geonames.org/postalCodeCountryInfo?username=karma"; // String s2 = "http://api.geonames.org/neighbourhood?lat=40.7&lng=-73.9&username=karma"; // String s3 = "http://api.geonames.org/neighbourhood?lat=40.9&lng=-73.9&username=karma"; List<String> urls = new ArrayList<String>(); urls.add(s1); // urls.add(s2); // urls.add(s3); List<String> ids = new ArrayList<String>(); ids.add("1"); // ids.add("2"); // ids.add("3"); try { InvocationManager sb = new InvocationManager(null, ids, urls, "UTF-8"); Table tb = sb.getServiceData(false, false, true); // String str = tb.asCSV(); // File f = new File("csv"); // PrintWriter pw = new PrintWriter(f); // pw.write(str); // pw.close(); logger.debug(tb.getPrintInfo()); WebService service = sb.getInitialServiceModel(null); // just for test service.getInputAttributes().get(0).sethNodeId("HN1"); service.getInputAttributes().get(1).sethNodeId("HN2"); service.getOutputAttributes().get(4).sethNodeId("HN3"); service.getOutputAttributes().get(6).sethNodeId("HN4"); service.getOutputAttributes().get(5).sethNodeId("HN5"); service.getOutputAttributes().get(3).sethNodeId("HN6"); service.print(); service.updateModel(Test.getGeoNamesNeighbourhoodTree()); String dir = Repository.Instance().SOURCE_REPOSITORY_DIR; service.getInputModel().writeJenaModelToFile(dir + "model", "N3"); System.out.println(service.getInputModel().getSparql(null)); } catch (Exception e) { e.printStackTrace(); } }*/ }
4,225
462
// Copyright 2020 The Khronos® Group Inc. #include "GLTFAssetTest.h" #include "GLTFAsset.h" TEST(GLTFAssetTest, RemoveUnusedSemantics) { GLTF::Asset* asset = new GLTF::Asset(); GLTF::Scene* scene = new GLTF::Scene(); asset->scenes.push_back(scene); asset->scene = 0; GLTF::Node* node = new GLTF::Node(); scene->nodes.push_back(node); GLTF::Mesh* mesh = new GLTF::Mesh(); node->mesh = mesh; GLTF::Primitive* primitive = new GLTF::Primitive(); mesh->primitives.push_back(primitive); GLTF::Material* material = new GLTF::Material(); primitive->material = material; // Add an unused texture coordinate attribute primitive->attributes["TEXCOORD_0"] = NULL; EXPECT_EQ(primitive->attributes.size(), 1); asset->removeUnusedSemantics(); EXPECT_EQ(primitive->attributes.size(), 0); // Add an unused and a used texture coordinaate primitive->attributes["TEXCOORD_0"] = NULL; primitive->attributes["TEXCOORD_1"] = (GLTF::Accessor*)1; material->values->ambientTexture = new GLTF::Texture(); material->values->ambientTexCoord = 1; EXPECT_EQ(primitive->attributes.size(), 2); asset->removeUnusedSemantics(); EXPECT_EQ(primitive->attributes.size(), 1); EXPECT_EQ(primitive->attributes["TEXCOORD_0"], (GLTF::Accessor*)1); EXPECT_EQ(material->values->ambientTexCoord, 0); }
467
921
<reponame>vsch/idea-markdown<gh_stars>100-1000 // Copyright (c) 2015-2020 <NAME> <<EMAIL>> Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.vladsch.md.nav.annotator; import com.vladsch.md.nav.MdBundle; import com.vladsch.md.nav.psi.element.MdRenameElement; import java.util.List; public interface FileDataCreator { void fillData(final MdRenameElement element, List<FileChoiceData> fileData); default boolean isAvailable(final MdRenameElement element) { return element.isValid(); } String getDefaultAnchor(); default String getFilePrompt() { return MdBundle.message("file-choice-with-preview.prompt-anchor.0.label", "%s"); } }
263
2,338
// RUN: %clang_cc1 -fsyntax-only -verify %s class test1 { template <typename> friend int bar(bool = true) {} // expected-note {{previous declaration is here}} template <typename> friend int bar(bool); // expected-error {{friend declaration specifying a default argument must be the only declaration}} }; class test2 { friend int bar(bool = true) {} // expected-note {{previous declaration is here}} friend int bar(bool); // expected-error{{friend declaration specifying a default argument must be the only declaration}} };
161
410
{ "authors": [ "<NAME>", "<NAME>" ], "date_download": "2017-11-30T19:51:20", "date_modify": null, "date_publish": "2016-11-10T15:50:40", "description": "At least two people were killed and 84 wounded when the insurgents struck with a suicide car bomb near a hotel being used by German diplomats, Afghan officials said.", "filename": "http%3A%2F%2Fwww.nytimes.com%2F2016%2F11%2F11%2Fworld%2Fasia%2Ftaliban-strike-german-consulate-in-afghan-city-of-mazar-i-sharif.html%3F_r%3D0.json", "image_url": "https://static01.nyt.com/images/2016/11/12/world/11AFGHANISTAN-1/11AFGHANISTAN-1-facebookJumbo.jpg", "language": "en", "localpath": null, "source_domain": "www.nytimes.com", "text": "Photo\nKUNDUZ, Afghanistan \u2014 Taliban insurgents struck at the German Consulate in the northern commercial hub of Mazar-i-Sharif late Thursday night, killing at least two people and wounding at least 90, Afghan officials said.\nSayid <NAME>, spokesman for the regional police zone in northern Afghanistan, said that there were two large explosions at the Mazar Hotel, which German diplomats have been using as their consulate in the city. A suicide car bomb apparently initiated the attack at the compound\u2019s security wall, followed by gunfire and an explosion inside the hotel.\nThe provincial police force issued a statement saying that no German Consulate personnel had been hurt, and that the police had cleared the consulate of attackers. <NAME>, the head of the public health department in Balkh Province, which includes Mazar-i-Sharif, said that the casualties had been transferred to the regional hospital there.\nNewsletter Sign Up Continue reading the main story Today\u2019s Headlines: Asia Edition Get news and analysis from Asia and around the world delivered to your inbox every day in the Asian morning. Please verify you're not a robot by clicking the box. Invalid email address. Please re-enter. You must select a newsletter to subscribe to. Sign Up You agree to receive occasional updates and special offers for The New York Times's products and services. Thank you for subscribing. An error has occurred. Please try again later. View all New York Times newsletters.\nA Taliban spokesman, <NAME>, called the attack a reprisal for airstrikes in Kunduz this week.\nAdvertisement Continue reading the main story\nGerman troops have been based in northern Afghanistan for years. After the end of the formal NATO combat mission in 2015, roughly 1,000 German service members have remained at a multinational military base near Mazar-i-Sharif, mostly responsible for training and supporting Afghan security forces.\nA spokesman for the German special forces command at its headquarters in Potsdam, Germany, said that Afghan police forces responded and fought with Taliban attackers soon after the initial bombing outside the consulate. He said that soldiers from the military base nearby also came to the scene.", "title": "Taliban Strike German Consulate in Afghan City of Mazar-i-Sharif", "title_page": null, "title_rss": null, "url": "http://www.nytimes.com/2016/11/11/world/asia/taliban-strike-german-consulate-in-afghan-city-of-mazar-i-sharif.html?_r=0", "newsCluster": { "CategoryId": 1, "Category": "world", "TopicId": 2, "Topic": "legancy", "EventId": 73, "Event": "Benghazi_US_consulate_attack" }, "dId": "921a73cf9c9293238dfb7460299647f9db62e582cee84dad8a4762e5" }
966
1,076
<reponame>kkmaity/DemoVideo package com.allattentionhere.autoplayvideos; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.os.Environment; import android.preference.PreferenceManager; import android.util.Log; import java.util.List; public class AAH_Utils { private static SharedPreferences sharedPrefs; protected static void initialize(Context context) { sharedPrefs = PreferenceManager.getDefaultSharedPreferences(context); } protected static String getString(Context context, String key) { if (sharedPrefs == null) { initialize(context); } return sharedPrefs.getString(key, null); } protected static void saveString(Context context, String key, String value) { if (sharedPrefs == null) { initialize(context); } sharedPrefs.edit().putString(key, value).apply(); } protected static void remove(Context context, String key) { if (sharedPrefs == null) { initialize(context); } sharedPrefs.edit().remove(key).apply(); } public static boolean isVideoDownloaded(Context c, String url) { return getString(c, url) != null; } public static boolean isConnected(Context c) { NetworkInfo info = ((ConnectivityManager) c.getSystemService(Context.CONNECTIVITY_SERVICE)).getActiveNetworkInfo(); return info != null && info.isConnected(); } }
565
1,484
{"data": [{"Vulnerability": {"Description": "", "FixedIn": [], "Link": "https://security-tracker.debian.org/tracker/CVE-2011-1133", "Metadata": {}, "Name": "CVE-2011-1133", "NamespaceName": "debian:10", "Severity": "Negligible"}}, {"Vulnerability": {"Description": "", "FixedIn": [], "Link": "https://security-tracker.debian.org/tracker/CVE-2011-1134", "Metadata": {}, "Name": "CVE-2011-1134", "NamespaceName": "debian:10", "Severity": "Negligible"}}, {"Vulnerability": {"Description": "", "FixedIn": [], "Link": "https://security-tracker.debian.org/tracker/CVE-2011-1135", "Metadata": {}, "Name": "CVE-2011-1135", "NamespaceName": "debian:10", "Severity": "Negligible"}}, {"Vulnerability": {"Description": "", "FixedIn": [], "Link": "https://security-tracker.debian.org/tracker/CVE-2009-3932", "Metadata": {"NVD": {"CVSSv2": {"Score": 9.3, "Vectors": "AV:N/AC:M/Au:N/C:C/I:C"}}}, "Name": "CVE-2009-3932", "NamespaceName": "debian:10", "Severity": "Negligible"}}, {"Vulnerability": {"Description": "", "FixedIn": [], "Link": "https://security-tracker.debian.org/tracker/CVE-2011-4719", "Metadata": {"NVD": {"CVSSv2": {"Score": 10.0, "Vectors": "AV:N/AC:L/Au:N/C:C/I:C"}}}, "Name": "CVE-2011-4719", "NamespaceName": "debian:10", "Severity": "Negligible"}}, {"Vulnerability": {"Description": "", "FixedIn": [], "Link": "https://security-tracker.debian.org/tracker/CVE-2014-6466", "Metadata": {"NVD": {"CVSSv2": {"Score": 6.9, "Vectors": "AV:L/AC:M/Au:N/C:C/I:C"}}}, "Name": "CVE-2014-6466", "NamespaceName": "debian:10", "Severity": "Negligible"}}, {"Vulnerability": {"Description": "", "FixedIn": [], "Link": "https://security-tracker.debian.org/tracker/CVE-2012-5118", "Metadata": {"NVD": {"CVSSv2": {"Score": 7.5, "Vectors": "AV:N/AC:L/Au:N/C:P/I:P"}}}, "Name": "CVE-2012-5118", "NamespaceName": "debian:10", "Severity": "Negligible"}}, {"Vulnerability": {"Description": "", "FixedIn": [], "Link": "https://security-tracker.debian.org/tracker/CVE-2012-5115", "Metadata": {"NVD": {"CVSSv2": {"Score": 7.5, "Vectors": "AV:N/AC:L/Au:N/C:P/I:P"}}}, "Name": "CVE-2012-5115", "NamespaceName": "debian:10", "Severity": "Negligible"}}, {"Vulnerability": {"Description": "", "FixedIn": [], "Link": "https://security-tracker.debian.org/tracker/CVE-2016-2850", "Metadata": {"NVD": {"CVSSv2": {"Score": 5.0, "Vectors": "AV:N/AC:L/Au:N/C:N/I:P"}}}, "Name": "CVE-2016-2850", "NamespaceName": "debian:10", "Severity": "Negligible"}}, {"Vulnerability": {"Description": "", "FixedIn": [], "Link": "https://security-tracker.debian.org/tracker/CVE-2017-11143", "Metadata": {"NVD": {"CVSSv2": {"Score": 5.0, "Vectors": "AV:N/AC:L/Au:N/C:N/I:N"}}}, "Name": "CVE-2017-11143", "NamespaceName": "debian:10", "Severity": "Negligible"}}], "next_token": ""}
1,100
507
<gh_stars>100-1000 # terrascript/jwt/r.py # Automatically generated by tools/makecode.py () import warnings warnings.warn( "using the 'legacy layout' is deprecated", DeprecationWarning, stacklevel=2 ) import terrascript class jwt_hashed_token(terrascript.Resource): pass class jwt_signed_token(terrascript.Resource): pass
117
4,551
<gh_stars>1000+ @LooperMode(Mode.PAUSED) package org.robolectric.plugins.config; import org.robolectric.annotation.LooperMode; import org.robolectric.annotation.LooperMode.Mode;
68
344
<filename>rsmanage/fill_practice_log_missings.py<gh_stars>100-1000 from rs_grading import do_fill_user_topic_practice_log_missings do_fill_user_topic_practice_log_missings(db=db, settings=settings)
71
679
<gh_stars>100-1000 /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ // MARKER(update_precomp.py): autogen include statement, do not remove #include "precompiled_package.hxx" #include <ZipPackageFolderEnumeration.hxx> #include <ContentInfo.hxx> using namespace com::sun::star; using rtl::OUString; ZipPackageFolderEnumeration::ZipPackageFolderEnumeration ( ContentHash &rInput) : rContents (rInput) , aIterator (rContents.begin()) { } ZipPackageFolderEnumeration::~ZipPackageFolderEnumeration( void ) { } sal_Bool SAL_CALL ZipPackageFolderEnumeration::hasMoreElements( ) throw(uno::RuntimeException) { return (aIterator != rContents.end() ); } uno::Any SAL_CALL ZipPackageFolderEnumeration::nextElement( ) throw(container::NoSuchElementException, lang::WrappedTargetException, uno::RuntimeException) { uno::Any aAny; if (aIterator == rContents.end() ) throw container::NoSuchElementException( ::rtl::OUString( RTL_CONSTASCII_USTRINGPARAM( OSL_LOG_PREFIX ) ), uno::Reference< uno::XInterface >() ); aAny <<= (*aIterator).second->xTunnel; aIterator++; return aAny; } OUString ZipPackageFolderEnumeration::getImplementationName() throw (uno::RuntimeException) { return OUString ( RTL_CONSTASCII_USTRINGPARAM ( "ZipPackageFolderEnumeration" ) ); } uno::Sequence< OUString > ZipPackageFolderEnumeration::getSupportedServiceNames() throw (uno::RuntimeException) { uno::Sequence< OUString > aNames(1); aNames[0] = OUString( RTL_CONSTASCII_USTRINGPARAM ( "com.sun.star.packages.PackageFolderEnumeration" ) ); return aNames; } sal_Bool SAL_CALL ZipPackageFolderEnumeration::supportsService( OUString const & rServiceName ) throw (uno::RuntimeException) { return rServiceName == getSupportedServiceNames()[0]; }
783
647
/** * @if Er7UtilsUseGroups * @addtogroup Er7Utils * @{ * @addtogroup Interface * @{ * @endif */ /** * @file * Defines Er7UtilsDeletable methods. */ /* Purpose: () */ // System includes #include <cstddef> // Interface includes #include "er7_utils/interface/include/alloc.hh" // Local includes #include "../include/deletable.hh" namespace er7_utils { /** * Delete an instance * @param[in,out] instance Object to be deleted. */ void Er7UtilsDeletable::delete_instance_internal ( Er7UtilsDeletable* instance) { alloc::delete_object (instance); } } /** * @if Er7UtilsUseGroups * @} * @} * @endif */
242
1,021
# # Copyright Contributors to the OpenTimelineIO project # # Licensed under the Apache License, Version 2.0 (the "Apache License") # with the following modification; you may not use this file except in # compliance with the Apache License and the following modification to it: # Section 6. Trademarks. is deleted and replaced with: # # 6. Trademarks. This License does not grant permission to use the trade # names, trademarks, service marks, or product names of the Licensor # and its affiliates, except as required to comply with Section 4(c) of # the License and to reproduce the content of the NOTICE file. # # You may obtain a copy of the Apache License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the Apache License with the above modification is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the Apache License for the specific # language governing permissions and limitations under the Apache License. # """OTIO to SVG Adapter Points in calculations are y-up. Points in SVG are y-down.""" # otio import opentimelineio as otio from xml.etree.ElementTree import Element, SubElement, tostring from xml.dom import minidom # python import math from random import seed from random import random random_colors_used = [] class Color: def __init__(self, r=0.0, g=0.0, b=0.0, a=255.0): self.value = (r, g, b, a) def __getitem__(self, item): return self.value[item] @staticmethod def random_color(): color = Color.__generate_new_color() random_colors_used.append(color) return color @staticmethod def __generate_new_color(): max_distance = None best_color = None for _ in range(100): color = Color.__get_random_color() if len(random_colors_used) == 0: return color best_distance = min([Color.__color_distance(color, c) for c in random_colors_used]) if not max_distance or best_distance > max_distance: max_distance = best_distance best_color = color return best_color @staticmethod def __get_random_color(): return Color(random(), random(), random(), 1.0) @staticmethod def __color_distance(c1, c2): return sum([abs(x[0] - x[1]) for x in zip(c1.value, c2.value)]) @property def r(self): return self.value[0] @property def g(self): return self.value[1] @property def b(self): return self.value[2] @property def a(self): return self.value[3] def svg_color(self): return 'rgb({:.8f},{:.8f},{:.8f})'.format(self.r * 255.0, self.g * 255.0, self.b * 255.0) COLORS = { 'transparent': Color(0, 0, 0, 0), 'black': Color(0.0, 0.0, 0.0, 1.0), 'white': Color(1.0, 1.0, 1.0, 1.0), 'transluscent_white': Color(1.0, 1.0, 1.0, 0.7), 'purple': Color(0.5, 0.0, 0.5, 1.0), 'light_blue': Color(0.529, 0.808, 0.922, 1.0), 'blue': Color(0.0, 0.0, 1.0, 1.0), 'dark_blue': Color(0.0, 0.0, 0.54, 1.0), 'green': Color(0.0, 0.5, 0.0, 1.0), 'dark_green': Color(0.0, 0.39, 0.0, 1.0), 'yellow': Color(1.0, 1.0, 0.0, 1.0), 'gold': Color(1.0, 0.84, 0.0, 1.0), 'orange': Color(1.0, 0.647, 0.0, 1.0), 'red': Color(1.0, 0.0, 0.0, 1.0), 'dark_red': Color(0.54, 0.0, 0.0, 1.0), 'brown': Color(0.54, 0.27, 0.1, 1.0), 'pink': Color(1.0, 0.75, 0.79, 1.0), 'gray': Color(0.5, 0.5, 0.5, 1.0), 'dark_gray': Color(0.66, 0.66, 0.66, 1.0), 'dark_gray_transluscent': Color(0.66, 0.66, 0.66, 0.7843) } class Point: def __init__(self, x, y): self.x = x self.y = y def svg_point_string(self): return "{:.8f},{:.8f}".format(self.x, self.y) class Rect(object): origin = Point(0, 0) width = 0.0 height = 0.0 def __init__(self, origin=Point(0, 0), width=0.0, height=0.0): self.origin = origin self.width = width self.height = height def normalized(self): normalized_origin = Point( self.origin.x + (self.width if self.width < 0 else 0), self.origin.y + (self.height if self.height < 0 else 0), ) normalized_width = abs(self.width) normalized_height = abs(self.height) return Rect(normalized_origin, normalized_width, normalized_height) def min_x(self): return self.normalized().origin.x def min_y(self): return self.normalized().origin.y def mid_x(self): return self.origin.x + (self.width * 0.5) def mid_y(self): return self.origin.y + (self.height * 0.5) def max_x(self): norm = self.normalized() return norm.origin.x + norm.width def max_y(self): norm = self.normalized() return norm.origin.y + norm.height def contract(self, distance): self.origin.x += distance self.origin.y += distance self.width -= 2.0 * distance self.height -= 2.0 * distance def convert_point_to_svg_coordinates(point, image_height): y = image_height - point.y return Point(point.x, y) def convert_rect_to_svg_coordinates(rect, image_height): """Convert to SVG coordinate system (0,0 at top-left)""" normalized_rect = rect.normalized() normalized_rect.origin = convert_point_to_svg_coordinates( normalized_rect.origin, image_height) normalized_rect.height *= -1 return normalized_rect.normalized() class SVGWriter: def __init__(self, image_width=2406.0, image_height=1054.0, image_margin=20.0, arrow_margin=10.0, arrow_label_margin=5.0, font_size=15.0, font_family='sans-serif'): self.image_width = image_width self.image_height = image_height self.image_margin = image_margin self.arrow_margin = arrow_margin self.arrow_label_margin = arrow_label_margin self.font_size = font_size self.text_margin = 0.5 * font_size self.font_family = font_family self.all_clips_data = [] self.trackwise_clip_count = [] self.tracks_duration = [] self.track_transition_available = [] self.max_total_duration = 0 self.global_min_time = 0 self.global_max_time = 0 self.scale_x = 1.0 self.scale_y = 1.0 self.x_origin = 0 self.clip_rect_height = 0 self.vertical_drawing_index = -1 self.svg_elem = Element("svg", { "height": "{:.8f}".format(self.image_height), "width": "{:.8f}".format(self.image_width), "version": "4.0", "xmlns": "http://www.w3.org/2000/svg", "xmlns:xlink": "http://www.w3.org/1999/xlink", }) # white background SubElement(self.svg_elem, "rect", { "width": "100%", "height": "100%", "fill": "white", }) def draw_rect(self, rect, stroke_width=2.0, stroke_color=COLORS['black']): svg_rect = convert_rect_to_svg_coordinates(rect, self.image_height) SubElement(self.svg_elem, "rect", { "x": "{:.8f}".format(svg_rect.origin.x), "y": "{:.8f}".format(svg_rect.origin.y), "width": "{:.8f}".format(svg_rect.width), "height": "{:.8f}".format(svg_rect.height), "style": "fill:rgb(255,255,255);stroke-width:{:.8f};" "stroke:{};opacity:1;fill-opacity:0;".format( stroke_width, stroke_color.svg_color())}) def draw_labeled_rect(self, rect, stroke_width=2.0, stroke_color=COLORS['black'], fill_color=COLORS['white'], label='', label_size=10.0): svg_rect = convert_rect_to_svg_coordinates(rect, self.image_height) g_elem = SubElement(self.svg_elem, "g", { "transform": "translate({:.8f},{:.8f})".format( svg_rect.origin.x, svg_rect.origin.y) }) SubElement(g_elem, "rect", { "width": "{:.8f}".format(svg_rect.width), "height": "{:.8f}".format(svg_rect.height), "style": "fill:{};stroke-width:{:.8f};" "stroke:{};opacity:1;".format( fill_color.svg_color(), stroke_width, stroke_color.svg_color()) }) sub_svg_elem = SubElement(g_elem, "svg", { "width": "{:.8f}".format(svg_rect.width), "height": "{:.8f}".format(svg_rect.height) }) text_elem = SubElement(sub_svg_elem, "text", { "x": "50%", "y": "50%", "font-size": "{:.8f}".format(label_size), "font-family": self.font_family, "style": "stroke:{};stroke-width:{:.8f};" "fill:{};opacity:{:.8f};".format( COLORS['black'].svg_color(), stroke_width / 4.0, COLORS['black'].svg_color(), COLORS['black'].a), "alignment-baseline": "middle", "text-anchor": "middle"}) text_elem.text = label def draw_dashed_rect(self, rect, stroke_width=2.0, stroke_color=COLORS['black'], fill_color=COLORS['white']): svg_rect = convert_rect_to_svg_coordinates(rect, self.image_height) SubElement(self.svg_elem, "rect", { "x": "{:.8f}".format(svg_rect.origin.x), "y": "{:.8f}".format(svg_rect.origin.y), "width": "{:.8f}".format(svg_rect.width), "height": "{:.8f}".format(svg_rect.height), "stroke-dasharray": "5", "style": "fill:{};stroke-width:{:.8f};stroke:{};" "opacity:1;fill-opacity:{:.8f}".format( fill_color.svg_color(), stroke_width, stroke_color.svg_color(), fill_color.a) }) def draw_labeled_dashed_rect_with_border(self, rect, stroke_width=2.0, fill_color=COLORS['white'], border_color=COLORS['black'], label='', label_size=10.0): svg_rect = convert_rect_to_svg_coordinates(rect, self.image_height) g_elem = SubElement(self.svg_elem, "g", { "transform": "translate({:.8f},{:.8f})".format( svg_rect.origin.x, svg_rect.origin.y) }) SubElement(g_elem, "rect", { "width": "{:.8f}".format(svg_rect.width), "height": "{:.8f}".format(svg_rect.height), "stroke-dasharray": "5", "style": "fill:{};stroke-width:{:.8f};" "stroke:{};opacity:{:.8f};".format( fill_color.svg_color(), stroke_width, border_color.svg_color(), fill_color.a) }) sub_svg_elem = SubElement(g_elem, "svg", { "width": "{:.8f}".format(svg_rect.width), "height": "{:.8f}".format(svg_rect.height) }) text_elem = SubElement(sub_svg_elem, "text", { "x": "50%", "y": "50%", "font-size": "{:.8f}".format(label_size), "font-family": self.font_family, "style": "stroke:{};stroke-width:{:.8f};" "fill:{};opacity:{:.8f};".format( COLORS['black'].svg_color(), stroke_width / 4.0, COLORS['black'].svg_color(), COLORS['black'].a), "alignment-baseline": "middle", "text-anchor": "middle" }) text_elem.text = label def draw_solid_rect(self, rect, fill_color=COLORS['white']): svg_rect = convert_rect_to_svg_coordinates(rect, self.image_height) SubElement(self.svg_elem, "rect", { "x": "{:.8f}".format(svg_rect.origin.x), "y": "{:.8f}".format(svg_rect.origin.y), "width": "{:.8f}".format(svg_rect.width), "height": "{:.8f}".format(svg_rect.height), "style": "fill:{};stroke-width:0;" "stroke:rgb(0,0,0);opacity:{:.8f};".format( fill_color.svg_color(), fill_color.a) }) def draw_solid_rect_with_border(self, rect, stroke_width=2.0, fill_color=COLORS['white'], border_color=COLORS['black']): svg_rect = convert_rect_to_svg_coordinates(rect, self.image_height) SubElement(self.svg_elem, "rect", { "x": "{:.8f}".format(svg_rect.origin.x), "y": "{:.8f}".format(svg_rect.origin.y), "width": "{:.8f}".format(svg_rect.width), "height": "{:.8f}".format(svg_rect.height), "style": "fill:{};stroke-width:{:.8f};" "stroke:{};opacity:{:.8f};".format( fill_color.svg_color(), stroke_width, border_color.svg_color(), fill_color.a) }) def draw_labeled_solid_rect_with_border(self, rect, stroke_width=2.0, fill_color=COLORS['white'], border_color=COLORS['black'], label='', label_size=10.0): svg_rect = convert_rect_to_svg_coordinates(rect, self.image_height) g_elem = SubElement(self.svg_elem, "g", { "transform": "translate({:.8f},{:.8f})".format( svg_rect.origin.x, svg_rect.origin.y) }) SubElement(g_elem, "rect", { "width": "{:.8f}".format(svg_rect.width), "height": "{:.8f}".format(svg_rect.height), "style": "fill:{};stroke-width:{:.8f};" "stroke:{};opacity:{:.8f};".format( fill_color.svg_color(), stroke_width, border_color.svg_color(), fill_color.a) }) sub_svg_elem = SubElement(g_elem, "svg", { "width": "{:.8f}".format(svg_rect.width), "height": "{:.8f}".format(svg_rect.height) }) text_elem = SubElement(sub_svg_elem, "text", { "x": "50%", "y": "50%", "font-size": "{:.8f}".format(label_size), "font-family": self.font_family, "style": "stroke:{};stroke-width:{:.8f};" "fill:{};opacity:{:.8f};".format( COLORS['black'].svg_color(), stroke_width / 4.0, COLORS['black'].svg_color(), COLORS['black'].a), "alignment-baseline": "middle", "text-anchor": "middle"}) text_elem.text = label def draw_line(self, start_point, end_point, stroke_width, stroke_color=COLORS['black'], is_dashed=False): point1 = convert_point_to_svg_coordinates(start_point, self.image_height) point2 = convert_point_to_svg_coordinates(end_point, self.image_height) style_str = "stroke-width:{:.8f};stroke:{}" \ ";opacity:{:.8f};" \ "stroke-linecap:butt;".format(stroke_width, stroke_color.svg_color(), stroke_color.a) if is_dashed: style_str = style_str + "stroke-dasharray:4 1" SubElement(self.svg_elem, "line", { "x1": "{:.8f}".format(point1.x), "y1": "{:.8f}".format(point1.y), "x2": "{:.8f}".format(point2.x), "y2": "{:.8f}".format(point2.y), "style": style_str }) def draw_arrow(self, start_point, end_point, stroke_width, stroke_color=COLORS['black']): point1 = convert_point_to_svg_coordinates(start_point, self.image_height) point2 = convert_point_to_svg_coordinates(end_point, self.image_height) direction = Point(point2.x - point1.x, point2.y - point1.y) direction_magnitude = math.sqrt(direction.x * direction.x + direction.y * direction.y) inv_magnitude = 1.0 / direction_magnitude arrowhead_length = 9.0 arrowhead_half_width = arrowhead_length * 0.5 direction = Point(direction.x * inv_magnitude, direction.y * inv_magnitude) point2 = Point(point2.x - arrowhead_length * direction.x, point2.y - arrowhead_length * direction.y) triangle_tip = Point(point2.x + arrowhead_length * direction.x, point2.y + arrowhead_length * direction.y) perpendicular_dir = Point(-direction.y, direction.x) triangle_pt_1 = Point(point2.x + arrowhead_half_width * perpendicular_dir.x, point2.y + arrowhead_half_width * perpendicular_dir.y) triangle_pt_2 = Point(point2.x - arrowhead_half_width * perpendicular_dir.x, point2.y - arrowhead_half_width * perpendicular_dir.y) SubElement(self.svg_elem, "line", { "x1": "{:.8f}".format(point1.x), "y1": "{:.8f}".format(point1.y), "x2": "{:.8f}".format(point2.x), "y2": "{:.8f}".format(point2.y), "style": "stroke-width:{:.8f};stroke:{};opacity:{:.8f};" "stroke-linecap:butt;".format( stroke_width, stroke_color.svg_color(), stroke_color.a) }) SubElement(self.svg_elem, "polygon", { "points": " ".join(p.svg_point_string() for p in [triangle_tip, triangle_pt_1, triangle_pt_2]), "style": "fill:{};".format(stroke_color.svg_color()) }) def draw_text(self, text, location, text_size, color=COLORS['black'], stroke_width=1.0): location_svg = convert_point_to_svg_coordinates(location, self.image_height) text_elem = SubElement(self.svg_elem, "text", { "x": "{:.8f}".format(location_svg.x), "y": "{:.8f}".format(location_svg.y), "font-size": "{:.8f}".format(text_size), "font-family": self.font_family, "style": "stroke:{};stroke-width:{:.8f};" "fill:{};opacity:{:.8f};".format( color.svg_color(), stroke_width / 4.0, color.svg_color(), color.a) }) text_elem.text = text def get_image(self): # Python 3 produces a bytestring with the tostring() method, whereas Python 2 # gives an str object. The try-except block below checks for this case. xmlstr = tostring(self.svg_elem, encoding='utf-8', method='xml') try: xmlstr = xmlstr.decode("utf8") except UnicodeDecodeError: pass return minidom.parseString(xmlstr).toprettyxml(indent=' ') class ClipData(object): def __init__(self, src_start=0.0, src_end=0.0, avlbl_start=0.0, avlbl_end=0.0, avlbl_duration=0.0, trim_start=0.0, trim_duration=0.0, target_url='', clip_id=0, transition_begin=None, transition_end=None): self.src_start = src_start self.src_end = src_end self.avlbl_start = avlbl_start self.avlbl_end = avlbl_end self.avlbl_duration = avlbl_duration self.trim_start = trim_start self.trim_duration = trim_duration self.target_url = target_url self.clip_id = clip_id self.transition_begin = transition_begin self.transition_end = transition_end def draw_item(otio_obj, svg_writer, extra_data=()): WRITE_TYPE_MAP = { otio.schema.Timeline: _draw_timeline, otio.schema.Stack: _draw_stack, otio.schema.Track: _draw_track, otio.schema.Clip: _draw_clip, otio.schema.Gap: _draw_gap, otio.schema.Transition: _draw_transition, otio.schema.SerializableCollection: _draw_collection, } if type(otio_obj) in WRITE_TYPE_MAP: return WRITE_TYPE_MAP[type(otio_obj)](otio_obj, svg_writer, extra_data) # Draw Timeline and calculate Clip and Gap data def _draw_timeline(timeline, svg_writer, extra_data=()): clip_count = 0 transition_track_count = 0 for track in timeline.tracks: if len(track) == 0: continue current_track_clips_data = [] current_track_has_transition = False current_transition = None track_duration = 0 min_time = 0 max_time = 0 for item in track: if isinstance(item, otio.schema.Transition): current_track_has_transition = True current_transition = item current_track_clips_data[-1].transition_end = item continue avlbl_start = track_duration - item.trimmed_range().start_time.value if isinstance(item, otio.schema.Clip): avlbl_start += item.available_range().start_time.value min_time = min(min_time, avlbl_start) src_start = track_duration track_duration += item.trimmed_range().duration.value src_end = track_duration - 1 avlbl_end = 0.0 trim_start = item.trimmed_range().start_time.value trim_duration = item.trimmed_range().duration.value if isinstance(item, otio.schema.Clip): avlbl_end = (item.available_range().start_time.value + item.available_range().duration.value - item.trimmed_range().start_time.value - item.trimmed_range().duration.value + track_duration - 1) clip_count += 1 avlbl_duration = item.available_range().duration.value clip_data = ClipData(src_start, src_end, avlbl_start, avlbl_end, avlbl_duration, trim_start, trim_duration, item.media_reference.target_url, clip_count - 1) if current_transition is not None: clip_data.transition_begin = current_transition current_transition = None current_track_clips_data.append(clip_data) elif isinstance(item, otio.schema.Gap): avlbl_end = src_end avlbl_duration = trim_duration current_transition = None clip_data = ClipData(src_start, src_end, avlbl_start, avlbl_end, avlbl_duration, trim_start, trim_duration, "Gap", -1) current_track_clips_data.append(clip_data) max_time = max(max_time, avlbl_end) svg_writer.global_max_time = max(svg_writer.global_max_time, max_time) svg_writer.global_min_time = min(svg_writer.global_min_time, min_time) svg_writer.all_clips_data.append(current_track_clips_data) svg_writer.tracks_duration.append(track_duration) svg_writer.track_transition_available.append(current_track_has_transition) if current_track_has_transition: transition_track_count += 1 # store track-wise clip count to draw arrows from stack to tracks if len(svg_writer.trackwise_clip_count) == 0: svg_writer.trackwise_clip_count.append(clip_count) else: svg_writer.trackwise_clip_count.append( clip_count - svg_writer.trackwise_clip_count[ len(svg_writer.trackwise_clip_count) - 1]) # The scale in x direction is calculated considering margins on the # left and right side if the image svg_writer.scale_x = (svg_writer.image_width - (2.0 * svg_writer.image_margin)) / \ (svg_writer.global_max_time - svg_writer.global_min_time + 1.0) svg_writer.x_origin = ((-svg_writer.global_min_time) * svg_writer.scale_x + svg_writer.image_margin) track_count = len(svg_writer.tracks_duration) # The rect height is calculated considering the following: # Total space available: # image height - top & bottom margin - # space for two labels for the bottom-most rect # Number of total rects to fit the height of the drawing space: # track_count * 2.0 = one for track rect and one for the sequence of # components on that track # + 2.0 = timeline and stack rects # clip_count = we need to draw a rect for a media reference per clip # transition_track_count = we need one more row per the number of tracks with # transitions # NumberOfRects * 2.0 - 1.0 = to account for "one rect space" between all the rects total_image_margin_space = 2.0 * svg_writer.image_margin bottom_label_space = 2.0 * svg_writer.font_size svg_total_draw_space = (svg_writer.image_height - total_image_margin_space - bottom_label_space) track_sequence_rect_count = track_count * 2.0 timeline_stack_rect_count = 2.0 rect_count = (track_sequence_rect_count + timeline_stack_rect_count + clip_count + transition_track_count) total_slots = rect_count * 2.0 - 1.0 svg_writer.clip_rect_height = svg_total_draw_space / total_slots # Draw Timeline svg_writer.vertical_drawing_index += 2 timeline_origin = Point(svg_writer.x_origin, svg_writer.image_height - svg_writer.image_margin - svg_writer.vertical_drawing_index * svg_writer.clip_rect_height) svg_writer.max_total_duration = max(svg_writer.tracks_duration) label_text_size = 0.4 * svg_writer.clip_rect_height svg_writer.draw_labeled_solid_rect_with_border( Rect(timeline_origin, svg_writer.max_total_duration * svg_writer.scale_x, svg_writer.clip_rect_height), label="Timeline", label_size=label_text_size) time_marker_height = 0.15 * svg_writer.clip_rect_height for i in range(1, int(svg_writer.max_total_duration)): start_pt = Point(svg_writer.x_origin + (i * svg_writer.scale_x), timeline_origin.y) end_pt = Point(start_pt.x, start_pt.y + time_marker_height) svg_writer.draw_line(start_point=start_pt, end_point=end_pt, stroke_width=1.0, stroke_color=COLORS['black']) # Draw arrow from timeline to stack timeline_width = svg_writer.max_total_duration * svg_writer.scale_x arrow_start = Point(svg_writer.x_origin + timeline_width * 0.5, timeline_origin.y - svg_writer.arrow_margin) arrow_end = Point(svg_writer.x_origin + timeline_width * 0.5, timeline_origin.y - svg_writer.clip_rect_height + svg_writer.arrow_margin) svg_writer.draw_arrow(start_point=arrow_start, end_point=arrow_end, stroke_width=2.0, stroke_color=COLORS['black']) arrow_label_location = Point(arrow_start.x + svg_writer.arrow_label_margin, (arrow_start.y + arrow_end.y) * 0.5) svg_writer.draw_text('tracks', arrow_label_location, svg_writer.font_size) # Draw global_start_time info if timeline.global_start_time is None: start_time_text = r'global_start_time: {}'.format('None') else: start_time_text = r'global_start_time: {}'.format( repr(float(round(timeline.global_start_time.value, 1)))) start_time_location = Point(timeline_origin.x + svg_writer.font_size, timeline_origin.y - svg_writer.font_size) svg_writer.draw_text(start_time_text, start_time_location, svg_writer.font_size) # Draw stack draw_item(timeline.tracks, svg_writer, (svg_writer.x_origin, svg_writer.max_total_duration)) # Draw stack def _draw_stack(stack, svg_writer, extra_data=()): stack_x_origin = extra_data[0] stack_duration = extra_data[1] svg_writer.vertical_drawing_index += 2 stack_origin = Point(stack_x_origin, svg_writer.image_height - svg_writer.image_margin - svg_writer.vertical_drawing_index * svg_writer.clip_rect_height) stack_text_size = 0.4 * svg_writer.clip_rect_height svg_writer.draw_labeled_solid_rect_with_border( Rect(stack_origin, stack_duration * svg_writer.scale_x, svg_writer.clip_rect_height), label="Stack", fill_color=COLORS['dark_gray_transluscent'], label_size=stack_text_size) time_marker_height = 0.15 * svg_writer.clip_rect_height for i in range(1, int(svg_writer.max_total_duration)): start_pt = Point(svg_writer.x_origin + (i * svg_writer.scale_x), stack_origin.y) end_pt = Point(start_pt.x, start_pt.y + time_marker_height) svg_writer.draw_line(start_point=start_pt, end_point=end_pt, stroke_width=1.0, stroke_color=COLORS['black']) for i in range(0, len(svg_writer.tracks_duration)): draw_item(stack[i], svg_writer, (stack_x_origin, svg_writer.tracks_duration[i], svg_writer.all_clips_data[i], svg_writer.track_transition_available[i])) # Draw arrows from stack to tracks # arrow from stack to first track stack_width = stack_duration * svg_writer.scale_x arrow_start = Point(svg_writer.x_origin + stack_width * 0.5, stack_origin.y - svg_writer.arrow_margin) arrow_end = Point(svg_writer.x_origin + stack_width * 0.5, stack_origin.y - svg_writer.clip_rect_height + svg_writer.arrow_margin) svg_writer.draw_arrow(start_point=arrow_start, end_point=arrow_end, stroke_width=2.0, stroke_color=COLORS['black']) end_arrow_offset = 1 # arrows from stack to rest of the tracks for i in range(1, len(svg_writer.trackwise_clip_count)): arrow_x_increment_per_track = 10.0 end_arrow_offset += (svg_writer.trackwise_clip_count[i - 1] * 2.0 + 4.0) arrow_start = Point( (i * arrow_x_increment_per_track) + svg_writer.x_origin + stack_width * 0.5, stack_origin.y - svg_writer.arrow_margin) arrow_end = Point( (i * arrow_x_increment_per_track) + svg_writer.x_origin + stack_width * 0.5, stack_origin.y - (end_arrow_offset * svg_writer.clip_rect_height) + svg_writer.arrow_margin) svg_writer.draw_arrow(start_point=arrow_start, end_point=arrow_end, stroke_width=2.0, stroke_color=COLORS['black']) arrow_label_text = r'children[{}]'.format(len(svg_writer.trackwise_clip_count)) arrow_label_location = Point(arrow_start.x + svg_writer.arrow_label_margin, stack_origin.y - svg_writer.clip_rect_height * 0.5) svg_writer.draw_text(arrow_label_text, arrow_label_location, svg_writer.font_size) # Draw range info if stack.trimmed_range() is None: trimmed_range_text = r'trimmed_range() -> {}'.format('None') else: trimmed_range_text = r'trimmed_range() -> {}, {}'.format( repr(float(round(stack.trimmed_range().start_time.value, 1))), repr(float(round(stack.trimmed_range().duration.value, 1)))) if stack.source_range is None: source_range_text = r'source_range: {}'.format('None') else: source_range_text = r'source_range: {}, {}'.format( repr(float(round(stack.source_range.start_time.value, 1))), repr(float(round(stack.source_range.duration.value, 1)))) trimmed_range_location = Point(stack_origin.x + svg_writer.font_size, stack_origin.y + svg_writer.clip_rect_height + svg_writer.text_margin) source_range_location = Point(stack_origin.x + svg_writer.font_size, stack_origin.y - svg_writer.font_size) svg_writer.draw_text(trimmed_range_text, trimmed_range_location, svg_writer.font_size, ) svg_writer.draw_text(source_range_text, source_range_location, svg_writer.font_size) def _draw_track(track, svg_writer, extra_data=()): svg_writer.vertical_drawing_index += 2 track_x_origin = extra_data[0] track_duration = extra_data[1] clips_data = extra_data[2] track_has_transition = extra_data[3] track_origin = Point(track_x_origin, svg_writer.image_height - svg_writer.image_margin - svg_writer.vertical_drawing_index * svg_writer.clip_rect_height) track_text_size = 0.4 * svg_writer.clip_rect_height track_text = track.name if track.name else 'Track' svg_writer.draw_labeled_solid_rect_with_border( Rect(track_origin, track_duration * svg_writer.scale_x, svg_writer.clip_rect_height), label=track_text, fill_color=COLORS['dark_gray_transluscent'], label_size=track_text_size) time_marker_height = 0.15 * svg_writer.clip_rect_height for i in range(1, int(track_duration)): start_pt = Point(svg_writer.x_origin + (i * svg_writer.scale_x), track_origin.y) end_pt = Point(start_pt.x, start_pt.y + time_marker_height) svg_writer.draw_line(start_point=start_pt, end_point=end_pt, stroke_width=1.0, stroke_color=COLORS['black']) item_count = 0 clip_count = 0 transition_count = 0 svg_writer.vertical_drawing_index += 2 if track_has_transition: svg_writer.vertical_drawing_index += 2 for item in track: if isinstance(item, otio.schema.Clip): clip_count += 1 draw_item(item, svg_writer, (clips_data[item_count], clip_count)) item_count += 1 elif isinstance(item, otio.schema.Gap): draw_item(item, svg_writer, (clips_data[item_count],)) item_count += 1 elif isinstance(item, otio.schema.Transition): cut_x = svg_writer.x_origin + (clips_data[item_count].src_start * svg_writer.scale_x) draw_item(item, svg_writer, (cut_x,)) transition_count += 1 svg_writer.vertical_drawing_index += (2 * clip_count) # Draw arrow from track to clips track_width = track_duration * svg_writer.scale_x arrow_start = Point(svg_writer.x_origin + track_width * 0.5, track_origin.y - svg_writer.arrow_margin) arrow_end = Point(svg_writer.x_origin + track_width * 0.5, track_origin.y - svg_writer.clip_rect_height + svg_writer.arrow_margin) svg_writer.draw_arrow(start_point=arrow_start, end_point=arrow_end, stroke_width=2.0, stroke_color=COLORS['black']) arrow_label_text = r'children[{}]'.format(item_count + transition_count) arrow_label_location = Point(arrow_start.x + svg_writer.arrow_label_margin, track_origin.y - svg_writer.clip_rect_height * 0.5) svg_writer.draw_text(arrow_label_text, arrow_label_location, svg_writer.font_size) # Draw range info if track.trimmed_range() is None: trimmed_range_text = r'trimmed_range() -> {}'.format('None') else: trimmed_range_text = r'trimmed_range() -> {}, {}'.format( repr(float(round(track.trimmed_range().start_time.value, 1))), repr(float(round(track.trimmed_range().duration.value, 1)))) if track.source_range is None: source_range_text = r'source_range: {}'.format('None') else: source_range_text = r'source_range: {}, {}'.format( repr(float(round(track.source_range.start_time.value, 1))), repr(float(round(track.source_range.duration.value, 1)))) trimmed_range_location = Point(track_origin.x + svg_writer.font_size, track_origin.y + svg_writer.clip_rect_height + svg_writer.text_margin) source_range_location = Point(track_origin.x + svg_writer.font_size, track_origin.y - svg_writer.font_size) svg_writer.draw_text(trimmed_range_text, trimmed_range_location, svg_writer.font_size, ) svg_writer.draw_text(source_range_text, source_range_location, svg_writer.font_size) # Draw clip def _draw_clip(clip, svg_writer, extra_data=()): clip_data = extra_data[0] clip_count = extra_data[1] clip_color = Color.random_color() clip_origin = Point( svg_writer.x_origin + (clip_data.src_start * svg_writer.scale_x), svg_writer.image_height - svg_writer.image_margin - svg_writer.vertical_drawing_index * svg_writer.clip_rect_height) clip_rect = Rect(clip_origin, clip_data.trim_duration * svg_writer.scale_x, svg_writer.clip_rect_height) clip_text_size = 0.4 * svg_writer.clip_rect_height clip_text = r'Clip-{}'.format(clip_data.clip_id) if len( clip.name) == 0 else clip.name svg_writer.draw_labeled_solid_rect_with_border( clip_rect, label=clip_text, fill_color=clip_color, label_size=clip_text_size) time_marker_height = 0.15 * svg_writer.clip_rect_height for i in range(int(clip_data.src_start), int(clip_data.src_end) + 1): start_pt = Point(svg_writer.x_origin + (i * svg_writer.scale_x), clip_origin.y) end_pt = Point(start_pt.x, start_pt.y + time_marker_height) svg_writer.draw_line(start_point=start_pt, end_point=end_pt, stroke_width=1.0, stroke_color=COLORS['black']) # Draw range info if clip.trimmed_range() is None: trimmed_range_text = r'trimmed_range() -> {}'.format('None') else: trimmed_range_text = r'trimmed_range() -> {}, {}'.format( repr(float(round(clip.trimmed_range().start_time.value, 1))), repr(float(round(clip.trimmed_range().duration.value, 1)))) if clip.source_range is None: source_range_text = r'source_range: {}'.format('None') else: source_range_text = r'source_range: {}, {}'.format( repr(float(round(clip.source_range.start_time.value, 1))), repr(float(round(clip.source_range.duration.value, 1)))) trimmed_range_location = Point(clip_origin.x + svg_writer.font_size, clip_origin.y + svg_writer.clip_rect_height + svg_writer.text_margin) source_range_location = Point(clip_origin.x + svg_writer.font_size, clip_origin.y - svg_writer.font_size) svg_writer.draw_text(trimmed_range_text, trimmed_range_location, svg_writer.font_size, ) svg_writer.draw_text(source_range_text, source_range_location, svg_writer.font_size) # Draw media reference trim_media_origin = Point( svg_writer.x_origin + (clip_data.src_start * svg_writer.scale_x), svg_writer.image_height - svg_writer.image_margin - (svg_writer.vertical_drawing_index + clip_count * 2) * svg_writer.clip_rect_height) media_origin = Point( svg_writer.x_origin + (clip_data.avlbl_start * svg_writer.scale_x), svg_writer.image_height - svg_writer.image_margin - (svg_writer.vertical_drawing_index + clip_count * 2) * svg_writer.clip_rect_height) svg_writer.draw_rect( Rect(media_origin, clip_data.avlbl_duration * svg_writer.scale_x, svg_writer.clip_rect_height)) media_text_size = 0.4 * svg_writer.clip_rect_height media_text = r'Media-{}'.format(clip_data.clip_id) if len( clip.media_reference.name) == 0 else clip.media_reference.name svg_writer.draw_labeled_solid_rect_with_border( Rect(trim_media_origin, clip_data.trim_duration * svg_writer.scale_x, svg_writer.clip_rect_height), label=media_text, fill_color=clip_color, label_size=media_text_size) for i in range(int(clip_data.avlbl_start), int(clip_data.avlbl_end) + 1): start_pt = Point(svg_writer.x_origin + (i * svg_writer.scale_x), media_origin.y) if start_pt.x < media_origin.x: continue end_pt = Point(start_pt.x, start_pt.y + time_marker_height) svg_writer.draw_line(start_point=start_pt, end_point=end_pt, stroke_width=1.0, stroke_color=COLORS['black']) # Draw media_reference info if clip.available_range() is None: available_range_text = r'available_range: {}'.format('None') else: available_range_text = r'available_range: {}, {}'.format( repr(float(round(clip.available_range().start_time.value, 1))), repr(float(round(clip.available_range().duration.value, 1)))) if clip.media_reference.target_url is None: target_url_text = r'target_url: {}'.format('Media Unavailable') else: target_url_text = r'target_url: {}'.format(clip.media_reference.target_url) available_range_text = available_range_text available_range_location = Point(media_origin.x + svg_writer.font_size, media_origin.y - svg_writer.font_size) target_url_location = Point(media_origin.x + svg_writer.font_size, media_origin.y - 2.0 * svg_writer.font_size) svg_writer.draw_text(available_range_text, available_range_location, svg_writer.font_size, ) svg_writer.draw_text(target_url_text, target_url_location, svg_writer.font_size) # Draw arrow from clip to media reference clip_media_height_difference = (((clip_count - 1) * 2.0 + 1) * svg_writer.clip_rect_height) media_arrow_start = Point( clip_origin.x + (clip_data.trim_duration * svg_writer.scale_x) * 0.5, clip_origin.y - svg_writer.arrow_margin) media_arrow_end = Point( clip_origin.x + (clip_data.trim_duration * svg_writer.scale_x) * 0.5, clip_origin.y - clip_media_height_difference + svg_writer.arrow_margin) svg_writer.draw_arrow(start_point=media_arrow_start, end_point=media_arrow_end, stroke_width=2.0, stroke_color=COLORS['black']) arrow_label_text = r'media_reference' arrow_label_location = Point(media_arrow_start.x + svg_writer.arrow_label_margin, media_arrow_start.y - svg_writer.clip_rect_height * 0.5) svg_writer.draw_text(arrow_label_text, arrow_label_location, svg_writer.font_size) # Draw media transition sections if clip_data.transition_end is not None: cut_x = clip_origin.x + clip_rect.width section_start_pt = Point(cut_x, media_origin.y) # Handle the case of transition ending at cut if clip_data.transition_end.out_offset.value == 0.0: media_transition_rect = Rect(section_start_pt, -clip_data.transition_end.in_offset.value * svg_writer.scale_x, svg_writer.clip_rect_height) marker_x = [clip_data.src_end, clip_data.src_end - clip_data.transition_end.in_offset.value] else: media_transition_rect = Rect(section_start_pt, clip_data.transition_end.out_offset.value * svg_writer.scale_x, svg_writer.clip_rect_height) marker_x = [clip_data.src_end, clip_data.src_end + clip_data.transition_end.out_offset.value] section_color = Color(clip_color[0], clip_color[1], clip_color[2], 0.5) svg_writer.draw_dashed_rect(media_transition_rect, fill_color=section_color) marker_x.sort() # Draw markers for transition sections for i in range(int(marker_x[0]), int(marker_x[1]) + 1): start_pt = Point(svg_writer.x_origin + (i * svg_writer.scale_x), media_origin.y) if start_pt.x < media_transition_rect.min_x(): continue end_pt = Point(start_pt.x, start_pt.y + time_marker_height) svg_writer.draw_line(start_point=start_pt, end_point=end_pt, stroke_width=1.0, stroke_color=COLORS['black']) if clip_data.transition_begin is not None: cut_x = clip_origin.x section_start_pt = Point(cut_x, media_origin.y) # Handle the case of transition starting at cut if clip_data.transition_begin.in_offset.value == 0.0: media_transition_rect = Rect(section_start_pt, clip_data.transition_begin.out_offset.value * svg_writer.scale_x, svg_writer.clip_rect_height) marker_x = [clip_data.src_start, clip_data.src_start + clip_data.transition_begin.out_offset.value] else: media_transition_rect = Rect(section_start_pt, -clip_data.transition_begin.in_offset.value * svg_writer.scale_x, svg_writer.clip_rect_height) marker_x = [clip_data.src_start, clip_data.src_start - clip_data.transition_begin.out_offset.value] section_color = Color(clip_color[0], clip_color[1], clip_color[2], 0.5) svg_writer.draw_dashed_rect(media_transition_rect, fill_color=section_color) marker_x.sort() # Draw markers for transition sections for i in range(int(marker_x[0]), int(marker_x[1]) + 1): start_pt = Point(svg_writer.x_origin + (i * svg_writer.scale_x), media_origin.y) if start_pt.x < media_transition_rect.min_x(): continue end_pt = Point(start_pt.x, start_pt.y + 0.15 * svg_writer.clip_rect_height) svg_writer.draw_line(start_point=start_pt, end_point=end_pt, stroke_width=1.0, stroke_color=COLORS['black']) def _draw_gap(gap, svg_writer, extra_data=()): gap_data = extra_data[0] gap_origin = Point(svg_writer.x_origin + (gap_data.src_start * svg_writer.scale_x), svg_writer.image_height - svg_writer.image_margin - svg_writer.vertical_drawing_index * svg_writer.clip_rect_height) gap_text_size = 0.4 * svg_writer.clip_rect_height gap_text = 'Gap' svg_writer.draw_labeled_dashed_rect_with_border( Rect(gap_origin, gap_data.trim_duration * svg_writer.scale_x, svg_writer.clip_rect_height), label=gap_text, label_size=gap_text_size) time_marker_height = 0.15 * svg_writer.clip_rect_height for i in range(int(gap_data.src_start), int(gap_data.src_end) + 1): start_pt = Point(svg_writer.x_origin + (i * svg_writer.scale_x), gap_origin.y) end_pt = Point(start_pt.x, start_pt.y + time_marker_height) svg_writer.draw_line(start_point=start_pt, end_point=end_pt, stroke_width=1.0, stroke_color=COLORS['black']) # Draw range info if gap.trimmed_range() is None: trimmed_range_text = r'trimmed_range() -> {}'.format('None') else: trimmed_range_text = r'trimmed_range() -> {}, {}'.format( repr(float(round(gap.trimmed_range().start_time.value, 1))), repr(float(round(gap.trimmed_range().duration.value, 1)))) if gap.source_range is None: source_range_text = r'source_range: {}'.format('None') else: source_range_text = r'source_range: {}, {}'.format( repr(float(round(gap.source_range.start_time.value, 1))), repr(float(round(gap.source_range.duration.value, 1)))) trimmed_range_location = Point(gap_origin.x + svg_writer.font_size, gap_origin.y + svg_writer.clip_rect_height + svg_writer.text_margin) source_range_location = Point(gap_origin.x + svg_writer.font_size, gap_origin.y - svg_writer.font_size) svg_writer.draw_text(trimmed_range_text, trimmed_range_location, svg_writer.font_size, ) svg_writer.draw_text(source_range_text, source_range_location, svg_writer.font_size) def _draw_transition(transition, svg_writer, extra_data=()): cut_x = extra_data[0] transition_origin = Point(cut_x - (transition.in_offset.value * svg_writer.scale_x), svg_writer.image_height - svg_writer.image_margin - (svg_writer.vertical_drawing_index - 2) * svg_writer.clip_rect_height) transition_rect = Rect(transition_origin, (transition.in_offset.value + transition.out_offset.value) * svg_writer.scale_x, svg_writer.clip_rect_height) transition_name = 'Transition' if len( transition.name) == 0 else transition.name transition_name_size = 0.4 * svg_writer.clip_rect_height svg_writer.draw_labeled_rect(transition_rect, label=transition_name, label_size=transition_name_size) line_end = Point(transition_origin.x + transition_rect.width, transition_origin.y + transition_rect.height) svg_writer.draw_line(transition_origin, line_end, stroke_width=1.0, stroke_color=COLORS['black']) in_offset_location = Point(transition_origin.x + svg_writer.font_size, transition_origin.y - svg_writer.font_size) out_offset_location = Point(transition_origin.x + svg_writer.font_size, transition_origin.y - 2.0 * svg_writer.font_size) in_offset_text = r'in_offset: ' \ r'{}'.format(repr(float(round(transition.in_offset.value, 1)))) out_offset_text = r'out_offset: ' \ r'{}'.format(repr(float(round(transition.out_offset.value, 1)))) svg_writer.draw_text(in_offset_text, in_offset_location, svg_writer.font_size) svg_writer.draw_text(out_offset_text, out_offset_location, svg_writer.font_size) cut_location = Point(cut_x, transition_origin.y) cut_line_end = Point(cut_x, svg_writer.image_height - svg_writer.image_margin - svg_writer.vertical_drawing_index * svg_writer.clip_rect_height) svg_writer.draw_line(cut_location, cut_line_end, stroke_width=1.0, stroke_color=COLORS['black']) def _draw_collection(collection, svg_writer, extra_data=()): pass def convert_otio_to_svg(timeline, width, height): global random_colors_used svg_writer = SVGWriter(image_width=width, image_height=height, font_family='sans-serif', image_margin=20.0, font_size=15.0, arrow_label_margin=5.0) random_colors_used = [] seed(100) draw_item(timeline, svg_writer, ()) return svg_writer.get_image() # -------------------- # adapter requirements # -------------------- def write_to_string(input_otio, width=2406.0, height=1054.0): return convert_otio_to_svg(input_otio, width=width, height=height)
29,859
478
#ifndef MAIN_HPP #define MAIN_HPP #include "SerialConfiguration.hpp" extern SerialConfiguration *serialconfig; #endif
43
575
// Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef SERVICES_NETWORK_PUBLIC_CPP_OPTIONAL_TRUST_TOKEN_PARAMS_H_ #define SERVICES_NETWORK_PUBLIC_CPP_OPTIONAL_TRUST_TOKEN_PARAMS_H_ #include "base/component_export.h" #include "base/optional.h" #include "services/network/public/mojom/trust_tokens.mojom.h" namespace network { // This class exists to wrap mojom::TrustTokenParamsPtr for use as a field in // network::ResourceRequest. // // Motivation: // 1. network::ResourceRequest has a requirement that all of // network::ResourceRequest's members be trivially copyable; // 2. Mojo struct pointers aren't, by default, trivially copyable; // 3. Mojo only knows, from its generated code, how to serialize and deserialize // struct pointers, not raw Mojo structs. // // One solution to this dilemma would be to manually define separate Mojo // StructTraits for the raw struct type (network::mojom::TrustTokenParams), but // this would add maintenance burden since it would require updating the traits // every time the structure's definition changes. // // Using this trivially-copyable wrapper class (where the copy constructor and // copy assignment operators use mojo::Clone) allows changing the format of the // Mojo struct without having to manually update the corresponding // serialization/deserialization code. class COMPONENT_EXPORT(NETWORK_CPP_BASE) OptionalTrustTokenParams { public: // The constructors Match base::Optional to the extent possible. OptionalTrustTokenParams(); OptionalTrustTokenParams(base::nullopt_t); // NOLINT explicit OptionalTrustTokenParams(mojom::TrustTokenParamsPtr); // Copy assignment uses mojo::Clone. OptionalTrustTokenParams(const mojom::TrustTokenParams&); // NOLINT OptionalTrustTokenParams(const OptionalTrustTokenParams&); OptionalTrustTokenParams& operator=(const OptionalTrustTokenParams&); OptionalTrustTokenParams(OptionalTrustTokenParams&&); OptionalTrustTokenParams& operator=(OptionalTrustTokenParams&&); ~OptionalTrustTokenParams(); // This comparison operator wraps mojo::Equals. bool operator==(const OptionalTrustTokenParams&) const; bool operator!=(const OptionalTrustTokenParams& rhs) const { return !(*this == rhs); } explicit operator bool() const { return has_value(); } bool has_value() const { return !!ptr_; } mojom::TrustTokenParams& value() { CHECK(has_value()); return *ptr_; } const mojom::TrustTokenParams& value() const { CHECK(has_value()); return *ptr_; } const mojom::TrustTokenParams* operator->() const { CHECK(has_value()); return ptr_.get(); } mojom::TrustTokenParams* operator->() { CHECK(has_value()); return ptr_.get(); } // |as_ptr| returns null if this object is empty. const mojom::TrustTokenParamsPtr& as_ptr() const { return ptr_; } mojom::TrustTokenParamsPtr& as_ptr() { return ptr_; } private: mojom::TrustTokenParamsPtr ptr_; }; } // namespace network #endif // SERVICES_NETWORK_PUBLIC_CPP_OPTIONAL_TRUST_TOKEN_PARAMS_H_
963
2,003
<reponame>Voilibaar/Tidal-Media-Downloader #!/usr/bin/env python # -*- encoding: utf-8 -*- """ @File : searchModel.py @Date : 2021/8/17 @Author : Yaronzz @Version : 1.0 @Contact : <EMAIL> @Desc : """ import _thread import threading import aigpy.stringHelper import tidal_dl from PyQt5.QtCore import pyqtSignal from aigpy.modelHelper import ModelBase from tidal_dl import Type from tidal_dl.model import Album, SearchResult from tidal_gui.tidalImp import tidalImp from tidal_gui.view.searchView import SearchView from tidal_gui.viewModel.viewModel import ViewModel class SearchModel(ViewModel): SIGNAL_ADD_TASKITEM = pyqtSignal(ModelBase) def __init__(self): super(SearchModel, self).__init__() self._lock = threading.Lock() self._resultData = SearchResult() self.view = SearchView() self.view.setPageIndex(1) self.view.connectButton('search', self.__search__) self.view.connectButton('prePage', self.__searchPrePage__) self.view.connectButton('nextPage', self.__searchNextPage__) self.view.connectButton('download', self.__download__) self.view.connectTab(self.__search__) def __startThread__(self): def __thread_search__(model: SearchModel): typeIndex = model.view.getSelectedTabIndex() pageIndex = model.view.getPageIndex() searchText = model.view.getSearchText() limit = 20 offset = (pageIndex - 1) * limit stype = tidal_dl.Type(typeIndex) msg, model._resultData = tidalImp.search(searchText, stype, offset, limit) if not aigpy.stringHelper.isNull(msg): # errmessage model._lock.release() return model.view.setTableItems(stype, offset, model._resultData) model._lock.release() _thread.start_new_thread(__thread_search__, (self,)) def __search__(self): if not self._lock.acquire(False): return self.view.setPageIndex(1) self.__startThread__() def __searchNextPage__(self): if not self._lock.acquire(False): return self.view.setPageIndex(self.view.getPageIndex() + 1) self.__startThread__() def __searchPrePage__(self): if not self._lock.acquire(False): return index = self.view.getPageIndex() - 1 if index < 1: index = 1 self.view.setPageIndex(index) self.__startThread__() def __download__(self): typeIndex = self.view.getSelectedTabIndex() stype = tidal_dl.Type(typeIndex) index = self.view.getSelectedTableIndex(stype) if index <= 0: pass data = None if stype == Type.Album: data = self._resultData.albums.items[index] elif stype == Type.Track: data = self._resultData.tracks.items[index] elif stype == Type.Video: data = self._resultData.videos.items[index] elif stype == Type.Playlist: data = self._resultData.playlists.items[index] if data is not None: self.SIGNAL_ADD_TASKITEM.emit(data)
1,430
1,473
#!/usr/bin/env python # coding=utf-8 from __future__ import division, print_function, unicode_literals from six.moves.urllib.request import urlretrieve import numpy as np import tarfile import h5py import os bs_data_dir = os.environ.get('BRAINSTORM_DATA_DIR', '.') url = 'http://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz' cifar10_file = os.path.join(bs_data_dir, 'cifar-10-binary.tar.gz') hdf_file = os.path.join(bs_data_dir, 'CIFAR-10.hdf5') print("Using data directory:", bs_data_dir) if not os.path.exists(cifar10_file): print("Downloading CIFAR-10 data ...") urlretrieve(url, cifar10_file) print("Done.") archive_paths = [ 'cifar-10-batches-bin/data_batch_1.bin', 'cifar-10-batches-bin/data_batch_2.bin', 'cifar-10-batches-bin/data_batch_3.bin', 'cifar-10-batches-bin/data_batch_4.bin', 'cifar-10-batches-bin/data_batch_5.bin', 'cifar-10-batches-bin/test_batch.bin' ] print("Extracting CIFAR-10 data ...") with tarfile.open(cifar10_file) as f: res = [] for fn in archive_paths: buf = f.extractfile(fn).read() tmp = np.fromstring(buf, dtype=np.uint8).reshape(-1, 1024 * 3 + 1) res.append(tmp) print("Done.") ds = np.concatenate(res) num_tr = 40000 x_tr = ds[:num_tr, 1:].reshape((1, num_tr, 3, 32, 32)) x_tr = x_tr.transpose([0, 1, 3, 4, 2]) y_tr = ds[:num_tr, 0].reshape((1, num_tr, 1)) x_va = ds[num_tr: 50000, 1:].reshape((1, 10000, 3, 32, 32)) x_va = x_va.transpose([0, 1, 3, 4, 2]) y_va = ds[num_tr: 50000, 0].reshape((1, 10000, 1)) x_te = ds[50000:, 1:].reshape((1, 10000, 3, 32, 32)) x_te = x_te.transpose([0, 1, 3, 4, 2]) y_te = ds[50000:, 0].reshape((1, 10000, 1)) tr_mean = x_tr.squeeze().mean(axis=0) tr_std = x_tr.squeeze().std(axis=0) x_tr = (x_tr - tr_mean) / tr_std x_va = (x_va - tr_mean) / tr_std x_te = (x_te - tr_mean) / tr_std print("Creating CIFAR-10 HDF5 dataset ...") f = h5py.File(hdf_file, 'w') description = """ The CIFAR-10 dataset is a labeled subset of the 80 million tiny images dataset. It consists of 60000 32x32 colour images in 10 classes, with 6000 images per class. There are 50000 training images and 10000 test images. The dataset was obtained from the link: http://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz Attributes ========== description: This description. mean: The pixel-wise mean of the first 40000 training set images. std: The pixel-wise standard deviation of the first 40000 training set images. Variants ======== normalized_full: Contains 'training' and 'test' sets. Image data has been normalized to have zero mean and unit standard deviation. normalized_split: Contains 'training' (first 40K out of the full training set), 'validation' (remaining 10K out of the full training set) and 'test' sets. Image data has been normalized to have zero mean and unit standard deviation. """ f.attrs['description'] = description f.attrs['mean'] = tr_mean f.attrs['std'] = tr_std variant = f.create_group('normalized_split') group = variant.create_group('training') group.create_dataset(name='default', data=x_tr, compression='gzip') group.create_dataset(name='targets', data=y_tr, compression='gzip') group = variant.create_group('validation') group.create_dataset(name='default', data=x_va, compression='gzip') group.create_dataset(name='targets', data=y_va, compression='gzip') group = variant.create_group('test') group.create_dataset(name='default', data=x_te, compression='gzip') group.create_dataset(name='targets', data=y_te, compression='gzip') nr_tr = 50000 x_tr = ds[:nr_tr, 1:].reshape((1, nr_tr, 3, 32, 32)).transpose([0, 1, 3, 4, 2]) x_tr = (x_tr - tr_mean) / tr_std y_tr = ds[:nr_tr, 0].reshape((1, nr_tr, 1)) variant = f.create_group('normalized_full') group = variant.create_group('training') group.create_dataset(name='default', data=x_tr, compression='gzip') group.create_dataset(name='targets', data=y_tr, compression='gzip') group = variant.create_group('test') group.create_dataset(name='default', data=x_te, compression='gzip') group.create_dataset(name='targets', data=y_te, compression='gzip') f.close() print("Done.")
1,650
309
#!/usr/bin/env python # Copyright 2011-2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Glance Scrub Service """ import os import sys import eventlet # NOTE(jokke): As per the eventlet commit # b756447bab51046dfc6f1e0e299cc997ab343701 there's circular import happening # which can be solved making sure the hubs are properly and fully imported # before calling monkey_patch(). This is solved in eventlet 0.22.0 but we # need to address it before that is widely used around. eventlet.hubs.get_hub() if os.name == 'nt': # eventlet monkey patching the os module causes subprocess.Popen to fail # on Windows when using pipes due to missing non-blocking IO support. eventlet.patcher.monkey_patch(os=False) else: eventlet.patcher.monkey_patch() # Monkey patch the original current_thread to use the up-to-date _active # global variable. See https://bugs.launchpad.net/bugs/1863021 and # https://github.com/eventlet/eventlet/issues/592 import __original_module_threading as orig_threading import threading orig_threading.current_thread.__globals__['_active'] = threading._active import subprocess # If ../glance/__init__.py exists, add ../ to Python search path, so that # it will override what happens to be installed in /usr/(local/)lib/python... possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), os.pardir, os.pardir)) if os.path.exists(os.path.join(possible_topdir, 'glance', '__init__.py')): sys.path.insert(0, possible_topdir) import glance_store from os_win import utilsfactory as os_win_utilsfactory from oslo_config import cfg from oslo_log import log as logging from glance.common import config from glance.common import exception from glance import scrubber CONF = cfg.CONF logging.register_options(CONF) CONF.set_default(name='use_stderr', default=True) def main(): # Used on Window, ensuring that a single scrubber can run at a time. mutex = None mutex_acquired = False try: if os.name == 'nt': # We can't rely on process names on Windows as there may be # wrappers with the same name. mutex = os_win_utilsfactory.get_mutex( name='Global\\glance-scrubber') mutex_acquired = mutex.acquire(timeout_ms=0) CONF.register_cli_opts(scrubber.scrubber_cmd_cli_opts) CONF.register_opts(scrubber.scrubber_cmd_opts) config.parse_args() logging.setup(CONF, 'glance') CONF.import_opt('enabled_backends', 'glance.common.wsgi') if CONF.enabled_backends: glance_store.register_store_opts(CONF) glance_store.create_multi_stores(CONF) glance_store.verify_store() else: glance_store.register_opts(CONF) glance_store.create_stores(CONF) glance_store.verify_default_store() if CONF.restore and CONF.daemon: sys.exit("ERROR: The restore and daemon options should not be set " "together. Please use either of them in one request.") app = scrubber.Scrubber(glance_store) if CONF.restore: if os.name == 'nt': scrubber_already_running = not mutex_acquired else: scrubber_already_running = scrubber_already_running_posix() if scrubber_already_running: already_running_msg = ( "ERROR: glance-scrubber is already running. " "Please ensure that the daemon is stopped.") sys.exit(already_running_msg) app.revert_image_status(CONF.restore) elif CONF.daemon: server = scrubber.Daemon(CONF.wakeup_time) server.start(app) server.wait() else: app.run() except (exception.ImageNotFound, exception.Conflict) as e: sys.exit("ERROR: %s" % e) except RuntimeError as e: sys.exit("ERROR: %s" % e) finally: if mutex and mutex_acquired: mutex.release() def scrubber_already_running_posix(): # Try to check the glance-scrubber is running or not. # 1. Try to find the pid file if scrubber is controlled by # glance-control # 2. Try to check the process name. pid_file = '/var/run/glance/glance-scrubber.pid' if os.path.exists(os.path.abspath(pid_file)): return True for glance_scrubber_name in ['glance-scrubber', 'glance.cmd.scrubber']: cmd = subprocess.Popen( ['/usr/bin/pgrep', '-f', glance_scrubber_name], stdout=subprocess.PIPE, shell=False) pids, _ = cmd.communicate() # The response format of subprocess.Popen.communicate() is # diffderent between py2 and py3. It's "string" in py2, but # "bytes" in py3. if isinstance(pids, bytes): pids = pids.decode() self_pid = os.getpid() if pids.count('\n') > 1 and str(self_pid) in pids: # One process is self, so if the process number is > 1, it # means that another glance-scrubber process is running. return True elif pids.count('\n') > 0 and str(self_pid) not in pids: # If self is not in result and the pids number is still # > 0, it means that the another glance-scrubber process is # running. return True return False if __name__ == '__main__': main()
2,567
2,027
<gh_stars>1000+ /* * Copyright 2017-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.atomix.core.barrier; import io.atomix.core.AbstractPrimitiveTest; import org.junit.Ignore; import org.junit.Test; import java.time.Duration; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Distributed cyclic barrier test. */ public class DistributedCyclicBarrierTest extends AbstractPrimitiveTest { @Test @Ignore public void testBarrier() throws Exception { CountDownLatch latch = new CountDownLatch(1); AsyncDistributedCyclicBarrier barrier1 = atomix().cyclicBarrierBuilder("test-barrier") .withProtocol(protocol()) .build() .async(); AsyncDistributedCyclicBarrier barrier2 = atomix().cyclicBarrierBuilder("test-barrier") .withProtocol(protocol()) .build() .async(); AsyncDistributedCyclicBarrier barrier3 = atomix().cyclicBarrierBuilder("test-barrier") .withProtocol(protocol()) .withBarrierAction(() -> latch.countDown()) .build() .async(); assertTrue(barrier1.getParties().get(10, TimeUnit.SECONDS) == 3); assertTrue(barrier2.getParties().get(10, TimeUnit.SECONDS) == 3); assertTrue(barrier3.getParties().get(10, TimeUnit.SECONDS) == 3); assertTrue(barrier1.getNumberWaiting().get(10, TimeUnit.SECONDS) == 0); assertTrue(barrier2.getNumberWaiting().get(10, TimeUnit.SECONDS) == 0); assertTrue(barrier3.getNumberWaiting().get(10, TimeUnit.SECONDS) == 0); CompletableFuture<Integer> future1 = barrier1.await(); assertTrue(barrier1.getNumberWaiting().get(10, TimeUnit.SECONDS) == 1); assertTrue(barrier2.getNumberWaiting().get(10, TimeUnit.SECONDS) == 1); assertTrue(barrier3.getNumberWaiting().get(10, TimeUnit.SECONDS) == 1); CompletableFuture<Integer> future2 = barrier2.await(); CompletableFuture<Integer> future3 = barrier3.await(); future1.get(10, TimeUnit.SECONDS); future2.get(10, TimeUnit.SECONDS); future3.get(10, TimeUnit.SECONDS); latch.await(10, TimeUnit.SECONDS); assertTrue(barrier1.getParties().get(10, TimeUnit.SECONDS) == 3); assertTrue(barrier2.getParties().get(10, TimeUnit.SECONDS) == 3); assertTrue(barrier3.getParties().get(10, TimeUnit.SECONDS) == 3); assertTrue(barrier1.getNumberWaiting().get(10, TimeUnit.SECONDS) == 0); assertTrue(barrier2.getNumberWaiting().get(10, TimeUnit.SECONDS) == 0); assertTrue(barrier3.getNumberWaiting().get(10, TimeUnit.SECONDS) == 0); future1 = barrier1.await(); assertTrue(barrier1.getNumberWaiting().get(10, TimeUnit.SECONDS) == 1); assertTrue(barrier2.getNumberWaiting().get(10, TimeUnit.SECONDS) == 1); assertTrue(barrier3.getNumberWaiting().get(10, TimeUnit.SECONDS) == 1); future2 = barrier2.await(); future3 = barrier3.await(); future1.get(10, TimeUnit.SECONDS); future2.get(10, TimeUnit.SECONDS); future3.get(10, TimeUnit.SECONDS); } @Test public void testBrokenBarrierReset() throws Exception { AsyncDistributedCyclicBarrier barrier1 = atomix().cyclicBarrierBuilder("test-barrier-reset") .withProtocol(protocol()) .build() .async(); AsyncDistributedCyclicBarrier barrier2 = atomix().cyclicBarrierBuilder("test-barrier-reset") .withProtocol(protocol()) .build() .async(); AsyncDistributedCyclicBarrier barrier3 = atomix().cyclicBarrierBuilder("test-barrier-reset") .withProtocol(protocol()) .build() .async(); CompletableFuture<Integer> future1 = barrier1.await(); CompletableFuture<Integer> future2 = barrier2.await(Duration.ofMillis(500)); try { future1.get(10, TimeUnit.SECONDS); fail(); } catch (ExecutionException e) { assertTrue(e.getCause() instanceof BrokenBarrierException); } try { future2.get(10, TimeUnit.SECONDS); fail(); } catch (ExecutionException e) { assertTrue(e.getCause() instanceof BrokenBarrierException); } CompletableFuture<Integer> future3 = barrier1.await(); try { future3.get(10, TimeUnit.SECONDS); fail(); } catch (ExecutionException e) { assertTrue(e.getCause() instanceof BrokenBarrierException); } assertTrue(barrier1.isBroken().get(10, TimeUnit.SECONDS)); barrier1.reset().get(10, TimeUnit.SECONDS); assertFalse(barrier1.isBroken().get(10, TimeUnit.SECONDS)); assertTrue(barrier1.getParties().get(10, TimeUnit.SECONDS) == 3); assertTrue(barrier2.getParties().get(10, TimeUnit.SECONDS) == 3); assertTrue(barrier3.getParties().get(10, TimeUnit.SECONDS) == 3); assertTrue(barrier1.getNumberWaiting().get(10, TimeUnit.SECONDS) == 0); assertTrue(barrier2.getNumberWaiting().get(10, TimeUnit.SECONDS) == 0); assertTrue(barrier3.getNumberWaiting().get(10, TimeUnit.SECONDS) == 0); future1 = barrier1.await(); int waiting = barrier1.getNumberWaiting().get(10, TimeUnit.SECONDS); assertTrue(barrier1.getNumberWaiting().get(10, TimeUnit.SECONDS) == 1); assertTrue(barrier2.getNumberWaiting().get(10, TimeUnit.SECONDS) == 1); assertTrue(barrier3.getNumberWaiting().get(10, TimeUnit.SECONDS) == 1); future2 = barrier2.await(); future3 = barrier3.await(); future1.get(10, TimeUnit.SECONDS); future2.get(10, TimeUnit.SECONDS); future3.get(10, TimeUnit.SECONDS); } }
2,366
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #ifndef SC_PAGEDATA_HXX #define SC_PAGEDATA_HXX #include "global.hxx" #include "address.hxx" class ScDocShell; //============================================================================ class ScPrintRangeData { private: ScRange aPrintRange; size_t nPagesX; SCCOL* pPageEndX; size_t nPagesY; SCROW* pPageEndY; long nFirstPage; sal_Bool bTopDown; sal_Bool bAutomatic; public: ScPrintRangeData(); ~ScPrintRangeData(); void SetPrintRange( const ScRange& rNew ) { aPrintRange = rNew; } const ScRange& GetPrintRange() const { return aPrintRange; } void SetPagesX( size_t nCount, const SCCOL* pEnd ); void SetPagesY( size_t nCount, const SCROW* pEnd ); size_t GetPagesX() const { return nPagesX; } const SCCOL* GetPageEndX() const { return pPageEndX; } size_t GetPagesY() const { return nPagesY; } const SCROW* GetPageEndY() const { return pPageEndY; } void SetFirstPage( long nNew ) { nFirstPage = nNew; } long GetFirstPage() const { return nFirstPage; } void SetTopDown( sal_Bool bSet ) { bTopDown = bSet; } sal_Bool IsTopDown() const { return bTopDown; } void SetAutomatic( sal_Bool bSet ) { bAutomatic = bSet; } sal_Bool IsAutomatic() const { return bAutomatic; } }; class ScPageBreakData { private: size_t nAlloc; size_t nUsed; ScPrintRangeData* pData; // Array public: ScPageBreakData(size_t nMax); ~ScPageBreakData(); size_t GetCount() const { return nUsed; } ScPrintRangeData& GetData(size_t i); sal_Bool IsEqual( const ScPageBreakData& rOther ) const; void AddPages(); }; #endif
906
1,056
<filename>java/java.editor/test/unit/data/org/netbeans/modules/java/editor/completion/data/GenericsMethodNoTypeParams.java package test; public class Test { public X get(Y obj) { return null; } }
81
6,989
<filename>library/cpp/coroutine/engine/stack/benchmark/alloc_bm.cpp #include <benchmark/benchmark.h> #include <util/generic/vector.h> #include <util/system/yassert.h> #include <library/cpp/coroutine/engine/stack/stack_allocator.h> #include <library/cpp/coroutine/engine/stack/stack_guards.h> #include <library/cpp/coroutine/engine/stack/stack_pool.h> #include <library/cpp/coroutine/engine/stack/stack_utils.h> namespace NCoro::NStack::NBenchmark { const char* TestCoroName = "any_name"; constexpr uint64_t BigCoroSize = PageSize * 25; constexpr uint64_t SmallCoroSize = PageSize * 4; constexpr uint64_t ManyStacks = 4096; void BasicOperations(TStackHolder& stack) { Y_VERIFY(!stack.Get().empty()); stack.LowerCanaryOk(); stack.UpperCanaryOk(); } void WriteStack(TStackHolder& stack) { auto memory = stack.Get(); Y_VERIFY(!memory.empty()); stack.LowerCanaryOk(); stack.UpperCanaryOk(); for (uint64_t i = PageSize / 2; i < memory.size(); i += PageSize * 2) { memory[i] = 42; } } static void BM_GetAlignedMemory(benchmark::State& state) { char* raw = nullptr; char* aligned = nullptr; for (auto _ : state) { if (NCoro::NStack::GetAlignedMemory(state.range(0), raw, aligned)) { free(raw); } } } BENCHMARK(BM_GetAlignedMemory)->RangeMultiplier(16)->Range(1, 1024 * 1024); static void BM_GetAlignedMemoryReleaseRss(benchmark::State& state) { char* raw = nullptr; char* aligned = nullptr; for (auto _ : state) { if (NCoro::NStack::GetAlignedMemory(state.range(0), raw, aligned)) { const auto toFree = state.range(0) > 2 ? state.range(0) - 2 : 1; ReleaseRss(aligned, toFree); free(raw); } } } BENCHMARK(BM_GetAlignedMemoryReleaseRss)->RangeMultiplier(16)->Range(1, 1024 * 1024); static void BM_PoolAllocator(benchmark::State& state) { auto allocator = GetAllocator(TPoolAllocatorSettings{}, (EGuard)state.range(0)); for (auto _ : state) { TStackHolder stack(*allocator, state.range(1), TestCoroName); BasicOperations(stack); } } BENCHMARK(BM_PoolAllocator) ->Args({(int64_t)EGuard::Canary, BigCoroSize}) // old version - ArgsProduct() is not supported ->Args({(int64_t)EGuard::Canary, SmallCoroSize}) ->Args({(int64_t)EGuard::Page, BigCoroSize}) ->Args({(int64_t)EGuard::Page, SmallCoroSize}); static void BM_DefaultAllocator(benchmark::State& state) { auto allocator = GetAllocator(Nothing(), (EGuard)state.range(0)); for (auto _ : state) { TStackHolder stack(*allocator, state.range(1), TestCoroName); BasicOperations(stack); } } BENCHMARK(BM_DefaultAllocator) ->Args({(int64_t)EGuard::Canary, BigCoroSize}) // old version - ArgsProduct() is not supported ->Args({(int64_t)EGuard::Canary, SmallCoroSize}) ->Args({(int64_t)EGuard::Page, BigCoroSize}) ->Args({(int64_t)EGuard::Page, SmallCoroSize}); static void BM_PoolAllocatorManyStacksOneAtTime(benchmark::State& state) { TPoolAllocatorSettings settings; settings.StacksPerChunk = state.range(2); auto allocator = GetAllocator(settings, (EGuard)state.range(0)); for (auto _ : state) { for (uint64_t i = 0; i < ManyStacks; ++i) { TStackHolder stack(*allocator, state.range(1), TestCoroName); BasicOperations(stack); } } } BENCHMARK(BM_PoolAllocatorManyStacksOneAtTime) ->Args({(int64_t)EGuard::Canary, BigCoroSize, 1}) // old version - ArgsProduct() is not supported ->Args({(int64_t)EGuard::Canary, SmallCoroSize, 1}) ->Args({(int64_t)EGuard::Page, BigCoroSize, 1}) ->Args({(int64_t)EGuard::Page, SmallCoroSize, 1}) ->Args({(int64_t)EGuard::Canary, BigCoroSize, 1024}) ->Args({(int64_t)EGuard::Canary, SmallCoroSize, 1024}) ->Args({(int64_t)EGuard::Page, BigCoroSize, 1024}) ->Args({(int64_t)EGuard::Page, SmallCoroSize, 1024}); static void BM_DefaultAllocatorManyStacksOneAtTime(benchmark::State& state) { auto allocator = GetAllocator(Nothing(), (EGuard)state.range(0)); for (auto _ : state) { for (uint64_t i = 0; i < ManyStacks; ++i) { TStackHolder stack(*allocator, state.range(1), TestCoroName); BasicOperations(stack); } } } BENCHMARK(BM_DefaultAllocatorManyStacksOneAtTime) ->Args({(int64_t)EGuard::Canary, BigCoroSize}) // old version - ArgsProduct() is not supported ->Args({(int64_t)EGuard::Canary, SmallCoroSize}) ->Args({(int64_t)EGuard::Page, BigCoroSize}) ->Args({(int64_t)EGuard::Page, SmallCoroSize}); static void BM_PoolAllocatorManyStacks(benchmark::State& state) { TPoolAllocatorSettings settings; settings.StacksPerChunk = state.range(2); auto allocator = GetAllocator(settings, (EGuard)state.range(0)); TVector<TStackHolder> stacks; // store stacks during benchmark stacks.reserve(ManyStacks); for (auto _ : state) { for (uint64_t i = 0; i < ManyStacks; ++i) { stacks.emplace_back(*allocator, state.range(1), TestCoroName); BasicOperations(stacks.back()); } } } BENCHMARK(BM_PoolAllocatorManyStacks) ->Args({(int64_t)EGuard::Canary, BigCoroSize, 1}) // old version - ArgsProduct() is not supported ->Args({(int64_t)EGuard::Canary, SmallCoroSize, 1}) ->Args({(int64_t)EGuard::Page, BigCoroSize, 1}) ->Args({(int64_t)EGuard::Page, SmallCoroSize, 1}) ->Args({(int64_t)EGuard::Canary, BigCoroSize, 1024}) ->Args({(int64_t)EGuard::Canary, SmallCoroSize, 1024}) ->Args({(int64_t)EGuard::Page, BigCoroSize, 1024}) ->Args({(int64_t)EGuard::Page, SmallCoroSize, 1024}); static void BM_DefaultAllocatorManyStacks(benchmark::State& state) { auto allocator = GetAllocator(Nothing(), (EGuard)state.range(0)); TVector<TStackHolder> stacks; // store stacks during benchmark stacks.reserve(ManyStacks); for (auto _ : state) { for (uint64_t i = 0; i < ManyStacks; ++i) { stacks.push_back(TStackHolder(*allocator, state.range(1), TestCoroName)); BasicOperations(stacks.back()); } } } BENCHMARK(BM_DefaultAllocatorManyStacks) ->Args({(int64_t)EGuard::Canary, BigCoroSize}) // old version - ArgsProduct() is not supported ->Args({(int64_t)EGuard::Canary, SmallCoroSize}) ->Args({(int64_t)EGuard::Page, BigCoroSize}) ->Args({(int64_t)EGuard::Page, SmallCoroSize}); // ------------------------------------------------------------------------ static void BM_PoolAllocatorManyStacksReleased(benchmark::State& state) { TPoolAllocatorSettings settings; settings.StacksPerChunk = state.range(2); auto allocator = GetAllocator(settings, (EGuard)state.range(0)); TVector<TStackHolder> stacks; // store stacks during benchmark stacks.reserve(ManyStacks); for (auto _ : state) { for (uint64_t i = 0; i < ManyStacks; ++i) { stacks.emplace_back(*allocator, state.range(1), TestCoroName); BasicOperations(stacks.back()); } stacks.clear(); } } BENCHMARK(BM_PoolAllocatorManyStacksReleased) ->Args({(int64_t)EGuard::Canary, BigCoroSize, 1}) // old version - ArgsProduct() is not supported ->Args({(int64_t)EGuard::Canary, SmallCoroSize, 1}) ->Args({(int64_t)EGuard::Page, BigCoroSize, 1}) ->Args({(int64_t)EGuard::Page, SmallCoroSize, 1}) ->Args({(int64_t)EGuard::Canary, BigCoroSize, 1024}) ->Args({(int64_t)EGuard::Canary, SmallCoroSize, 1024}) ->Args({(int64_t)EGuard::Page, BigCoroSize, 1024}) ->Args({(int64_t)EGuard::Page, SmallCoroSize, 1024}); static void BM_DefaultAllocatorManyStacksReleased(benchmark::State& state) { auto allocator = GetAllocator(Nothing(), (EGuard)state.range(0)); TVector<TStackHolder> stacks; // store stacks during benchmark stacks.reserve(ManyStacks); for (auto _ : state) { for (uint64_t i = 0; i < ManyStacks; ++i) { stacks.push_back(TStackHolder(*allocator, state.range(1), TestCoroName)); BasicOperations(stacks.back()); } stacks.clear(); } } BENCHMARK(BM_DefaultAllocatorManyStacksReleased) ->Args({(int64_t)EGuard::Canary, BigCoroSize}) // old version - ArgsProduct() is not supported ->Args({(int64_t)EGuard::Canary, SmallCoroSize}) ->Args({(int64_t)EGuard::Page, BigCoroSize}) ->Args({(int64_t)EGuard::Page, SmallCoroSize}); // ------------------------------------------------------------------------ static void BM_PoolAllocatorManyStacksReleasedAndRealloc(benchmark::State& state) { TPoolAllocatorSettings settings; settings.StacksPerChunk = state.range(2); auto allocator = GetAllocator(settings, (EGuard)state.range(0)); TVector<TStackHolder> stacks; // store stacks during benchmark stacks.reserve(ManyStacks); for (auto _ : state) { for (uint64_t i = 0; i < ManyStacks; ++i) { stacks.emplace_back(*allocator, state.range(1), TestCoroName); BasicOperations(stacks.back()); } stacks.clear(); for (uint64_t i = 0; i < ManyStacks; ++i) { stacks.emplace_back(*allocator, state.range(1), TestCoroName); BasicOperations(stacks.back()); } } } BENCHMARK(BM_PoolAllocatorManyStacksReleasedAndRealloc) ->Args({(int64_t)EGuard::Canary, BigCoroSize, 1}) // old version - ArgsProduct() is not supported ->Args({(int64_t)EGuard::Canary, SmallCoroSize, 1}) ->Args({(int64_t)EGuard::Page, BigCoroSize, 1}) ->Args({(int64_t)EGuard::Page, SmallCoroSize, 1}) ->Args({(int64_t)EGuard::Canary, BigCoroSize, 1024}) ->Args({(int64_t)EGuard::Canary, SmallCoroSize, 1024}) ->Args({(int64_t)EGuard::Page, BigCoroSize, 1024}) ->Args({(int64_t)EGuard::Page, SmallCoroSize, 1024}) ->Args({(int64_t)EGuard::Canary, BigCoroSize, 8192}) ->Args({(int64_t)EGuard::Canary, SmallCoroSize, 8192}) ->Args({(int64_t)EGuard::Page, BigCoroSize, 8192}) ->Args({(int64_t)EGuard::Page, SmallCoroSize, 8192}); static void BM_DefaultAllocatorManyStacksReleasedAndRealloc(benchmark::State& state) { auto allocator = GetAllocator(Nothing(), (EGuard)state.range(0)); TVector<TStackHolder> stacks; // store stacks during benchmark stacks.reserve(ManyStacks); for (auto _ : state) { for (uint64_t i = 0; i < ManyStacks; ++i) { stacks.push_back(TStackHolder(*allocator, state.range(1), TestCoroName)); BasicOperations(stacks.back()); } stacks.clear(); for (uint64_t i = 0; i < ManyStacks; ++i) { stacks.push_back(TStackHolder(*allocator, state.range(1), TestCoroName)); BasicOperations(stacks.back()); } } } BENCHMARK(BM_DefaultAllocatorManyStacksReleasedAndRealloc) ->Args({(int64_t)EGuard::Canary, BigCoroSize}) // old version - ArgsProduct() is not supported ->Args({(int64_t)EGuard::Canary, SmallCoroSize}) ->Args({(int64_t)EGuard::Page, BigCoroSize}) ->Args({(int64_t)EGuard::Page, SmallCoroSize}); // ------------------------------------------------------------------------ static void BM_PoolAllocatorManyStacksMemoryWriteReleasedAndRealloc(benchmark::State& state) { TPoolAllocatorSettings settings; settings.StacksPerChunk = state.range(2); auto allocator = GetAllocator(settings, (EGuard)state.range(0)); TVector<TStackHolder> stacks; // store stacks during benchmark stacks.reserve(ManyStacks); for (auto _ : state) { for (uint64_t i = 0; i < ManyStacks; ++i) { stacks.emplace_back(*allocator, state.range(1), TestCoroName); WriteStack(stacks.back()); } stacks.clear(); for (uint64_t i = 0; i < ManyStacks; ++i) { stacks.emplace_back(*allocator, state.range(1), TestCoroName); WriteStack(stacks.back()); } } } BENCHMARK(BM_PoolAllocatorManyStacksMemoryWriteReleasedAndRealloc) ->Args({(int64_t)EGuard::Canary, BigCoroSize, 1}) // old version - ArgsProduct() is not supported ->Args({(int64_t)EGuard::Canary, SmallCoroSize, 1}) ->Args({(int64_t)EGuard::Page, BigCoroSize, 1}) ->Args({(int64_t)EGuard::Page, SmallCoroSize, 1}) ->Args({(int64_t)EGuard::Canary, BigCoroSize, 1024}) ->Args({(int64_t)EGuard::Canary, SmallCoroSize, 1024}) ->Args({(int64_t)EGuard::Page, BigCoroSize, 1024}) ->Args({(int64_t)EGuard::Page, SmallCoroSize, 1024}) ->Args({(int64_t)EGuard::Canary, BigCoroSize, 8192}) ->Args({(int64_t)EGuard::Canary, SmallCoroSize, 8192}) ->Args({(int64_t)EGuard::Page, BigCoroSize, 8192}) ->Args({(int64_t)EGuard::Page, SmallCoroSize, 8192}); static void BM_DefaultAllocatorManyStacksMemoryWriteReleasedAndRealloc(benchmark::State& state) { auto allocator = GetAllocator(Nothing(), (EGuard)state.range(0)); TVector<TStackHolder> stacks; // store stacks during benchmark stacks.reserve(ManyStacks); for (auto _ : state) { for (uint64_t i = 0; i < ManyStacks; ++i) { stacks.push_back(TStackHolder(*allocator, state.range(1), TestCoroName)); WriteStack(stacks.back()); } stacks.clear(); for (uint64_t i = 0; i < ManyStacks; ++i) { stacks.push_back(TStackHolder(*allocator, state.range(1), TestCoroName)); WriteStack(stacks.back()); } } } BENCHMARK(BM_DefaultAllocatorManyStacksMemoryWriteReleasedAndRealloc) ->Args({(int64_t)EGuard::Canary, BigCoroSize}) // old version - ArgsProduct() is not supported ->Args({(int64_t)EGuard::Canary, SmallCoroSize}) ->Args({(int64_t)EGuard::Page, BigCoroSize}) ->Args({(int64_t)EGuard::Page, SmallCoroSize}); } BENCHMARK_MAIN();
6,834
6,566
<gh_stars>1000+ /* * Tencent is pleased to support the open source community by making Tencent Shadow available. * Copyright (C) 2019 THL A29 Limited, a Tencent company. All rights reserved. * * Licensed under the BSD 3-Clause License (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of * the License at * * https://opensource.org/licenses/BSD-3-Clause * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.tencent.shadow.core.manager.installplugin; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.File; import java.util.HashMap; import java.util.Map; public class PluginConfig { /** * 配置json文件的格式版本号 */ public int version; /** * 配置json文件的格式兼容版本号 */ public int[] compact_version; /** * 标识一次插件发布的id */ public String UUID; /** * 标识一次插件发布的id,可以使用自定义格式描述版本信息 */ public String UUID_NickName; /** * pluginLoaderAPk 文件信息 */ public FileInfo pluginLoader; /** * runtime 文件信息 */ public FileInfo runTime; /** * 业务插件 key: partKey value:文件信息 */ public Map<String, PluginFileInfo> plugins = new HashMap<>(); /** * 插件的存储目录 */ public File storageDir; public static class FileInfo { public final File file; public final String hash; FileInfo(File file, String hash) { this.file = file; this.hash = hash; } } public static class PluginFileInfo extends FileInfo { final String[] dependsOn; final String[] hostWhiteList; final String businessName; PluginFileInfo(String businessName, FileInfo fileInfo, String[] dependsOn, String[] hostWhiteList) { this(businessName, fileInfo.file, fileInfo.hash, dependsOn, hostWhiteList); } PluginFileInfo(String businessName, File file, String hash, String[] dependsOn, String[] hostWhiteList) { super(file, hash); this.businessName = businessName; this.dependsOn = dependsOn; this.hostWhiteList = hostWhiteList; } } public static PluginConfig parseFromJson(String json, File storageDir) throws JSONException { JSONObject jsonObject = new JSONObject(json); PluginConfig pluginConfig = new PluginConfig(); pluginConfig.version = jsonObject.getInt("version"); JSONArray compact_version_json = jsonObject.optJSONArray("compact_version"); if (compact_version_json != null && compact_version_json.length() > 0) { pluginConfig.compact_version = new int[compact_version_json.length()]; for (int i = 0; i < compact_version_json.length(); i++) { pluginConfig.compact_version[i] = compact_version_json.getInt(i); } } //todo #27 json的版本检查和不兼容检查 pluginConfig.UUID = jsonObject.getString("UUID"); pluginConfig.UUID_NickName = jsonObject.getString("UUID_NickName"); JSONObject loaderJson = jsonObject.optJSONObject("pluginLoader"); if (loaderJson != null) { pluginConfig.pluginLoader = getFileInfo(loaderJson, storageDir); } JSONObject runtimeJson = jsonObject.optJSONObject("runtime"); if (runtimeJson != null) { pluginConfig.runTime = getFileInfo(runtimeJson, storageDir); } JSONArray pluginArray = jsonObject.optJSONArray("plugins"); if (pluginArray != null && pluginArray.length() > 0) { for (int i = 0; i < pluginArray.length(); i++) { JSONObject plugin = pluginArray.getJSONObject(i); String partKey = plugin.getString("partKey"); pluginConfig.plugins.put(partKey, getPluginFileInfo(plugin, storageDir)); } } pluginConfig.storageDir = storageDir; return pluginConfig; } private static FileInfo getFileInfo(JSONObject jsonObject, File storageDir) throws JSONException { String name = jsonObject.getString("apkName"); String hash = jsonObject.getString("hash"); return new FileInfo(new File(storageDir, name), hash); } private static PluginFileInfo getPluginFileInfo(JSONObject jsonObject, File storageDir) throws JSONException { String businessName = jsonObject.optString("businessName", ""); FileInfo fileInfo = getFileInfo(jsonObject, storageDir); String[] dependsOn = getArrayStringByName(jsonObject, "dependsOn"); String[] hostWhiteList = getArrayStringByName(jsonObject, "hostWhiteList"); return new PluginFileInfo(businessName, fileInfo, dependsOn, hostWhiteList); } private static String[] getArrayStringByName(JSONObject jsonObject, String name) throws JSONException { JSONArray jsonArray = jsonObject.optJSONArray(name); String[] dependsOn; if (jsonArray != null) { dependsOn = new String[jsonArray.length()]; for (int i = 0; i < jsonArray.length(); i++) { dependsOn[i] = jsonArray.getString(i); } } else { dependsOn = new String[]{}; } return dependsOn; } }
2,271
675
/* * Copyright 2012-2014 eBay Software Foundation and selendroid committers. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.selendroid.server; import android.app.Instrumentation; import android.content.Context; import android.os.Handler; import io.selendroid.server.common.exceptions.SelendroidException; import io.selendroid.server.util.SelendroidLogger; public class JUnitRunnerServerInstrumentation extends DefaultServerInstrumentation { private AndroidServer server; public JUnitRunnerServerInstrumentation(Instrumentation instrumentation, InstrumentationArguments args) { super(instrumentation, args); } @Override public void onCreate() { super.onCreate(); } @Override public void startServer() { super.startServer(); } @Override protected void startServerImpl() { SelendroidLogger.info("*** ServerInstrumentation#startServerImpl() ***"); if (server != null) { server.stop(); } try { if (server == null) { server = new AndroidServer(this, serverPort); } DefaultServerInstrumentation.startAndroidServer( server, wakeLock); } catch (Exception e) { SelendroidLogger.error("Error starting httpd.", e); throw new SelendroidException("Httpd failed to start!"); } } @Override public void stopServer() { if (server == null) { return; } server.stop(); } }
736
9,782
<reponame>sreekanth370/presto /* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.accumulo.udf; import com.facebook.presto.common.type.StandardTypes; import com.facebook.presto.spi.function.Description; import com.facebook.presto.spi.function.ScalarFunction; import com.facebook.presto.spi.function.SqlType; import io.airlift.slice.Slice; import io.airlift.slice.Slices; import java.util.UUID; /** * Class containing String-based SQL functions for Accumulo connector */ public class AccumuloStringFunctions { private AccumuloStringFunctions() {} @Description("Returns a randomly generated UUID") @ScalarFunction(value = "uuid", deterministic = false) @SqlType(StandardTypes.VARCHAR) public static Slice UUID() { return Slices.utf8Slice(UUID.randomUUID().toString()); } }
433
3,579
package com.querydsl.core.domain2; import com.querydsl.core.annotations.QueryEntity; @QueryEntity public class TenantImpl implements Tenant { }
47
1,144
package de.metas.handlingunits.attribute.impl; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import de.metas.handlingunits.HuId; import de.metas.handlingunits.IHandlingUnitsBL; import de.metas.handlingunits.attribute.HUAndPIAttributes; import de.metas.handlingunits.attribute.IHUAttributesDAO; import de.metas.handlingunits.attribute.IHUPIAttributesDAO; import de.metas.handlingunits.attribute.PIAttributes; import de.metas.handlingunits.exceptions.HUException; import de.metas.handlingunits.model.I_M_HU; import de.metas.handlingunits.model.I_M_HU_Attribute; import de.metas.logging.LogManager; import de.metas.util.Check; import de.metas.util.Services; import org.adempiere.ad.service.IDeveloperModeBL; import org.adempiere.ad.trx.api.ITrx; import org.adempiere.ad.trx.api.ITrxManager; import org.adempiere.ad.trx.api.OnTrxMissingPolicy; import org.adempiere.exceptions.AdempiereException; import org.adempiere.mm.attributes.AttributeId; import org.adempiere.mm.attributes.api.IAttributeDAO; import org.adempiere.model.InterfaceWrapperHelper; import org.adempiere.service.ISysConfigBL; import org.adempiere.util.lang.ObjectUtils; import org.adempiere.util.text.annotation.ToStringBuilder; import org.compiere.model.I_M_Attribute; import org.compiere.util.Util; import org.slf4j.Logger; import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.function.Function; /** * {@link IHUAttributesDAO} implementation which acts like a save buffer: * <ul> * <li>automatically loads attributes from underlying {@link IHUAttributesDAO}, if they does not already exist in our local cache * <li>on save, instead of directly saving them we are just adding them to the cache/buffer. Later, on {@link #flush()} everything will be saved. * </ul> * <p> * NOTE to developer: Please make sure all public methods are synchronized. * * @author tsa */ public class SaveDecoupledHUAttributesDAO implements IHUAttributesDAO { /** * Set this to <code>true</code> in {@link ISysConfigBL} to avoid M_HU_Attributes from not beeing saved, depending on your trxactions and stuff. */ public static final String SYSCONFIG_AutoFlushEnabledInitial = SaveDecoupledHUAttributesDAO.class.getName() + ".AutoflushEnabledInitial"; // services private static final transient Logger logger = LogManager.getLogger(SaveDecoupledHUAttributesDAO.class); private final transient ITrxManager trxManager = Services.get(ITrxManager.class); // Parameters @ToStringBuilder(skip = true) private final IHUAttributesDAO db; private final boolean _autoflushEnabled = Services.get(ISysConfigBL.class).getBooleanValue(SYSCONFIG_AutoFlushEnabledInitial, false); // false to be backward compatible; // Status /** * Cache: huKey to huAtributeKey to {@link I_M_HU_Attribute} */ @ToStringBuilder(skip = true) private final HashMap<HuId, HUAttributesMap> _hu2huAttributes = new HashMap<>(); @ToStringBuilder(skip = true) private final List<I_M_HU_Attribute> _huAttributesToRemove = new ArrayList<>(); private final Set<Integer> idsToSaveFromLastFlush = new HashSet<>(); public SaveDecoupledHUAttributesDAO(final IHUAttributesDAO db) { super(); Check.assumeNotNull(db, "db not null"); this.db = db; } @Override public synchronized String toString() { return ObjectUtils.toString(this); } @Override protected void finalize() throws Throwable { if (!idsToSaveFromLastFlush.isEmpty()) { new AdempiereException("WARNING: It could be that following M_HU_Attribute_IDs have changes which will never be saved to database: " + idsToSaveFromLastFlush) .throwIfDeveloperModeOrLogWarningElse(logger); } if (!_huAttributesToRemove.isEmpty()) { new AdempiereException("WARNING: It could be that following M_HU_Attributes to be removed are skipped: " + _huAttributesToRemove) .throwIfDeveloperModeOrLogWarningElse(logger); } } /** * @return <code>false</code> by default, for performance reasons. Note that we do call {@link #flush()} on commit, see the <code>SaveOnCommitHUAttributesDAOTrxListener</code> in * {@link SaveOnCommitHUAttributesDAO}. */ public synchronized final boolean isAutoflushEnabled() { return _autoflushEnabled; } private boolean isIncrementalFlush() { return false; } @Override public synchronized I_M_HU_Attribute newHUAttribute(final Object contextProvider) { final I_M_HU_Attribute huAttribute = db.newHUAttribute(contextProvider); setReadonly(huAttribute, true); return huAttribute; } private static void setReadonly(final Object model, final boolean readonly) { if (model == null) { return; } InterfaceWrapperHelper.setSaveDeleteDisabled(model, readonly); } @Override public synchronized void save(final I_M_HU_Attribute huAttribute) { final int huAttributeId = huAttribute.getM_HU_Attribute_ID(); final boolean autoflushEnabled = isAutoflushEnabled(); if (autoflushEnabled) { if (huAttributeId > 0) { idsToSaveFromLastFlush.add(huAttributeId); } saveToDatabase(huAttribute, ITrx.TRXNAME_ThreadInherited); } final I_M_HU hu = huAttribute.getM_HU(); final HUAttributesMap huAttributes = getHUAttributesMap(hu); final I_M_HU_Attribute huAttributeOld = huAttributes.put(huAttribute); // // Check if cache is valid // NOTE: shall not happen, but even if it happens, i think it will not corrupt the data, so it's ok to just log it when running in production (08776). if (huAttributeOld != null && !Util.same(huAttribute, huAttributeOld)) { final HUException ex = new HUException("HU attribute shall not exist in internal cache or it shall be the same instance." + "\n HU Attribute: " + huAttribute + " (" + huAttribute.getCreated() + ")" + "\n HU Attribute - trx: " + trxManager.get(InterfaceWrapperHelper.getTrxName(huAttribute), OnTrxMissingPolicy.ReturnTrxNone) + "\n HU Attribute(in cache): " + huAttributeOld + " (" + huAttributeOld.getCreated() + ")" + "\n HU: " + hu + "\n HU Trx: " + trxManager.get(InterfaceWrapperHelper.getTrxName(hu), OnTrxMissingPolicy.ReturnTrxNone) + "\n Autoflush enabled: " + autoflushEnabled); if (Services.get(IDeveloperModeBL.class).isEnabled()) { throw ex; } logger.warn(ex.getLocalizedMessage(), ex); } // // Track M_HU_Attribute_ID of changed HU Attributes // This set will be used for doing incremental flushes. if (!autoflushEnabled // if autoflush was enabled, record was already saved && huAttributeId > 0) { idsToSaveFromLastFlush.add(huAttributeId); } setReadonly(huAttribute, true); } @Override public synchronized void delete(final I_M_HU_Attribute huAttribute) { if (isAutoflushEnabled()) { deleteFromDatabase(huAttribute, ITrx.TRXNAME_ThreadInherited); } final I_M_HU hu = huAttribute.getM_HU(); final HUAttributesMap huAttributes = getHUAttributesMap(hu); huAttributes.remove(huAttribute); // // Case: HU Attribute was already saved in database so we will need to enqueue it to be deleted // NOTE: in case of autoflush, we already deleted it if (!isAutoflushEnabled() && huAttribute.getM_HU_Attribute_ID() > 0) { _huAttributesToRemove.add(huAttribute); } } @Override public List<I_M_HU_Attribute> retrieveAttributesNoCache(final Collection<HuId> huIds) { return db.retrieveAttributesNoCache(huIds); } @Override public synchronized HUAndPIAttributes retrieveAttributesOrdered(final I_M_HU hu) { final HUAttributesMap huAttributes = getHUAttributesMap(hu); final List<I_M_HU_Attribute> huAttributesList = huAttributes.toList(); final PIAttributes piAttributes = createPIAttributes(huAttributesList); final ImmutableList<I_M_HU_Attribute> huAttributesSorted = HUAttributesBySeqNoComparator.of(piAttributes).sortAndCopy(huAttributesList); return HUAndPIAttributes.of(huAttributesSorted, piAttributes); } private PIAttributes createPIAttributes(final Collection<I_M_HU_Attribute> huAttributesList) { final IHUPIAttributesDAO piAttributesRepo = Services.get(IHUPIAttributesDAO.class); final ImmutableSet<Integer> piAttributeIds = huAttributesList.stream().map(I_M_HU_Attribute::getM_HU_PI_Attribute_ID).collect(ImmutableSet.toImmutableSet()); return piAttributesRepo.retrievePIAttributesByIds(piAttributeIds); } private HUAttributesMap getHUAttributesMap(final I_M_HU hu) { return getHUAttributesMap(hu, this::retrieveHUAttributesMap); } private HUAttributesMap retrieveHUAttributesMap(final I_M_HU hu) { final HUAttributesMap huAttributes; final HUAndPIAttributes huAndPIAttributes = db.retrieveAttributesOrdered(hu); huAttributes = HUAttributesMap.of(huAndPIAttributes); return huAttributes; } private HUAttributesMap getHUAttributesMap(final I_M_HU hu, final Function<I_M_HU, HUAttributesMap> loader) { final HuId huId = HuId.ofRepoId(hu.getM_HU_ID()); return _hu2huAttributes.computeIfAbsent(huId, k -> loader.apply(hu)); } @Override public synchronized I_M_HU_Attribute retrieveAttribute(final I_M_HU hu, final AttributeId attributeId) { return getHUAttributesMap(hu) .getByAttributeIdOrNull(attributeId); } /** * Save all attributes to database */ @Override public synchronized final void flush() { logger.trace("Start flushing"); final String trxName = trxManager.getThreadInheritedTrxName(); logger.trace("TrxName={}", trxName); // Remove queued attributes for (final Iterator<I_M_HU_Attribute> it = _huAttributesToRemove.iterator(); it.hasNext(); ) { final I_M_HU_Attribute huAttributeToRemove = it.next(); deleteFromDatabase(huAttributeToRemove, trxName); // Also delete it from our list to prevent trying to delete it again it.remove(); } // // Save all attributes for (final HUAttributesMap huAttributes : _hu2huAttributes.values()) { if (huAttributes == null || huAttributes.isEmpty()) { continue; } for (final I_M_HU_Attribute huAttribute : huAttributes) { saveToDatabase(huAttribute, trxName); } // NOTE: we are not deleting it from our map because we are using that map as a cache too // it.remove(); } logger.trace("Flushing done"); } private void saveToDatabase(final I_M_HU_Attribute model, final String trxName) { // // If incremental flush is enabled and our record it's not in the list of records to be saved from last flush // => do nothing final int huAttributeId = model.getM_HU_Attribute_ID(); if (isIncrementalFlush() && huAttributeId > 0 && !idsToSaveFromLastFlush.contains(huAttributeId)) { trace("skip from saving", model); return; } setReadonly(model, false); try { trace("saving to db", model); // NOTE: i think it would be better if we would use here the delegated DAO InterfaceWrapperHelper.save(model, trxName); // Incremental flush: remove the HU Attribute from "to save" list because it was already saved idsToSaveFromLastFlush.remove(huAttributeId); } finally { setReadonly(model, true); } } private void deleteFromDatabase(final Object model, final String trxName) { setReadonly(model, false); final String modelTrxName = InterfaceWrapperHelper.getTrxName(model); try { // NOTE: i think it would be better if we would use here the delegated DAO InterfaceWrapperHelper.setTrxName(model, trxName); InterfaceWrapperHelper.delete(model); } finally { InterfaceWrapperHelper.setTrxName(model, modelTrxName); // restore model's trxName setReadonly(model, true); } } /** * Debugging method which logs in console the given huAttribute. */ private void trace(final String message, final I_M_HU_Attribute huAttribute) { if (!logger.isTraceEnabled()) { return; } final I_M_HU_Attribute modelOld = InterfaceWrapperHelper.createOld(huAttribute, I_M_HU_Attribute.class); final IAttributeDAO attributesRepo = Services.get(IAttributeDAO.class); final I_M_Attribute attribute = attributesRepo.getAttributeById(huAttribute.getM_Attribute_ID()); final String modelChangeInfo = "" + Services.get(IHandlingUnitsBL.class).getDisplayName(huAttribute.getM_HU()) + " - " + attribute.getValue() + "/" + attribute.getName() + ": " + modelOld.getValue() + "->" + huAttribute.getValue() + ", " + modelOld.getValueNumber() + "->" + huAttribute.getValueNumber() // + (InterfaceWrapperHelper.hasChanges(huAttribute) ? ", HasChanges" : ""); final String daoStatus = "IncrementalFlush=" + isIncrementalFlush() + ", IdsToSaveFromLastFlush=" + idsToSaveFromLastFlush; logger.trace("" + message + ": " + modelChangeInfo + " -- " + huAttribute + " -- " + daoStatus); } private static class HUAttributesMap implements Iterable<I_M_HU_Attribute> { public static HUAttributesMap of(final HUAndPIAttributes huAndPIAttributes) { final ImmutableList<I_M_HU_Attribute> huAttributesList = huAndPIAttributes.getHuAttributes(); final HashMap<AttributeId, I_M_HU_Attribute> huAttributes = new HashMap<>(huAttributesList.size()); for (final I_M_HU_Attribute huAttribute : huAttributesList) { // Make sure HU attributes are cached using ThreadInherited trx and NOT hu's transaction (08776) InterfaceWrapperHelper.setTrxName(huAttribute, ITrx.TRXNAME_ThreadInherited); final AttributeId attributeId = AttributeId.ofRepoId(huAttribute.getM_Attribute_ID()); huAttributes.put(attributeId, huAttribute); setReadonly(huAttribute, true); } return new HUAttributesMap(huAttributes); } private final HashMap<AttributeId, I_M_HU_Attribute> huAttributes; private HUAttributesMap(final HashMap<AttributeId, I_M_HU_Attribute> huAttributes) { this.huAttributes = huAttributes; } public I_M_HU_Attribute getByAttributeIdOrNull(final AttributeId attributeId) { return huAttributes.get(attributeId); } public boolean isEmpty() { return huAttributes.isEmpty(); } @Override public Iterator<I_M_HU_Attribute> iterator() { return huAttributes.values().iterator(); } public List<I_M_HU_Attribute> toList() { return ImmutableList.copyOf(huAttributes.values()); } @Nullable public I_M_HU_Attribute put(final I_M_HU_Attribute huAttribute) { final AttributeId attributeId = AttributeId.ofRepoId(huAttribute.getM_Attribute_ID()); return huAttributes.put(attributeId, huAttribute); } public void remove(final I_M_HU_Attribute huAttribute) { final AttributeId attributeId = AttributeId.ofRepoId(huAttribute.getM_Attribute_ID()); final I_M_HU_Attribute huAttributeRemoved = huAttributes.remove(attributeId); if (!Util.same(huAttribute, huAttributeRemoved)) { throw new AdempiereException("Given " + huAttribute + " was not found in internal cache or it's different (" + huAttributeRemoved + ")"); } } } }
5,321
65,488
[{ "code": "textarea-duplicate-value", "message": "A <textarea> can have either a value attribute or (equivalently) child content, but not both", "start": { "line": 1, "column": 10, "character": 10 }, "end": { "line": 1, "column": 23, "character": 23 }, "pos": 10 }]
120
1,157
/** * @file load_balancing.cpp Checks how Maxscale balances load * * - also used for 'load_balancing_pers1' and 'load_balancing_pers10' tests (with 'persistpoolmax=1' and *'persistpoolmax=10' for all servers) * * - start two groups of threads: each group consists of 25 threads, each thread creates connections to * RWSplit, * threads from first group try to execute as many SELECTs as possible, from second group - one query per * second * - after 100 seconds all threads are stopped * - check number of connections to every slave: test PASSED if COM_SELECT difference between slaves is not * greater then 3 times and no * more then 10% of quesries went to Master */ #include <maxtest/big_load.hh> #include <maxtest/testconnections.hh> int main(int argc, char* argv[]) { TestConnections* Test = new TestConnections(argc, argv); Test->reset_timeout(); long int q; int threads_num = 25; long int selects[256]; long int inserts[256]; long int new_selects[256]; long int new_inserts[256]; long int i1, i2; if (Test->smoke) { threads_num = 15; } Test->tprintf("Increasing connection and error limits on backend nodes.\n"); Test->repl->connect(); for (int i = 0; i < Test->repl->N; i++) { execute_query(Test->repl->nodes[i], (char*) "set global max_connections = 300;"); execute_query(Test->repl->nodes[i], (char*) "set global max_connect_errors = 100000;"); } Test->repl->close_connections(); Test->tprintf("Creating query load with %d threads...\n", threads_num); Test->reset_timeout(); load(&new_inserts[0], &new_selects[0], &selects[0], &inserts[0], threads_num, Test, &i1, &i2, 1, false, true); long int avr = (i1 + i2 ) / (Test->repl->N); Test->tprintf("average number of quries per node %ld\n", avr); long int min_q = avr / 3; long int max_q = avr * 3; Test->tprintf("Acceplable value for every node from %ld until %ld\n", min_q, max_q); for (int i = 1; i < Test->repl->N; i++) { q = new_selects[i] - selects[i]; if ((q > max_q) || (q < min_q)) { Test->add_result(1, "number of queries for node %d is %ld\n", i + 1, q); } } if ((new_selects[0] - selects[0]) > avr / 3) { Test->add_result(1, "number of queries for master greater then 30%% of averange number of queries per node\n"); } Test->tprintf("Restoring nodes\n"); Test->repl->connect(); for (int i = 0; i < Test->repl->N; i++) { execute_query(Test->repl->nodes[i], (char*) "flush hosts;"); execute_query(Test->repl->nodes[i], (char*) "set global max_connections = 151;"); } Test->repl->close_connections(); Test->check_maxscale_alive(); int rval = Test->global_result; delete Test; return rval; }
1,219
364
<gh_stars>100-1000 /* * Copyright 2014-2019 <NAME>. Distributed under the Boost * Software License, Version 1.0. (See accompanying file * LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */ //[oglplus_dsa_framebuffer_1 template <> class __ObjectOps<__tag_DirectState, __tag_Framebuffer> : public __ObjZeroOps<__tag_DirectState, __tag_Framebuffer> /*< Indirectly inherits from __ObjCommonOps_Framebuffer<__tag_Framebuffer>. >*/ { public: struct Property { using Buffer = __OneOf< __FramebufferBuffer, __FramebufferAttachment, __FramebufferColorAttachment>; /*< Enumerations specifying framebuffer output buffer. >*/ using Attachment = __OneOf<__FramebufferAttachment, __FramebufferColorAttachment>; /*< Enumerations specifying framebuffer attachments. >*/ using Status = __FramebufferStatus; }; void Bind(__FramebufferTarget target); __FramebufferStatus Status(__FramebufferTarget target) const; /*< Returns the status of [^this] framebuffer for the specified [^target]. See [glfunc CheckNamedFramebufferStatus]. >*/ bool IsComplete(__FramebufferTarget target) const; /*< Returns true if [^this] framebuffer is complete for the specified [^target]. See [glfunc CheckNamedFramebufferStatus]. >*/ void Complete(__FramebufferTarget target) const; /*< Throws an __IncompleteFramebuffer exception if [^this] framebuffer is not complete. >*/ void AttachRenderbuffer( Property::Attachment attachment, __RenderbufferName renderbuffer); /*< Attaches a [^renderbuffer] object as an [^attachment] to [^this] framebuffer. See [glfunc NamedFramebufferRenderbuffer]. >*/ void AttachColorRenderbuffer( __FramebufferColorAttachmentNumber attachment_no, __RenderbufferName renderbuffer); /*< Attaches a [^renderbuffer] object as a color attachment with index specified by [^attachment_no] to [^this] framebuffer. >*/ void AttachTexture( Property::Attachment attachment, __TextureName texture, GLint level); /*< Attaches the specified [^texture] [^level] as an attachment to [^this] framebuffer. See [glfunc NamedFramebufferTexture]. >*/ void AttachColorTexture( __FramebufferColorAttachmentNumber attachment_no, __TextureName texture, GLint level); /*< Attaches the specified [^texture] [^level] as a color attachment with index specified by [^attachment_no] to [^this] framebuffer. >*/ void AttachTextureLayer( Property::Attachment attachment, __TextureName texture, GLint level, GLint layer); /*< Attaches the [^level] (or [^level]'s [^layer]) of a 1D, 2D or 3D [^texture] as an [^attachment] of [^this] framebuffer. See [glfunc NamedFramebufferTextureLayer]. >*/ }; //] //[oglplus_dsa_framebuffer_def using DSAFramebufferOps = ObjectOps<__tag_DirectState, __tag_Framebuffer>; using DSAFramebuffer = __Object<DSAFramebufferOps>; //] //[oglplus_dsa_framebuffer_sugar struct DSAFramebufferOpsAndAttch {}; /*< Helper class used with syntax-sugar operators. Stores a framebuffer name and attachment. >*/ DSAFramebufferOpsAndAttch operator<<( DSAFramebufferOps& fbo, DSAFramebufferOps::Property::Attachment attch); DSAFramebufferOps& operator<<( DSAFramebufferOps& fbo, __FramebufferTarget target); /*< Equivalent to [^fbo.Bind(target)]. >*/ DSAFramebufferOps& operator<<( DSAFramebufferOpsAndAttch&& faa, __TextureName tex); /*< Attaches [^tex] to the attachment of the framebuffer specified by [^faa]. >*/ DSAFramebufferOps& operator<<( DSAFramebufferOpsAndAttch&& faa, __RenderbufferName rbo); /*< Attaches [^rbo] to the attachment of the framebuffer specified by [^faa]. >*/ //]
1,801
1,380
<reponame>Passw/pmwkaa-sophia /* * sophia database * sphia.org * * Copyright (c) <NAME> * BSD License */ #include <libss.h> #include <libsf.h> #include <libsr.h> #include <libsv.h> #include <libsd.h> int sd_schemebegin(sdscheme *c, sr *r) { int rc = ss_bufensure(&c->buf, r->a, sizeof(sdschemeheader)); if (ssunlikely(rc == -1)) return sr_oom(r->e); sdschemeheader *h = (sdschemeheader*)c->buf.s; memset(h, 0, sizeof(sdschemeheader)); ss_bufadvance(&c->buf, sizeof(sdschemeheader)); return 0; } int sd_schemeadd(sdscheme *c, sr *r, uint8_t id, sstype type, void *value, uint32_t size) { sdschemeopt opt = { .type = type, .id = id, .size = size }; int rc = ss_bufadd(&c->buf, r->a, &opt, sizeof(opt)); if (ssunlikely(rc == -1)) goto error; rc = ss_bufadd(&c->buf, r->a, value, size); if (ssunlikely(rc == -1)) goto error; sdschemeheader *h = (sdschemeheader*)c->buf.s; h->count++; return 0; error: return sr_oom(r->e); } int sd_schemecommit(sdscheme *c, sr *r) { if (ssunlikely(ss_bufused(&c->buf) == 0)) return 0; sdschemeheader *h = (sdschemeheader*)c->buf.s; h->size = ss_bufused(&c->buf) - sizeof(sdschemeheader); h->crc = ss_crcs(r->crc, (char*)h, ss_bufused(&c->buf), 0); return 0; } int sd_schemewrite(sdscheme *c, sr *r, char *path, int sync) { ssfile meta; ss_fileinit(&meta, r->vfs); int rc = ss_filenew(&meta, path, 0); if (ssunlikely(rc == -1)) goto error; rc = ss_filewrite(&meta, c->buf.s, ss_bufused(&c->buf)); if (ssunlikely(rc == -1)) goto error; if (sync) { rc = ss_filesync(&meta); if (ssunlikely(rc == -1)) goto error; } ss_fileadvise(&meta, 0, 0, meta.size); rc = ss_fileclose(&meta); if (ssunlikely(rc == -1)) goto error; return 0; error: sr_error(r->e, "scheme file '%s' error: %s", path, strerror(errno)); ss_fileclose(&meta); return -1; } int sd_schemerecover(sdscheme *c, sr *r, char *path) { ssize_t size = ss_vfssize(r->vfs, path); if (ssunlikely(size == -1)) goto error; if (ssunlikely((unsigned int)size < sizeof(sdschemeheader))) { sr_error(r->e, "scheme file '%s' is corrupted", path); return -1; } int rc = ss_bufensure(&c->buf, r->a, size); if (ssunlikely(rc == -1)) return sr_oom(r->e); ssfile meta; ss_fileinit(&meta, r->vfs); rc = ss_fileopen(&meta, path, 0); if (ssunlikely(rc == -1)) goto error; rc = ss_filepread(&meta, 0, c->buf.s, size); if (ssunlikely(rc == -1)) goto error; rc = ss_fileclose(&meta); if (ssunlikely(rc == -1)) goto error; ss_bufadvance(&c->buf, size); return 0; error: sr_error(r->e, "scheme file '%s' error: %s", path, strerror(errno)); return -1; }
1,256
19,127
/* * Copyright 1999-2021 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy * in the file LICENSE in the source distribution or at * https://www.openssl.org/source/license.html */ #include <stdio.h> #include "internal/cryptlib.h" #include <openssl/core.h> #include <openssl/core_names.h> #include <openssl/pkcs12.h> #include "p12_local.h" #include "crypto/pkcs7/pk7_local.h" /* Pack an object into an OCTET STRING and turn into a safebag */ PKCS12_SAFEBAG *PKCS12_item_pack_safebag(void *obj, const ASN1_ITEM *it, int nid1, int nid2) { PKCS12_BAGS *bag; PKCS12_SAFEBAG *safebag; if ((bag = PKCS12_BAGS_new()) == NULL) { ERR_raise(ERR_LIB_PKCS12, ERR_R_MALLOC_FAILURE); return NULL; } bag->type = OBJ_nid2obj(nid1); if (!ASN1_item_pack(obj, it, &bag->value.octet)) { ERR_raise(ERR_LIB_PKCS12, ERR_R_MALLOC_FAILURE); goto err; } if ((safebag = PKCS12_SAFEBAG_new()) == NULL) { ERR_raise(ERR_LIB_PKCS12, ERR_R_MALLOC_FAILURE); goto err; } safebag->value.bag = bag; safebag->type = OBJ_nid2obj(nid2); return safebag; err: PKCS12_BAGS_free(bag); return NULL; } /* Turn a stack of SAFEBAGS into a PKCS#7 data Contentinfo */ PKCS7 *PKCS12_pack_p7data(STACK_OF(PKCS12_SAFEBAG) *sk) { PKCS7 *p7; if ((p7 = PKCS7_new()) == NULL) { ERR_raise(ERR_LIB_PKCS12, ERR_R_MALLOC_FAILURE); return NULL; } p7->type = OBJ_nid2obj(NID_pkcs7_data); if ((p7->d.data = ASN1_OCTET_STRING_new()) == NULL) { ERR_raise(ERR_LIB_PKCS12, ERR_R_MALLOC_FAILURE); goto err; } if (!ASN1_item_pack(sk, ASN1_ITEM_rptr(PKCS12_SAFEBAGS), &p7->d.data)) { ERR_raise(ERR_LIB_PKCS12, PKCS12_R_CANT_PACK_STRUCTURE); goto err; } return p7; err: PKCS7_free(p7); return NULL; } /* Unpack SAFEBAGS from PKCS#7 data ContentInfo */ STACK_OF(PKCS12_SAFEBAG) *PKCS12_unpack_p7data(PKCS7 *p7) { if (!PKCS7_type_is_data(p7)) { ERR_raise(ERR_LIB_PKCS12, PKCS12_R_CONTENT_TYPE_NOT_DATA); return NULL; } return ASN1_item_unpack(p7->d.data, ASN1_ITEM_rptr(PKCS12_SAFEBAGS)); } /* Turn a stack of SAFEBAGS into a PKCS#7 encrypted data ContentInfo */ PKCS7 *PKCS12_pack_p7encdata_ex(int pbe_nid, const char *pass, int passlen, unsigned char *salt, int saltlen, int iter, STACK_OF(PKCS12_SAFEBAG) *bags, OSSL_LIB_CTX *ctx, const char *propq) { PKCS7 *p7; X509_ALGOR *pbe; const EVP_CIPHER *pbe_ciph = NULL; EVP_CIPHER *pbe_ciph_fetch = NULL; if ((p7 = PKCS7_new_ex(ctx, propq)) == NULL) { ERR_raise(ERR_LIB_PKCS12, ERR_R_MALLOC_FAILURE); return NULL; } if (!PKCS7_set_type(p7, NID_pkcs7_encrypted)) { ERR_raise(ERR_LIB_PKCS12, PKCS12_R_ERROR_SETTING_ENCRYPTED_DATA_TYPE); goto err; } ERR_set_mark(); pbe_ciph = pbe_ciph_fetch = EVP_CIPHER_fetch(ctx, OBJ_nid2sn(pbe_nid), propq); if (pbe_ciph == NULL) pbe_ciph = EVP_get_cipherbynid(pbe_nid); ERR_pop_to_mark(); if (pbe_ciph != NULL) { pbe = PKCS5_pbe2_set_iv_ex(pbe_ciph, iter, salt, saltlen, NULL, -1, ctx); } else { pbe = PKCS5_pbe_set_ex(pbe_nid, iter, salt, saltlen, ctx); } if (pbe == NULL) { ERR_raise(ERR_LIB_PKCS12, ERR_R_MALLOC_FAILURE); goto err; } X509_ALGOR_free(p7->d.encrypted->enc_data->algorithm); p7->d.encrypted->enc_data->algorithm = pbe; ASN1_OCTET_STRING_free(p7->d.encrypted->enc_data->enc_data); if (!(p7->d.encrypted->enc_data->enc_data = PKCS12_item_i2d_encrypt_ex(pbe, ASN1_ITEM_rptr(PKCS12_SAFEBAGS), pass, passlen, bags, 1, ctx, propq))) { ERR_raise(ERR_LIB_PKCS12, PKCS12_R_ENCRYPT_ERROR); goto err; } EVP_CIPHER_free(pbe_ciph_fetch); return p7; err: PKCS7_free(p7); EVP_CIPHER_free(pbe_ciph_fetch); return NULL; } PKCS7 *PKCS12_pack_p7encdata(int pbe_nid, const char *pass, int passlen, unsigned char *salt, int saltlen, int iter, STACK_OF(PKCS12_SAFEBAG) *bags) { return PKCS12_pack_p7encdata_ex(pbe_nid, pass, passlen, salt, saltlen, iter, bags, NULL, NULL); } STACK_OF(PKCS12_SAFEBAG) *PKCS12_unpack_p7encdata(PKCS7 *p7, const char *pass, int passlen) { if (!PKCS7_type_is_encrypted(p7)) return NULL; return PKCS12_item_decrypt_d2i_ex(p7->d.encrypted->enc_data->algorithm, ASN1_ITEM_rptr(PKCS12_SAFEBAGS), pass, passlen, p7->d.encrypted->enc_data->enc_data, 1, p7->ctx.libctx, p7->ctx.propq); } PKCS8_PRIV_KEY_INFO *PKCS12_decrypt_skey_ex(const PKCS12_SAFEBAG *bag, const char *pass, int passlen, OSSL_LIB_CTX *ctx, const char *propq) { return PKCS8_decrypt_ex(bag->value.shkeybag, pass, passlen, ctx, propq); } PKCS8_PRIV_KEY_INFO *PKCS12_decrypt_skey(const PKCS12_SAFEBAG *bag, const char *pass, int passlen) { return PKCS12_decrypt_skey_ex(bag, pass, passlen, NULL, NULL); } int PKCS12_pack_authsafes(PKCS12 *p12, STACK_OF(PKCS7) *safes) { if (ASN1_item_pack(safes, ASN1_ITEM_rptr(PKCS12_AUTHSAFES), &p12->authsafes->d.data)) return 1; return 0; } STACK_OF(PKCS7) *PKCS12_unpack_authsafes(const PKCS12 *p12) { STACK_OF(PKCS7) *p7s; PKCS7 *p7; int i; if (!PKCS7_type_is_data(p12->authsafes)) { ERR_raise(ERR_LIB_PKCS12, PKCS12_R_CONTENT_TYPE_NOT_DATA); return NULL; } p7s = ASN1_item_unpack(p12->authsafes->d.data, ASN1_ITEM_rptr(PKCS12_AUTHSAFES)); if (p7s != NULL) { for (i = 0; i < sk_PKCS7_num(p7s); i++) { p7 = sk_PKCS7_value(p7s, i); if (!ossl_pkcs7_ctx_propagate(p12->authsafes, p7)) goto err; } } return p7s; err: sk_PKCS7_free(p7s); return NULL; }
3,627
1,039
<gh_stars>1000+ // // CMHControllerHelper.h // MHDevelopExample // // Created by lx on 2018/6/7. // Copyright © 2018年 CoderMikeHe. All rights reserved. // #import <Foundation/Foundation.h> @interface CMHControllerHelper : NSObject /// 获取当前正在显示控制器 + (UIViewController *)currentViewController; @end
127
2,381
package com.github.dockerjava.api.command; import java.io.InputStream; import com.github.dockerjava.api.exception.NotFoundException; public interface CopyArchiveToContainerCmd extends SyncDockerCmd<Void> { String getContainerId(); String getHostResource(); InputStream getTarInputStream(); boolean isNoOverwriteDirNonDir(); boolean isDirChildrenOnly(); /** * Set container's id * * @param containerId * id of the container to copy file to */ CopyArchiveToContainerCmd withContainerId(String containerId); /** * Set path to the resource on the host machine * * @param resource * path to the resource on the host machine */ CopyArchiveToContainerCmd withHostResource(String resource); /** * Set the tar input stream that will be uploaded to the container. withHostResource or withTarInputStream can be defined but not both. * * @param tarInputStream * the stream to upload to the container */ CopyArchiveToContainerCmd withTarInputStream(InputStream tarInputStream); /** * If set to true then it will be an error if unpacking the given content would cause an existing directory to be replaced with a * non-directory and vice versa * * @param noOverwriteDirNonDir * flag to know if non directory can be overwritten */ CopyArchiveToContainerCmd withNoOverwriteDirNonDir(boolean noOverwriteDirNonDir); /** * If this flag is set to true, all children of the local directory will be copied to the remote without the root directory. For ex: if * I have root/titi and root/tata and the remote path is /var/data. dirChildrenOnly = true will create /var/data/titi and /var/data/tata * dirChildrenOnly = false will create /var/data/root/titi and /var/data/root/tata * * @param dirChildrenOnly * if root directory is ignored */ CopyArchiveToContainerCmd withDirChildrenOnly(boolean dirChildrenOnly); String getRemotePath(); CopyArchiveToContainerCmd withRemotePath(String remotePath); @Override Void exec() throws NotFoundException; interface Exec extends DockerCmdSyncExec<CopyArchiveToContainerCmd, Void> { } }
775
582
<gh_stars>100-1000 package com.easy.securityOauth2AuthCodeServer.config; import org.springframework.context.annotation.Configuration; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer; import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter; import org.springframework.security.oauth2.config.annotation.web.configurers.ResourceServerSecurityConfigurer; public class ResourceServerConfig { private static final String RESOURCE_ID = "account"; @Configuration @EnableResourceServer() protected static class ResourceServerConfiguration extends ResourceServerConfigurerAdapter { @Override public void configure(ResourceServerSecurityConfigurer resources) { resources.resourceId(RESOURCE_ID).stateless(true); } @Override public void configure(HttpSecurity httpSecurity) throws Exception { httpSecurity .requestMatchers() // 保险起见,防止被主过滤器链路拦截 .antMatchers("/account/**") .and() .authorizeRequests() .antMatchers("/account/info/**").access("#oauth2.hasScope('get_user_info')") .antMatchers("/account/child/**").access("#oauth2.hasScope('get_childlist')") .and() .authorizeRequests().anyRequest().authenticated(); } } }
646
819
<reponame>virdesai/stock-analysis-engine<gh_stars>100-1000 #!/usr/bin/env python """ Tool for inspecting cached pricing data to find common errors. This tool uses the `Extraction API <https://stock-analysis-engine. readthedocs.io/en/latest/extract.html>`__ to look for dates that are not in sync with the redis cached date. .. note:: This tool requires redis to be running with fetched datasets already stored in supported keys **Examples** **Inspect Minute Datasets for a Ticker** :: inspect_datasets.py -t SPY **Inspect Daily Datasets for a Ticker** :: inspect_datasets.py -t AAPL -g daily # or # inspect_datasets.py -t AAPL -g day **Usage** :: inspect_datasets.py -h usage: inspect_datasets.py [-h] [-t TICKER] [-g DATASETS] [-s START_DATE] Inspect datasets looking for dates in redis that look incorrect optional arguments: -h, --help show this help message and exit -t TICKER ticker -g DATASETS optional - datasets: minute or min = examine IEX Cloud intraday minute data, daily or day = examine IEX Cloud daily data, quote = examine IEX Cloud quotes data, stats = examine IEX Cloud key stats data, peers = examine IEX Cloud peers data, news = examine IEX Cloud news data, fin = examine IEX Cloud financials data, earn = examine IEX Cloud earnings data, div = examine IEX Cloud dividendsdata, comp = examine IEX Cloud company data, calls = examine Tradier calls data, puts = examine Tradier puts data, and comma delimited is supported as well -s START_DATE start date format YYYY-MM-DD (default is 2019-01-01) """ import datetime import argparse import analysis_engine.consts as ae_consts import analysis_engine.utils as ae_utils import analysis_engine.extract as ae_extract import spylunking.log.setup_logging as log_utils log = log_utils.build_colorized_logger( name='inspect-redis-data', handler_name='no_date_colors') def inspect_datasets( ticker=None, start_date=None, datasets=None): """inspect_datasets Loop over all cached data in redis by going sequentially per date and examine the latest ``date`` value in the cache to check if it matches the redis key's date. For IEX Cloud minute data errors, running this function will print out commands to fix any issues (if possible): :: fetch -t TICKER -g iex_min -F DATE_TO_FIX :param ticker: optional - string ticker :param start_date: optional - datetime start date for the loop (default is ``2019-01-01``) :param datasets: optional - list of strings to extract specific, supported datasets (default is ``['minute']``) """ if not start_date: start_date = datetime.datetime( year=2019, month=1, day=1) if not datasets: datasets = [ 'minute' ] if not ticker: ticker = 'SPY' tickers = [ ticker ] fix_suggestions = [] last_close = ae_utils.last_close() for ticker in tickers: not_done = True cur_date = start_date while not_done: cur_date_str = cur_date.strftime(ae_consts.COMMON_DATE_FORMAT) log.info( f'extracting {ticker} date={cur_date_str}') res = None # get from a date or the latest if not set if cur_date_str: res = ae_extract.extract( ticker=ticker, date=cur_date_str, datasets=datasets) else: res = ae_extract.extract( ticker=ticker, datasets=datasets) weekday_name = cur_date.strftime('%A') for ds_name in datasets: df = res[ticker][ds_name] if ae_consts.is_df(df=df): if 'date' in df: latest_date = df['date'].iloc[-1] latest_date_str = latest_date.strftime( ae_consts.COMMON_DATE_FORMAT) if latest_date_str == cur_date_str: log.info( f'valid - {ds_name} latest dates match ' f'{weekday_name}: ' f'{latest_date_str} == {cur_date_str}') else: if ds_name != 'daily': log.critical( f'{ds_name} latest dates does ' f'NOT match on ' f'{weekday_name} {cur_date_str} found: ' f'{latest_date_str}') else: one_day_back = ( latest_date + datetime.timedelta(days=1)) if weekday_name == 'Monday': one_day_back = ( latest_date + datetime.timedelta( days=3)) latest_date_str = one_day_back.strftime( ae_consts.COMMON_DATE_FORMAT) if latest_date_str == cur_date_str: log.info( f'valid - {ds_name} latest dates ' f'match ' f'{weekday_name}: ' f'{latest_date_str} == ' f'{cur_date_str}') else: log.critical( f'{ds_name} latest dates does ' f'NOT match on ' f'{weekday_name} {cur_date_str} ' f'found: ' f'{latest_date_str}') if ds_name == 'minute': fix_suggestions.append( f'fetch -t {ticker} -g iex_min ' f'-F {cur_date_str}') else: log.error( f'{ds_name} df does not have a date column ' f'on {cur_date_str}') else: log.error( f'Missing {ds_name} df on {cur_date_str}') # end of inspecting datasets if cur_date > last_close: not_done = False else: cur_date += datetime.timedelta(days=1) not_a_weekday = True while not_a_weekday: weekday = cur_date.date().weekday() if weekday > 4: log.debug( 'SKIP weekend day: ' f'{cur_date.strftime("%A on %Y-%m-%d")}') cur_date += datetime.timedelta(days=1) else: not_a_weekday = False # end for all dates # end of for all tickers if len(fix_suggestions) > 0: print('-------------------------------') print( 'Detected invalid dates - below are the suggested fixes ' 'to run using the fetch command.') print( ' - Please be aware fetching data may incur usages and ' 'costs on your account') for s in fix_suggestions: print(s) else: log.info( 'done') # end inspect_datasets if __name__ == '__main__': parser = argparse.ArgumentParser( description=( 'Inspect datasets looking for dates in redis ' 'that look incorrect')) parser.add_argument( '-t', help=( 'ticker'), required=False, dest='ticker') parser.add_argument( '-g', help=( 'optional - datasets: ' 'minute or min = examine IEX Cloud intraday minute data, ' 'daily or day = examine IEX Cloud daily data, ' 'quote = examine IEX Cloud quotes data, ' 'stats = examine IEX Cloud key stats data, ' 'peers = examine IEX Cloud peers data, ' 'news = examine IEX Cloud news data, ' 'fin = examine IEX Cloud financials data, ' 'earn = examine IEX Cloud earnings data, ' 'div = examine IEX Cloud dividendsdata, ' 'comp = examine IEX Cloud company data, ' 'calls = examine Tradier calls data, ' 'puts = examine Tradier puts data, ' 'and comma delimited is supported as well'), required=False, dest='datasets') parser.add_argument( '-s', help=( 'start date format YYYY-MM-DD (default is 2019-01-01)'), required=False, dest='start_date') args = parser.parse_args() start_date = datetime.datetime( year=2019, month=1, day=1) datasets = [ 'minute' ] ticker = 'SPY' valid = True if args.ticker: ticker = args.ticker.upper() if args.datasets: datasets = [] for key in args.datasets.lower().split(','): if key == 'news': datasets.append('news1') elif key == 'min': datasets.append('minute') elif key == 'day': datasets.append('daily') elif key == 'fin': datasets.append('financials') elif key == 'earn': datasets.append('earnings') elif key == 'div': datasets.append('dividends') elif key == 'comp': datasets.append('company') elif key == 'calls': datasets.append('tdcalls') elif key == 'puts': datasets.append('tdputs') else: if key not in ae_consts.BACKUP_DATASETS: log.error( f'unsupported dataset key: {key} ' 'please use a supported key: ' f'{ae_consts.BACKUP_DATASETS}') valid = False else: datasets.append(key) if args.start_date: start_date = datetime.datetime.strptime( args.start_date, '%Y-%m-%d') if valid: inspect_datasets( ticker=ticker, start_date=start_date, datasets=datasets)
6,218
480
<reponame>weicao/galaxysql /* * Copyright [2013-2021], Alibaba Group Holding Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.polardbx; import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.serializer.SerializerFeature; import com.alibaba.polardbx.common.model.lifecycle.AbstractLifecycle; import com.alibaba.polardbx.common.utils.GeneralUtil; import com.alibaba.polardbx.common.utils.Pair; import com.alibaba.polardbx.common.utils.TStringUtil; import com.alibaba.polardbx.common.utils.extension.Activate; import com.alibaba.polardbx.common.utils.logger.Logger; import com.alibaba.polardbx.common.utils.logger.LoggerFactory; import com.alibaba.polardbx.common.utils.thread.ExecutorTemplate; import com.alibaba.polardbx.executor.cursor.ResultCursor; import com.alibaba.polardbx.executor.sync.ISyncManager; import com.alibaba.polardbx.executor.utils.ExecUtils; import com.alibaba.polardbx.gms.node.GmsNodeManager; import com.alibaba.polardbx.gms.node.NodeInfo; import com.alibaba.polardbx.gms.sync.IGmsSyncAction; import com.alibaba.polardbx.gms.sync.ISyncResultHandler; import com.alibaba.polardbx.gms.sync.SyncScope; import javax.sql.DataSource; import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; /** * server集群多机通知 * * @author agapple 2015年3月26日 下午6:53:29 * @since 5.1.19 */ @Activate(order = 2) public class ClusterSyncManager extends AbstractLifecycle implements ISyncManager { private static final Logger logger = LoggerFactory.getLogger(ClusterSyncManager.class); @Override public List<List<Map<String, Object>>> sync(IGmsSyncAction action, String schemaName) { return doSync(action, schemaName, SyncScope.DEFAULT_SYNC_SCOPE, null, false); } @Override public List<List<Map<String, Object>>> sync(IGmsSyncAction action, String schemaName, boolean throwExceptions) { return doSync(action, schemaName, SyncScope.DEFAULT_SYNC_SCOPE, null, throwExceptions); } @Override public List<List<Map<String, Object>>> sync(IGmsSyncAction action, String schemaName, SyncScope scope) { return doSync(action, schemaName, scope, null, false); } @Override public List<List<Map<String, Object>>> sync(IGmsSyncAction action, String schemaName, SyncScope scope, boolean throwExceptions) { return doSync(action, schemaName, scope, null, throwExceptions); } @Override public void sync(IGmsSyncAction action, String schemaName, ISyncResultHandler handler) { doSync(action, schemaName, SyncScope.DEFAULT_SYNC_SCOPE, handler, false); } @Override public void sync(IGmsSyncAction action, String schemaName, ISyncResultHandler handler, boolean throwExceptions) { doSync(action, schemaName, SyncScope.DEFAULT_SYNC_SCOPE, handler, throwExceptions); } @Override public void sync(IGmsSyncAction action, String schemaName, SyncScope scope, ISyncResultHandler handler) { doSync(action, schemaName, scope, handler, false); } @Override public void sync(IGmsSyncAction action, String schemaName, SyncScope scope, ISyncResultHandler handler, boolean throwExceptions) { doSync(action, schemaName, scope, handler, throwExceptions); } private List<List<Map<String, Object>>> doSync(IGmsSyncAction action, String schemaName, SyncScope scope, ISyncResultHandler handler, boolean throwExceptions) { final List<List<Map<String, Object>>> results = Collections.synchronizedList(new ArrayList(1)); final List<Pair<NodeInfo, List<Map<String, Object>>>> resultsForHandler = Collections.synchronizedList(new ArrayList(1)); // Perform the sync action locally first. final NodeInfo localNode = GmsNodeManager.getInstance().getLocalNode(); List<Map<String, Object>> localResult = null; if (scope == null) { scope = SyncScope.DEFAULT_SYNC_SCOPE; } switch (scope) { case MASTER_ONLY: if (GmsNodeManager.getInstance().isCurrentNodeMaster()) { localResult = ExecUtils.resultSetToList((ResultCursor) action.sync()); } break; case SLAVE_ONLY: if (GmsNodeManager.getInstance().isCurrentNodeReadOnly()) { localResult = ExecUtils.resultSetToList((ResultCursor) action.sync()); } break; case ALL: case CURRENT_ONLY: default: localResult = ExecUtils.resultSetToList((ResultCursor) action.sync()); break; } List<NodeInfo> originSyncNodes = GmsNodeManager.getInstance().getNodesBySyncScope(scope); List<NodeInfo> syncNodes = new ArrayList<>(); synchronized (originSyncNodes) { syncNodes.addAll(originSyncNodes); } if (GeneralUtil.isNotEmpty(syncNodes)) { sync(resultsForHandler, localNode, syncNodes, action, schemaName, throwExceptions); for (Pair<NodeInfo, List<Map<String, Object>>> result : resultsForHandler) { results.add(result.getValue()); } } if (localResult != null) { results.add(localResult); resultsForHandler.add(new Pair<>(localNode, localResult)); } if (handler != null) { handler.handle(resultsForHandler); } return results; } private void sync(List<Pair<NodeInfo, List<Map<String, Object>>>> resultsForHandler, NodeInfo localNode, List<NodeInfo> remoteNodes, IGmsSyncAction action, String schemaName, boolean throwExceptions) { // Use thread pool for manager port to avoid conflict with server port. ExecutorTemplate template = new ExecutorTemplate(CobarServer.getInstance().getManagerExecutor()); Map<String, String> nodeExceptions = new HashMap<>(); for (final NodeInfo remoteNode : remoteNodes) { if (remoteNode == null || (remoteNode.equals(localNode))) { // The node info is null (for defence) or already do sync action for local node. continue; } final String sql = buildRequestSql(action, schemaName); template.submit(() -> { Connection conn = null; boolean checked = false; try { conn = remoteNode.getManagerDataSource().getConnection(); // 先验证链接可用性 conn.createStatement().execute("show @@config"); checked = true; Statement stmt = conn.createStatement(); stmt.execute(sql); resultsForHandler.add(new Pair<>(remoteNode, ExecUtils.resultSetToList(stmt.getResultSet()))); } catch (Throwable e) { // 如果manager端口不存在,可能节点未启动,忽略之 if (checked) { String error = String.format("Failed to SYNC to '" + remoteNode.getManagerKey() + "'. Caused by: %s", e.getMessage()); logger.error(error, e); nodeExceptions.put(remoteNode.getManagerKey(), e.getMessage()); throw GeneralUtil.nestedException(error, e); } else { logger.error(e); } } finally { if (conn != null) { try { conn.close(); } catch (SQLException e) { logger.error(e); } } } }); } // 同步等待所有结果 template.waitForResult(); if (throwExceptions && GeneralUtil.isNotEmpty(nodeExceptions)) { StringBuilder buf = new StringBuilder(); buf.append("Failed to SYNC the following nodes:").append("\n"); nodeExceptions.forEach((key, value) -> buf.append(key).append(" - ").append(value).append(";\n")); throw GeneralUtil.nestedException(buf.toString()); } } @Override public List<Map<String, Object>> sync(IGmsSyncAction action, String schemaName, String serverKey) { NodeInfo localNode = GmsNodeManager.getInstance().getLocalNode(); List<NodeInfo> remoteNodes = GmsNodeManager.getInstance().getRemoteNodes(); if (GeneralUtil.isEmpty(remoteNodes) || localNode == null || TStringUtil.equals(localNode.getServerKey(), serverKey)) { // If there are no other nodes at all or the sync target is local // server, then do the sync action locally only. return ExecUtils.resultSetToList((ResultCursor) action.sync()); } final String sql = buildRequestSql(action, schemaName); DataSource dataSource = getDataSource(serverKey, remoteNodes); try (Connection conn = dataSource.getConnection(); Statement stmt = conn.createStatement()) { stmt.execute(sql); return ExecUtils.resultSetToList(stmt.getResultSet()); } catch (SQLException e) { String errMsg = "Failed to SYNC to '" + serverKey + "'. Caused by: " + e.getMessage(); logger.error(errMsg, e); throw GeneralUtil.nestedException(errMsg, e); } } private String buildRequestSql(IGmsSyncAction action, String schema) { String data = JSON.toJSONString(action, SerializerFeature.WriteClassName); return "SYNC " + schema + " " + data; } private DataSource getDataSource(String serverKey, List<NodeInfo> remoteNodes) { for (NodeInfo remoteNode : remoteNodes) { if (TStringUtil.equals(remoteNode.getServerKey(), serverKey)) { return remoteNode.getManagerDataSource(); } } throw GeneralUtil.nestedException("Not found the sync target server '" + serverKey + "' from node list"); } }
4,636
2,151
<gh_stars>1000+ // Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef MEDIA_BASE_VECTOR_MATH_H_ #define MEDIA_BASE_VECTOR_MATH_H_ #include <utility> #include "media/base/media_shmem_export.h" namespace media { namespace vector_math { // Required alignment for inputs and outputs to all vector math functions enum { kRequiredAlignment = 16 }; // Multiply each element of |src| (up to |len|) by |scale| and add to |dest|. // |src| and |dest| must be aligned by kRequiredAlignment. MEDIA_SHMEM_EXPORT void FMAC(const float src[], float scale, int len, float dest[]); // Multiply each element of |src| by |scale| and store in |dest|. |src| and // |dest| must be aligned by kRequiredAlignment. MEDIA_SHMEM_EXPORT void FMUL(const float src[], float scale, int len, float dest[]); // Computes the exponentially-weighted moving average power of a signal by // iterating the recurrence: // // y[-1] = initial_value // y[n] = smoothing_factor * src[n]^2 + (1-smoothing_factor) * y[n-1] // // Returns the final average power and the maximum squared element value. MEDIA_SHMEM_EXPORT std::pair<float, float> EWMAAndMaxPower( float initial_value, const float src[], int len, float smoothing_factor); MEDIA_SHMEM_EXPORT void Crossfade(const float src[], int len, float dest[]); } // namespace vector_math } // namespace media #endif // MEDIA_BASE_VECTOR_MATH_H_
692
1,738
<reponame>jeikabu/lumberyard<filename>dev/Gems/CloudGemWebCommunicator/Code/Include/CloudGemWebCommunicator/CloudGemWebCommunicatorBus.h /* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ #pragma once #include <AzCore/EBus/EBus.h> #include <ResponseCode.hpp> namespace CloudGemWebCommunicator { class CloudGemWebCommunicatorRequests : public AZ::EBusTraits { public: static const AZ::EBusHandlerPolicy HandlerPolicy = AZ::EBusHandlerPolicy::Single; static const AZ::EBusAddressPolicy AddressPolicy = AZ::EBusAddressPolicy::Single; // Public functions virtual bool RequestRegistration(const AZStd::string& connectionType) = 0; virtual bool RequestConnection(const AZStd::string& connectionType) = 0; virtual bool RequestChannelList() = 0; virtual bool RequestSubscribeChannel(const AZStd::string& channelName) = 0; virtual bool RequestSubscribeChannelList(const AZStd::vector<AZStd::string>& channelName) = 0; virtual bool RequestUnsubscribeChannel(const AZStd::string& channelName) = 0; virtual bool RequestDisconnect() = 0; virtual bool RequestRefreshDeviceInfo() = 0; virtual bool PostClientMessage(const AZStd::string& channelName, const AZStd::string& message) = 0; virtual bool RequestSendMessageDirect(const AZStd::string& channelName, const AZStd::string& message) = 0; virtual AZStd::string GetRegistrationStatus() = 0; virtual AZStd::string GetConnectionStatus() = 0; virtual AZStd::string GetSubscriptionStatus(const AZStd::string& channelName) = 0; virtual AZStd::vector<AZStd::string> GetSubscriptionList() = 0; virtual AZStd::string GetEndpointPortString() = 0; }; using CloudGemWebCommunicatorRequestBus = AZ::EBus<CloudGemWebCommunicatorRequests>; class CloudGemWebCommunicatorUpdates : public AZ::EBusTraits { public: static const AZ::EBusHandlerPolicy HandlerPolicy = AZ::EBusHandlerPolicy::Multiple; static const AZ::EBusAddressPolicy AddressPolicy = AZ::EBusAddressPolicy::Single; // Public functions virtual void ConnectionStatusChanged(const AZStd::string& connection) = 0; virtual void MessageReceived(const AZStd::string& channelName, const AZStd::string& channelMessage) = 0; virtual void RegistrationStatusChanged(const AZStd::string& registrationStatus) = 0; virtual void SubscriptionStatusChanged(const AZStd::string& channelName, const AZStd::string& subscriptionStatus) = 0; }; using CloudGemWebCommunicatorUpdateBus = AZ::EBus<CloudGemWebCommunicatorUpdates>; // Handle callbacks for job requests to the socket library class CloudGemWebCommunicatorLibraryResponse : public AZ::EBusTraits { public: static const AZ::EBusHandlerPolicy HandlerPolicy = AZ::EBusHandlerPolicy::Multiple; static const AZ::EBusAddressPolicy AddressPolicy = AZ::EBusAddressPolicy::ById; static const bool EnableEventQueue = true; using BusIdType = AZ::EntityId; using ResponseCodeType = awsiotsdk::ResponseCode; // Public functions virtual void GotConnectionResponse(ResponseCodeType responseCode, std::shared_ptr<awsiotsdk::MqttClient> connectionClient) = 0; virtual void GotSubscribeResponse(ResponseCodeType responseCode, AZStd::vector<AZStd::string> channelName, std::shared_ptr<awsiotsdk::MqttClient> connectionClient) = 0; virtual void GotMessage(const AZStd::string channelName, const AZStd::string channelMessage) = 0; virtual void GotUnsubscribeResponse(ResponseCodeType responseCode, AZStd::string channelName, std::shared_ptr<awsiotsdk::MqttClient> connectionClient) = 0; }; using CloudGemWebCommunicatorLibraryResponseBus = AZ::EBus<CloudGemWebCommunicatorLibraryResponse>; } // namespace CloudGemWebCommunicator
1,442
2,338
<reponame>mkinsner/llvm typedef int pch_int;
22
1,199
--- te_subs.c.orig Sat Jan 17 03:31:40 2004 +++ te_subs.c Sat Jan 17 03:31:53 2004 @@ -231,7 +231,7 @@ char c; { if (isdigit(c)) return(c - '0' + 1); - else if isalpha(c) return(mapch_l[c] - 'a' + 11); + else if (isalpha(c)) return(mapch_l[c] - 'a' + 11); else if (fors) { if (c == '_') return (SERBUF);
174
984
/* * Copyright DataStax, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datastax.oss.driver.internal.mapper.processor.dao; import com.datastax.oss.driver.api.mapper.annotations.QueryProvider; import com.squareup.javapoet.AnnotationSpec; import com.squareup.javapoet.MethodSpec; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; import javax.lang.model.element.Modifier; import org.junit.Test; import org.junit.runner.RunWith; @RunWith(DataProviderRunner.class) public class DaoQueryProviderMethodGeneratorTest extends DaoMethodGeneratorTest { @Test @Override @UseDataProvider("invalidSignatures") public void should_fail_with_expected_error(String expectedError, MethodSpec method) { super.should_fail_with_expected_error(expectedError, method); } @DataProvider public static Object[][] invalidSignatures() { return new Object[][] { { "Invalid annotation configuration: the elements in QueryProvider.entityHelpers " + "must be Entity-annotated classes (offending element: java.lang.String)", MethodSpec.methodBuilder("select") .addAnnotation( AnnotationSpec.builder(QueryProvider.class) // We don't go until instantiation, any class will do .addMember("providerClass", "$T.class", String.class) .addMember( "entityHelpers", "{ $T.class, $T.class }", ENTITY_CLASS_NAME, String.class) .build()) .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) .build(), }, }; } }
809
1,756
#ifndef MAILCORE_JAVA_IMAP_OPERATION_CALLBACK_H #define MAILCORE_JAVA_IMAP_OPERATION_CALLBACK_H #include <jni.h> #include "MCBaseTypes.h" #include "MCIMAPOperationCallback.h" #ifdef __cplusplus namespace mailcore { class JavaIMAPOperationCallback : public Object, public IMAPOperationCallback { public: JavaIMAPOperationCallback(JNIEnv * env, jobject listener); virtual void bodyProgress(IMAPOperation * session, unsigned int current, unsigned int maximum); virtual void itemProgress(IMAPOperation * session, unsigned int current, unsigned int maximum); private: JNIEnv * mEnv; jobject mListener; }; } #endif #endif
281
8,805
// // KBSearchResults.h // Keybase // // Created by Gabriel on 4/8/15. // Copyright (c) 2015 <NAME>. All rights reserved. // #import <Foundation/Foundation.h> @interface KBSearchResults : NSObject @property NSString *header; @property NSArray *results; @property NSInteger section; @end
98
32,544
<reponame>DBatOWL/tutorials<gh_stars>1000+ package com.baeldung.thymeleaf.enums; public class Widget { private String name; private Color color; public String getName() { return name; } public void setName(String name) { this.name = name; } public Color getColor() { return color; } public void setColor(Color color) { this.color = color; } @Override public String toString() { return "Widget [name=" + name + ", color=" + color + "]"; } }
225
532
<gh_stars>100-1000 #ifndef TROPTER_TESTING_H #define TROPTER_TESTING_H // ---------------------------------------------------------------------------- // tropter: testing.h // ---------------------------------------------------------------------------- // Copyright (c) 2017 tropter authors // // Licensed under the Apache License, Version 2.0 (the "License"); you may // not use this file except in compliance with the License. You may obtain a // copy of the License at http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------- #include <catch.hpp> #include <Eigen/Dense> /// Compare any two Eigen matrices (their dimensions and each element) /// using Catch, with a given relative error tolerance. // Extra parentheses are to avoid a warning from GCC 5.4. namespace tropter { #define TROPTER_REQUIRE_EIGEN(actual, expected, rel_error_tolerance) \ do { \ REQUIRE((actual.rows() == expected.rows())); \ REQUIRE((actual.cols() == expected.cols())); \ for (int ir = 0; ir < actual.rows(); ++ir) { \ for (int ic = 0; ic < actual.cols(); ++ic) { \ INFO("(" << ir << "," << ic << "): " << \ actual(ir, ic) << " vs " << expected(ir, ic)); \ REQUIRE((Approx(actual(ir, ic)) \ .epsilon(rel_error_tolerance).scale(1.0) \ == expected(ir, ic))); \ } \ } \ } while (0) /// Similar to TROPTER_REQUIRE_EIGEN, but using an absolute error tolerance. #define TROPTER_REQUIRE_EIGEN_ABS(actual, expected, abs_error_tolerance) \ do { \ REQUIRE((actual.rows() == expected.rows())); \ REQUIRE((actual.cols() == expected.cols())); \ for (int ir = 0; ir < actual.rows(); ++ir) { \ for (int ic = 0; ic < actual.cols(); ++ic) { \ INFO("(" << ir << "," << ic << "): " << \ actual(ir, ic) << " vs " << expected(ir, ic)); \ REQUIRE((Approx(actual(ir, ic)).margin(abs_error_tolerance) \ == expected(ir, ic))); \ } \ } \ } while (0) template <typename TActual, typename TExpected> void testAlmostEqual(const Eigen::EigenBase<TActual>& a, const Eigen::EigenBase<TExpected>& e, double rel_error_tolerance) { TROPTER_REQUIRE_EIGEN(a.derived(), e.derived(), rel_error_tolerance); } } // namespace tropter #endif // TROPTER_TESTING_H
1,835
3,765
/** * BSD-style license; for more info see http://pmd.sourceforge.net/license.html */ package net.sourceforge.pmd.lang.plsql.ast; import java.util.List; import org.junit.Assert; import org.junit.Test; import net.sourceforge.pmd.lang.plsql.AbstractPLSQLParserTst; public class SelectIntoWithGroupByTest extends AbstractPLSQLParserTst { @Test public void testExample1() { ASTInput input = plsql.parseResource("SelectIntoWithGroupBy1.pls"); ASTGroupByClause groupByClause = input.getFirstDescendantOfType(ASTGroupByClause.class); Assert.assertNotNull(groupByClause); } @Test public void testExample2() { ASTInput input = plsql.parseResource("SelectIntoWithGroupBy2.pls"); ASTGroupByClause groupByClause = input.getFirstDescendantOfType(ASTGroupByClause.class); Assert.assertNotNull(groupByClause); } @Test public void testExample3WithCube() { ASTInput input = plsql.parseResource("SelectIntoWithGroupBy3.pls"); ASTRollupCubeClause cubeClause = input.getFirstDescendantOfType(ASTRollupCubeClause.class); Assert.assertNotNull(cubeClause); Assert.assertEquals("CUBE", cubeClause.getImage()); } @Test public void testExample4WithGroupingSets() { ASTInput input = plsql.parseResource("SelectIntoWithGroupBy4.pls"); ASTGroupingSetsClause groupingSetsClause = input.getFirstDescendantOfType(ASTGroupingSetsClause.class); Assert.assertNotNull(groupingSetsClause); List<ASTFromClause> fromClauses = input.findDescendantsOfType(ASTFromClause.class); Assert.assertEquals(1, fromClauses.size()); Assert.assertEquals(5, fromClauses.get(0).getNumChildren()); } }
672
4,283
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.jet.impl.deployment; import com.hazelcast.core.HazelcastException; import com.hazelcast.logging.ILogger; import com.hazelcast.logging.Logger; import javax.annotation.Nonnull; import java.io.File; import java.io.IOException; import java.net.URISyntaxException; import java.net.URL; import java.net.URLClassLoader; import java.util.Enumeration; import java.util.Iterator; import java.util.LinkedList; import java.util.List; /** * ChildFirstClassLoader is a classloader which prefers own classes before parent's classes. * <p> * It simply tries to load class (or a resource) from specified set of URLs and if it cannot find it, it delegates to * it's parent. */ public class ChildFirstClassLoader extends URLClassLoader { private static final ILogger LOG = Logger.getLogger(ChildFirstClassLoader.class); private volatile boolean closed; public ChildFirstClassLoader(@Nonnull URL[] urls, @Nonnull ClassLoader parent) { super(urls, parent); for (URL url : urls) { try { if (!new File(url.toURI()).exists()) { LOG.warning("URL '" + url + "' does not point to an existing local file."); } } catch (URISyntaxException e) { throw new HazelcastException("URL has incorrect syntax", e); } } if (urls.length == 0) { throw new IllegalArgumentException("urls must not be null nor empty"); } if (parent == null) { throw new IllegalArgumentException("parent must not be null"); } } @Override protected Class<?> findClass(String name) throws ClassNotFoundException { return super.findClass(name); } @Override protected Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException { // has the class loaded already? Class<?> loadedClass = findLoadedClass(name); if (loadedClass == null) { try { // find the class from given jar urls as in first constructor parameter. loadedClass = findClass(name); } catch (ClassNotFoundException ignored) { // ignore class not found } if (loadedClass == null) { loadedClass = getParent().loadClass(name); } if (loadedClass == null) { throw new ClassNotFoundException("Could not find class " + name + " in classloader nor in parent classloader"); } } if (resolve) { resolveClass(loadedClass); } return loadedClass; } @Override public Enumeration<URL> getResources(String name) throws IOException { List<URL> allRes = new LinkedList<>(); // load resource from this classloader Enumeration<URL> thisRes = findResources(name); if (thisRes != null) { while (thisRes.hasMoreElements()) { allRes.add(thisRes.nextElement()); } } // then try finding resources from parent classloaders Enumeration<URL> parentRes = super.findResources(name); if (parentRes != null) { while (parentRes.hasMoreElements()) { allRes.add(parentRes.nextElement()); } } return new Enumeration<URL>() { final Iterator<URL> it = allRes.iterator(); @Override public boolean hasMoreElements() { return it.hasNext(); } @Override public URL nextElement() { return it.next(); } }; } @Override public URL getResource(String name) { URL res = findResource(name); if (res == null) { res = super.getResource(name); } return res; } @Override public void close() throws IOException { super.close(); closed = true; } /** * Returns if this classloader has been already closed. * <p> * Visible for testing because there is no easy way to find out if * {@link URLClassLoader} has been closed. */ public boolean isClosed() { return closed; } }
1,969
1,338
/* * Copyright 2019-2020, <NAME> <<EMAIL>>. * * All rights reserved. Distributed under the terms of the MIT License. */ #include "UserUsageConditions.h" #include "Logger.h" // These are keys that are used to store this object's data into a BMessage // instance. #define KEY_COPY_MARKDOWN "copyMarkdown" #define KEY_CODE "code" #define KEY_MINIMUM_AGE "minimumAge" UserUsageConditions::UserUsageConditions(BMessage* from) : fCode(""), fCopyMarkdown(""), fMinimumAge(0) { int16 minimumAge; if (from->FindInt16(KEY_MINIMUM_AGE, &minimumAge) != B_OK) HDERROR("expected key [%s] in the message data", KEY_MINIMUM_AGE); fMinimumAge = (uint8) minimumAge; if (from->FindString(KEY_CODE, &fCode) != B_OK) HDERROR("expected key [%s] in the message data", KEY_CODE); if (from->FindString(KEY_COPY_MARKDOWN, &fCopyMarkdown) != B_OK) HDERROR("expected key [%s] in the message data", KEY_COPY_MARKDOWN); } UserUsageConditions::UserUsageConditions() : fCode(""), fCopyMarkdown(""), fMinimumAge(0) { } UserUsageConditions::~UserUsageConditions() { } const BString& UserUsageConditions::Code() const { return fCode; } const uint8 UserUsageConditions::MinimumAge() const { return fMinimumAge; } const BString& UserUsageConditions::CopyMarkdown() const { return fCopyMarkdown; } void UserUsageConditions::SetCode(const BString& code) { fCode = code; } void UserUsageConditions::SetMinimumAge(uint8 age) { fMinimumAge = age; } void UserUsageConditions::SetCopyMarkdown(const BString& copyMarkdown) { fCopyMarkdown = copyMarkdown; } status_t UserUsageConditions::Archive(BMessage* into, bool deep) const { into->AddInt16(KEY_MINIMUM_AGE, (int16) fMinimumAge); into->AddString(KEY_CODE, fCode); into->AddString(KEY_COPY_MARKDOWN, fCopyMarkdown); return B_OK; }
676
310
// Copyright (C) 2020 Intel Corporation // SPDX-License-Identifier: MIT #include <CL/sycl.hpp> #include <algorithm> #include <iostream> using namespace sycl; int main() { // Set up queue on any available device queue Q; // Initialize input and output memory on the host constexpr size_t N = 256; constexpr size_t M = 256; std::vector<int> a(N * M), b(N * M), c(N * M); std::fill(a.begin(), a.end(), 1); std::fill(b.begin(), b.end(), 2); std::fill(c.begin(), c.end(), 0); { // Create buffers associated with inputs and output buffer<int, 2> a_buf(a.data(), range<2>(N, M)), b_buf(b.data(), range<2>(N, M)), c_buf(c.data(), range<2>(N, M)); // Submit the kernel to the queue Q.submit([&](handler& h) { accessor a{a_buf, h}; accessor b{b_buf, h}; accessor c{c_buf, h}; // START CODE SNIP h.parallel_for(range{N, M}, [=](id<2> idx) { c[idx] = a[idx] + b[idx]; }); // END CODE SNIP }); } // Check that all outputs match expected value bool passed = std::all_of(c.begin(), c.end(), [](int i) { return (i == 3); }); std::cout << ((passed) ? "SUCCESS" : "FAILURE") << std::endl; return (passed) ? 0 : 1; }
513
777
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CONTENT_RENDERER_SHARED_WORKER_WEBSHAREDWORKER_PROXY_H_ #define CONTENT_RENDERER_SHARED_WORKER_WEBSHAREDWORKER_PROXY_H_ #include "base/macros.h" #include "ipc/ipc_listener.h" #include "third_party/WebKit/public/web/WebSharedWorkerConnectListener.h" #include "third_party/WebKit/public/web/WebSharedWorkerCreationErrors.h" struct ViewHostMsg_CreateWorker_Params; namespace blink { class WebMessagePortChannel; } namespace IPC { class MessageRouter; } namespace content { // Implementation of the WebSharedWorker APIs. This object is intended to only // live long enough to allow the caller to send a "connect" event to the worker // thread. Once the connect event has been sent, all future communication will // happen via the WebMessagePortChannel. This instance will self-destruct when a // connection is established. class WebSharedWorkerProxy : private IPC::Listener { public: // |channel| should be passed with its ownership. WebSharedWorkerProxy( std::unique_ptr<blink::WebSharedWorkerConnectListener> listener, ViewHostMsg_CreateWorker_Params params, blink::WebMessagePortChannel* channel); ~WebSharedWorkerProxy() override; private: void connect(ViewHostMsg_CreateWorker_Params params, blink::WebMessagePortChannel* channel); // IPC::Listener implementation. bool OnMessageReceived(const IPC::Message& message) override; void OnWorkerCreated(); void OnWorkerScriptLoadFailed(); void OnWorkerConnected(); // Routing id associated with this worker - used to receive messages from the // worker, and also to route messages to the worker (WorkerService contains // a map that maps between these renderer-side route IDs and worker-side // routing ids). int route_id_; IPC::MessageRouter* const router_; int message_port_id_; std::unique_ptr<blink::WebSharedWorkerConnectListener> listener_; DISALLOW_COPY_AND_ASSIGN(WebSharedWorkerProxy); }; } // namespace content #endif // CONTENT_RENDERER_SHARED_WORKER_WEBSHAREDWORKER_PROXY_H_
704
351
#pragma once #include "reverse_comparator.hpp" template <typename T, typename C = int, typename Comp = std::less<T>> struct cnt_min { T v; C cnt; cnt_min() : v(), cnt(0) {} explicit cnt_min(T v_) : v(v_), cnt(1) {} cnt_min(T v_, C cnt_) : v(v_), cnt(cnt_) {} friend cnt_min operator + (const cnt_min& a, const cnt_min& b) { if (!b.cnt) return a; else if (!a.cnt) return b; else if (Comp().operator()(a.v, b.v)) return a; else if (Comp().operator()(b.v, a.v)) return b; else return cnt_min(a.v, a.cnt + b.cnt); } cnt_min& operator += (const cnt_min& o) { return *this = (*this + o); } }; template <typename T, typename C = int, typename Comp = std::less<T>> using cnt_max = cnt_min<T, C, reverse_comparator_t<Comp>>;
345
2,338
<reponame>mkinsner/llvm //===- MathToSPIRV.h - Math to SPIR-V Patterns ------------------*- C++ -*-===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// // // Provides patterns to convert Math dialect to SPIR-V dialect. // //===----------------------------------------------------------------------===// #ifndef MLIR_CONVERSION_MATHTOSPIRV_MATHTOSPIRV_H #define MLIR_CONVERSION_MATHTOSPIRV_MATHTOSPIRV_H #include "mlir/Transforms/DialectConversion.h" namespace mlir { class SPIRVTypeConverter; /// Appends to a pattern list additional patterns for translating Math ops /// to SPIR-V ops. void populateMathToSPIRVPatterns(SPIRVTypeConverter &typeConverter, RewritePatternSet &patterns); } // namespace mlir #endif // MLIR_CONVERSION_MATHTOSPIRV_MATHTOSPIRV_H
345
1,426
package com.octo.android.robospice.springandroid.test; import java.io.File; import org.springframework.web.client.RestTemplate; import android.app.Application; import com.octo.android.robospice.SpringAndroidSpiceService; import com.octo.android.robospice.persistence.CacheManager; import com.octo.android.robospice.persistence.exception.CacheCreationException; import com.octo.android.robospice.persistence.springandroid.json.jackson.JacksonObjectPersisterFactory; public class SpringAndroidTestService extends SpringAndroidSpiceService { @Override public CacheManager createCacheManager(Application application) throws CacheCreationException { CacheManager cacheManager = new CacheManager(); cacheManager.addPersister(new JacksonObjectPersisterFactory(application, new File("/"))); return cacheManager; } @Override public RestTemplate createRestTemplate() { return new RestTemplate(); } }
288
16,989
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.python; import static com.google.common.collect.ImmutableSet.toImmutableSet; import com.google.common.base.Ascii; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.config.BuildOptions; import com.google.devtools.build.lib.analysis.config.BuildOptionsView; import com.google.devtools.build.lib.analysis.config.SymlinkDefinition; import com.google.devtools.build.lib.analysis.config.transitions.TransitionFactory; import com.google.devtools.build.lib.buildtool.BuildRequestOptions; import com.google.devtools.build.lib.cmdline.RepositoryName; import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.packages.Attribute.AllowedValueSet; import com.google.devtools.build.lib.packages.AttributeMap; import com.google.devtools.build.lib.packages.RawAttributeMapper; import com.google.devtools.build.lib.packages.RuleTransitionData; import com.google.devtools.build.lib.packages.Type; import com.google.devtools.build.lib.util.FileType; import com.google.devtools.build.lib.vfs.Path; import java.util.Set; import java.util.function.Function; /** Rule definitions for Python rules. */ public class PyRuleClasses { public static final FileType PYTHON_SOURCE = FileType.of(".py", ".py3"); /** * A value set of the target and sentinel values that doesn't mention the sentinel in error * messages. */ public static final AllowedValueSet TARGET_PYTHON_ATTR_VALUE_SET = new AllowedValueSet(PythonVersion.TARGET_AND_SENTINEL_STRINGS) { @Override public String getErrorReason(Object value) { return String.format("has to be one of 'PY2' or 'PY3' instead of '%s'", value); } }; /** * Returns a rule transition factory for Python binary rules and other rules that may change the * Python version. * * <p>The factory reads the version specified by the target's {@code python_version} attribute (if * given), which must exist on the rule class. If a value was read successfully, the factory * returns a transition that sets the version to that value. Otherwise, the factory returns {@code * defaultTransition} instead. * * <p>If the attribute has an unparseable value, then the factory returns {@code * defaultTransition} and it is up to the rule's analysis phase ({@link * PyCommon#validatePythonVersionAttr}) to report an attribute error to the user. This case should * be prevented by attribute validation if the rule class is defined correctly. */ public static TransitionFactory<RuleTransitionData> makeVersionTransition( PythonVersionTransition defaultTransition) { return (ruleData) -> { AttributeMap attrs = RawAttributeMapper.of(ruleData.rule()); // Fail fast if we're used on an ill-defined rule class. Preconditions.checkArgument( attrs.has(PyCommon.PYTHON_VERSION_ATTRIBUTE, Type.STRING), "python version transitions require that the RuleClass define a " + "'python_version' attribute"); // Attribute validation should enforce that the attribute string value is either a target // value ("PY2" or "PY3") or the sentinel value ("_INTERNAL_SENTINEL"). But just in case, // we'll, treat an invalid value as the default value rather than propagate an unchecked // exception in this context. That way the user can at least get a clean error message // instead of a crash. PythonVersionTransition transition; try { PythonVersion versionFromAttribute = PyCommon.readPythonVersionFromAttribute(attrs); if (versionFromAttribute == null) { transition = defaultTransition; } else { transition = PythonVersionTransition.toConstant(versionFromAttribute); } } catch (IllegalArgumentException ex) { transition = defaultTransition; } return transition; }; } /** * A Python version transition that sets the version as specified by the target's attributes, with * a default determined by {@link PythonOptions#getDefaultPythonVersion}. */ public static final TransitionFactory<RuleTransitionData> VERSION_TRANSITION = makeVersionTransition(PythonVersionTransition.toDefault()); /** The py2 and py3 symlinks. */ public enum PySymlink implements SymlinkDefinition { PY2(PythonVersion.PY2), PY3(PythonVersion.PY3); private final String versionString; private final PythonVersionTransition transition; PySymlink(PythonVersion version) { this.versionString = Ascii.toLowerCase(version.toString()); this.transition = PythonVersionTransition.toConstant(version); } @Override public String getLinkName(String symlinkPrefix, String productName, String workspaceBaseName) { return symlinkPrefix + versionString; } @Override public ImmutableSet<Path> getLinkPaths( BuildRequestOptions buildRequestOptions, Set<BuildConfiguration> targetConfigs, Function<BuildOptions, BuildConfiguration> configGetter, RepositoryName repositoryName, Path outputPath, Path execRoot) { if (!buildRequestOptions.experimentalCreatePySymlinks) { return ImmutableSet.of(); } EventHandler e = event -> { throw new UnsupportedOperationException( "This transition shouldn't do anything that could fail.\n" + "TODO(bazel-team): refactor this to not call patch(). Blaze code should" + " not apply transitions unless it absolutely has to, since that requires" + " sequencing (like supporting Starlark flags and handling exceptions)" + " that's easy to get wrong."); }; return targetConfigs.stream() .map( config -> configGetter.apply( transition.patch( new BuildOptionsView( config.getOptions(), transition.requiresOptionFragments()), e))) .map(config -> config.getOutputDirectory(repositoryName).getRoot().asPath()) .distinct() .collect(toImmutableSet()); } } }
2,393
2,338
<reponame>mkinsner/llvm struct ClassWithImplicitCtor { int foo() { return 1; } }; struct ClassWithDefaultedCtor { ClassWithDefaultedCtor() = default; int foo() { return 2; } }; struct ClassWithOneCtor { int value; ClassWithOneCtor(int i) { value = i; } }; struct ClassWithMultipleCtor { int value; ClassWithMultipleCtor(int i) { value = i; } ClassWithMultipleCtor(int i, int v) { value = v + i; } }; struct ClassWithDeletedCtor { int value; ClassWithDeletedCtor() { value = 6; } ClassWithDeletedCtor(int i) = delete; }; struct ClassWithDeletedDefaultCtor { int value; ClassWithDeletedDefaultCtor() = delete; ClassWithDeletedDefaultCtor(int i) { value = i; } }; int main() { ClassWithImplicitCtor C1; C1.foo(); ClassWithDefaultedCtor C2; C2.foo(); ClassWithOneCtor C3(22); ClassWithMultipleCtor C4(23); ClassWithMultipleCtor C5(24, 25); ClassWithDeletedCtor C6; ClassWithDeletedDefaultCtor C7(26); return 0; // break here }
367
1,025
//================================================================================== // Copyright (c) 2016 , Advanced Micro Devices, Inc. All rights reserved. // /// \author AMD Developer Tools Team /// \file suInterceptionFunctions.cpp /// //================================================================================== //------------------------------ suInterceptionFunctions.cpp ------------------------------ // Infra: #include <AMDTBaseTools/Include/gtAssert.h> #include <AMDTBaseTools/Include/AMDTDefinitions.h> #include <AMDTBaseTools/Include/gtString.h> #include <AMDTOSWrappers/Include/osApplication.h> #include <AMDTOSWrappers/Include/osFilePath.h> #include <AMDTOSWrappers/Include/osProcess.h> #include <AMDTOSWrappers/Include/osStringConstants.h> // Local: #include <AMDTServerUtilities/Include/suInterceptionFunctions.h> #include <AMDTServerUtilities/Include/suStringConstants.h> void suTestFunction() { return; } // --------------------------------------------------------------------------- // Name: gsExtensionsManager::getSpiesDirectory // Description: // Returns the path of the "spies" directory. // Spies are DLLs that are used for DLL replacement. // Arguments: spiesDirectory - Will get the spies directory. // Return Val: bool - Success / failure. // Author: <NAME> // Date: 13/5/2007 // --------------------------------------------------------------------------- bool suGetSpiesDirectory(gtString& spiesDirectory, bool isRunningInStandaloneMode) { bool retVal = false; // Get the environment spies directory: bool rc1 = osGetCurrentProcessEnvVariableValue(OS_STR_envVar_spiesDirectory, spiesDirectory); if (rc1 == true) { retVal = true; } else { // If we are running in a "standalone" mode: if (isRunningInStandaloneMode) { // Allow debugging the OpenGL sever in a standalone mode: #if AMDT_BUILD_CONFIGURATION == AMDT_DEBUG_BUILD { if (rc1 == false) { #if AMDT_BUILD_TARGET == AMDT_WINDOWS_OS { spiesDirectory = L"o:\\debug\\bin\\examples\\teapot"; retVal = true; } #elif ((AMDT_BUILD_TARGET == AMDT_LINUX_OS) && (AMDT_LINUX_VARIANT == AMDT_GENERIC_LINUX_VARIANT)) { // Get the current user home directory: gtString currUserName; bool rc2 = osGetCurrentProcessEnvVariableValue(L"USER", currUserName); GT_ASSERT(rc2) { // Set the spies directory according to gremedy's work convention: spiesDirectory = L"/home/"; spiesDirectory += currUserName; spiesDirectory += L"/work/driveo/debug/bin/spies"; retVal = true; } } #elif ((AMDT_BUILD_TARGET == AMDT_LINUX_OS) && (AMDT_LINUX_VARIANT == AMDT_MAC_OS_X_LINUX_VARIANT)) { #ifdef _GR_IPHONE_DEVICE_BUILD { // Uri, 15/10/09 - we currently assume the CodeXL spy is inside the current bundle: osFilePath currentAppDir; retVal = osGetCurrentApplicationPath(currentAppDir); if (retVal) { currentAppDir.clearFileExtension(); currentAppDir.clearFileName(); spiesDirectory = currentAppDir.asString(); } } #else // ndef _GR_IPHONE_DEVICE_BUILD { // Get the current user home directory: gtString currUserName; bool rc2 = osGetCurrentProcessEnvVariableValue("USER", currUserName); GT_ASSERT(rc2) { // Set the spies directory according to gremedy's work convention: spiesDirectory = "/Users/"; spiesDirectory += currUserName; spiesDirectory += "/work/driveo/debug/bin/CodeXL_d.app/Contents/MacOS/spies"; retVal = true; } } #endif // _GR_IPHONE_DEVICE_BUILD } #endif // AMDT_BUILD_TARGET } } #elif AMDT_BUILD_CONFIGURATION == AMDT_RELEASE_BUILD { if (rc1 == false) { #if AMDT_BUILD_TARGET == AMDT_WINDOWS_OS { spiesDirectory = L"o:\\release\\bin\\examples\\teapot"; retVal = true; } #elif ((AMDT_BUILD_TARGET == AMDT_LINUX_OS) && (AMDT_LINUX_VARIANT == AMDT_GENERIC_LINUX_VARIANT)) { // Get the current user home directory: gtString currUserName; bool rc2 = osGetCurrentProcessEnvVariableValue(L"USER", currUserName); GT_ASSERT(rc2) { // Set the spies directory according to gremedy's work convention: spiesDirectory = L"/home/"; spiesDirectory += currUserName; spiesDirectory += L"/work/driveo/release/bin/spies"; retVal = true; } } #elif ((AMDT_BUILD_TARGET == AMDT_LINUX_OS) && (AMDT_LINUX_VARIANT == AMDT_MAC_OS_X_LINUX_VARIANT)) { #ifdef _GR_IPHONE_DEVICE_BUILD { // Uri, 15/10/09 - we currently assume the CodeXL spy is inside the current bundle: osFilePath currentAppDir; retVal = osGetCurrentApplicationPath(currentAppDir); if (retVal) { currentAppDir.clearFileExtension(); currentAppDir.clearFileName(); spiesDirectory = currentAppDir.asString(); } } #else // ndef _GR_IPHONE_DEVICE_BUILD { // Get the current user home directory: gtString currUserName; bool rc2 = osGetCurrentProcessEnvVariableValue("USER", currUserName); GT_ASSERT(rc2) { // Set the spies directory according to gremedy's work convention: spiesDirectory = "/Users/"; spiesDirectory += currUserName; spiesDirectory += "/work/driveo/release/bin/CodeXL.app/Contents/MacOS/spies"; retVal = true; } } #endif // _GR_IPHONE_DEVICE_BUILD } #else #error Unknown Linux variant! #endif // AMDT_BUILD_TARGET } } #endif // AMDT_BUILD_CONFIGURATION } else { #ifdef _GR_IPHONE_DEVICE_BUILD { // Uri, 15/10/09 - we currently assume the CodeXL spy is inside the current bundle: osFilePath currentAppDir; retVal = osGetCurrentApplicationPath(currentAppDir); if (retVal) { currentAppDir.clearFileExtension(); currentAppDir.clearFileName(); spiesDirectory = currentAppDir.asString(); } } #endif } } return retVal; } // --------------------------------------------------------------------------- // Name: suHandleCRInSources // Description: Works around a bug in the AMD compilers, by replacing each CR // character which is not followed by a LF with a CRLF. // This is only done if needed, on both OpenGL and OpenCL sources. // Arguments: sourceCount - number of source strings. // sources - the source strings. // sourceLengths - numeric values or 0 for null-terminated // o_modifiedSource - the output string if needed. // Return Val: bool - was a change required. // Author: <NAME> // Date: 16/07/2015 // --------------------------------------------------------------------------- bool SU_API suHandleCRInSources(unsigned int sourceCount, const char* const* sources, unsigned int* sourceLengths, gtASCIIString& o_modifiedSource) { bool retVal = false; // Go over all the input strings: for (unsigned int i = 0; i < sourceCount; ++i) { const char* currentSource = sources[i]; unsigned int l = (nullptr != sourceLengths) ? sourceLengths[i] : 0; // Variables used in the analysis: unsigned int j = 0; unsigned int j_ = 0; bool lastCharWasCR = false; // Go over the source string: bool goOn = (nullptr != currentSource); while (goOn) { const char& currentChar = currentSource[j]; bool wasUnmatchedCR = lastCharWasCR && ('\x0A' != currentChar); if (wasUnmatchedCR) { // If this is the first time we've encountered an unmatched CR: if (!retVal) { // Start by copying all the strings up to this point: o_modifiedSource.makeEmpty(); for (unsigned int i_ = 0; i_ < i; ++i_) { // Handle null strings and lengths specified: const char* currentSource_ = sources[i_]; if (nullptr != currentSource_) { // Consider the length parameter: unsigned int l_ = (nullptr != sourceLengths) ? sourceLengths[i_] : 0; if (l_ > 0) { o_modifiedSource.append(currentSource_, l_); } else { o_modifiedSource.append(currentSource_); } } } // Now copy the start of the current string: o_modifiedSource.append(currentSource, j); // Note that j > 0 since we saw at least the CR. j_ = j; // Finally, note that we need to replace the string: retVal = true; } else // retVal { // Copy everything up to this point: o_modifiedSource.append(&(currentSource[j_]), j - j_); j_ = j; } // Follow it with a LF: o_modifiedSource.append('\x0A'); } // Update the last character. Stop at a null character: lastCharWasCR = ('\x0D' == currentChar); goOn = goOn && ('\0' != currentChar); ++j; if (0 < l) { goOn = goOn && (j < l); } } // If we're creating a new string, copy the current string's remainder: if (retVal) { // Consider the length parameter: if (l > 0) { o_modifiedSource.append(&(currentSource[j_]), l - j_); } else { o_modifiedSource.append(&(currentSource[j_])); } } } return retVal; }
6,497
1,743
<filename>tests/unit/modules/test_LocalProvisioning.py import time import datetime from bzt import ToolError from bzt.engine import EXEC from bzt.modules.provisioning import Local from tests.unit import BZTestCase, EngineEmul class ScenarioExecutorEmul(object): def __init__(self): pass def startup(self): pass class LocalProvisioningEmul(Local): def __init__(self): super(LocalProvisioningEmul, self).__init__() self.engine = EngineEmul() class LocalProvisioningTest(BZTestCase): def check_started_list(self, start_time, delay, started): executor = ScenarioExecutorEmul() executor.delay = delay prov = LocalProvisioningEmul() prov.executors = [executor] prov.start_time = start_time prov.available_slots = 1 if started: prov.started_modules = [executor] elif executor in prov.started_modules: return False prov._start_modules() return executor in prov.started_modules def check_slots(self, start_time, delay, slots): executor = ScenarioExecutorEmul() executor.delay = delay prov = LocalProvisioningEmul() prov.executors = [executor] prov.start_time = start_time prov.available_slots = slots prov._start_modules() if slots == 0: return prov.available_slots == slots else: return prov.available_slots == slots - 1 def test_delay_cycle(self): cur_time = time.time() self.assertTrue(self.check_started_list(cur_time, 0, False)) # all ok self.assertTrue(self.check_started_list(cur_time, 0, True)) # module is already started self.assertTrue(self.check_started_list(cur_time - 10, 5, False)) # time to run self.assertFalse(self.check_started_list(cur_time - 10, 20, False)) # too early self.assertTrue(self.check_slots(cur_time, 0, 0)) # no slots available self.assertTrue(self.check_slots(cur_time, 0, 1)) # 1 slot available self.assertTrue(self.check_slots(cur_time, 0, 3)) # some slots available def test_start_shift(self): local = Local() _today = datetime.date.today() _yesterday = _today - datetime.timedelta(days=1) _tomorrow = _today + datetime.timedelta(days=1) _start_time = datetime.time(12, 30, 5) _scheduled_time = datetime.time(13, 31, 7) local.start_time = time.mktime(datetime.datetime.combine(_today, _start_time).timetuple()) date = datetime.datetime.combine(_tomorrow, _scheduled_time).strftime('%Y-%m-%d %H:%M:%S') shift = local._get_start_shift(date) self.assertEqual(shift, 90062.0) date = datetime.datetime.combine(_yesterday, _scheduled_time).strftime('%Y-%m-%d %H:%M') shift = local._get_start_shift(date) self.assertEqual(shift, 3655.0) date = datetime.datetime.combine(_today, _scheduled_time).strftime('%H:%M:%S') shift = local._get_start_shift(date) self.assertEqual(shift, 3662.0) date = datetime.datetime.combine(_today, _scheduled_time).strftime('%H:%M') shift = local._get_start_shift(date) self.assertEqual(shift, 3655.0) shift = local._get_start_shift('') self.assertEqual(shift, 0) shift = local._get_start_shift('lorem ipsum') self.assertEqual(shift, 0) def test_start_sequential_global(self): local = Local() local.settings["sequential"] = True local.engine = EngineEmul() local.engine.config.merge({EXEC: [{}, {}]}) local.engine.config.get("settings")["default-executor"] = "mock" local.engine.unify_config() local.prepare() local.startup() cnt = 0 while not local.check(): cnt += 1 self.assertEqual(3, cnt) local.shutdown() for executor in local.executors: executor.is_has_results = True local.post_process() def test_check_sequential_slots(self): local = Local() local.settings["capacity"] = 2 local.engine = EngineEmul() local.engine.config.merge({EXEC: [{}, {}, {}, {}, {}]}) local.engine.config.get("settings")["default-executor"] = "mock" local.engine.unify_config() local.prepare() local.startup() cnt = 0 while not local.check(): cnt += 1 self.assertEqual(5, cnt) local.shutdown() for executor in local.executors: executor.is_has_results = True local.post_process() def test_exception(self): local = Local() local.engine = EngineEmul() local.engine.config.merge({EXEC: [{}]}) local.engine.config.get("settings")["default-executor"] = "mock" local.engine.unify_config() local.prepare() local.startup() local.check() local.shutdown() try: local.post_process() except ToolError as exc: self.assertNotIn('DIAGNOSTICS', str(exc)) self.assertIsNotNone(exc.diagnostics) self.assertEqual(exc.diagnostics, ['DIAGNOSTICS']) except BaseException as exc: self.fail("Was supposed to fail with ToolError, but crashed with %s" % exc)
2,398
13,006
<gh_stars>1000+ /******************************************************************************* * Copyright (c) 2015-2018 Skymind, Inc. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package org.deeplearning4j.arbiter.optimize.api; import lombok.AllArgsConstructor; import lombok.Data; import org.deeplearning4j.arbiter.optimize.generator.util.SerializedSupplier; import org.nd4j.common.function.Supplier; import java.io.Serializable; import java.util.Map; /** * Candidate: a proposed hyperparameter configuration. * Also includes a map for data parameters, to configure things like data preprocessing, etc. */ @Data @AllArgsConstructor public class Candidate<C> implements Serializable { private Supplier<C> supplier; private int index; private double[] flatParameters; private Map<String, Object> dataParameters; private Exception exception; public Candidate(C value, int index, double[] flatParameters, Map<String,Object> dataParameters, Exception e) { this(new SerializedSupplier<C>(value), index, flatParameters, dataParameters, e); } public Candidate(C value, int index, double[] flatParameters) { this(new SerializedSupplier<C>(value), index, flatParameters); } public Candidate(Supplier<C> value, int index, double[] flatParameters) { this(value, index, flatParameters, null, null); } public C getValue(){ return supplier.get(); } }
574
32,544
package com.baeldung.patterns.intercepting.filter.commands; import com.baeldung.patterns.intercepting.filter.filters.FilterManager; import com.baeldung.patterns.intercepting.filter.filters.OnIntercept; import javax.servlet.RequestDispatcher; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; public abstract class FrontCommand implements OnIntercept { protected HttpServletRequest request; protected HttpServletResponse response; private boolean intercept; public FrontCommand() { } public void init(HttpServletRequest request, HttpServletResponse response) { this.request = request; this.response = response; } public void process() throws ServletException, IOException { FilterManager.process(request, response, this); } public void forward(String target) throws ServletException, IOException { if (intercept) { return; } String path = String.format("/WEB-INF/jsp/%s.jsp", target); RequestDispatcher dispatcher = request.getServletContext() .getRequestDispatcher(path); dispatcher.forward(request, response); } @Override public void intercept() { intercept = true; } }
477
8,767
<reponame>monperrus/jsoup<gh_stars>1000+ /** Contains the jsoup HTML cleaner, and safelist definitions. */ package org.jsoup.safety;
45
314
package com.ys.yoosir.zzshow.mvp.ui.activities; import android.animation.ValueAnimator; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.os.Build; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.support.v7.widget.Toolbar; import android.transition.Transition; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.ImageView; import android.widget.Toast; import com.bumptech.glide.Glide; import com.bumptech.glide.load.DecodeFormat; import com.bumptech.glide.load.engine.DiskCacheStrategy; import com.bumptech.glide.request.animation.GlideAnimation; import com.bumptech.glide.request.target.SimpleTarget; import com.squareup.picasso.Picasso; import com.ys.yoosir.zzshow.R; import com.ys.yoosir.zzshow.di.component.AppComponent; import com.ys.yoosir.zzshow.di.component.DaggerPhotoGirlDetailComponent; import com.ys.yoosir.zzshow.di.module.ActivityModule; import com.ys.yoosir.zzshow.di.module.PhotoGirlDetailModule; import com.ys.yoosir.zzshow.mvp.presenter.PhotoDetailPresenterImpl; import com.ys.yoosir.zzshow.mvp.presenter.interfaces.PhotoDetailPresenter; import com.ys.yoosir.zzshow.mvp.ui.activities.base.BaseActivity; import com.ys.yoosir.zzshow.mvp.view.PhotoDetailView; import com.ys.yoosir.zzshow.utils.SystemUiVisibilityUtil; import butterknife.BindView; import uk.co.senab.photoview.PhotoView; import uk.co.senab.photoview.PhotoViewAttacher; /** * 1.共享元素 动画,使用 PhotoView 作为共享元素时 ,动画不流畅 * 2.当使用Glide加载图片时,会导致 ImageView 与 PhotoView 呈现的大小不一样 */ public class PhotoDetailActivity extends BaseActivity<PhotoDetailPresenterImpl> implements PhotoDetailView { private static final String PHOTO_URL = "PHOTO_URL"; @BindView(R.id.img_iv) ImageView mImgIv; @BindView(R.id.photo_iv) PhotoView mPhotoIv; @BindView(R.id.toolbar) Toolbar mToolbar; private String mPhotoUrl; private boolean isHidden = false; private boolean mIsStatusBarHidden = false; public static Intent getPhotoDetailIntent(Context context, String photoUrl){ Intent intent = new Intent(context,PhotoDetailActivity.class); intent.putExtra(PHOTO_URL,photoUrl); return intent; } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_photo_detail, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()){ case R.id.action_save: mPresenter.savePhoto(mPhotoUrl); return true; case R.id.action_share: mPresenter.sharePhoto(mPhotoUrl); return true; case R.id.action_set_wallpaper: mPresenter.setWallpaper(mPhotoUrl); return true; } return super.onOptionsItemSelected(item); } private void hideOrShowStatusBar() { if (mIsStatusBarHidden) { SystemUiVisibilityUtil.enter(PhotoDetailActivity.this); } else { SystemUiVisibilityUtil.exit(PhotoDetailActivity.this); } mIsStatusBarHidden = !mIsStatusBarHidden; } public void hideToolBarAndTextView(){ isHidden = !isHidden; if(isHidden){ startAnimation(true,1.0f,0.0f); }else{ startAnimation(false,0.1f,1.0f); } } private void startAnimation(final boolean endState, float startValue, float endValue) { ValueAnimator animator = ValueAnimator.ofFloat(startValue,endValue).setDuration(500); animator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { float y1; if(endState){ y1 = (0 - animation.getAnimatedFraction())*mToolbar.getHeight(); }else{ y1 = (animation.getAnimatedFraction() - 1)*mToolbar.getHeight(); } mToolbar.setTranslationY(y1); } }); animator.start(); } @Override public int getLayoutId() { return R.layout.activity_photo_detail; } @Override public void initVariables() { mPhotoUrl = getIntent().getStringExtra(PHOTO_URL); } @Override public void initViews() { Picasso.with(this) .load(mPhotoUrl) .placeholder(R.mipmap.ic_loading) .error(R.mipmap.ic_load_fail) .into(mImgIv); mPhotoIv.setOnPhotoTapListener(new PhotoViewAttacher.OnPhotoTapListener() { @Override public void onPhotoTap(View view, float v, float v1) { hideToolBarAndTextView(); hideOrShowStatusBar(); } @Override public void onOutsidePhotoTap() { } }); // Glide.with(this).load(mPhotoUrl) // .asBitmap() // .format(DecodeFormat.PREFER_ARGB_8888) // .placeholder(R.mipmap.ic_loading) // .error(R.mipmap.ic_load_fail) // .diskCacheStrategy(DiskCacheStrategy.ALL) // .into(new SimpleTarget<Bitmap>() { // @Override // public void onResourceReady(Bitmap resource, GlideAnimation<? super Bitmap> glideAnimation) { // Log.d("ThreadName","----------------"+Thread.currentThread().getName()); // mImgIv.setImageBitmap(resource); // } // }); initLazyLoadView(); } @Override protected void setupActivityComponent(AppComponent appComponent) { DaggerPhotoGirlDetailComponent.builder() .appComponent(appComponent) .photoGirlDetailModule(new PhotoGirlDetailModule(this)) .activityModule(new ActivityModule(this)) .build() .inject(this); } private void initLazyLoadView(){ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { getWindow().getEnterTransition().addListener(new Transition.TransitionListener() { @Override public void onTransitionStart(Transition transition) { } @Override public void onTransitionEnd(Transition transition) { loadPhotoView(); } @Override public void onTransitionCancel(Transition transition) { } @Override public void onTransitionPause(Transition transition) { } @Override public void onTransitionResume(Transition transition) { } }); }else{ loadPhotoView(); } } private void loadPhotoView(){ // Glide.with(this).load(mPhotoUrl) // .asBitmap() // .format(DecodeFormat.PREFER_ARGB_8888) // .placeholder(R.mipmap.ic_loading) // .error(R.mipmap.ic_load_fail) // .diskCacheStrategy(DiskCacheStrategy.ALL) // .into(mPhotoIv); Picasso.with(this) .load(mPhotoUrl) .into(mPhotoIv); } @Override public void showProgress() { } @Override public void hideProgress() { } @Override public void showMsg(String message) { Toast.makeText(PhotoDetailActivity.this,message,Toast.LENGTH_SHORT).show(); } }
3,670
12,278
<reponame>cpp-pm/boost<gh_stars>1000+ // // Copyright (c) 2018-2019, <NAME>, <EMAIL> // // Distributed under the Boost Software License, Version 1.0. (See // accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // // The authors gratefully acknowledge the support of // Fraunhofer IOSB, Ettlingen, Germany // #include <boost/numeric/ublas/tensor.hpp> #include <boost/numeric/ublas/matrix.hpp> #include <boost/numeric/ublas/vector.hpp> #include <iostream> int main() { using namespace boost::numeric::ublas; using format_t = column_major; using value_t = float; using tensor_t = tensor<value_t,format_t>; using matrix_t = matrix<value_t,format_t>; using namespace boost::numeric::ublas::index; // Tensor-Vector-Multiplications - Including Transposition { auto n = shape{3,4,2}; auto A = tensor_t(n,1); auto B1 = matrix_t(n[1],n[2],2); auto v1 = tensor_t(shape{n[0],1},2); auto v2 = tensor_t(shape{n[1],1},2); // auto v3 = tensor_t(shape{n[2],1},2); // C1(j,k) = B1(j,k) + A(i,j,k)*v1(i); // tensor_t C1 = B1 + prod(A,vector_t(n[0],1),1); // tensor_t C1 = B1 + A(_i,_,_) * v1(_i,_); // C2(i,k) = A(i,j,k)*v2(j) + 4; //tensor_t C2 = prod(A,vector_t(n[1],1),2) + 4; // tensor_t C2 = A(_,_i,_) * v2(_i,_) + 4; // not yet implemented! // C3() = A(i,j,k)*T1(i)*T2(j)*T2(k); // tensor_t C3 = prod(prod(prod(A,v1,1),v2,1),v3,1); // tensor_t C3 = A(_i,_j,_k) * v1(_i,_) * v2(_j,_) * v3(_k,_); // formatted output std::cout << "% --------------------------- " << std::endl; std::cout << "% --------------------------- " << std::endl << std::endl; std::cout << "% C1(j,k) = B1(j,k) + A(i,j,k)*v1(i);" << std::endl << std::endl; // std::cout << "C1=" << C1 << ";" << std::endl << std::endl; // formatted output std::cout << "% --------------------------- " << std::endl; std::cout << "% --------------------------- " << std::endl << std::endl; std::cout << "% C2(i,k) = A(i,j,k)*v2(j) + 4;" << std::endl << std::endl; // std::cout << "C2=" << C2 << ";" << std::endl << std::endl; } // Tensor-Matrix-Multiplications - Including Transposition { auto n = shape{3,4,2}; auto m = 5u; auto A = tensor_t(n,2); auto B = tensor_t(shape{n[1],n[2],m},2); auto B1 = tensor_t(shape{m,n[0]},1); auto B2 = tensor_t(shape{m,n[1]},1); // C1(l,j,k) = B(j,k,l) + A(i,j,k)*B1(l,i); // tensor_t C1 = B + prod(A,B1,1); // tensor_t C1 = B + A(_i,_,_) * B1(_,_i); // C2(i,l,k) = A(i,j,k)*B2(l,j) + 4; // tensor_t C2 = prod(A,B2) + 4; // tensor_t C2 = A(_,_j,_) * B2(_,_j) + 4; // C3(i,l1,l2) = A(i,j,k)*T1(l1,j)*T2(l2,k); // not yet implemented. // formatted output std::cout << "% --------------------------- " << std::endl; std::cout << "% --------------------------- " << std::endl << std::endl; std::cout << "% C1(l,j,k) = B(j,k,l) + A(i,j,k)*B1(l,i);" << std::endl << std::endl; // std::cout << "C1=" << C1 << ";" << std::endl << std::endl; // formatted output std::cout << "% --------------------------- " << std::endl; std::cout << "% --------------------------- " << std::endl << std::endl; std::cout << "% C2(i,l,k) = A(i,j,k)*B2(l,j) + 4;" << std::endl << std::endl; // std::cout << "C2=" << C2 << ";" << std::endl << std::endl; // // formatted output // std::cout << "% --------------------------- " << std::endl; // std::cout << "% --------------------------- " << std::endl << std::endl; // std::cout << "% C3(i,l1,l2) = A(i,j,k)*T1(l1,j)*T2(l2,k);" << std::endl << std::endl; // std::cout << "C3=" << C3 << ";" << std::endl << std::endl; } // Tensor-Tensor-Multiplications Including Transposition { auto na = shape{3,4,5}; auto nb = shape{4,6,3,2}; auto A = tensor_t(na,2); auto B = tensor_t(nb,3); auto T1 = tensor_t(shape{na[2],na[2]},2); auto T2 = tensor_t(shape{na[2],nb[1],nb[3]},2); // C1(j,l) = T1(j,l) + A(i,j,k)*A(i,j,l) + 5; // tensor_t C1 = T1 + prod(A,A,perm_t{1,2}) + 5; // tensor_t C1 = T1 + A(_i,_j,_m)*A(_i,_j,_l) + 5; // formatted output std::cout << "% --------------------------- " << std::endl; std::cout << "% --------------------------- " << std::endl << std::endl; std::cout << "% C1(k,l) = T1(k,l) + A(i,j,k)*A(i,j,l) + 5;" << std::endl << std::endl; // std::cout << "C1=" << C1 << ";" << std::endl << std::endl; // C2(k,l,m) = T2(k,l,m) + A(i,j,k)*B(j,l,i,m) + 5; //tensor_t C2 = T2 + prod(A,B,perm_t{1,2},perm_t{3,1}) + 5; // tensor_t C2 = T2 + A(_i,_j,_k)*B(_j,_l,_i,_m) + 5; // formatted output std::cout << "% --------------------------- " << std::endl; std::cout << "% --------------------------- " << std::endl << std::endl; std::cout << "% C2(k,l,m) = T2(k,l,m) + A(i,j,k)*B(j,l,i,m) + 5;" << std::endl << std::endl; // std::cout << "C2=" << C2 << ";" << std::endl << std::endl; } }
2,379
56,632
// This file is part of OpenCV project. // It is subject to the license terms in the LICENSE file found in the top-level directory // of this distribution and at http://opencv.org/license.html. #include "precomp.hpp" #include "opencv2/videoio/registry.hpp" #include "videoio_registry.hpp" using namespace cv; // Legacy C-like API CV_IMPL CvCapture* cvCreateCameraCapture(int) { CV_LOG_WARNING(NULL, "cvCreateCameraCapture doesn't support legacy API anymore.") return NULL; } CV_IMPL CvCapture* cvCreateFileCaptureWithPreference(const char*, int) { CV_LOG_WARNING(NULL, "cvCreateFileCaptureWithPreference doesn't support legacy API anymore.") return NULL; } CV_IMPL CvCapture* cvCreateFileCapture(const char * filename) { return cvCreateFileCaptureWithPreference(filename, CAP_ANY); } CV_IMPL CvVideoWriter* cvCreateVideoWriter(const char*, int, double, CvSize, int) { CV_LOG_WARNING(NULL, "cvCreateVideoWriter doesn't support legacy API anymore.") return NULL; } CV_IMPL int cvWriteFrame(CvVideoWriter* writer, const IplImage* image) { return writer ? writer->writeFrame(image) : 0; } CV_IMPL void cvReleaseVideoWriter(CvVideoWriter** pwriter) { if( pwriter && *pwriter ) { delete *pwriter; *pwriter = 0; } } CV_IMPL void cvReleaseCapture(CvCapture** pcapture) { if (pcapture && *pcapture) { delete *pcapture; *pcapture = 0; } } CV_IMPL IplImage* cvQueryFrame(CvCapture* capture) { if (!capture) return 0; if (!capture->grabFrame()) return 0; return capture->retrieveFrame(0); } CV_IMPL int cvGrabFrame(CvCapture* capture) { return capture ? capture->grabFrame() : 0; } CV_IMPL IplImage* cvRetrieveFrame(CvCapture* capture, int idx) { return capture ? capture->retrieveFrame(idx) : 0; } CV_IMPL double cvGetCaptureProperty(CvCapture* capture, int id) { return capture ? capture->getProperty(id) : 0; } CV_IMPL int cvSetCaptureProperty(CvCapture* capture, int id, double value) { return capture ? capture->setProperty(id, value) : 0; } CV_IMPL int cvGetCaptureDomain(CvCapture* capture) { return capture ? capture->getCaptureDomain() : 0; }
828
601
<reponame>Harshagracy/sp-dev-fx-webparts { "$schema": "https://developer.microsoft.com/json-schemas/spfx-build/config.2.0.schema.json", "version": "2.0", "bundles": { "remote-event-receiver-manager-web-part": { "components": [ { "entrypoint": "./lib/webparts/remoteEventReceiverManager/RemoteEventReceiverManagerWebPart.js", "manifest": "./src/webparts/remoteEventReceiverManager/RemoteEventReceiverManagerWebPart.manifest.json" } ] } }, "externals": {}, "localizedResources": { "RemoteEventReceiverManagerWebPartStrings": "lib/webparts/remoteEventReceiverManager/loc/{locale}.js" } }
275
776
package act.xio.undertow; /*- * #%L * ACT Framework * %% * Copyright (C) 2014 - 2017 ActFramework * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import act.ActResponse; import act.app.ActionContext; import io.undertow.io.*; import io.undertow.server.BlockingHttpExchange; import io.undertow.server.HttpServerExchange; import io.undertow.util.AttachmentKey; import org.osgl.http.H; import org.osgl.util.WriterOutputStream; import java.io.InputStream; import java.io.OutputStream; public class ActBlockingExchange implements BlockingHttpExchange { public static final AttachmentKey<ActionContext> KEY_APP_CTX = AttachmentKey.create(ActionContext.class); private InputStream inputStream; private OutputStream outputStream; private final HttpServerExchange exchange; public ActBlockingExchange(HttpServerExchange exchange, ActionContext context) { this.exchange = exchange; exchange.putAttachment(KEY_APP_CTX, context); } @Override public InputStream getInputStream() { if (inputStream == null) { inputStream = new UndertowInputStream(exchange); } return inputStream; } @Override public OutputStream getOutputStream() { if (outputStream == null) { outputStream = new UndertowOutputStream(exchange); } return outputStream; } @Override public Sender getSender() { H.Response response = ctx().resp(); if (response.writerCreated()) { return new BlockingSenderImpl(exchange, new WriterOutputStream(response.writer())); } else { return new BlockingSenderImpl(exchange, response.outputStream()); } } @Override public Receiver getReceiver() { return new BlockingReceiverImpl(this.exchange, this.getInputStream()); } @Override public void close() { ActionContext ctx = ctx(); if (!exchange.isComplete()) { try { UndertowRequest req = (UndertowRequest) ctx.req(); req.closeAndDrainRequest(); } finally { ActResponse resp = ctx.resp(); resp.close(); } } else { try { UndertowRequest req = (UndertowRequest) ctx.req(); req.freeResources(); } finally { UndertowResponse resp = (UndertowResponse) ctx.resp(); resp.freeResources(); } } } private ActionContext ctx() { return exchange.getAttachment(KEY_APP_CTX); } }
1,218
619
package com.yyquan.jzh.xmpp; import android.annotation.TargetApi; import android.app.ActivityManager; import android.app.Notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.AsyncQueryHandler; import android.content.BroadcastReceiver; import android.content.ContentValues; import android.content.Context; import android.content.Intent; import android.database.Cursor; import android.os.Build; import android.util.Log; import com.google.gson.Gson; import com.yyquan.jzh.R; import com.yyquan.jzh.activity.ChatActivity; import com.yyquan.jzh.entity.User; import com.yyquan.jzh.entity.XmppChat; import com.yyquan.jzh.entity.XmppFriend; import com.yyquan.jzh.entity.XmppMessage; import com.yyquan.jzh.entity.XmppUser; import com.yyquan.jzh.util.SharedPreferencesUtil; import com.yyquan.jzh.util.TimeUtil; import java.util.HashMap; import java.util.List; /** * Created by jzh on 2016/1/8. */ public class XmppReceiver extends BroadcastReceiver { updateActivity ua = null; public NotificationManager manager = null; public XmppReceiver(updateActivity ua) { this.ua = ua; } @TargetApi(Build.VERSION_CODES.JELLY_BEAN) @Override public void onReceive(Context context, Intent intent) { String type = intent.getStringExtra("type"); if (type.equals("chat")) { XmppChat xc = (XmppChat) intent.getSerializableExtra("chat"); if (ChatActivity.ca != null) { //在chat界面更新信息 Log.i("xmpppppp", ChatActivity.xf.getUser().getUser() + "\t" + xc.getNickname()); if (ChatActivity.xf.getUser().getUser().equals(xc.getUser())) { ua.update(xc); } chatDatas(xc.getMain(), xc.getUser(), xc.getToo(), xc.getContent()); } else { int num = chatData(xc.getMain(), xc.getUser(), xc.getToo(), xc.getContent()); if (XmppService.vibrator != null && SharedPreferencesUtil.getBoolean(context, "tishi", "zhendong", true)) { XmppService.vibrator.vibrate(500); } if (!isAppOnForeground(context)) { //在message界面更新信息 if (manager == null) { manager = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE); } Intent intent1 = new Intent(context, ChatActivity.class); User users = new User(); users.setUser(xc.getUser()); users.setNickname(xc.getNickname()); intent1.putExtra("xmpp_friend", new XmppFriend(users)); PendingIntent pi = PendingIntent.getActivity(context, 0, intent1, PendingIntent.FLAG_UPDATE_CURRENT); Notification notify = new Notification.Builder(context) .setAutoCancel(true) .setTicker("有新消息") .setSmallIcon(R.mipmap.ic_icon2) .setContentTitle("来自" + xc.getNickname() + "的消息") .setContentText(xc.getContent()) .setDefaults(Notification.DEFAULT_SOUND) .setWhen(System.currentTimeMillis()) .setNumber(num) .setContentIntent(pi).build(); manager.notify(0, notify); } else { if (XmppService.pool != null && SharedPreferencesUtil.getBoolean(context, "tishi", "music", true)) { XmppService.pool.play(1, 1, 1, 0, 0, 1); } } } } ua.update(type); } public interface updateActivity { public void update(String type); public void update(XmppChat xc); } public int chatData(final String main, final String users, final String to, final String content) { Cursor cursor = XmppService.resolver.query(XmppContentProvider.CONTENT_MESSAGES_URI, null, "main=? and type=?", new String[]{main, "chat"}, null); if (!cursor.moveToFirst()) { //插入 List<XmppUser> list1 = XmppTool.getInstance().searchUsers(users); Log.i("XmppService_add", list1.get(0).getUserName() + "\n" + list1.get(0).getName()); XmppMessage xm = new XmppMessage(to, "chat", new XmppUser(list1.get(0).getUserName(), list1.get(0).getName()), TimeUtil.getDate(), content, 1, main ); XmppContentProvider.add_message(xm); return 1; } else { //更新 int id = cursor.getInt(cursor.getColumnIndex("id")); int result = cursor.getInt(cursor.getColumnIndex("result")); ContentValues values = new ContentValues(); values.put("content", content); values.put("time", TimeUtil.getDate()); values.put("result", (result + 1)); XmppService.resolver.update(XmppContentProvider.CONTENT_MESSAGES_URI, values, "id=?", new String[]{id + ""}); return (result + 1); } } public void chatDatas(final String main, final String users, final String to, final String content) { Cursor cursor = XmppService.resolver.query(XmppContentProvider.CONTENT_MESSAGES_URI, null, "main=? and type=?", new String[]{main, "chat"}, null); if (!cursor.moveToFirst()) { //插入 List<XmppUser> list1 = XmppTool.getInstance().searchUsers(users); Log.i("XmppService_add", list1.get(0).getUserName() + "\n" + list1.get(0).getName()); XmppMessage xm = new XmppMessage(to, "chat", new XmppUser(list1.get(0).getUserName(), list1.get(0).getName()), TimeUtil.getDate(), content, 0, main ); XmppContentProvider.add_message(xm); } else { //更新 int id = cursor.getInt(cursor.getColumnIndex("id")); ContentValues values = new ContentValues(); values.put("content", content); values.put("time", TimeUtil.getDate()); values.put("result", 0); XmppService.resolver.update(XmppContentProvider.CONTENT_MESSAGES_URI, values, "id=?", new String[]{id + ""}); } } public boolean isAppOnForeground(Context context) { // Returns a list of application processes that are running on the // device ActivityManager activityManager = (ActivityManager) context.getApplicationContext().getSystemService(Context.ACTIVITY_SERVICE); String packageName = context.getApplicationContext().getPackageName(); List<ActivityManager.RunningAppProcessInfo> appProcesses = activityManager .getRunningAppProcesses(); if (appProcesses == null) return false; for (ActivityManager.RunningAppProcessInfo appProcess : appProcesses) { // The name of the process that this object is associated with. if (appProcess.processName.equals(packageName) && appProcess.importance == ActivityManager.RunningAppProcessInfo.IMPORTANCE_FOREGROUND) { return true; } } return false; } }
3,733
2,084
<reponame>petrdousa/archaius package com.netflix.config.sources; import com.netflix.config.DynamicConfiguration; import com.netflix.config.DynamicPropertyFactory; import com.netflix.config.FixedDelayPollingScheduler; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; import org.junit.BeforeClass; import org.junit.Test; import static org.hamcrest.core.IsEqual.equalTo; import static org.junit.Assert.assertThat; public class TypesafeConfigurationSourceTest { private static DynamicPropertyFactory props; @BeforeClass public static void setup() { props = props(); } @Test public void topLevel() { assertThat(props.getIntProperty("top", -1).get(), equalTo(3)); } @Test public void topLevelDotted() { assertThat(props.getStringProperty("nest.dotted", "n/a").get(), equalTo("wxy")); } @Test public void variableSubstitution() { assertThat(props.getStringProperty("top-var", "n/a").get(), equalTo("3.14")); } @Test public void simpleNested() { assertThat(props.getIntProperty("nest.nested", -1).get(), equalTo(7)); } @Test public void nestedMap() { assertThat(props.getStringProperty("nest.nested-map.inner", "n/a").get(), equalTo("abc")); } @Test public void willNotClobberWhenExpandingArrays() { assertThat(props.getIntProperty("an-unexpanded-array.length", -1).get(), equalTo(13)); assertThat(props.getIntProperty("an-expanded-array.length", -1).get(), equalTo(7)); assertThat(props.getStringProperty("an-unexpanded-array[0]", "n/a").get(), equalTo("n/a")); assertThat(props.getStringProperty("an-expanded-array[4]", "n/a").get(), equalTo("e")); } @Test public void nestedIntegerArray() { assertThat(props.getIntProperty("nest.nested-list.length", -1).get(), equalTo(4)); assertThat(props.getIntProperty("nest.nested-list[0]", -1).get(), equalTo(3)); assertThat(props.getIntProperty("nest.nested-list[1]", -1).get(), equalTo(5)); assertThat(props.getIntProperty("nest.nested-list[2]", -1).get(), equalTo(7)); assertThat(props.getIntProperty("nest.nested-list[3]", -1).get(), equalTo(11)); } @Test public void nestedStringArray() { assertThat(props.getIntProperty("arrays.nesting.nested.length", -1).get(), equalTo(3)); assertThat(props.getStringProperty("arrays.nesting.nested[0]", "n/a").get(), equalTo("abc")); assertThat(props.getStringProperty("arrays.nesting.nested[1]", "n/a").get(), equalTo("def")); assertThat(props.getStringProperty("arrays.nesting.nested[2]", "n/a").get(), equalTo("ghi")); } private static DynamicPropertyFactory props() { FixedDelayPollingScheduler scheduler = new FixedDelayPollingScheduler(0, 10, false); DynamicConfiguration configuration = new DynamicConfiguration(source(), scheduler); DynamicPropertyFactory.initWithConfigurationSource(configuration); return DynamicPropertyFactory.getInstance(); } private static TypesafeConfigurationSource source() { return new TypesafeConfigurationSource() { @Override protected Config config() { return ConfigFactory.load("reference-test"); } }; } }
1,306
2,338
// RUN: %check_clang_tidy %s hicpp-no-assembler %t __asm__(".symver foo, bar@v"); // CHECK-MESSAGES: :[[@LINE-1]]:1: warning: do not use inline assembler in safety-critical code [hicpp-no-assembler] static int s asm("spam"); // CHECK-MESSAGES: :[[@LINE-1]]:12: warning: do not use inline assembler in safety-critical code [hicpp-no-assembler] void f() { __asm("mov al, 2"); // CHECK-MESSAGES: :[[@LINE-1]]:3: warning: do not use inline assembler in safety-critical code [hicpp-no-assembler] }
199
2,453
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 30 2020 21:18:12). // // Copyright (C) 1997-2019 <NAME>. // #import <IDEKit/IDEGeneratedInterfaceBasedGeniusResultsFinder.h> @class NSMutableArray; @interface IDESwiftGeneratedInterfaceGeniusResultsFinder : IDEGeneratedInterfaceBasedGeniusResultsFinder { NSMutableArray *_orderedNavItemResults; } - (void).cxx_destruct; - (void)_updateGeniusResults; - (id)_geniusResultsForOrderedItems; @end
169
387
class DeserializationError(Exception): pass
14
2,053
void sip_header_param_test(void); void sip_uri_parse_test(void); void sip_uri_equal_test(void); void sip_header_contact_test(void); void sip_header_via_test(void); void sip_header_cseq_test(void); void sip_header_substate_test(void); #if defined(_DEBUG) || defined(DEBUG) void sip_header_test(void) { sip_header_param_test(); sip_uri_parse_test(); sip_uri_equal_test(); sip_header_contact_test(); sip_header_via_test(); sip_header_cseq_test(); sip_header_substate_test(); } #endif
195
1,711
<gh_stars>1000+ #!/usr/bin/env python # Copyright 2015-2016 Yelp Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ PaaSTA service list (instances) etc. """ from pyramid.response import Response from pyramid.view import view_config from paasta_tools.api import settings from paasta_tools.api.views.exception import ApiFailure from paasta_tools.cli.utils import get_instance_config from paasta_tools.kubernetes_tools import KubernetesDeploymentConfig from paasta_tools.marathon_tools import MarathonServiceConfig @view_config(route_name="service.autoscaler.get", request_method="GET", renderer="json") def get_autoscaler_count(request): service = request.swagger_data.get("service") instance = request.swagger_data.get("instance") cluster = settings.cluster soa_dir = settings.soa_dir instance_config = get_instance_config(service, instance, cluster, soa_dir) if not isinstance( instance_config, (KubernetesDeploymentConfig, MarathonServiceConfig) ): error_message = ( f"Autoscaling is not supported for {service}.{instance} because instance type is not " f"marathon or kubernetes." ) raise ApiFailure(error_message, 501) response_body = { "desired_instances": instance_config.get_instances(), "calculated_instances": instance_config.get_instances(with_limit=False), } return Response(json_body=response_body, status_code=200) @view_config( route_name="service.autoscaler.post", request_method="POST", renderer="json" ) def update_autoscaler_count(request): service = request.swagger_data.get("service") instance = request.swagger_data.get("instance") cluster = settings.cluster soa_dir = settings.soa_dir desired_instances = request.swagger_data.get("json_body")["desired_instances"] if not isinstance(desired_instances, int): error_message = 'The provided body does not have an integer value for "desired_instances": {}'.format( request.swagger_data.get("json_body") ) raise ApiFailure(error_message, 500) instance_config = get_instance_config(service, instance, cluster, soa_dir, True) if not isinstance( instance_config, (KubernetesDeploymentConfig, MarathonServiceConfig) ): error_message = ( f"Autoscaling is not supported for {service}.{instance} because instance type is not " f"marathon or kubernetes." ) raise ApiFailure(error_message, 501) max_instances = instance_config.get_max_instances() if max_instances is None: error_message = f"Autoscaling is not enabled for {service}.{instance}" raise ApiFailure(error_message, 404) min_instances = instance_config.get_min_instances() status = "SUCCESS" if desired_instances > max_instances: desired_instances = max_instances status = ( "WARNING desired_instances is greater than max_instances %d" % max_instances ) elif desired_instances < min_instances: desired_instances = min_instances status = ( "WARNING desired_instances is less than min_instances %d" % min_instances ) try: if isinstance(instance_config, KubernetesDeploymentConfig): instance_config.set_autoscaled_instances( instance_count=desired_instances, kube_client=settings.kubernetes_client ) else: instance_config.set_autoscaled_instances(instance_count=desired_instances) except Exception as err: raise ApiFailure(err, 500) response_body = {"desired_instances": desired_instances, "status": status} return Response(json_body=response_body, status_code=202)
1,541
457
package denominator.route53; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.Map; import dagger.ObjectGraph; import denominator.Credentials.MapCredentials; import denominator.DNSApiManager; import denominator.Provider; import static denominator.CredentialsConfiguration.anonymous; import static denominator.CredentialsConfiguration.credentials; import static denominator.Denominator.create; import static denominator.Providers.list; import static denominator.Providers.provide; import static org.assertj.core.api.Assertions.assertThat; public class Route53ProviderTest { @Rule public final ExpectedException thrown = ExpectedException.none(); private static final Provider PROVIDER = new Route53Provider(); @Test public void testRoute53Metadata() { assertThat(PROVIDER.name()).isEqualTo("route53"); assertThat(PROVIDER.supportsDuplicateZoneNames()).isTrue(); assertThat(PROVIDER.credentialTypeToParameterNames()) .containsEntry("accessKey", Arrays.asList("accessKey", "secretKey")) .containsEntry("session", Arrays.asList("accessKey", "secretKey", "sessionToken")); } @Test public void testRoute53Registered() { assertThat(list()).contains(PROVIDER); } @Test public void testProviderWiresRoute53ZoneApiWithAccessKeyCredentials() { DNSApiManager manager = create(PROVIDER, credentials("accesskey", "secretkey")); assertThat(manager.api().zones()).isInstanceOf(Route53ZoneApi.class); manager = create("route53", credentials("accesskey", "secretkey")); assertThat(manager.api().zones()).isInstanceOf(Route53ZoneApi.class); Map<String, String> map = new LinkedHashMap<String, String>(); map.put("accesskey", "A"); map.put("secretkey", "S"); manager = create("route53", credentials(MapCredentials.from(map))); assertThat(manager.api().zones()).isInstanceOf(Route53ZoneApi.class); } @Test public void testProviderWiresRoute53ZoneApiWithSessionCredentials() { DNSApiManager manager = create(PROVIDER, credentials("accesskey", "secretkey", "sessionToken")); assertThat(manager.api().zones()).isInstanceOf(Route53ZoneApi.class); manager = create("route53", credentials("accesskey", "secretkey", "sessionToken")); assertThat(manager.api().zones()).isInstanceOf(Route53ZoneApi.class); } @Test public void testCredentialsRequired() { thrown.expect(IllegalArgumentException.class); thrown.expectMessage( "no credentials supplied. route53 requires one of the following forms: when type is accessKey: accessKey,secretKey; session: accessKey,secretKey,sessionToken"); // manually passing anonymous in case this test is executed from EC2 // where IAM profiles are present. create(PROVIDER, anonymous()).api().zones().iterator(); } @Test public void testViaDagger() { DNSApiManager manager = ObjectGraph .create(provide(new Route53Provider()), new Route53Provider.Module(), credentials("accesskey", "secretkey")) .get(DNSApiManager.class); assertThat(manager.api().zones()).isInstanceOf(Route53ZoneApi.class); } }
1,038
4,772
package example.service; import example.repo.Customer1643Repository; import org.springframework.stereotype.Service; @Service public class Customer1643Service { public Customer1643Service(Customer1643Repository repo) {} }
64
377
<filename>dmlab2d/lib/system/generators/pushbox/room.cc // Copyright (C) 2020 The DMLab2D Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "dmlab2d/lib/system/generators/pushbox/room.h" #include <algorithm> #include <iterator> #include <memory> #include <string> #include "absl/strings/str_cat.h" #include "dmlab2d/lib/support/logging.h" #include "dmlab2d/lib/system/generators/pushbox/constants.h" namespace deepmind::lab2d::pushbox { namespace { // Returns a char to represent a room's tile based on its type and content. char GetTileChar(const TileType& tile, bool contains_box, bool contains_player) { if (contains_player) { return room::kPlayerChar; } else if (contains_box) { return tile == TileType::kTarget ? room::kBoxTagetChar : room::kBoxChar; } else { // The tile is empty, so it can be a wall '*', floor ' ' or target 'X'. switch (tile) { case TileType::kWall: return room::kWallChar; break; case TileType::kFloor: return room::kFloorChar; break; case TileType::kTarget: return room::kTagetChar; break; } } LOG(FATAL) << "Unexpected tile type."; } } // namespace Room::Room(int width, int height, absl::Span<const TileType> topology, absl::Span<const std::uint64_t> zobrist_bitstrings) : width_(width), height_(height), cell_count_(width * height), topology_(topology), zobrist_bitstrings_(zobrist_bitstrings), zobrist_hash_(0), player_({}), num_actions_(0), last_box_index_(-1), moved_box_changes_(0), room_score_(0) { DCHECK_EQ(topology_.size(), width_ * height_); DCHECK_EQ(zobrist_bitstrings.size(), width_ * height_ * 2); zobrist_hash_ ^= zobrist_bitstrings_[0]; } std::string Room::ToString() const { std::string room_string; room_string.reserve(topology_.size() + height_); for (int tile_idx = 0; tile_idx < topology_.size(); tile_idx++) { if (tile_idx > 0 && tile_idx % width_ == 0) absl::StrAppend(&room_string, "\n"); math::Vector2d position{tile_idx % width_, tile_idx / width_}; bool contains_player = ContainsPlayer(position); bool contains_box = ContainsBox(position); room_string.push_back( GetTileChar(topology_[tile_idx], contains_box, contains_player)); } return room_string; } void Room::ZobristAddOrRemovePiece(const math::Vector2d& position, EntityLayer layer) { int location = position.x + position.y * width_; zobrist_hash_ ^= zobrist_bitstrings_[location + static_cast<int>(layer) * cell_count_]; } TileType Room::GetTileType(const math::Vector2d& position) const { return topology_[position.x + position.y * width_]; } bool Room::IsWall(const math::Vector2d& position) const { return GetTileType(position) == TileType::kWall; } bool Room::IsFloor(const math::Vector2d& position) const { return GetTileType(position) == TileType::kFloor; } bool Room::IsTarget(const math::Vector2d& position) const { return GetTileType(position) == TileType::kTarget; } bool Room::IsEmpty(const math::Vector2d& position) const { return !(ContainsPlayer(position) || ContainsBox(position)); } bool Room::ContainsPlayer(const math::Vector2d& position) const { return player_.position() == position; } bool Room::ContainsBox(const math::Vector2d& position) const { return std::any_of(boxes_.begin(), boxes_.end(), [position](const Box& box) { return box.position() == position; }); } void Room::SetPlayerPosition(const math::Vector2d& position) { DCHECK(player_.position() == position || IsEmpty(position)); ZobristAddOrRemovePiece(player_.position(), EntityLayer::kPlayer); ZobristAddOrRemovePiece(position, EntityLayer::kPlayer); player_.set_position(position); } void Room::AddBox(const math::Vector2d& position) { DCHECK(IsEmpty(position)); ZobristAddOrRemovePiece(position, EntityLayer::kBox); boxes_.push_back(Box(position)); } void Room::ApplyAction(const Action& action) { // Move the player to its new position. const auto initial_player_position = player_.position(); ApplyPlayerAction(initial_player_position, action); // If pulling, move the box to the original player position. if (action.pull) { const auto& box_position = initial_player_position - action.direction; MoveBox(box_position, action.direction); } ++num_actions_; } float Room::ComputeScore() { room_score_ = 0; // Avoid generating a room with the player or a box lying on a target. if (PlayerOnTarget() || BoxOnTarget()) { return room_score_; } float total_displacement = 0; for (const auto& box : boxes_) { total_displacement += box.Displacement(); } room_score_ = moved_box_changes_ * total_displacement; return room_score_; } // Returns whether there is any box on top of any target. bool Room::BoxOnTarget() { for (const auto& box : boxes_) if (IsTarget(box.position())) return true; return false; } void Room::ApplyPlayerAction(const math::Vector2d& origin, const Action& action) { const auto& target = origin + action.direction; DCHECK(IsEmpty(target) && ContainsPlayer(origin)); // Change the position of the player. SetPlayerPosition(target); } void Room::MoveBox(const math::Vector2d& origin, const math::Vector2d& direction) { const auto& target = origin + direction; DCHECK(IsEmpty(target) && ContainsBox(origin)); auto it = std::find_if( boxes_.begin(), boxes_.end(), [origin](const Box& box) { return box.position() == origin; }); CHECK(it != boxes_.end()); it->set_position(target); it->AddMove(); int box_idx = std::distance(boxes_.begin(), it); if (last_box_index_ != box_idx) { last_box_index_ = box_idx; ++moved_box_changes_; } ZobristAddOrRemovePiece(origin, EntityLayer::kBox); ZobristAddOrRemovePiece(target, EntityLayer::kBox); } } // namespace deepmind::lab2d::pushbox
2,359
343
from .base import * # NOQA DEBUG = False SIGNATURES_GPG_PATH = '/usr/bin/gpg1'
35
497
<filename>app/src/main/java/us/pinguo/shareelementdemo/contacts/Contacts.java package us.pinguo.shareelementdemo.contacts; import android.os.Parcel; import android.os.Parcelable; /** * Created by huangwei on 2018/10/12. */ public class Contacts implements Parcelable{ public String name; public int avatarRes; public String desc; public Contacts(String name, int avatarRes, String desc) { this.name = name; this.avatarRes = avatarRes; this.desc = desc; } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeString(this.name); dest.writeInt(this.avatarRes); dest.writeString(this.desc); } protected Contacts(Parcel in) { this.name = in.readString(); this.avatarRes = in.readInt(); this.desc = in.readString(); } public static final Creator<Contacts> CREATOR = new Creator<Contacts>() { @Override public Contacts createFromParcel(Parcel source) { return new Contacts(source); } @Override public Contacts[] newArray(int size) { return new Contacts[size]; } }; }
523
19,127
<reponame>pmesnier/openssl<filename>include/openssl/store.h /* * Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy * in the file LICENSE in the source distribution or at * https://www.openssl.org/source/license.html */ #ifndef OPENSSL_STORE_H # define OPENSSL_STORE_H # pragma once # include <openssl/macros.h> # ifndef OPENSSL_NO_DEPRECATED_3_0 # define HEADER_OSSL_STORE_H # endif # include <stdarg.h> # include <openssl/types.h> # include <openssl/pem.h> # include <openssl/storeerr.h> # ifdef __cplusplus extern "C" { # endif /*- * The main OSSL_STORE functions. * ------------------------------ * * These allow applications to open a channel to a resource with supported * data (keys, certs, crls, ...), read the data a piece at a time and decide * what to do with it, and finally close. */ typedef struct ossl_store_ctx_st OSSL_STORE_CTX; /* * Typedef for the OSSL_STORE_INFO post processing callback. This can be used * to massage the given OSSL_STORE_INFO, or to drop it entirely (by returning * NULL). */ typedef OSSL_STORE_INFO *(*OSSL_STORE_post_process_info_fn)(OSSL_STORE_INFO *, void *); /* * Open a channel given a URI. The given UI method will be used any time the * loader needs extra input, for example when a password or pin is needed, and * will be passed the same user data every time it's needed in this context. * * Returns a context reference which represents the channel to communicate * through. */ OSSL_STORE_CTX * OSSL_STORE_open(const char *uri, const UI_METHOD *ui_method, void *ui_data, OSSL_STORE_post_process_info_fn post_process, void *post_process_data); OSSL_STORE_CTX * OSSL_STORE_open_ex(const char *uri, OSSL_LIB_CTX *libctx, const char *propq, const UI_METHOD *ui_method, void *ui_data, const OSSL_PARAM params[], OSSL_STORE_post_process_info_fn post_process, void *post_process_data); /* * Control / fine tune the OSSL_STORE channel. |cmd| determines what is to be * done, and depends on the underlying loader (use OSSL_STORE_get0_scheme to * determine which loader is used), except for common commands (see below). * Each command takes different arguments. */ # ifndef OPENSSL_NO_DEPRECATED_3_0 OSSL_DEPRECATEDIN_3_0 int OSSL_STORE_ctrl(OSSL_STORE_CTX *ctx, int cmd, ... /* args */); OSSL_DEPRECATEDIN_3_0 int OSSL_STORE_vctrl(OSSL_STORE_CTX *ctx, int cmd, va_list args); # endif # ifndef OPENSSL_NO_DEPRECATED_3_0 /* * Common ctrl commands that different loaders may choose to support. */ /* int on = 0 or 1; STORE_ctrl(ctx, STORE_C_USE_SECMEM, &on); */ # define OSSL_STORE_C_USE_SECMEM 1 /* Where custom commands start */ # define OSSL_STORE_C_CUSTOM_START 100 # endif /* * Read one data item (a key, a cert, a CRL) that is supported by the OSSL_STORE * functionality, given a context. * Returns a OSSL_STORE_INFO pointer, from which OpenSSL typed data can be * extracted with OSSL_STORE_INFO_get0_PKEY(), OSSL_STORE_INFO_get0_CERT(), ... * NULL is returned on error, which may include that the data found at the URI * can't be figured out for certain or is ambiguous. */ OSSL_STORE_INFO *OSSL_STORE_load(OSSL_STORE_CTX *ctx); /* * Check if end of data (end of file) is reached * Returns 1 on end, 0 otherwise. */ int OSSL_STORE_eof(OSSL_STORE_CTX *ctx); /* * Check if an error occurred * Returns 1 if it did, 0 otherwise. */ int OSSL_STORE_error(OSSL_STORE_CTX *ctx); /* * Close the channel * Returns 1 on success, 0 on error. */ int OSSL_STORE_close(OSSL_STORE_CTX *ctx); /* * Attach to a BIO. This works like OSSL_STORE_open() except it takes a * BIO instead of a uri, along with a scheme to use when reading. * The given UI method will be used any time the loader needs extra input, * for example when a password or pin is needed, and will be passed the * same user data every time it's needed in this context. * * Returns a context reference which represents the channel to communicate * through. * * Note that this function is considered unsafe, all depending on what the * BIO actually reads. */ OSSL_STORE_CTX *OSSL_STORE_attach(BIO *bio, const char *scheme, OSSL_LIB_CTX *libctx, const char *propq, const UI_METHOD *ui_method, void *ui_data, const OSSL_PARAM params[], OSSL_STORE_post_process_info_fn post_process, void *post_process_data); /*- * Extracting OpenSSL types from and creating new OSSL_STORE_INFOs * --------------------------------------------------------------- */ /* * Types of data that can be ossl_stored in a OSSL_STORE_INFO. * OSSL_STORE_INFO_NAME is typically found when getting a listing of * available "files" / "tokens" / what have you. */ # define OSSL_STORE_INFO_NAME 1 /* char * */ # define OSSL_STORE_INFO_PARAMS 2 /* EVP_PKEY * */ # define OSSL_STORE_INFO_PUBKEY 3 /* EVP_PKEY * */ # define OSSL_STORE_INFO_PKEY 4 /* EVP_PKEY * */ # define OSSL_STORE_INFO_CERT 5 /* X509 * */ # define OSSL_STORE_INFO_CRL 6 /* X509_CRL * */ /* * Functions to generate OSSL_STORE_INFOs, one function for each type we * support having in them, as well as a generic constructor. * * In all cases, ownership of the object is transferred to the OSSL_STORE_INFO * and will therefore be freed when the OSSL_STORE_INFO is freed. */ OSSL_STORE_INFO *OSSL_STORE_INFO_new(int type, void *data); OSSL_STORE_INFO *OSSL_STORE_INFO_new_NAME(char *name); int OSSL_STORE_INFO_set0_NAME_description(OSSL_STORE_INFO *info, char *desc); OSSL_STORE_INFO *OSSL_STORE_INFO_new_PARAMS(EVP_PKEY *params); OSSL_STORE_INFO *OSSL_STORE_INFO_new_PUBKEY(EVP_PKEY *pubkey); OSSL_STORE_INFO *OSSL_STORE_INFO_new_PKEY(EVP_PKEY *pkey); OSSL_STORE_INFO *OSSL_STORE_INFO_new_CERT(X509 *x509); OSSL_STORE_INFO *OSSL_STORE_INFO_new_CRL(X509_CRL *crl); /* * Functions to try to extract data from a OSSL_STORE_INFO. */ int OSSL_STORE_INFO_get_type(const OSSL_STORE_INFO *info); void *OSSL_STORE_INFO_get0_data(int type, const OSSL_STORE_INFO *info); const char *OSSL_STORE_INFO_get0_NAME(const OSSL_STORE_INFO *info); char *OSSL_STORE_INFO_get1_NAME(const OSSL_STORE_INFO *info); const char *OSSL_STORE_INFO_get0_NAME_description(const OSSL_STORE_INFO *info); char *OSSL_STORE_INFO_get1_NAME_description(const OSSL_STORE_INFO *info); EVP_PKEY *OSSL_STORE_INFO_get0_PARAMS(const OSSL_STORE_INFO *info); EVP_PKEY *OSSL_STORE_INFO_get1_PARAMS(const OSSL_STORE_INFO *info); EVP_PKEY *OSSL_STORE_INFO_get0_PUBKEY(const OSSL_STORE_INFO *info); EVP_PKEY *OSSL_STORE_INFO_get1_PUBKEY(const OSSL_STORE_INFO *info); EVP_PKEY *OSSL_STORE_INFO_get0_PKEY(const OSSL_STORE_INFO *info); EVP_PKEY *OSSL_STORE_INFO_get1_PKEY(const OSSL_STORE_INFO *info); X509 *OSSL_STORE_INFO_get0_CERT(const OSSL_STORE_INFO *info); X509 *OSSL_STORE_INFO_get1_CERT(const OSSL_STORE_INFO *info); X509_CRL *OSSL_STORE_INFO_get0_CRL(const OSSL_STORE_INFO *info); X509_CRL *OSSL_STORE_INFO_get1_CRL(const OSSL_STORE_INFO *info); const char *OSSL_STORE_INFO_type_string(int type); /* * Free the OSSL_STORE_INFO */ void OSSL_STORE_INFO_free(OSSL_STORE_INFO *info); /*- * Functions to construct a search URI from a base URI and search criteria * ----------------------------------------------------------------------- */ /* OSSL_STORE search types */ # define OSSL_STORE_SEARCH_BY_NAME 1 /* subject in certs, issuer in CRLs */ # define OSSL_STORE_SEARCH_BY_ISSUER_SERIAL 2 # define OSSL_STORE_SEARCH_BY_KEY_FINGERPRINT 3 # define OSSL_STORE_SEARCH_BY_ALIAS 4 /* To check what search types the scheme handler supports */ int OSSL_STORE_supports_search(OSSL_STORE_CTX *ctx, int search_type); /* Search term constructors */ /* * The input is considered to be owned by the caller, and must therefore * remain present throughout the lifetime of the returned OSSL_STORE_SEARCH */ OSSL_STORE_SEARCH *OSSL_STORE_SEARCH_by_name(X509_NAME *name); OSSL_STORE_SEARCH *OSSL_STORE_SEARCH_by_issuer_serial(X509_NAME *name, const ASN1_INTEGER *serial); OSSL_STORE_SEARCH *OSSL_STORE_SEARCH_by_key_fingerprint(const EVP_MD *digest, const unsigned char *bytes, size_t len); OSSL_STORE_SEARCH *OSSL_STORE_SEARCH_by_alias(const char *alias); /* Search term destructor */ void OSSL_STORE_SEARCH_free(OSSL_STORE_SEARCH *search); /* Search term accessors */ int OSSL_STORE_SEARCH_get_type(const OSSL_STORE_SEARCH *criterion); X509_NAME *OSSL_STORE_SEARCH_get0_name(const OSSL_STORE_SEARCH *criterion); const ASN1_INTEGER *OSSL_STORE_SEARCH_get0_serial(const OSSL_STORE_SEARCH *criterion); const unsigned char *OSSL_STORE_SEARCH_get0_bytes(const OSSL_STORE_SEARCH *criterion, size_t *length); const char *OSSL_STORE_SEARCH_get0_string(const OSSL_STORE_SEARCH *criterion); const EVP_MD *OSSL_STORE_SEARCH_get0_digest(const OSSL_STORE_SEARCH *criterion); /* * Add search criterion and expected return type (which can be unspecified) * to the loading channel. This MUST happen before the first OSSL_STORE_load(). */ int OSSL_STORE_expect(OSSL_STORE_CTX *ctx, int expected_type); int OSSL_STORE_find(OSSL_STORE_CTX *ctx, const OSSL_STORE_SEARCH *search); /*- * Function to fetch a loader and extract data from it * --------------------------------------------------- */ typedef struct ossl_store_loader_st OSSL_STORE_LOADER; OSSL_STORE_LOADER *OSSL_STORE_LOADER_fetch(OSSL_LIB_CTX *libctx, const char *scheme, const char *properties); int OSSL_STORE_LOADER_up_ref(OSSL_STORE_LOADER *loader); void OSSL_STORE_LOADER_free(OSSL_STORE_LOADER *loader); const OSSL_PROVIDER *OSSL_STORE_LOADER_get0_provider(const OSSL_STORE_LOADER * loader); const char *OSSL_STORE_LOADER_get0_properties(const OSSL_STORE_LOADER *loader); const char *OSSL_STORE_LOADER_get0_description(const OSSL_STORE_LOADER *loader); int OSSL_STORE_LOADER_is_a(const OSSL_STORE_LOADER *loader, const char *scheme); void OSSL_STORE_LOADER_do_all_provided(OSSL_LIB_CTX *libctx, void (*fn)(OSSL_STORE_LOADER *loader, void *arg), void *arg); int OSSL_STORE_LOADER_names_do_all(const OSSL_STORE_LOADER *loader, void (*fn)(const char *name, void *data), void *data); /*- * Function to register a loader for the given URI scheme. * ------------------------------------------------------- * * The loader receives all the main components of an URI except for the * scheme. */ # ifndef OPENSSL_NO_DEPRECATED_3_0 /* struct ossl_store_loader_ctx_st is defined differently by each loader */ typedef struct ossl_store_loader_ctx_st OSSL_STORE_LOADER_CTX; typedef OSSL_STORE_LOADER_CTX *(*OSSL_STORE_open_fn) (const OSSL_STORE_LOADER *loader, const char *uri, const UI_METHOD *ui_method, void *ui_data); typedef OSSL_STORE_LOADER_CTX *(*OSSL_STORE_open_ex_fn) (const OSSL_STORE_LOADER *loader, const char *uri, OSSL_LIB_CTX *libctx, const char *propq, const UI_METHOD *ui_method, void *ui_data); typedef OSSL_STORE_LOADER_CTX *(*OSSL_STORE_attach_fn) (const OSSL_STORE_LOADER *loader, BIO *bio, OSSL_LIB_CTX *libctx, const char *propq, const UI_METHOD *ui_method, void *ui_data); typedef int (*OSSL_STORE_ctrl_fn) (OSSL_STORE_LOADER_CTX *ctx, int cmd, va_list args); typedef int (*OSSL_STORE_expect_fn) (OSSL_STORE_LOADER_CTX *ctx, int expected); typedef int (*OSSL_STORE_find_fn) (OSSL_STORE_LOADER_CTX *ctx, const OSSL_STORE_SEARCH *criteria); typedef OSSL_STORE_INFO *(*OSSL_STORE_load_fn) (OSSL_STORE_LOADER_CTX *ctx, const UI_METHOD *ui_method, void *ui_data); typedef int (*OSSL_STORE_eof_fn)(OSSL_STORE_LOADER_CTX *ctx); typedef int (*OSSL_STORE_error_fn)(OSSL_STORE_LOADER_CTX *ctx); typedef int (*OSSL_STORE_close_fn)(OSSL_STORE_LOADER_CTX *ctx); # endif # ifndef OPENSSL_NO_DEPRECATED_3_0 OSSL_DEPRECATEDIN_3_0 OSSL_STORE_LOADER *OSSL_STORE_LOADER_new(ENGINE *e, const char *scheme); OSSL_DEPRECATEDIN_3_0 int OSSL_STORE_LOADER_set_open(OSSL_STORE_LOADER *loader, OSSL_STORE_open_fn open_function); OSSL_DEPRECATEDIN_3_0 int OSSL_STORE_LOADER_set_open_ex(OSSL_STORE_LOADER *loader, OSSL_STORE_open_ex_fn open_ex_function); OSSL_DEPRECATEDIN_3_0 int OSSL_STORE_LOADER_set_attach(OSSL_STORE_LOADER *loader, OSSL_STORE_attach_fn attach_function); OSSL_DEPRECATEDIN_3_0 int OSSL_STORE_LOADER_set_ctrl(OSSL_STORE_LOADER *loader, OSSL_STORE_ctrl_fn ctrl_function); OSSL_DEPRECATEDIN_3_0 int OSSL_STORE_LOADER_set_expect(OSSL_STORE_LOADER *loader, OSSL_STORE_expect_fn expect_function); OSSL_DEPRECATEDIN_3_0 int OSSL_STORE_LOADER_set_find(OSSL_STORE_LOADER *loader, OSSL_STORE_find_fn find_function); OSSL_DEPRECATEDIN_3_0 int OSSL_STORE_LOADER_set_load(OSSL_STORE_LOADER *loader, OSSL_STORE_load_fn load_function); OSSL_DEPRECATEDIN_3_0 int OSSL_STORE_LOADER_set_eof(OSSL_STORE_LOADER *loader, OSSL_STORE_eof_fn eof_function); OSSL_DEPRECATEDIN_3_0 int OSSL_STORE_LOADER_set_error(OSSL_STORE_LOADER *loader, OSSL_STORE_error_fn error_function); OSSL_DEPRECATEDIN_3_0 int OSSL_STORE_LOADER_set_close(OSSL_STORE_LOADER *loader, OSSL_STORE_close_fn close_function); OSSL_DEPRECATEDIN_3_0 const ENGINE *OSSL_STORE_LOADER_get0_engine(const OSSL_STORE_LOADER *loader); OSSL_DEPRECATEDIN_3_0 const char * OSSL_STORE_LOADER_get0_scheme(const OSSL_STORE_LOADER *loader); OSSL_DEPRECATEDIN_3_0 int OSSL_STORE_register_loader(OSSL_STORE_LOADER *loader); OSSL_DEPRECATEDIN_3_0 OSSL_STORE_LOADER *OSSL_STORE_unregister_loader(const char *scheme); # endif /*- * Functions to list STORE loaders * ------------------------------- */ # ifndef OPENSSL_NO_DEPRECATED_3_0 OSSL_DEPRECATEDIN_3_0 int OSSL_STORE_do_all_loaders(void (*do_function)(const OSSL_STORE_LOADER *loader, void *do_arg), void *do_arg); # endif # ifdef __cplusplus } # endif #endif
6,795
3,411
<reponame>fratik/JDA /* * Copyright 2015 <NAME>, <NAME>, <NAME>, and the JDA contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.dv8tion.jda.api.entities; import net.dv8tion.jda.api.JDA; import net.dv8tion.jda.api.managers.WebhookManager; import net.dv8tion.jda.api.requests.RestAction; import net.dv8tion.jda.api.requests.restaction.AuditableRestAction; import net.dv8tion.jda.internal.requests.RestActionImpl; import net.dv8tion.jda.internal.requests.Route; import javax.annotation.CheckReturnValue; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.regex.Pattern; /** * An object representing Webhooks in Discord * * @since 3.0 * * @see TextChannel#retrieveWebhooks() * @see Guild#retrieveWebhooks() * @see JDA#retrieveWebhookById(String) */ public interface Webhook extends ISnowflake { /** * Pattern for a Webhook URL. * * <h2>Groups</h2> * <table> * <caption style="display: none">Javadoc is stupid, this is not a required tag</caption> * <tr> * <th>Index</th> * <th>Name</th> * <th>Description</th> * </tr> * <tr> * <td>0</td> * <td>N/A</td> * <td>The entire link</td> * </tr> * <tr> * <td>1</td> * <td>id</td> * <td>The ID of the webhook</td> * </tr> * <tr> * <td>2</td> * <td>token</td> * <td>The token of the webhook</td> * </tr> * </table> * * You can use the names with {@link java.util.regex.Matcher#group(String) Matcher.group(String)} * and the index with {@link java.util.regex.Matcher#group(int) Matcher.group(int)}. */ Pattern WEBHOOK_URL = Pattern.compile("https?://(?:[^\\s.]+\\.)?discord(?:app)?\\.com/api(?:/v\\d+)?/webhooks/(?<id>\\d+)/(?<token>[^\\s/]+)", Pattern.CASE_INSENSITIVE); /** * The JDA instance of this Webhook. * * @return The current JDA instance of this Webhook */ @Nonnull JDA getJDA(); /** * The {@link WebhookType} of this webhook. * <br>Webhooks of type {@link WebhookType#FOLLOWER} don't have a token. * * @return The {@link WebhookType} */ @Nonnull WebhookType getType(); /** * Whether this webhook cannot provide {@link #getChannel()} and {@link #getGuild()}. * <br>This means that the webhook is not local to this shard's cache and cannot provide full channel/guild references. * * @return True, if {@link #getChannel()} and {@link #getGuild()} would throw */ boolean isPartial(); /** * The {@link net.dv8tion.jda.api.entities.Guild Guild} instance * for this Webhook. * <br>This is a shortcut for <code>{@link #getChannel()}.getGuild()</code>. * * @throws IllegalStateException * If this webhooks {@link #isPartial() is partial} * * @return The current Guild of this Webhook */ @Nonnull Guild getGuild(); /** * The {@link net.dv8tion.jda.api.entities.TextChannel TextChannel} instance * this Webhook is attached to. * * @throws IllegalStateException * If this webhooks {@link #isPartial() is partial} * * @return The current TextChannel of this Webhook */ @Nonnull TextChannel getChannel(); /** * The owner of this Webhook. This will be null for some Webhooks, such as those retrieved from Audit Logs. * <br>This requires the member to be cached. You can use {@link #getOwnerAsUser()} to get a reference to the user instead. * * @return Possibly-null {@link net.dv8tion.jda.api.entities.Member Member} instance * representing the owner of this Webhook. */ @Nullable Member getOwner(); /** * The owner of this Webhook. This will be null for some Webhooks, such as those retrieved from Audit Logs. * <br>This can be non-null even when {@link #getOwner()} is null. {@link #getOwner()} requires the webhook to be local to this shard and in cache. * * @return Possibly-null {@link net.dv8tion.jda.api.entities.User User} instance * representing the owner of this Webhook. */ @Nullable User getOwnerAsUser(); /** * The default User for this Webhook. * * <p>The {@link net.dv8tion.jda.api.entities.User User} returned is always fake and cannot be interacted with. * <br>This User is used for all messages posted to the Webhook route (found in {@link #getUrl()}), * it holds the default references for the message authors of messages by this Webhook. * * <p>When {@code POST}ing to a Webhook route the name/avatar of this default user * can be overridden. * * @return A fake {@link net.dv8tion.jda.api.entities.User User} instance * representing the default webhook user. * * @see <a href="https://discord.com/developers/docs/resources/webhook#execute-webhook">Execute Webhook Docs</a> */ @Nonnull User getDefaultUser(); /** * The name of this Webhook. * <br>This will be displayed by default as the author name * of every message by this Webhook. * * <p>This is a shortcut for <code>{@link #getDefaultUser()}.getName()</code>. * * @return The name of this Webhook */ @Nonnull String getName(); /** * The execute token for this Webhook. * <br>This can be used to modify/delete/execute * this Webhook. * * <p><b>Note: Some Webhooks, such as those retrieved from Audit Logs, do not contain a token</b> * * @return The execute token for this Webhook */ @Nullable String getToken(); /** * The {@code POST} route for this Webhook. * <br>This contains the {@link #getToken() token} and {@link #getId() id} * of this Webhook. Some Webhooks without tokens (such as those retrieved from Audit Logs) * will return a URL without a token. * * <p>The route returned by this method does not need permission checks * to be executed. * <br>It is implied that Webhook messages always have all permissions * including {@link net.dv8tion.jda.api.Permission#MESSAGE_MENTION_EVERYONE mentioning everyone}. * * <p>Webhook executions are limited with 5 requests per second. * The response contains rate limit headers that should be handled * by execution frameworks. (<a href="https://discord.com/developers/docs/topics/rate-limits">Learn More</a>) * * @return The execution route for this Webhook. */ @Nonnull String getUrl(); /** * The source channel for a Webhook of type {@link WebhookType#FOLLOWER FOLLOWER}. * * @return {@link ChannelReference} */ @Nullable ChannelReference getSourceChannel(); /** * The source guild for a Webhook of type {@link WebhookType#FOLLOWER FOLLOWER}. * * @return {@link GuildReference} */ @Nullable GuildReference getSourceGuild(); /** * Deletes this Webhook. * * <p>The following {@link net.dv8tion.jda.api.requests.ErrorResponse ErrorResponses} are possible: * <ul> * <li>{@link net.dv8tion.jda.api.requests.ErrorResponse#MISSING_ACCESS MISSING_ACCESS} * <br>The delete was attempted after the account lost permission to view the channel.</li> * * <li>{@link net.dv8tion.jda.api.requests.ErrorResponse#MISSING_PERMISSIONS MISSING_PERMISSIONS} * <br>The delete was attempted after the account lost {@link net.dv8tion.jda.api.Permission#MANAGE_WEBHOOKS Permission.MANAGE_WEBHOOKS} in * the channel.</li> * * <li>{@link net.dv8tion.jda.api.requests.ErrorResponse#UNKNOWN_WEBHOOK UNKNOWN_WEBHOOK} * <br>The delete was attempted after the Webhook had already been deleted.</li> * </ul> * * @throws net.dv8tion.jda.api.exceptions.InsufficientPermissionException * If the Webhook does not have a token, such as the Webhooks retrieved from Audit Logs and the currently * logged in account does not have {@link net.dv8tion.jda.api.Permission#MANAGE_WEBHOOKS} in this channel. * * @return {@link net.dv8tion.jda.api.requests.restaction.AuditableRestAction AuditableRestAction} * <br>The rest action to delete this Webhook. */ @Nonnull @CheckReturnValue AuditableRestAction<Void> delete(); /** * Deletes this Webhook. * * <p>The following {@link net.dv8tion.jda.api.requests.ErrorResponse ErrorResponses} are possible: * <ul> * <li>{@link net.dv8tion.jda.api.requests.ErrorResponse#MISSING_ACCESS MISSING_ACCESS} * <br>The delete was attempted after the account lost permission to view the channel.</li> * * <li>{@link net.dv8tion.jda.api.requests.ErrorResponse#MISSING_PERMISSIONS MISSING_PERMISSIONS} * <br>The delete was attempted after the account lost {@link net.dv8tion.jda.api.Permission#MANAGE_WEBHOOKS Permission.MANAGE_WEBHOOKS} in * the channel.</li> * * <li>{@link net.dv8tion.jda.api.requests.ErrorResponse#UNKNOWN_WEBHOOK UNKNOWN_WEBHOOK} * <br>The delete was attempted after the Webhook had already been deleted.</li> * * <li>{@link net.dv8tion.jda.api.requests.ErrorResponse#INVALID_WEBHOOK_TOKEN INVALID_WEBHOOK_TOKEN} * <br>If the provided webhook token is not valid.</li> * </ul> * * @param token * The webhook token (this is not the bot authorization token!) * * @throws IllegalArgumentException * If the provided token is null * * @return {@link net.dv8tion.jda.api.requests.restaction.AuditableRestAction AuditableRestAction} * <br>The rest action to delete this Webhook. * * @since 4.0.0 */ @Nonnull @CheckReturnValue AuditableRestAction<Void> delete(@Nonnull String token); /** * The {@link WebhookManager WebhookManager} for this Webhook. * <br>You can modify multiple fields in one request by chaining setters before calling {@link net.dv8tion.jda.api.requests.RestAction#queue() RestAction.queue()}. * * <p>This is a lazy idempotent getter. The manager is retained after the first call. * This getter is not thread-safe and would require guards by the user. * * @throws net.dv8tion.jda.api.exceptions.InsufficientPermissionException * If the currently logged in account does not have {@link net.dv8tion.jda.api.Permission#MANAGE_WEBHOOKS Permission.MANAGE_WEBHOOKS} * * @return The {@link WebhookManager WebhookManager} for this Webhook */ @Nonnull WebhookManager getManager(); /** * Partial Webhook which can be {@link #resolve() resolved} to a {@link Webhook}. * * @see #resolve() */ class WebhookReference implements ISnowflake { private final JDA api; private final long webhookId, channelId; public WebhookReference(JDA api, long webhookId, long channelId) { this.api = api; this.webhookId = webhookId; this.channelId = channelId; } @Override public long getIdLong() { return webhookId; } /** * The ID for the channel this webhook belongs to * * @return The ID for the channel this webhook belongs to */ @Nonnull public String getChannelId() { return Long.toUnsignedString(channelId); } /** * The ID for the channel this webhook belongs to * * @return The ID for the channel this webhook belongs to */ public long getChannelIdLong() { return channelId; } /** * Resolves this reference to a {@link Webhook} instance. * <br>The resulting instance may not provide a {@link #getChannel()} and {@link #getGuild()} due to API limitation. * * <p>The resulting webhook can also not be executed because the API does not provide a token. * * @return {@link RestAction} - Type: {@link Webhook} */ @Nonnull @CheckReturnValue public RestAction<Webhook> resolve() { Route.CompiledRoute route = Route.Webhooks.GET_WEBHOOK.compile(getId()); return new RestActionImpl<>(api, route, (response, request) -> request.getJDA().getEntityBuilder().createWebhook(response.getObject(), true)); } } /** * Partial Channel which references the source channel for a follower webhook. */ class ChannelReference implements ISnowflake { private final long id; private final String name; public ChannelReference(long id, String name) { this.id = id; this.name = name; } @Override public long getIdLong() { return id; } /** * The source channel's name * * @return The channel name */ @Nonnull public String getName() { return name; } } /** * Partial Guild which references the source guild for a follower webhook. */ class GuildReference implements ISnowflake { private final long id; private final String name; public GuildReference(long id, String name) { this.id = id; this.name = name; } @Override public long getIdLong() { return id; } /** * The source guild's name * * @return The guild name */ @Nonnull public String getName() { return name; } } }
6,499
13,074
<gh_stars>1000+ package com.lmax.disruptor; /** * Pulls together the low-level data access and sequencing operations of {@link RingBuffer} * @param <T> The event type */ public interface EventSequencer<T> extends DataProvider<T>, Sequenced { }
77
2,059
<filename>lib/graph/src/types/downstream.cpp #include "graph/types/downstream.h" #include "graph/types/root.h" Downstream::~Downstream() { for (auto r : roots) r->removeDownstream(this); } void Downstream::trigger() { for (auto r : roots) r->removeDownstream(this); roots.clear(); update(); }
134
848
<reponame>hito0512/Vitis-AI /* * Copyright 2019 Xilinx Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <iostream> #include <fstream> #include <sys/stat.h> #include <opencv2/imgcodecs.hpp> #include <vitis/ai/env_config.hpp> #include <vitis/ai/profiling.hpp> #include <vitis/ai/pointpainting.hpp> using namespace vitis::ai; using namespace vitis::ai::pointpillars_nus; DEF_ENV_PARAM(SAMPLES_BATCH_NUM, "0"); void read_cam_intr_from_line(const std::string &line, CamInfo &cam_info) { auto s = line; auto cnt = 0u; while(cnt < cam_info.cam_intr.size()) { auto f = std::atof(s.c_str()); cam_info.cam_intr[cnt] = f; cnt++; auto n = s.find_first_of(' '); if (n == std::string::npos) { break; } s = s.substr(n + 1); } } void read_s2lr_from_line(const std::string &line, CamInfo &cam_info) { auto s = line; auto cnt = 0u; while(cnt < cam_info.s2l_r.size()) { auto f = std::atof(s.c_str()); cam_info.s2l_r[cnt] = f; cnt++; auto n = s.find_first_of(' '); if (n == std::string::npos) { break; } s = s.substr(n + 1); } } void read_s2lt_from_line(const std::string &line, CamInfo &cam_info) { auto s = line; auto cnt = 0u; while(cnt < cam_info.s2l_t.size()) { auto f = std::atof(s.c_str()); cam_info.s2l_t[cnt] = f; cnt++; auto n = s.find_first_of(' '); if (n == std::string::npos) { break; } s = s.substr(n + 1); } } //void read_points_file(const std::string &points_file_name, PointsInfo &points_info) { void read_points_file(const std::string &points_file_name, std::vector<float> &points) { //int DIM = 5; //points_info.dim = DIM; struct stat file_stat; if (stat(points_file_name.c_str(), &file_stat) != 0) { std::cerr << "file:" << points_file_name << " state error!" << std::endl; exit(-1); } auto file_size = file_stat.st_size; //LOG(INFO) << "input file:" << points_file_name << " size:" << file_size; //points_info.points.resize(file_size / 4); points.resize(file_size / 4); //CHECK(std::ifstream(points_file_name).read(reinterpret_cast<char *>(points_info.points.data()), file_size).good()); CHECK(std::ifstream(points_file_name).read(reinterpret_cast<char *>(points.data()), file_size).good()); } void read_cams(std::ifstream &anno_file, const std::string &path_prefix, std::vector<CamInfo> &cam_infos, std::vector<cv::Mat> &images) { char line[1024]; // read sweeps if (anno_file.getline(line, 1024, '\n') && std::strncmp(line, "cams:", 5) == 0) { auto s = std::string(line + 5); int num = std::atoi(s.c_str()); //std::cout << "sweep num:" << num << std::endl; if (num == 0) { return; } int cnt = 0; images.clear(); cam_infos.clear(); images.resize(num); cam_infos.resize(num); while(cnt < num) { // read data path if (anno_file.getline(line, 1024, '\n') && std::strncmp(line + 1, "data_path:", 10) == 0) { auto data_path = std::string(line + 1).substr(10); data_path = path_prefix + data_path; //std::cout << "data_path:" << data_path << std::endl; images[cnt] = cv::imread(data_path); } else { break; } if (anno_file.getline(line, 1024, '\n') && std::strncmp(line + 1, "sensor2lidar_rotation:", 22) == 0) { auto l = std::string(line + 1).substr(22); //std::cout << "s2lr:" << l << std::endl; read_s2lr_from_line(l, cam_infos[cnt]); } else { break; } if (anno_file.getline(line, 1024, '\n') && std::strncmp(line + 1, "sensor2lidar_translation:", 25) == 0) { auto l = std::string(line + 1).substr(25); //std::cout << "s2lt:" << l << std::endl; read_s2lt_from_line(l, cam_infos[cnt]); } else { break; } if (anno_file.getline(line, 1024, '\n') && std::strncmp(line + 1, "cam_intrinsic:", 14) == 0) { auto l = std::string(line + 1).substr(14); //std::cout << "s2lt:" << l << std::endl; read_cam_intr_from_line(l, cam_infos[cnt]); } else { break; } cnt++; } } } void read_sweeps(std::ifstream &anno_file, const std::string &path_prefix, std::vector<SweepInfo> &sweeps, int points_dim) { char line[1024]; // read sweeps if (anno_file.getline(line, 1024, '\n') && std::strncmp(line, "sweeps:", 7) == 0) { auto s = std::string(line + 7); int num = std::atoi(s.c_str()); //std::cout << "sweep num:" << num << std::endl; if (num == 0) { return; } int cnt = 0; sweeps.clear(); sweeps.resize(num); while(cnt < num) { // read data path if (anno_file.getline(line, 1024, '\n') && std::strncmp(line + 1, "data_path:", 10) == 0) { auto data_path = std::string(line + 1).substr(10); data_path = path_prefix + data_path; //std::cout << "data_path:" << data_path << std::endl; if (!sweeps[cnt].points.points) { sweeps[cnt].points.points.reset(new std::vector<float>); } read_points_file(data_path, *(sweeps[cnt].points.points)); sweeps[cnt].points.dim = points_dim; } else { break; } if (anno_file.getline(line, 1024, '\n') && std::strncmp(line + 1, "timestamp:", 10) == 0) { auto timestamp = std::atoll(std::string(line + 1).substr(10).c_str()); //std::cout << "timestamp:" << timestamp<< std::endl; sweeps[cnt].cam_info.timestamp = timestamp; } else { break; } if (anno_file.getline(line, 1024, '\n') && std::strncmp(line + 1, "sensor2lidar_rotation:", 22) == 0) { auto l = std::string(line + 1).substr(22); //std::cout << "s2lr:" << l << std::endl; read_s2lr_from_line(l, sweeps[cnt].cam_info); } else { break; } if (anno_file.getline(line, 1024, '\n') && std::strncmp(line + 1, "sensor2lidar_translation:", 25) == 0) { auto l = std::string(line + 1).substr(25); //std::cout << "s2lt:" << l << std::endl; read_s2lt_from_line(l, sweeps[cnt].cam_info); } else { break; } cnt++; } } } void read_inno_file(const std::string &file_name, PointsInfo &points_info, int points_dim, std::vector<SweepInfo> &sweeps, int sweeps_points_dim, std::vector<cv::Mat> &images) { std::string path_prefix; if (file_name.find_last_of('/') != std::string::npos) { path_prefix = file_name.substr(0, file_name.find_last_of('/') + 1); //std::cout << "path_prefix:" << path_prefix << std::endl; } auto anno_file = std::ifstream(file_name); if (!anno_file) { std::cerr << "open:" << file_name << " fail!" << std::endl; exit(-1); } char line[1024]; while(anno_file.getline(line, 1024, '\n')) { auto len = std::strlen(line); if (len == 0) { continue; } if (std::strncmp(line, "lidar_path:", 11) == 0) { auto lidar_path = std::string(line).substr(11); //std::cout << "lidar_path:" << lidar_path << std::endl; //read_points_file(lidar_path, points_info); if (!points_info.points.points) { points_info.points.points.reset(new std::vector<float>); } read_points_file(path_prefix + lidar_path, *(points_info.points.points)); points_info.points.dim = points_dim; } else { break; } // read timestamp if (anno_file.getline(line, 1024, '\n') && std::strncmp(line, "timestamp:", 10) == 0) { auto timestamp = std::atoll(std::string(line).substr(10).c_str()); //std::cout << "timestamp:" << timestamp<< std::endl; //points_info.cam_info.timestamp = timestamp; points_info.timestamp = timestamp; } else { break; } // read sweeps read_sweeps(anno_file, path_prefix, sweeps, sweeps_points_dim); // read cams read_cams(anno_file, path_prefix, points_info.cam_info, images); break; } anno_file.close(); } void print_points_info(const PointsInfo &points_info) { std::cout << "points info: " << std::endl; std::cout << " cam_info:" << std::endl; for (auto n = 0u; n < points_info.cam_info.size(); ++n) { std::cout << " timestamp:" << points_info.cam_info[n].timestamp << std::endl; std::cout << " s2l_t:"; for (auto i = 0u; i < points_info.cam_info[n].s2l_t.size(); ++i) { std::cout << points_info.cam_info[n].s2l_t[i] << " "; } std::cout << std::endl; std::cout << " s2l_r:"; for (auto i = 0u; i < points_info.cam_info[n].s2l_r.size(); ++i) { std::cout << points_info.cam_info[n].s2l_r[i] << " "; } std::cout << std::endl; } std::cout << " dim:" << points_info.points.dim << std::endl; if (points_info.points.points) { std::cout << " points size:" << points_info.points.points->size() << std::endl; } } int main( int argc, char *argv[]) { if (argc < 5) { //std::cout << "usage: " << argv[0] << " <input_file>" << std::endl; std::cout << "usage:" << argv[0] << " [segmentation_model] [pointpillars_model0] [pointpillars_model1] [anno_file_name]" << std::endl; exit(0); } int input_num = argc - 4; if (ENV_PARAM(SAMPLES_BATCH_NUM)) { input_num = std::min(ENV_PARAM(SAMPLES_BATCH_NUM), input_num); //std::cout << "set batch num :" << input_num << std::endl; } std::vector<std::string> anno_file_names(input_num); std::vector<PointsInfo> points_infos(input_num); std::vector<std::vector<cv::Mat>> images(input_num); for (auto i = 0; i < input_num; ++i) { anno_file_names[i] = argv[4 + i]; struct stat file_stat; if (stat(anno_file_names[i].c_str(), &file_stat) != 0) { std::cerr << "file:" << anno_file_names[i] << " state error!" << std::endl; exit(-1); } read_inno_file(anno_file_names[i], points_infos[i], 5, points_infos[i].sweep_infos, 16, images[i]); } //std::string seg_model = "pointpainting_segmentation"; std::string seg_model = argv[1]; //std::string model_0 = "pointpainting_pointpillars_0"; std::string model_0 = argv[2]; //std::string model_1 = "pointpainting_pointpillars_1"; std::string model_1 = argv[3]; auto pointpainting = vitis::ai::PointPainting::create( seg_model, model_0, model_1); //auto batch = pointpainting->get_pointpillars_batch(); auto batch_ret = pointpainting->run(images, points_infos); //LOG(INFO) << "input width:" << ret.width // << " input height: " << ret.height; for (auto b = 0u; b < batch_ret.size(); ++b) { std::cout << "batch : " << b << std::endl; auto &ret = batch_ret[b]; for (auto c = 0u; c < 10; ++c) { for (auto i = 0u; i < ret.bboxes.size(); ++i) { if (ret.bboxes[i].label != c) { continue; } std::cout << "label: " << ret.bboxes[i].label; std::cout << " bbox: "; for (auto j = 0u; j < ret.bboxes[i].bbox.size(); ++j) { std::cout << ret.bboxes[i].bbox[j] << " "; } std::cout << "score: " << ret.bboxes[i].score; std::cout << std::endl; } } std::cout << std::endl; } return 0; }
5,523
1,006
<reponame>eenurkka/incubator-nuttx /**************************************************************************** * arch/arm/include/max326xx/chip.h * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The * ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * ****************************************************************************/ #ifndef __ARCH_ARM_INCLUDE_MAX326XX_CHIP_H #define __ARCH_ARM_INCLUDE_MAX326XX_CHIP_H /**************************************************************************** * Included Files ****************************************************************************/ #include <nuttx/config.h> /**************************************************************************** * Pre-processor Prototypes ****************************************************************************/ /* Get customizations for each supported MAX326xx family. Only sizes and * numbers of things are provided here. See arch/arm/src/max326xx/Kconfig * for other, boolean configuration settings. * * MAX326xx Families are determined by sharing a common User Guide for the * chip specification: * * MAX32620/32621 Family: MAX32620 Rev C, User Guide, AN6242, Rev 2, 2/17 * MAX32630/32632 Family: MAX32630 Rev B, User Guide, AN6349, Rev 0, 10/16 * MAX32660 Family: MAX32660 User Guide, AN6659, Rev0, 7/18 */ /* MAX32620/32621 Family: * * Part Flash SRAM Trust Pin/Package * (Mb) (Kb) Protection * MAX32620ICQ+ 2 256 No 100 TQFP * MAX32620IWG+ 2 256 No 81 WLP * MAX32620IWG+T 2 256 No 81 WLP * MAX32620IWGL+ 1 256 No 81 WLP * MAX32620IWGL+T 1 256 No 81 WLP * MAX32621ICQ+ 2 256 Yes 100 TQFP * MAX32621IWG+ 2 256 Yes 81 WLP * MAX32621IWG+T 2 256 Yes 81 WLP */ #if defined(CONFIG_ARCH_CHIP_MAX32620) || defined(CONFIG_ARCH_CHIP_MAX32621) /* Peripherals */ # define MAX326_NWDOG 0 /* No Watchdog Timers */ # define MAX326_NWWDOG 2 /* Two Windowed Watchdog Timers */ # define MAX326_NRWDOG 1 /* One Recovery Watchdog Timer */ # define MAX326_NWAKEUP 1 /* One Wakeup Timer */ # define MAX326_NRTC 1 /* One RTC */ # define MAX326_NCRC 1 /* One CRC16/32 */ # define MAX326_NAES 1 /* One AES 128,192, 256 */ # define MAX326_NUSB20 1 /* One USB 2.0 device */ # define MAX326_NTMR32 6 /* Six 32-bit Timers */ # define MAX326_NTMR8 0 /* No 8-bit Timers */ # define MAX326_NPTENGINE 16 /* Sixteen pulse train engines */ # define MAX326_NSPIM 3 /* Three SPI master */ # define MAX326_NSPIS 1 /* One SPI slave */ # define MAX326_NSPIXIP 1 /* One SPI XIP */ # define MAX326_NI2SS 0 /* No I2S slave */ # define MAX326_NI2CM 3 /* Three I2C master */ # define MAX326_NI2CS 1 /* One I2C slave */ # define MAX326_NUART 4 /* Four UARTs */ # define MAX326_N1WIREM 1 /* One 1-Wire master */ # define MAX326_NADC10 1 /* One 10-bit ADC */ /* MAX32630/32632 Family: * * Part Flash SRAM Trust Secure Pin/Package * (Mb) (Kb) Protection Bootloader * MAX32630IWQ+ 2 512 No No 100 WLP * MAX32630IWQ+T 2 512 No No 100 WLP * MAX32630ICQ+ 2 512 No No 100 TQFP-EP * MAX32631IWQ+ 2 512 Yes No 100 WLP * MAX32631IWQ+T 2 512 Yes No 100 WLP * MAX32631ICQ+ 2 512 Yes No 100 TQFP-EP * MAX32632IWQ+ 2 512 Yes Yes 100 WLP * MAX32632IWQ+T 2 512 Yes Yes 100 WLP */ #elif defined(CONFIG_ARCH_CHIP_MAX32630) || defined(CONFIG_ARCH_CHIP_MAX32632) /* Peripherals */ # define MAX326_NWDOG 0 /* No Watchdog Timers */ # define MAX326_NWWDOG 2 /* Two Windowed Watchdog Timers */ # define MAX326_NRWDOG 0 /* No Recovery Watchdog Timer */ # define MAX326_NWAKEUP 1 /* One Wakeup Timer */ # define MAX326_NRTC 1 /* One RTC */ # define MAX326_NCRC 1 /* One CRC16/32 */ # define MAX326_NAES 1 /* One AES 128,192, 256 */ # define MAX326_NUSB20 1 /* One USB 2.0 device */ # define MAX326_NTMR32 6 /* Six 32-bit Timers */ # define MAX326_NTMR8 0 /* No 8-bit Timers */ # define MAX326_NPTENGINE 16 /* Sixteen pulse train engines */ # define MAX326_NSPIM 3 /* Three SPI master */ # define MAX326_NSPIS 1 /* One SPI slave */ # define MAX326_NSPIXIP 1 /* One SPI XIP */ # define MAX326_NI2SS 0 /* No I2S slave */ # define MAX326_NI2CM 3 /* Three I2C master */ # define MAX326_NI2CS 1 /* One I2C slave */ # define MAX326_NUART 4 /* Four UARTs */ # define MAX326_N1WIREM 1 /* One 1-Wire master */ # define MAX326_NADC10 1 /* One 10-bit ADC */ /* MAX32660 Family: * * Part Flash SRAM Secure Pin/Package * (Mb) (Kb) Bootloader * MAX32660GWE+ 256 96 No 16 WLP * MAX32660GWE+T 256 96 No 16 WLP * MAX32660GTP+ 256 96 No 20 TQFN-EP * MAX32660GTP+T 256 96 No 20 TQFN-EP * MAX32660GTG+ 256 96 No 24 TQFN-EP * MAX32660GTG+T 256 96 No 24 TQFN-EP * MAX32660GWEBL+* 256 96 Yes 16 WLP * MAX32660GWEBL+T* 256 96 Yes 16 WLP * MAX32660GTGBL+* 256 96 Yes 24 TQFN-EP * MAX32660GTGBL+T* 256 96 Yes 24 TQFN-EP * MAX32660GWELA+* 128 64 No 16 WLP * MAX32660GWELA+T* 128 64 No 16 WLP * MAX32660GTGLA+* 128 64 No 24 TQFN-EP * MAX32660GTGLA+T* 128 64 No 24 TQFN-EP * MAX32660GWELB+* 64 32 No 16 WLP * MAX32660GWELB+T* 64 32 No 16 WLP * MAX32660GTGLB+* 64 32 No 24 TQFN-EP * MAX32660GTGLB+T* 64 32 No 24 TQFN-EP */ #elif defined(CONFIG_ARCH_CHIP_MAX32660) /* Peripherals */ # define MAX326_NWDOG 1 /* One Watchdog Timer */ # define MAX326_NWWDOG 0 /* No Windowed Watchdog Timers */ # define MAX326_NRWDOG 0 /* No Recovery Watchdog Timer */ # define MAX326_NWAKEUP 0 /* No Wakeup Timer */ # define MAX326_NRTC 1 /* One RTC */ # define MAX326_NCRC 0 /* No CRC16/32 */ # define MAX326_NAES 0 /* No AES 128,192, 256 */ # define MAX326_NUSB20 0 /* No USB 2.0 device */ # define MAX326_NTMR32 2 /* Two 32-bit Timers */ # define MAX326_NTMR8 1 /* One 8-bit Timers */ # define MAX326_NPTENGINE 0 /* No pulse train engines */ # define MAX326_NSPIM 2 /* Three SPI master */ # define MAX326_NSPIS 2 /* Two SPI slave */ # define MAX326_NSPIXIP 0 /* No SPI XIP */ # define MAX326_NI2SS 1 /* One I2S slave */ # define MAX326_NI2CM 2 /* Two I2C master */ # define MAX326_NI2CS 2 /* One I2C slave */ # define MAX326_NUART 2 /* Two UARTs */ # define MAX326_N1WIREM 0 /* No 1-Wire master */ # define MAX326_NADC10 0 /* No 10-bit ADC */ #else # error Unrecognized MAX326XX chip #endif /* NVIC priority levels *****************************************************/ /* Each priority field holds a priority value, 0x00-0xe0. The lower the * value, the greater the priority of the corresponding interrupt. The * processor implements only bits[7:4] of each field, bits[6:0] read as zero * and ignore writes. */ #define NVIC_SYSH_PRIORITY_MIN 0xe0 /* All bits[7:5] set is minimum priority */ #define NVIC_SYSH_PRIORITY_DEFAULT 0x80 /* Midpoint is the default */ #define NVIC_SYSH_PRIORITY_MAX 0x00 /* Zero is maximum priority */ #define NVIC_SYSH_PRIORITY_STEP 0x20 /* Eight priority levels in steps 0x20 */ /**************************************************************************** * Public Types ****************************************************************************/ /**************************************************************************** * Public Data ****************************************************************************/ /**************************************************************************** * Public Functions Prototypes ****************************************************************************/ #endif /* __ARCH_ARM_INCLUDE_MAX326XX_CHIP_H */
3,983
14,668
<reponame>zealoussnow/chromium // Copyright 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "cc/test/fake_content_layer_client.h" #include <algorithm> #include <cstddef> #include "cc/paint/paint_op_buffer.h" #include "third_party/skia/include/core/SkTextBlob.h" #include "ui/gfx/geometry/rect_conversions.h" #include "ui/gfx/geometry/skia_conversions.h" namespace cc { FakeContentLayerClient::ImageData::ImageData(PaintImage img, const gfx::Point& point, const SkSamplingOptions& sampling, const PaintFlags& flags) : image(std::move(img)), point(point), sampling(sampling), flags(flags) {} FakeContentLayerClient::ImageData::ImageData(PaintImage img, const gfx::Transform& transform, const SkSamplingOptions& sampling, const PaintFlags& flags) : image(std::move(img)), transform(transform), sampling(sampling), flags(flags) {} FakeContentLayerClient::ImageData::ImageData(const ImageData& other) = default; FakeContentLayerClient::ImageData::~ImageData() = default; FakeContentLayerClient::SkottieData::SkottieData( scoped_refptr<SkottieWrapper> skottie, const gfx::Rect& dst, float t, SkottieFrameDataMap images) : skottie(std::move(skottie)), dst(dst), t(t), images(std::move(images)) {} FakeContentLayerClient::SkottieData::SkottieData(const SkottieData& other) = default; FakeContentLayerClient::SkottieData& FakeContentLayerClient::SkottieData::operator=(const SkottieData& other) = default; FakeContentLayerClient::SkottieData::~SkottieData() = default; FakeContentLayerClient::FakeContentLayerClient() = default; FakeContentLayerClient::~FakeContentLayerClient() = default; gfx::Rect FakeContentLayerClient::PaintableRegion() const { CHECK(bounds_set_); return gfx::Rect(bounds_); } scoped_refptr<DisplayItemList> FakeContentLayerClient::PaintContentsToDisplayList() { auto display_list = base::MakeRefCounted<DisplayItemList>(); for (RectPaintVector::const_iterator it = draw_rects_.begin(); it != draw_rects_.end(); ++it) { const gfx::RectF& draw_rect = it->first; const PaintFlags& flags = it->second; display_list->StartPaint(); display_list->push<DrawRectOp>(gfx::RectFToSkRect(draw_rect), flags); display_list->EndPaintOfUnpaired(ToEnclosingRect(draw_rect)); } for (ImageVector::const_iterator it = draw_images_.begin(); it != draw_images_.end(); ++it) { if (!it->transform.IsIdentity()) { display_list->StartPaint(); display_list->push<SaveOp>(); display_list->push<ConcatOp>(it->transform.GetMatrixAsSkM44()); display_list->EndPaintOfPairedBegin(); } display_list->StartPaint(); display_list->push<SaveOp>(); display_list->push<ClipRectOp>(gfx::RectToSkRect(PaintableRegion()), SkClipOp::kIntersect, false); display_list->push<DrawImageOp>( it->image, static_cast<float>(it->point.x()), static_cast<float>(it->point.y()), PaintFlags::FilterQualityToSkSamplingOptions( it->flags.getFilterQuality()), &it->flags); display_list->push<RestoreOp>(); display_list->EndPaintOfUnpaired(PaintableRegion()); if (!it->transform.IsIdentity()) { display_list->StartPaint(); display_list->push<RestoreOp>(); display_list->EndPaintOfPairedEnd(); } } for (const SkottieData& skottie_data : skottie_data_) { display_list->StartPaint(); display_list->push<DrawSkottieOp>(skottie_data.skottie, gfx::RectToSkRect(skottie_data.dst), skottie_data.t, skottie_data.images); display_list->EndPaintOfUnpaired(PaintableRegion()); } if (contains_slow_paths_) { // Add 6 slow paths, passing the reporting threshold. SkPath path; path.addCircle(2, 2, 5); path.addCircle(3, 4, 2); display_list->StartPaint(); for (int i = 0; i < 6; ++i) { display_list->push<ClipPathOp>(path, SkClipOp::kIntersect, true); } display_list->EndPaintOfUnpaired(PaintableRegion()); } if (fill_with_nonsolid_color_) { gfx::Rect draw_rect = PaintableRegion(); PaintFlags flags; flags.setColor(SK_ColorRED); display_list->StartPaint(); while (!draw_rect.IsEmpty()) { display_list->push<DrawIRectOp>(gfx::RectToSkIRect(draw_rect), flags); draw_rect.Inset(1, 1); } display_list->EndPaintOfUnpaired(PaintableRegion()); } if (has_non_aa_paint_) { PaintFlags flags; flags.setAntiAlias(false); display_list->StartPaint(); display_list->push<DrawRectOp>(SkRect::MakeWH(10, 10), flags); display_list->EndPaintOfUnpaired(PaintableRegion()); } if (has_draw_text_op_) { display_list->StartPaint(); display_list->push<DrawTextBlobOp>( SkTextBlob::MakeFromString("any", SkFont()), 0.0f, 0.0f, PaintFlags()); display_list->EndPaintOfUnpaired(PaintableRegion()); } display_list->Finalize(); return display_list; } bool FakeContentLayerClient::FillsBoundsCompletely() const { return false; } } // namespace cc
2,307