max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
337
/* * Copyright 2019 The Polycube Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * This function is called each time a packet arrives to the cube. * ctx contains the packet and md some additional metadata for the packet. * If the service is of type XDP_SKB/DRV CTX TYPE is equivalent to the struct * xdp_md otherwise, if the service is of type TC, CTXTYPE is equivalent to * the __sk_buff struct * Please look at the libpolycube documentation for more details. */ #include <bcc/helpers.h> #include <uapi/linux/if_ether.h> #include <uapi/linux/in.h> #include <uapi/linux/ip.h> #include <uapi/linux/tcp.h> #include <uapi/linux/udp.h> static __always_inline int handle_rx(struct CTXTYPE *ctx, struct pkt_metadata *md) { unsigned int key = 0; void *data = (void *)(long)ctx->data; void *data_end = (void *)(long)ctx->data_end; u16 reason = 1; uint32_t a, x, m[16]; u32 mdata[3]; uint64_t pkt_timestamp; //CUSTOM_FILTER_CODE return RX_OK; }
490
1,540
<gh_stars>1000+ package test.github765; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; public class DuplicateCallsSample extends TestTemplate<Integer> { private int i = 0; @Test(dataProvider = "testParameters") public void callExecuteTest(Integer testParameters) { Assert.assertTrue(testParameters > 0); } @DataProvider(name = "testParameters") public Object[][] getOnboardingTestParameters() { return new Object[][] {{4}}; } }
164
429
#ifndef __BOOLEAN_CIRCUIT__ #define __BOOLEAN_CIRCUIT__ #include "Party.h" #define MSG_KEYS_HEADER_SZ (16) #endif
61
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.parsing.api.indexing; import org.junit.Test; import org.netbeans.junit.NbTestCase; import org.netbeans.modules.parsing.impl.TaskProcessor; import org.netbeans.modules.parsing.impl.Utilities; import org.netbeans.modules.parsing.impl.indexing.RepositoryUpdater; import org.openide.filesystems.FileObject; import org.openide.filesystems.FileUtil; /** * * @author <NAME> */ public class IndexingManagerTest extends NbTestCase { private FileObject root; public IndexingManagerTest(final String name) { super(name); } @Override protected void setUp() throws Exception { super.setUp(); clearWorkDir(); final FileObject workDir = FileUtil.toFileObject(getWorkDir()); root = FileUtil.createFolder(workDir, "root"); //NOI18N } public void testRefreshIndexAndWaitCalledWithParserLock() { boolean success = false; Utilities.acquireParserLock(); try { IndexingManager.getDefault().refreshIndexAndWait(root.toURL(), null); success = true; } catch (IllegalStateException ise) { //pass } finally { Utilities.releaseParserLock(); } assertFalse(success); } public void testRefreshIndexAndWaitCalledFromIndexer() { final boolean[] success = {false}; RepositoryUpdater.getDefault().runIndexer(new Runnable() { @Override public void run() { try { IndexingManager.getDefault().refreshIndexAndWait(root.toURL(), null); success[0] = true; } catch (IllegalStateException ise) { //pass } } }); assertFalse(success[0]); } public void testRefreshIndexAndWait() { boolean success = false; try { IndexingManager.getDefault().refreshIndexAndWait(root.toURL(), null); success = true; } catch (IllegalStateException ise) { //pass } assertTrue(success); } }
1,137
678
<gh_stars>100-1000 /** * This header is generated by class-dump-z 0.2b. * * Source: /System/Library/PrivateFrameworks/IMCore.framework/Frameworks/IMFoundation.framework/IMFoundation */ #import <IMFoundation/XXUnknownSuperclass.h> @class NSString, NSDictionary; @interface IMTimer : XXUnknownSuperclass { id _timer; // 4 = 0x4 id _target; // 8 = 0x8 NSDictionary *_userInfo; // 12 = 0xc NSString *_name; // 16 = 0x10 double _timeInterval; // 20 = 0x14 SEL _selector; // 28 = 0x1c BOOL _wakeDevice; // 32 = 0x20 } @property(readonly, assign, nonatomic) id userInfo; // G=0x28aed; @synthesize=_userInfo // declared property getter: - (id)userInfo; // 0x28aed - (void)dealloc; // 0x28a65 - (void)invalidate; // 0x28a45 - (void)setFireTimeInterval:(double)interval; // 0x2880d - (id)initWithTimeInterval:(double)timeInterval name:(id)name shouldWake:(BOOL)wake target:(id)target selector:(SEL)selector userInfo:(id)info; // 0x285b5 @end
370
10,225
package org.jboss.resteasy.reactive.common.headers; import java.util.Date; import javax.ws.rs.ext.RuntimeDelegate; import org.jboss.resteasy.reactive.common.util.DateUtil; /** * @author <a href="mailto:<EMAIL>"><NAME></a> */ public class DateDelegate implements RuntimeDelegate.HeaderDelegate<Date> { public static final DateDelegate INSTANCE = new DateDelegate(); @Override public Date fromString(String value) { if (value == null) throw new IllegalArgumentException("Param was null"); return DateUtil.parseDate(value); } @Override public String toString(Date value) { if (value == null) throw new IllegalArgumentException("Param was null"); return DateUtil.formatDate(value); } }
284
892
{ "schema_version": "1.2.0", "id": "GHSA-94j2-2w6f-7qp9", "modified": "2022-05-02T06:16:20Z", "published": "2022-05-02T06:16:20Z", "aliases": [ "CVE-2010-0864" ], "details": "Unspecified vulnerability in the Retail - Oracle Retail Place In-Season component in Oracle Industry Product Suite 12.2 allows remote attackers to affect integrity via unknown vectors related to Online Help.", "severity": [ ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-0864" }, { "type": "WEB", "url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/57743" }, { "type": "WEB", "url": "http://www.oracle.com/technetwork/topics/security/cpuapr2010-099504.html" }, { "type": "WEB", "url": "http://www.securitytracker.com/id?1023872" }, { "type": "WEB", "url": "http://www.us-cert.gov/cas/techalerts/TA10-103B.html" } ], "database_specific": { "cwe_ids": [ ], "severity": "MODERATE", "github_reviewed": false } }
503
514
<filename>duke-core/src/main/java/no/priv/garshol/duke/Database.java package no.priv.garshol.duke; import java.util.Collection; /** * Used to store and index records for later matching. */ public interface Database { /** * Returns true iff the database is held entirely in memory, and * thus is not persistent. */ public boolean isInMemory(); /** * Add the record to the index. */ public void index(Record record); /** * Flushes all changes to disk. For in-memory databases this is a * no-op. */ public void commit(); /** * Look up record by identity. */ public Record findRecordById(String id); /** * Look up potentially matching records. This method must be * thread-safe. */ public Collection<Record> findCandidateMatches(Record record); /** * Stores state to disk and closes all open resources. */ public void close(); /** * Gives the database its configuration (called by Duke framework). * @since 1.2 */ public void setConfiguration(Configuration config); /** * Sets whether or not to overwrite any existing index (called by * Duke framework). * @since 1.2 */ public void setOverwrite(boolean overwrite); }
381
6,098
from __future__ import print_function import sys sys.path.insert(1,"../../../") import h2o from tests import pyunit_utils from h2o.estimators.gbm import H2OGradientBoostingEstimator from tests.pyunit_utils import roc_auc_score def multinomial_auc_prostate_gbm(): data = h2o.import_file(pyunit_utils.locate("smalldata/logreg/prostate.csv")) response_col = "GLEASON" data[response_col] = data[response_col].asfactor() predictors = ["RACE", "AGE", "PSA", "DPROS", "CAPSULE", "VOL", "DCAPS"] distribution = "multinomial" # train model gbm = H2OGradientBoostingEstimator(ntrees=1, max_depth=2, nfolds=3, distribution=distribution, auc_type="WEIGHTED_OVR") gbm.train(x=predictors, y=response_col, training_frame=data) gbm.show() # get result on training data from h2o cm = gbm.confusion_matrix(data) h2o_auc_table = gbm.multinomial_auc_table(train=True) h2o_aucpr_table = gbm.multinomial_aucpr_table(train=True) print(cm) print(h2o_auc_table.as_data_frame()) print(h2o_aucpr_table.as_data_frame()) h2o_ovr_macro_auc = h2o_auc_table[3][7] h2o_ovr_weighted_auc = h2o_auc_table[3][8] h2o_ovo_macro_auc = h2o_auc_table[3][30] h2o_ovo_weighted_auc = h2o_auc_table[3][31] h2o_ovr_weighted_aucpr = h2o_aucpr_table[3][8] h2o_default_auc = gbm.auc() h2o_default_aucpr = gbm.aucpr() print("default vs. table AUC "+str(h2o_ovr_weighted_auc)+" "+str(h2o_default_auc)) print("default vs. table PR AUC "+str(h2o_ovr_weighted_aucpr)+" "+str(h2o_default_aucpr)) # default should be ovr weighted assert h2o_ovr_weighted_auc == h2o_default_auc, "default vs. table AUC "+str(h2o_ovr_weighted_auc)+" != "+str(h2o_default_auc) assert h2o_ovr_weighted_aucpr == h2o_default_aucpr, "default vs. table PR AUC "+str(h2o_ovr_weighted_aucpr)+" != "+str(h2o_default_aucpr) # transform data for sklearn prediction = gbm.predict(data).as_data_frame().iloc[:,1:] actual = data[response_col].as_data_frame().iloc[:, 0].tolist() # get result on training data from sklearn sklearn_ovr_macro_auc = roc_auc_score(actual, prediction, multi_class="ovr", average='macro') sklearn_ovr_weighted_auc = roc_auc_score(actual, prediction, multi_class="ovr", average='weighted') sklearn_ovo_macro_auc = roc_auc_score(actual, prediction, multi_class="ovo", average='macro') sklearn_ovo_weighted_auc = roc_auc_score(actual, prediction, multi_class="ovo", average='weighted') print("sklearn vs. h2o ovr macro: "+str(sklearn_ovr_macro_auc)+" "+str(h2o_ovr_macro_auc)) print("sklearn vs. h2o ovr weighted: "+str(sklearn_ovr_weighted_auc)+" "+str(h2o_ovr_weighted_auc)) print("sklearn vs. h2o ovo macro: "+str(sklearn_ovo_macro_auc)+" "+str(h2o_ovo_macro_auc)) print("sklearn vs. h2o ovo weighted: "+str(sklearn_ovo_weighted_auc)+" "+str(h2o_ovo_weighted_auc)) # compare results h2o vs sklearn precision = 1e-7 assert abs(h2o_ovr_macro_auc - sklearn_ovr_macro_auc) < precision, "sklearn vs. h2o ovr macro: "+str(sklearn_ovr_macro_auc)+" != "+str(h2o_ovr_macro_auc) assert abs(h2o_ovr_weighted_auc - sklearn_ovr_weighted_auc) < precision, "sklearn vs. h2o ovr weighted: "+str(sklearn_ovr_weighted_auc)+" != "+str(h2o_ovr_weighted_auc) assert abs(h2o_ovo_macro_auc - sklearn_ovo_macro_auc) < precision, "sklearn vs. h2o ovo macro: "+str(sklearn_ovo_macro_auc)+" != "+str(h2o_ovo_macro_auc) assert abs(h2o_ovo_weighted_auc - sklearn_ovo_weighted_auc) < precision, "sklearn vs. h2o ovo weighted: "+str(sklearn_ovo_weighted_auc)+" != "+str(h2o_ovo_weighted_auc) # set auc_type gbm = H2OGradientBoostingEstimator(ntrees=1, max_depth=2, nfolds=3, distribution=distribution, auc_type="MACRO_OVR") gbm.train(x=predictors, y=response_col, training_frame=data, validation_frame=data) h2o_auc_table = gbm.multinomial_auc_table(train=True) h2o_aucpr_table = gbm.multinomial_aucpr_table(train=True) h2o_ovr_macro_auc = h2o_auc_table[3][7] h2o_ovr_macro_aucpr = h2o_aucpr_table[3][7] h2o_default_auc = gbm.auc() h2o_default_aucpr = gbm.aucpr() assert abs(h2o_ovr_macro_auc - h2o_default_auc) < precision, "default auc vs. h2o ovr macro auc: "+str(sklearn_ovr_macro_auc)+" != "+str(h2o_default_auc) assert abs(h2o_ovr_macro_aucpr - h2o_default_aucpr) < precision, "default aucpr vs. h2o ovr macro aucpr: "+str(h2o_ovr_macro_aucpr)+" != "+str(h2o_default_aucpr) # test early stopping ntrees = 100 gbm2 = H2OGradientBoostingEstimator(ntrees=ntrees, max_depth=2, nfolds=3, distribution=distribution, score_each_iteration=True, auc_type="MACRO_OVR", stopping_metric="AUC", stopping_rounds=3) gbm2.train(x=predictors, y=response_col, training_frame=data, validation_frame=data) assert ntrees > gbm2.score_history().shape[0], "Test early stopping: Training should start early." # test performance with different auc type perf2 = gbm.model_performance(data, auc_type="WEIGHTED_OVO") perf2_auc = perf2.auc() assert abs(h2o_ovo_weighted_auc - perf2_auc) < precision, "h2o ovo weighted vs. h2o performance ovo weighted: "+str(h2o_ovo_weighted_auc)+" != "+str(perf2_auc) # test peformance with no data and auc_type is set ntrees = 2 gbm3 = H2OGradientBoostingEstimator(ntrees=ntrees, max_depth=2, nfolds=3, distribution=distribution) gbm3.train(x=predictors, y=response_col, training_frame=data, validation_frame=data) perf3 = gbm3.model_performance(train=True, auc_type="WEIGHTED_OVO") perf3_auc = perf3.auc() assert perf3_auc == "NaN", "AUC should be \"NaN\" because it is not set in model parameters and test_data is None" # test aucpr is not in cv summary print(gbm._model_json["output"]["cv_scoring_history"][0]._col_header) assert not "aucpr" in gbm.cross_validation_metrics_summary()[0], "The aucpr should not be in cross-validation metrics summary." assert "pr_auc" in gbm.cross_validation_metrics_summary()[0], "The pr_auc should be in cross-validation metrics summary." if __name__ == "__main__": pyunit_utils.standalone_test(multinomial_auc_prostate_gbm) else: multinomial_auc_prostate_gbm()
2,635
344
/* Copyright 2018 Canaan Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef _BSP_SYSCALLS_H #define _BSP_SYSCALLS_H #include <machine/syscall.h> #include <stddef.h> #include <stdint.h> #include <stdio.h> #include <string.h> #ifdef __cplusplus extern "C" { #endif /** * @brief Definitions for syscall putchar function * * @param[in] c The char to put * * @return result * - Byte On success, returns the written character. * - EOF On failure, returns EOF and sets the error indicator (see ferror()) on stdout. */ typedef int (*sys_putchar_t)(char c); /** * @brief Definitions for syscall getchar function * * @return byte as int type to get * - Byte The character read as an unsigned char cast to an int * - EOF EOF on end of file or error, no enough byte to read */ typedef int (*sys_getchar_t)(void); extern sys_putchar_t sys_putchar; extern sys_getchar_t sys_getchar; /** * @brief Register putchar function when perform write syscall * * @param[in] putchar The user-defined putchar function * * @return None */ void sys_register_putchar(sys_putchar_t putchar); /** * @brief Register getchar function when perform read syscall * * @param[in] getchar The user-defined getchar function * * @return None */ void sys_register_getchar(sys_getchar_t getchar); /** * @brief Flush stdin buffer * * @return None */ void sys_stdin_flush(void); void __attribute__((noreturn)) sys_exit(int code); /** * @brief Get free memory * * @return The size of free memory */ size_t get_free_heap_size(void); #ifdef __cplusplus } #endif #endif /* _BSP_SYSCALLS_H */
808
921
package cz.habarta.typescript.generator.p2; import cz.habarta.typescript.generator.p1.A; public class B extends A { public String sb; }
58
8,586
<filename>src/graph/context/Symbols.cpp /* Copyright (c) 2021 vesoft inc. All rights reserved. * * This source code is licensed under Apache 2.0 License. */ #include "graph/context/Symbols.h" #include <sstream> #include "graph/planner/plan/PlanNode.h" #include "graph/util/Utils.h" namespace nebula { namespace graph { std::string Variable::toString() const { std::stringstream ss; ss << "name: " << name << ", type: " << type << ", colNames: <" << folly::join(",", colNames) << ">, readBy: <" << util::join(readBy, [](auto pn) { return pn->toString(); }) << ">, writtenBy: <" << util::join(writtenBy, [](auto pn) { return pn->toString(); }) << ">"; return ss.str(); } std::string SymbolTable::toString() const { std::stringstream ss; ss << "SymTable: ["; for (const auto& p : vars_) { ss << "\n" << p.first << ": "; if (p.second) { ss << p.second->toString(); } } ss << "\n]"; return ss.str(); } } // namespace graph } // namespace nebula
381
799
{ "campaign_intelligence": [ { "apt": "Publicly Available Exploit", "description": "SMBLost Remote Kernel Memory Read POC (Author: Unknown)", "targeted_countries": [ "" ], "targeted_industries": [ "" ] } ], "cve_description": "A remote code execution vulnerability exists in the way that the Microsoft Server Message Block 1.0 (SMBv1) server handles certain requests, aka Windows SMB Remote Code Execution Vulnerability.", "cve_dynamic_data": { "base_metric_v2": { "ac_insuf_info": "False", "access_vector": "NETWORK", "attack_complexity": "LOW", "authentication": "SINGLE", "availability_impact": "PARTIAL", "base_score": "6.5", "confidentiality_impact": "PARTIAL", "exploitability_score": "8.0", "impact_score": "6.4", "integrity_impact": "PARTIAL", "obtain_all_privilege": "False", "obtain_other_privilege": "False", "obtain_user_privilege": "False", "severity": "MEDIUM", "user_interaction_required": "False", "vector_string": "AV:N/AC:L/Au:S/C:P/I:P/A:P" }, "base_metric_v3": { "attack_complexity": "LOW", "attack_vector": "NETWORK", "availability_impact": "HIGH", "base_score": "8.8", "base_severity": "HIGH", "confidentiality_impact": "HIGH", "exploitability_score": "2.8", "impact_score": "5.9", "integrity_impact": "HIGH", "privileges_required": "LOW", "scope": "UNCHANGED", "user_interaction": "NONE", "vector_string": "CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H" }, "infinipoint_base_metric": { "attack_complexity": "6.03", "campaigns": 1, "device_count": 1, "exploitability_risk": "8.0", "exploits": 1, "risk_label": "High", "risk_level": 6.03, "risk_type": 3, "trends_level": "3.06" } }, "cve_id": "CVE-2020-1301", "cwe_description": "None", "cwe_id": "NVD-CWE-noinfo", "devices": [ { "$device": "22ddf738-7e1c-4f20-a9c7-07620d1f2110", "device_name_string": "DESKTOP-U0QSLQ8", "device_os": "Microsoft Windows 10 Enterprise Evaluation", "device_risk": 6.03, "is_managed": true, "map_id": "22ddf738-7e1c-4f20-a9c7-07620d1f2110CVE-2020-1301", "vulnerableProduct": "Microsoft Windows 10 Enterprise Evaluation", "vulnerableVersion": "10.0.17763.1282" } ], "scan_date": null, "software_list": [ { "cpe_name_string": "Microsoft Windows 10 Enterprise Evaluation 17763", "cpe_strings": [], "cpe_type": "OS_ONLY" } ], "top_devices": [ { "$device": "22ddf738-7e1c-4f20-a9c7-07620d1f2110", "device_name_string": "DESKTOP-U0QSLQ8", "device_os": "Microsoft Windows 10 Enterprise Evaluation", "device_risk": 6.03, "is_managed": true, "map_id": "22ddf738-7e1c-4f20-a9c7-07620d1f2110CVE-2020-1301", "vulnerableProduct": "Microsoft Windows 10 Enterprise Evaluation", "vulnerableVersion": "10.0.17763.1282" } ] }
1,465
377
#pragma once //------------------------------------------------------------------------------ /** @class IO::SafeFileStream Wrapper around FileStream that will save to a temporary file and swap when closed @copyright (C) 2016-2020 Individual contributors, see AUTHORS file */ #include "io/filestream.h" #include "util/string.h" #include "io/filetime.h" #include "io/fswrapper.h" //------------------------------------------------------------------------------ namespace IO { class SafeFileStream : public FileStream { __DeclareClass(SafeFileStream); public: /// constructor SafeFileStream(); /// destructor virtual ~SafeFileStream(); /// open the stream virtual bool Open(); /// close the stream virtual void Close(); protected: IO::URI tmpUri; }; } // namespace IO //------------------------------------------------------------------------------
262
1,408
/* * Copyright (c) 2013-2014, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ #ifndef MMIO_H #define MMIO_H #include <stdint.h> static inline void mmio_write_8(uintptr_t addr, uint8_t value) { *(volatile uint8_t*)addr = value; } static inline uint8_t mmio_read_8(uintptr_t addr) { return *(volatile uint8_t*)addr; } static inline void mmio_write_16(uintptr_t addr, uint16_t value) { *(volatile uint16_t*)addr = value; } static inline uint16_t mmio_read_16(uintptr_t addr) { return *(volatile uint16_t*)addr; } static inline void mmio_clrsetbits_16(uintptr_t addr, uint16_t clear, uint16_t set) { mmio_write_16(addr, (mmio_read_16(addr) & ~clear) | set); } static inline void mmio_write_32(uintptr_t addr, uint32_t value) { *(volatile uint32_t*)addr = value; } static inline uint32_t mmio_read_32(uintptr_t addr) { return *(volatile uint32_t*)addr; } static inline void mmio_write_64(uintptr_t addr, uint64_t value) { *(volatile uint64_t*)addr = value; } static inline uint64_t mmio_read_64(uintptr_t addr) { return *(volatile uint64_t*)addr; } static inline void mmio_clrbits_32(uintptr_t addr, uint32_t clear) { mmio_write_32(addr, mmio_read_32(addr) & ~clear); } static inline void mmio_setbits_32(uintptr_t addr, uint32_t set) { mmio_write_32(addr, mmio_read_32(addr) | set); } static inline void mmio_clrsetbits_32(uintptr_t addr, uint32_t clear, uint32_t set) { mmio_write_32(addr, (mmio_read_32(addr) & ~clear) | set); } #endif /* MMIO_H */
667
417
//----------------------------------------------------------------------------- // Copyright (c) 2012 GarageGames, LLC // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. //----------------------------------------------------------------------------- //~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~~// // Arcane-FX for MIT Licensed Open Source version of Torque 3D from GarageGames // Copyright (C) 2015 Faust Logic, Inc. //~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~~// #ifndef _STATICSHAPE_H_ #define _STATICSHAPE_H_ #ifndef _SHAPEBASE_H_ #include "T3D/shapeBase.h" #endif //---------------------------------------------------------------------------- struct StaticShapeData: public ShapeBaseData { typedef ShapeBaseData Parent; public: StaticShapeData(); bool noIndividualDamage; S32 dynamicTypeField; bool isShielded; F32 energyPerDamagePoint; // Re-added for AFX // DECLARE_CONOBJECT(StaticShapeData); static void initPersistFields(); virtual void packData(BitStream* stream); virtual void unpackData(BitStream* stream); public: StaticShapeData(const StaticShapeData&, bool = false); virtual bool allowSubstitutions() const { return true; } }; //---------------------------------------------------------------------------- class StaticShape: public ShapeBase { typedef ShapeBase Parent; StaticShapeData* mDataBlock; bool mPowered; void onUnmount(SceneObject* obj,S32 node); protected: enum MaskBits { PositionMask = Parent::NextFreeMask, NextFreeMask = Parent::NextFreeMask << 1 }; public: DECLARE_CONOBJECT(StaticShape); StaticShape(); ~StaticShape(); bool onAdd(); void onRemove(); bool onNewDataBlock(GameBaseData *dptr, bool reload); void processTick(const Move *move); void interpolateTick(F32 delta); void setTransform(const MatrixF &mat); U32 packUpdate (NetConnection *conn, U32 mask, BitStream *stream); void unpackUpdate(NetConnection *conn, BitStream *stream); // power void setPowered(bool power) {mPowered = power;} bool isPowered() {return(mPowered);} static void initPersistFields(); }; #endif
954
310
<gh_stars>100-1000 { "name": "Classic 8-Inch Chef's Knife", "description": "A knife.", "url": "https://www.amazon.com/Shun-DM0706-Classic-8-Inch-Chefs/dp/B0000Y7KNQ" }
76
5,788
<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.encrypt.spring.boot; import lombok.RequiredArgsConstructor; import org.apache.shardingsphere.encrypt.algorithm.config.AlgorithmProvidedEncryptRuleConfiguration; import org.apache.shardingsphere.encrypt.spi.EncryptAlgorithm; import org.apache.shardingsphere.encrypt.spring.boot.algorithm.EncryptAlgorithmProvidedBeanRegistry; import org.apache.shardingsphere.encrypt.spring.boot.condition.EncryptSpringBootCondition; import org.apache.shardingsphere.encrypt.spring.boot.rule.YamlEncryptRuleSpringBootConfiguration; import org.apache.shardingsphere.encrypt.yaml.config.YamlEncryptRuleConfiguration; import org.apache.shardingsphere.encrypt.yaml.swapper.EncryptRuleAlgorithmProviderConfigurationYamlSwapper; import org.apache.shardingsphere.infra.config.RuleConfiguration; import org.springframework.beans.factory.ObjectProvider; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; import org.springframework.core.env.Environment; import java.util.Collections; import java.util.Map; import java.util.Optional; /** * Encrypt rule configuration for spring boot. */ @Configuration @EnableConfigurationProperties(YamlEncryptRuleSpringBootConfiguration.class) @ConditionalOnClass(YamlEncryptRuleConfiguration.class) @Conditional(EncryptSpringBootCondition.class) @RequiredArgsConstructor public class EncryptRuleSpringBootConfiguration { private final EncryptRuleAlgorithmProviderConfigurationYamlSwapper swapper = new EncryptRuleAlgorithmProviderConfigurationYamlSwapper(); private final YamlEncryptRuleSpringBootConfiguration yamlConfig; /** * Encrypt rule configuration for spring boot. * * @param encryptors encryptors algorithm to map * @return encrypt rule configuration */ @Bean public RuleConfiguration encryptRuleConfiguration(final ObjectProvider<Map<String, EncryptAlgorithm>> encryptors) { AlgorithmProvidedEncryptRuleConfiguration result = swapper.swapToObject(yamlConfig.getEncrypt()); result.setEncryptors(Optional.ofNullable(encryptors.getIfAvailable()).orElse(Collections.emptyMap())); result.setQueryWithCipherColumn(yamlConfig.getEncrypt().isQueryWithCipherColumn()); return result; } /** * Encrypt algorithm provided bean registry. * * @param environment environment * @return encrypt algorithm provided bean registry */ @Bean public static EncryptAlgorithmProvidedBeanRegistry encryptAlgorithmProvidedBeanRegistry(final Environment environment) { return new EncryptAlgorithmProvidedBeanRegistry(environment); } }
1,077
1,139
package com.journaldev.androidretrofitcalleveryxsecond; import com.google.gson.annotations.SerializedName; import java.util.List; public class Jokes { @SerializedName("url") public String url; @SerializedName("icon_url") public String icon_url; @SerializedName("value") public String value; }
116
407
<filename>paas/appmanager/tesla-appmanager-server/src/main/java/com/alibaba/tesla/appmanager/server/action/DeployComponentStateAction.java package com.alibaba.tesla.appmanager.server.action; import com.alibaba.tesla.appmanager.server.repository.domain.DeployComponentDO; import java.util.Map; /** * Component 部署工单 State 处理 Action 接口 * * @author <EMAIL> */ public interface DeployComponentStateAction { /** * 自身逻辑处理 * * @param subOrder 部署工单 * @param attrMap 属性字典 */ void run(DeployComponentDO subOrder, Map<String, String> attrMap); }
248
2,205
<gh_stars>1000+ /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * */ package org.smartloli.kafka.eagle.core.factory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.smartloli.kafka.eagle.common.constant.JmxConstants.BrokerServer; import org.smartloli.kafka.eagle.common.protocol.MBeanInfo; import org.smartloli.kafka.eagle.common.util.JMXFactoryUtils; import org.smartloli.kafka.eagle.common.util.KConstants.MBean; import org.smartloli.kafka.eagle.common.util.SystemConfigUtils; import javax.management.MBeanServerConnection; import javax.management.ObjectName; import javax.management.remote.JMXConnector; import javax.management.remote.JMXServiceURL; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; /** * Implements Mx4jService all method. * * @author smartloli. * * Created by Jul 14, 2017 * * Update by smartloli Sep 12, 2021 * Settings prefixed with 'kafka.eagle.' will be deprecated, use 'efak.' instead. */ public class Mx4jServiceImpl implements Mx4jService { private Logger LOG = LoggerFactory.getLogger(Mx4jServiceImpl.class); // private static final String JMX = "service:jmx:rmi:///jndi/rmi://%s/jmxrmi"; private static final String TOPIC_CONCAT_CHARACTER = ",topic="; /** Get brokers all topics bytes in per sec. */ @Override public MBeanInfo bytesInPerSec(String clusterAlias, String uri) { return common(clusterAlias, uri, BrokerServer.BYTES_IN_PER_SEC.getValue()); } /** Get brokers bytes in per sec by topic. */ @Override public MBeanInfo bytesInPerSec(String clusterAlias, String uri, String topic) { String mbean = BrokerServer.BYTES_IN_PER_SEC.getValue() + TOPIC_CONCAT_CHARACTER + topic; return common(clusterAlias, uri, mbean); } /** Get brokers all topics bytes out per sec. */ @Override public MBeanInfo bytesOutPerSec(String clusterAlias, String uri) { return common(clusterAlias, uri, BrokerServer.BYTES_OUT_PER_SEC.getValue()); } /** Get brokers bytes out per sec by topic. */ @Override public MBeanInfo bytesOutPerSec(String clusterAlias, String uri, String topic) { String mbean = BrokerServer.BYTES_OUT_PER_SEC.getValue() + TOPIC_CONCAT_CHARACTER + topic; return common(clusterAlias, uri, mbean); } /** Get brokers all topics byte rejected per sec. */ @Override public MBeanInfo bytesRejectedPerSec(String clusterAlias, String uri) { return common(clusterAlias, uri, BrokerServer.BYTES_REJECTED_PER_SEC.getValue()); } /** Get brokers byte rejected per sec by topic. */ @Override public MBeanInfo bytesRejectedPerSec(String clusterAlias, String uri, String topic) { String mbean = BrokerServer.BYTES_REJECTED_PER_SEC.getValue() + TOPIC_CONCAT_CHARACTER + topic; return common(clusterAlias, uri, mbean); } /** Get brokers all topic failed fetch request per sec. */ @Override public MBeanInfo failedFetchRequestsPerSec(String clusterAlias, String uri) { return common(clusterAlias, uri, BrokerServer.FAILED_FETCH_REQUESTS_PER_SEC.getValue()); } /** Get brokers failed fetch request per sec by topic. */ @Override public MBeanInfo failedFetchRequestsPerSec(String clusterAlias, String uri, String topic) { String mbean = BrokerServer.FAILED_FETCH_REQUESTS_PER_SEC.getValue() + TOPIC_CONCAT_CHARACTER + topic; return common(clusterAlias, uri, mbean); } /** Get brokers all topics failed fetch produce request per sec. */ @Override public MBeanInfo failedProduceRequestsPerSec(String clusterAlias, String uri) { return common(clusterAlias, uri, BrokerServer.FAILED_PRODUCE_REQUESTS_PER_SEC.getValue()); } /** Get brokers failed fetch produce request per sec by topic. */ @Override public MBeanInfo failedProduceRequestsPerSec(String clusterAlias, String uri, String topic) { String mbean = BrokerServer.FAILED_PRODUCE_REQUESTS_PER_SEC.getValue() + TOPIC_CONCAT_CHARACTER + topic; return common(clusterAlias, uri, mbean); } /** Get brokers topic all partitions log end offset. */ @Override public Map<Integer, Long> logEndOffset(String clusterAlias, String uri, String topic) { String mbean = "kafka.log:type=Log,name=LogEndOffset,topic=" + topic + ",partition=*"; JMXConnector connector = null; Map<Integer, Long> endOffsets = new HashMap<>(); try { JMXServiceURL jmxSeriverUrl = new JMXServiceURL(String.format(SystemConfigUtils.getProperty(clusterAlias + ".efak.jmx.uri"), uri)); connector = JMXFactoryUtils.connectWithTimeout(clusterAlias, jmxSeriverUrl, 30, TimeUnit.SECONDS); MBeanServerConnection mbeanConnection = connector.getMBeanServerConnection(); Set<ObjectName> objectNames = mbeanConnection.queryNames(new ObjectName(mbean), null); for (ObjectName objectName : objectNames) { int partition = Integer.valueOf(objectName.getKeyProperty("partition")); Object value = mbeanConnection.getAttribute(new ObjectName(mbean), MBean.VALUE); if (value != null) { endOffsets.put(partition, Long.valueOf(value.toString())); } } } catch (Exception e) { LOG.error("JMX service url[" + uri + "] create has error,msg is ", e); } finally { if (connector != null) { try { connector.close(); } catch (Exception e) { LOG.error("Close JMXConnector[" + uri + "] has error,msg is ", e); } } } return endOffsets; } /** Get brokers all topics message in per sec. */ @Override public MBeanInfo messagesInPerSec(String clusterAlias, String uri) { return common(clusterAlias, uri, BrokerServer.MESSAGES_IN_PER_SEC.getValue()); } /** Get brokers message in per sec by topic. */ @Override public MBeanInfo messagesInPerSec(String clusterAlias, String uri, String topic) { String mbean = BrokerServer.MESSAGES_IN_PER_SEC.getValue() + TOPIC_CONCAT_CHARACTER + topic; return common(clusterAlias, uri, mbean); } @Override public MBeanInfo produceMessageConversionsPerSec(String clusterAlias, String uri) { return common(clusterAlias, uri, BrokerServer.PRODUCE_MESSAGE_CONVERSIONS_PER_SEC.getValue()); } @Override public MBeanInfo produceMessageConversionsPerSec(String clusterAlias, String uri, String topic) { String mbean = BrokerServer.PRODUCE_MESSAGE_CONVERSIONS_PER_SEC.getValue() + TOPIC_CONCAT_CHARACTER + topic; return common(clusterAlias, uri, mbean); } @Override public MBeanInfo totalFetchRequestsPerSec(String clusterAlias, String uri) { return common(clusterAlias, uri, BrokerServer.TOTAL_FETCH_REQUESTS_PER_SEC.getValue()); } @Override public MBeanInfo totalFetchRequestsPerSec(String clusterAlias, String uri, String topic) { String mbean = BrokerServer.TOTAL_FETCH_REQUESTS_PER_SEC.getValue() + TOPIC_CONCAT_CHARACTER + topic; return common(clusterAlias, uri, mbean); } @Override public MBeanInfo totalProduceRequestsPerSec(String clusterAlias, String uri) { return common(clusterAlias, uri, BrokerServer.TOTAL_PRODUCE_REQUESTS_PER_SEC.getValue()); } @Override public MBeanInfo totalProduceRequestsPerSec(String clusterAlias, String uri, String topic) { String mbean = BrokerServer.TOTAL_PRODUCE_REQUESTS_PER_SEC.getValue() + TOPIC_CONCAT_CHARACTER + topic; return common(clusterAlias, uri, mbean); } @Override public MBeanInfo replicationBytesInPerSec(String clusterAlias, String uri) { return common(clusterAlias, uri, BrokerServer.REPLICATION_BYTES_IN_PER_SEC.getValue()); } @Override public MBeanInfo replicationBytesInPerSec(String clusterAlias, String uri, String topic) { String mbean = BrokerServer.REPLICATION_BYTES_IN_PER_SEC.getValue() + TOPIC_CONCAT_CHARACTER + topic; return common(clusterAlias, uri, mbean); } @Override public MBeanInfo replicationBytesOutPerSec(String clusterAlias, String uri) { return common(clusterAlias, uri, BrokerServer.REPLICATION_BYTES_OUT_PER_SEC.getValue()); } @Override public MBeanInfo replicationBytesOutPerSec(String clusterAlias, String uri, String topic) { String mbean = BrokerServer.REPLICATION_BYTES_OUT_PER_SEC.getValue() + TOPIC_CONCAT_CHARACTER + topic; return common(clusterAlias, uri, mbean); } /** * Before Kafka 0.11.x, some exceptions are thrown, such as * <p>ReplicationBytesOutPerSec</p> Exception. * @param uri ip:jmx_port */ private MBeanInfo common(String clusterAlias, String uri, String mbean) { JMXConnector connector = null; MBeanInfo mbeanInfo = new MBeanInfo(); try { JMXServiceURL jmxSeriverUrl = new JMXServiceURL(String.format(SystemConfigUtils.getProperty(clusterAlias + ".efak.jmx.uri"), uri)); connector = JMXFactoryUtils.connectWithTimeout(clusterAlias, jmxSeriverUrl, 30, TimeUnit.SECONDS); MBeanServerConnection mbeanConnection = connector.getMBeanServerConnection(); if (mbeanConnection.isRegistered(new ObjectName(mbean))) { Object fifteenMinuteRate = mbeanConnection.getAttribute(new ObjectName(mbean), MBean.FIFTEEN_MINUTE_RATE); Object fiveMinuteRate = mbeanConnection.getAttribute(new ObjectName(mbean), MBean.FIVE_MINUTE_RATE); Object meanRate = mbeanConnection.getAttribute(new ObjectName(mbean), MBean.MEAN_RATE); Object oneMinuteRate = mbeanConnection.getAttribute(new ObjectName(mbean), MBean.ONE_MINUTE_RATE); mbeanInfo.setFifteenMinute(fifteenMinuteRate.toString()); mbeanInfo.setFiveMinute(fiveMinuteRate.toString()); mbeanInfo.setMeanRate(meanRate.toString()); mbeanInfo.setOneMinute(oneMinuteRate.toString()); } else { mbeanInfo.setFifteenMinute("0.0"); mbeanInfo.setFiveMinute("0.0"); mbeanInfo.setMeanRate("0.0"); mbeanInfo.setOneMinute("0.0"); } } catch (Exception e) { LOG.error("JMX service url[" + uri + "] create has error,msg is ", e); mbeanInfo.setFifteenMinute("0.0"); mbeanInfo.setFiveMinute("0.0"); mbeanInfo.setMeanRate("0.0"); mbeanInfo.setOneMinute("0.0"); } finally { if (connector != null) { try { connector.close(); } catch (Exception e) { LOG.error("Close JMXConnector[" + uri + "] has error,msg is ", e); } } } return mbeanInfo; } }
4,705
678
/** * This header is generated by class-dump-z 0.2b. * * Source: /System/Library/PrivateFrameworks/CalDAV.framework/CalDAV */ #import <CalDAV/CalDAVCalendar.h> @class NSURL; @protocol CalDAVSubscribedCalendar <CalDAVCalendar> @property(retain) NSURL *subscriptionURL; @property(assign) BOOL hasAlarmFilter; @property(assign) BOOL hasAttachmentFilter; @property(assign) BOOL hasTaskFilter; @property(assign) double refreshInterval; // declared property getter: - (double)refreshInterval; // declared property setter: - (void)setRefreshInterval:(double)interval; // declared property getter: - (BOOL)hasTaskFilter; // declared property setter: - (void)setHasTaskFilter:(BOOL)filter; // declared property getter: - (BOOL)hasAttachmentFilter; // declared property setter: - (void)setHasAttachmentFilter:(BOOL)filter; // declared property getter: - (BOOL)hasAlarmFilter; // declared property setter: - (void)setHasAlarmFilter:(BOOL)filter; // declared property getter: - (id)subscriptionURL; // declared property setter: - (void)setSubscriptionURL:(id)url; @end
345
10,225
package io.quarkus.vault; /** * Allows obtaining PKI engines for specific mount paths. * * @see VaultPKISecretEngine */ public interface VaultPKISecretEngineFactory { /** * Get a PKI engine for a specific mount. * * @param mount Engine mount path. * * @return PKI engine interface. */ VaultPKISecretEngine engine(String mount); }
134
721
package crazypants.enderio.base.integration.te; import javax.annotation.Nonnull; import crazypants.enderio.base.EnderIO; import crazypants.enderio.base.Log; import crazypants.enderio.base.events.EnderIOLifecycleEvent; import crazypants.enderio.base.farming.FarmersRegistry; import net.minecraftforge.fml.common.Loader; import net.minecraftforge.fml.common.Mod.EventBusSubscriber; import net.minecraftforge.fml.common.event.FMLPostInitializationEvent; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; @EventBusSubscriber(modid = EnderIO.MODID) public class TEUtil { public static void init(@Nonnull FMLPostInitializationEvent event) { if (Loader.isModLoaded("cofhcore")) { // Add support for TE wrench try { Class.forName("crazypants.enderio.base.integration.te.TEToolProvider").newInstance(); } catch (Exception e) { Log.warn("Could not find Thermal Expansion Wrench definition. Wrench integration with it may fail"); } } } @SubscribeEvent public static void registerHoes(@Nonnull EnderIOLifecycleEvent.Init.Pre event) { FarmersRegistry.registerHoes("thermalfoundation", "tool.hoe_invar", "tool.hoe_copper", "tool.hoe_bronze", "tool.hoe_silver", "tool.hoe_electrum", "tool.hoe_tin", "tool.hoe_lead", "tool.hoe_nickel", "tool.hoe_platinum", "tool.hoe_aluminum", "tool.hoe_steel", "tool.hoe_constantan"); } }
497
32,544
<reponame>DBatOWL/tutorials package com.baeldung.reactorbus.service.impl; import org.springframework.stereotype.Service; import com.baeldung.reactorbus.domain.NotificationData; import com.baeldung.reactorbus.service.NotificationService; @Service public class NotificationServiceimpl implements NotificationService { @Override public void initiateNotification(NotificationData notificationData) throws InterruptedException { System.out.println("Notification service started for Notification ID: " + notificationData.getId()); Thread.sleep(5000); System.out.println("Notification service ended for Notification ID: " + notificationData.getId()); } }
207
3,102
// RUN: rm -rf %t // RUN: not %clang_cc1 -x c++ -Rmodule-build -DMISSING_HEADER -fmodules -fimplicit-module-maps -fmodules-cache-path=%t -I %S/Inputs/auto-import-unavailable %s 2>&1 | FileCheck %s --check-prefix=MISSING-HEADER // RUN: %clang_cc1 -x c++ -Rmodule-build -DNONREQUIRED_MISSING_HEADER -fmodules -fimplicit-module-maps -fmodules-cache-path=%t -I %S/Inputs/auto-import-unavailable %s 2>&1 | FileCheck %s --check-prefix=NONREQUIRED-MISSING-HEADER // RUN: not %clang_cc1 -x c++ -Rmodule-build -DMISSING_REQUIREMENT -fmodules -fimplicit-module-maps -fmodules-cache-path=%t -I %S/Inputs/auto-import-unavailable %s 2>&1 | FileCheck %s --check-prefix=MISSING-REQUIREMENT #ifdef MISSING_HEADER // Even if the header we ask for is not missing, if the top-level module // containing it has a missing header, then the whole top-level is // unavailable and we issue an error. // MISSING-HEADER: module.modulemap:2:27: error: header 'missing_header/missing.h' not found // MISSING-HEADER-DAG: auto-import-unavailable.cpp:[[@LINE+1]]:10: note: submodule of top-level module 'missing_header' implicitly imported here #include "missing_header/not_missing.h" // We should not attempt to build the module. // MISSING-HEADER-NOT: remark: building module #endif // #ifdef MISSING_HEADER #ifdef NONREQUIRED_MISSING_HEADER // However, if the missing header is dominated by an unsatisfied // `requires`, then that is acceptable. // This also tests that an unsatisfied `requires` elsewhere in the // top-level module doesn't affect an available module. // NONREQUIRED-MISSING-HEADER: auto-import-unavailable.cpp:[[@LINE+2]]:10: remark: building module 'nonrequired_missing_header' // NONREQUIRED-MISSING-HEADER: auto-import-unavailable.cpp:[[@LINE+1]]:10: remark: finished building module 'nonrequired_missing_header' #include "nonrequired_missing_header/not_missing.h" #endif // #ifdef NONREQUIRED_MISSING_HEADER #ifdef MISSING_REQUIREMENT // If the header is unavailable due to a missing requirement, an error // should be emitted if a user tries to include it. // MISSING-REQUIREMENT:module.modulemap:16:8: error: module 'missing_requirement' requires feature 'nonexistent_feature' // MISSING-REQUIREMENT: auto-import-unavailable.cpp:[[@LINE+1]]:10: note: submodule of top-level module 'missing_requirement' implicitly imported here #include "missing_requirement.h" // MISSING-REQUIREMENT-NOT: remark: building module #endif // #ifdef MISSING_REQUIREMENT
818
1,091
<filename>apps/virtual/app/src/main/java/org/onosproject/incubator/net/virtual/rest/TenantWebResource.java /* * Copyright 2018-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.incubator.net.virtual.rest; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import org.onlab.util.ItemNotFoundException; import org.onosproject.net.TenantId; import org.onosproject.incubator.net.virtual.VirtualNetworkAdminService; import org.onosproject.rest.AbstractWebResource; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriBuilder; import javax.ws.rs.core.UriInfo; import java.io.IOException; import java.io.InputStream; import static org.onlab.util.Tools.readTreeFromStream; /** * Query and manage tenants of virtual networks. */ @Path("tenants") public class TenantWebResource extends AbstractWebResource { private static final String MISSING_TENANTID = "Missing tenant identifier"; private static final String TENANTID_NOT_FOUND = "Tenant identifier not found"; private static final String INVALID_TENANTID = "Invalid tenant identifier "; @Context private UriInfo uriInfo; private final VirtualNetworkAdminService vnetAdminService = get(VirtualNetworkAdminService.class); /** * Returns all tenant identifiers. * * @return 200 OK with set of tenant identifiers * @onos.rsModel TenantIds */ @GET @Produces(MediaType.APPLICATION_JSON) public Response getVirtualNetworkTenantIds() { Iterable<TenantId> tenantIds = vnetAdminService.getTenantIds(); return ok(encodeArray(TenantId.class, "tenants", tenantIds)).build(); } /** * Creates a tenant with the given tenant identifier. * * @param stream TenantId JSON stream * @return status of the request - CREATED if the JSON is correct, * BAD_REQUEST if the JSON is invalid * @onos.rsModel TenantId */ @POST @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response addTenantId(InputStream stream) { try { final TenantId tid = getTenantIdFromJsonStream(stream); vnetAdminService.registerTenantId(tid); final TenantId resultTid = getExistingTenantId(vnetAdminService, tid); UriBuilder locationBuilder = uriInfo.getBaseUriBuilder() .path("tenants") .path(resultTid.id()); return Response .created(locationBuilder.build()) .build(); } catch (IOException e) { throw new IllegalArgumentException(e); } } /** * Removes the specified tenant with the specified tenant identifier. * * @param tenantId tenant identifier * @return 204 NO CONTENT */ @DELETE @Path("{tenantId}") public Response removeTenantId(@PathParam("tenantId") String tenantId) { final TenantId tid = TenantId.tenantId(tenantId); final TenantId existingTid = getExistingTenantId(vnetAdminService, tid); vnetAdminService.unregisterTenantId(existingTid); return Response.noContent().build(); } /** * Gets the tenant identifier from the JSON stream. * * @param stream TenantId JSON stream * @return TenantId * @throws IOException if unable to parse the request */ private TenantId getTenantIdFromJsonStream(InputStream stream) throws IOException { ObjectNode jsonTree = readTreeFromStream(mapper(), stream); JsonNode specifiedTenantId = jsonTree.get("id"); if (specifiedTenantId == null) { throw new IllegalArgumentException(MISSING_TENANTID); } return TenantId.tenantId(specifiedTenantId.asText()); } /** * Get the matching tenant identifier from existing tenant identifiers in system. * * @param vnetAdminSvc virtual network administration service * @param tidIn tenant identifier * @return TenantId */ protected static TenantId getExistingTenantId(VirtualNetworkAdminService vnetAdminSvc, TenantId tidIn) { return vnetAdminSvc .getTenantIds() .stream() .filter(tenantId -> tenantId.equals(tidIn)) .findFirst() .orElseThrow(() -> new ItemNotFoundException(TENANTID_NOT_FOUND)); } }
2,025
1,253
// C++ program to rotate a linked list counter clock wise #include <bits/stdc++.h> using namespace std; /* Link list node */ class Node { public: int data; Node* next; }; // This function rotates a linked list counter-clockwise and updates the head. // The function assumes that k is smaller than size of linked list. // It doesn't modify the list if k is greater than or equal to size. void rotate(Node** head_ref, int k) { if (k == 0) return; Node* current = *head_ref; // current will either point to kth or NULL after this loop. int count = 1; while (count < k && current != NULL) { current = current->next; count++; } // If current is NULL, k is greater than or equal to count of nodes in linked list. Don't change the list in this case if (current == NULL) return; // current points to kth node. Store it in a variable. kthNode points to node 40 in the above example Node* kthNode = current; // current will point to last node after this loop while (current->next != NULL) current = current->next; // Change next of last node to previous head current->next = *head_ref; // Change head to (k+1)th node *head_ref = kthNode->next; // change next of kth node to NULL kthNode->next = NULL; } /* UTILITY FUNCTIONS */ /* Function to push a node */ void push(Node** head_ref, int new_data) { /* allocate node */ Node* new_node = new Node(); /* put in the data */ new_node->data = new_data; /* link the old list off the new node */ new_node->next = (*head_ref); /* move the head to point to the new node */ (*head_ref) = new_node; } /* Function to print linked list */ void printList(Node* node) { while (node != NULL) { cout << node->data << " "; node = node->next; } } /* Driver code*/ int main() { /* Start with the empty list */ Node* start = NULL; int cnt; cout << "Enter number of nodes: "; cin >> cnt; cout << endl; for(int i=0;i<cnt;i++) { cout << "Enter node" << (i+1) << ": "; int data; cin >> data; push(&start, data); } cout << endl; cout << "Linked list before rotation:"; printList(start); cout << endl; cout << "Enter position from where to rotate: "; int pos; cin >> pos; rotate(&start, pos); cout << endl; cout << "\nLinked list after rotation: "; printList(start); cout << endl; return 0; } // This is code is contributed by bhanupsingh10
975
321
<gh_stars>100-1000 import multiprocessing import multiprocessing.dummy as threading import sys import time from functools import wraps from retry import retry from six import iteritems def run_multiprocessing(func, params: list, max_worker: int=None, join=True): pool = multiprocessing.Pool(max_worker) if not params: return if isinstance(params[0], tuple): pool.starmap(func, params) else: pool.map(func, params) pool.close() if join: pool.join() def run_multithreading(func, params: list, max_worker: int=None, join=True): pool = threading.Pool(max_worker) if isinstance(params[0], tuple): pool.starmap(func, params) else: pool.map(func, params) pool.close() if join: pool.join() def show_process(finished_len: int, total_len: int): i = int(finished_len/total_len*100) k = i + 1 output = '>'*(i//2)+' '*((100-k)//2) sys.stdout.write('\r'+output+'[%s%%]' % (i+1)) sys.stdout.flush() def dict_to_table(dict_data): for key, value in dict_data.items(): if not isinstance(value, str): dict_data[key] = str(value) max_key_len = max([len(i) for i in dict_data])+1 max_value_len = max([len(i) for i in dict_data.values()])+1 total_len = max_key_len+max_value_len+3 print(f'+{total_len*"-"}+') for key, value in dict_data.items(): print(f'|{key.ljust(max_key_len)} | {value.rjust(max_value_len)}|') print(f'+{total_len*"-"}+') def run_fuction(*funcs): for func in funcs: print(20*'=', f'Function: {func.__name__}', 20*'=') begin = time.time() func() end = time.time() print(f'Finished in {end-begin:.2f}s') def handle_error(tries=20, delay=0.01): def decorate(func): @wraps(func) @retry(tries=tries, delay=delay) def wrapper(*args, **kwargs): try: func(*args, **kwargs) except Exception as error: print(f'Raise an error: {error}') print(f'Start to retry!!!!!') raise Exception return wrapper return decorate if __name__ == "__main__": # Functions below are just for test. def just_sleep(index, test_list): i = len(test_list) time.sleep(0.01) show_process(i, 200) test_list.append(index) def test_multithreading(): print('Start test MultiThreading') finished_list = multiprocessing.Manager().list() run_multithreading( just_sleep, [(i, finished_list) for i in range(200)], 2) def test_multiprocessing(): print('Start test MultiProcessing') finished_list = multiprocessing.Manager().list() run_multiprocessing( just_sleep, [(i, finished_list) for i in range(200)], 2) # run_fuction(test_multithreading, test_multiprocessing) raw = [i for i in range(100)] finish = [] for i in raw: time.sleep(0.1) finish.append(i) show_process(len(finish), len(raw))
1,412
432
# -*- coding: utf-8 -*- """Top-level package for Python API and CLI for Nessie.""" import os import confuse from .conf import build_config from .nessie_client import NessieClient __author__ = """Project Nessie""" __email__ = "<EMAIL>" __version__ = "0.10.2" def get_config(config_dir: str = None, args: dict = None) -> confuse.Configuration: """Retrieve a confuse Configuration object.""" if config_dir: os.environ["NESSIE_CLIENTDIR"] = config_dir return build_config(args) def init(config_dir: str = None, config_dict: dict = None) -> NessieClient: """Create a new Nessie client object. :param config_dir: optional directory to look for config in :param config_dict: dictionary of extra config arguments :return: either a simple or rich client :example: >>> client = init('/my/config/dir') """ if config_dict is None: config_dict = dict() config = get_config(config_dir, args=config_dict) return _connect(config) def _connect(config: confuse.Configuration) -> NessieClient: return NessieClient(config)
367
1,039
import time from walrus.rate_limit import RateLimitException from walrus.tests.base import WalrusTestCase from walrus.tests.base import db class TestRateLimit(WalrusTestCase): def setUp(self): super(TestRateLimit, self).setUp() # Limit to 5 events per second. self.rl = db.rate_limit('test-rl', 5, 1) def test_rate_limit(self): for i in range(5): self.assertFalse(self.rl.limit('k1')) for i in range(3): self.assertTrue(self.rl.limit('k1')) self.assertFalse(self.rl.limit('k2')) def test_rate_limit_rollover(self): rl = db.rate_limit('test-rl2', 3, 100) container = db.List('test-rl2:k1') now = time.time() past = now - 101 # Simulate two events. container.extend([now, now]) # Third event goes through OK. self.assertFalse(rl.limit('k1')) # Fourth event is rate-limited. self.assertTrue(rl.limit('k1')) # There are three timestamps in the container. self.assertEqual(len(container), 3) # Hand modify the oldest timestamp to appear as if it happened over # 100 seconds ago. container[-1] = past # We can again perform an action. self.assertFalse(rl.limit('k1')) # We once again have 3 items all within the last 100 seconds, so we # are rate-limited. self.assertTrue(rl.limit('k1')) # There are only 3 items in the container. self.assertEqual(len(container), 3) # The oldest item is the 2nd we added at the beginning of the test. self.assertEqual(float(container[-1]), now) # Remove an item and make the 2nd timestamp (oldest) in the past. This # gives us 2 actions. container.popright() container[-1] = past self.assertFalse(rl.limit('k1')) self.assertFalse(rl.limit('k1')) self.assertTrue(rl.limit('k1')) def test_decorator(self): rl = db.rate_limit('test-rl2', 3, 100) container = db.List('test-rl2:fake-key') def key_fn(*args, **kwargs): return 'fake-key' @rl.rate_limited(key_function=key_fn) def do_test(): return 'OK' now = time.time() container.extend([now, now]) self.assertEqual(do_test(), 'OK') self.assertRaises(RateLimitException, do_test) container.popright() container[-1] = now - 101 self.assertEqual(do_test(), 'OK') self.assertEqual(do_test(), 'OK') self.assertRaises(RateLimitException, do_test)
1,158
938
<reponame>brickviking/TinkersConstruct { "parent": "minecraft:block/orientable", "textures": { "top": "tconstruct:block/foundry/scorched/bricks", "front": "tconstruct:block/foundry/controller/foundry_unformed", "side": "tconstruct:block/foundry/scorched/bricks" } }
114
3,172
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import gym import numpy as np from parl.utils import logger from Environment.base_env import Environment from utilize.settings import settings from utilize.form_action import * class MaxTimestepWrapper(gym.Wrapper): def __init__(self, env, max_timestep=288): logger.info("[env type]:{}".format(type(env))) self.max_timestep = max_timestep env.observation_space = None env.reward_range = None env.metadata = None gym.Wrapper.__init__(self, env) self.timestep = 0 def step(self, action, **kwargs): self.timestep += 1 obs, reward, done, info = self.env.step(action, **kwargs) if self.timestep >= self.max_timestep: done = True info["timeout"] = True else: info["timeout"] = False return obs, reward, done, info def reset(self, **kwargs): self.timestep = 0 return self.env.reset(**kwargs) class ObsTransformerWrapper(gym.Wrapper): def __init__(self, env): logger.info("[env type]:{}".format(type(env))) gym.Wrapper.__init__(self, env) def _get_obs(self, obs): # loads loads = [] loads.append(obs.load_p) loads.append(obs.load_q) loads.append(obs.load_v) loads = np.concatenate(loads) # prods prods = [] prods.append(obs.gen_p) prods.append(obs.gen_q) prods.append(obs.gen_v) prods = np.concatenate(prods) # rho rho = np.array(obs.rho) - 1.0 next_load = obs.nextstep_load_p # action_space action_space_low = obs.action_space['adjust_gen_p'].low.tolist() action_space_high = obs.action_space['adjust_gen_p'].high.tolist() action_space_low[settings.balanced_id] = 0.0 action_space_high[settings.balanced_id] = 0.0 features = np.concatenate([ loads, prods, rho.tolist(), next_load, action_space_low, action_space_high ]) return features def step(self, action, **kwargs): self.raw_obs, reward, done, info = self.env.step(action, **kwargs) obs = self._get_obs(self.raw_obs) return obs, reward, done, info def reset(self, **kwargs): self.raw_obs = self.env.reset(**kwargs) obs = self._get_obs(self.raw_obs) return obs class RewardShapingWrapper(gym.Wrapper): def __init__(self, env): logger.info("[env type]:{}".format(type(env))) gym.Wrapper.__init__(self, env) def step(self, action, **kwargs): obs, reward, done, info = self.env.step(action, **kwargs) shaping_reward = 1.0 info["origin_reward"] = reward return obs, shaping_reward, done, info def reset(self, **kwargs): return self.env.reset(**kwargs) class ActionWrapper(gym.Wrapper): def __init__(self, env, raw_env): logger.info("[env type]:{}".format(type(env))) gym.Wrapper.__init__(self, env) self.raw_env = raw_env self.v_action = np.zeros(self.raw_env.settings.num_gen) def step(self, action, **kwargs): N = len(action) gen_p_action_space = self.env.raw_obs.action_space['adjust_gen_p'] low_bound = gen_p_action_space.low high_bound = gen_p_action_space.high mapped_action = low_bound + (action - (-1.0)) * ( (high_bound - low_bound) / 2.0) mapped_action[self.raw_env.settings.balanced_id] = 0.0 mapped_action = np.clip(mapped_action, low_bound, high_bound) ret_action = form_action(mapped_action, self.v_action) return self.env.step(ret_action, **kwargs) def reset(self, **kwargs): return self.env.reset(**kwargs) def get_env(): env = Environment(settings, "EPRIReward") env.action_space = None raw_env = env env = MaxTimestepWrapper(env) env = RewardShapingWrapper(env) env = ObsTransformerWrapper(env) env = ActionWrapper(env, raw_env) return env
2,009
707
// Copyright (c) FIRST and other WPILib contributors. // Open Source Software; you can modify and/or share it under the terms of // the WPILib BSD license file in the root directory of this project. #pragma once #include <functional> #include <memory> #include <string> #include <string_view> #include <vector> #include "wpi/SmallVector.h" #include "wpi/span.h" namespace wpi { class SendableBuilder { public: /** * The backend kinds used for the sendable builder. */ enum BackendKind { kUnknown, kNetworkTables }; virtual ~SendableBuilder() = default; /** * Set the string representation of the named data type that will be used * by the smart dashboard for this sendable. * * @param type data type */ virtual void SetSmartDashboardType(std::string_view type) = 0; /** * Set a flag indicating if this sendable should be treated as an actuator. * By default this flag is false. * * @param value true if actuator, false if not */ virtual void SetActuator(bool value) = 0; /** * Set the function that should be called to set the Sendable into a safe * state. This is called when entering and exiting Live Window mode. * * @param func function */ virtual void SetSafeState(std::function<void()> func) = 0; /** * Add a boolean property. * * @param key property name * @param getter getter function (returns current value) * @param setter setter function (sets new value) */ virtual void AddBooleanProperty(std::string_view key, std::function<bool()> getter, std::function<void(bool)> setter) = 0; /** * Add a double property. * * @param key property name * @param getter getter function (returns current value) * @param setter setter function (sets new value) */ virtual void AddDoubleProperty(std::string_view key, std::function<double()> getter, std::function<void(double)> setter) = 0; /** * Add a string property. * * @param key property name * @param getter getter function (returns current value) * @param setter setter function (sets new value) */ virtual void AddStringProperty( std::string_view key, std::function<std::string()> getter, std::function<void(std::string_view)> setter) = 0; /** * Add a boolean array property. * * @param key property name * @param getter getter function (returns current value) * @param setter setter function (sets new value) */ virtual void AddBooleanArrayProperty( std::string_view key, std::function<std::vector<int>()> getter, std::function<void(wpi::span<const int>)> setter) = 0; /** * Add a double array property. * * @param key property name * @param getter getter function (returns current value) * @param setter setter function (sets new value) */ virtual void AddDoubleArrayProperty( std::string_view key, std::function<std::vector<double>()> getter, std::function<void(wpi::span<const double>)> setter) = 0; /** * Add a string array property. * * @param key property name * @param getter getter function (returns current value) * @param setter setter function (sets new value) */ virtual void AddStringArrayProperty( std::string_view key, std::function<std::vector<std::string>()> getter, std::function<void(wpi::span<const std::string>)> setter) = 0; /** * Add a raw property. * * @param key property name * @param getter getter function (returns current value) * @param setter setter function (sets new value) */ virtual void AddRawProperty(std::string_view key, std::function<std::string()> getter, std::function<void(std::string_view)> setter) = 0; /** * Add a string property (SmallString form). * * @param key property name * @param getter getter function (returns current value) * @param setter setter function (sets new value) */ virtual void AddSmallStringProperty( std::string_view key, std::function<std::string_view(wpi::SmallVectorImpl<char>& buf)> getter, std::function<void(std::string_view)> setter) = 0; /** * Add a boolean array property (SmallVector form). * * @param key property name * @param getter getter function (returns current value) * @param setter setter function (sets new value) */ virtual void AddSmallBooleanArrayProperty( std::string_view key, std::function<wpi::span<const int>(wpi::SmallVectorImpl<int>& buf)> getter, std::function<void(wpi::span<const int>)> setter) = 0; /** * Add a double array property (SmallVector form). * * @param key property name * @param getter getter function (returns current value) * @param setter setter function (sets new value) */ virtual void AddSmallDoubleArrayProperty( std::string_view key, std::function<wpi::span<const double>(wpi::SmallVectorImpl<double>& buf)> getter, std::function<void(wpi::span<const double>)> setter) = 0; /** * Add a string array property (SmallVector form). * * @param key property name * @param getter getter function (returns current value) * @param setter setter function (sets new value) */ virtual void AddSmallStringArrayProperty( std::string_view key, std::function< wpi::span<const std::string>(wpi::SmallVectorImpl<std::string>& buf)> getter, std::function<void(wpi::span<const std::string>)> setter) = 0; /** * Add a raw property (SmallVector form). * * @param key property name * @param getter getter function (returns current value) * @param setter setter function (sets new value) */ virtual void AddSmallRawProperty( std::string_view key, std::function<std::string_view(wpi::SmallVectorImpl<char>& buf)> getter, std::function<void(std::string_view)> setter) = 0; /** * Gets the kind of backend being used. * * @return Backend kind */ virtual BackendKind GetBackendKind() const = 0; /** * Return whether this sendable has been published. * * @return True if it has been published, false if not. */ virtual bool IsPublished() const = 0; /** * Update the published values by calling the getters for all properties. */ virtual void Update() = 0; /** * Clear properties. */ virtual void ClearProperties() = 0; }; } // namespace wpi
2,446
5,079
__version__ = '3.3'
10
1,337
/* * Copyright 2017 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.haulmont.cuba.web.widgets.client.addons.aceeditor.gwt; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.core.client.JsArray; /** * An annotation shown at the Ace editor. * */ public class GwtAceAnnotation extends JavaScriptObject { protected GwtAceAnnotation() { } /** * Eg. create("error", "An error on line 5", 4); * * @param type * @param text * @param row * @return the annotation */ public static final native GwtAceAnnotation create(String type, String text, int row) /*-{ return { text: text, row: row, type: type, isVaadinAceEditorAnnotation: true }; }-*/; public static final native JsArray<GwtAceAnnotation> createEmptyArray() /*-{ return []; }-*/; public final native String getText() /*-{ return this.text; }-*/; public final native int getRow() /*-{ return this.row; }-*/; public final native String getType() /*-{ return this.type; }-*/; public final native boolean isVaadinAceEditorAnnotation() /*-{ return !!this.isVaadinAceEditorAnnotation; }-*/; }
546
2,084
<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ #ifndef TVM_RUNTIME_HEXAGON_HEXAGON_HEXAGON_BUFFER_H_ #define TVM_RUNTIME_HEXAGON_HEXAGON_HEXAGON_BUFFER_H_ #include <tvm/runtime/c_runtime_api.h> #include <tvm/runtime/device_api.h> #include <tvm/runtime/logging.h> #include <tvm/runtime/ndarray.h> #include <tvm/runtime/packed_func.h> #include <memory> #include <vector> namespace tvm { namespace runtime { namespace hexagon { struct Allocation; class HexagonBuffer { public: /* \brief Allocate 1d (contiguous) memory within Hexagon accessible * memory scopes. * * \param nbytes The number of bytes of physical storage * to allocate. * * \param alignment The byte alignment to be used when allocating. * * \param scope Optional storage scope indicating the memory * space in which to allocate. Defaults to global system * memory (DDR). */ HexagonBuffer(size_t nbytes, size_t alignment, Optional<String> scope); /* \brief Allocate 2d (discontiguous) memory within Hexagon accessible * memory scopes. * * \param nallocs The number of allocations. * * \param nbytes The number of bytes of physical storage * to allocate per allocation. * * \param alignment The byte alignment to be used when allocating. * * \param scope Optional storage scope indicating the memory * space in which to allocate. Defaults to global system * memory (DDR). */ HexagonBuffer(size_t nallocs, size_t nbytes, size_t alignment, Optional<String> scope); /* \brief Construct a Hexagon Buffer from an external buffer. * * \param data The pointer to the external buffer. * * \param nbytes The size of the external buffer in bytes. * * \param scope Optional storage scope indicating the memory * space in which to allocate. Defaults to global system * memory (DDR). */ explicit HexagonBuffer(void* data, size_t nbytes, Optional<String> scope); //! \brief Destruction deallocates the underlying allocations. ~HexagonBuffer(); //! \brief Prevent copy construction of HexagonBuffers. HexagonBuffer(const HexagonBuffer&) = delete; //! \brief Prevent copy assignment with HexagonBuffers. HexagonBuffer& operator=(const HexagonBuffer&) = delete; //! \brief Prevent move construction. HexagonBuffer(HexagonBuffer&&) = delete; //! \brief Prevent move assignment. HexagonBuffer& operator=(HexagonBuffer&&) = delete; //! \brief Return pointer to allocations. void** GetPointer(); //! \brief Memory scopes managed by a Hexagon Buffer. enum class StorageScope { //! \brief System DDR corresponding to global storage. kDDR, /*! \brief Vector tightly coupled memory corresponding to * global.vtcm storage. */ kVTCM, }; //! \brief Return storage scope of underlying allocation. StorageScope GetStorageScope() const; /* \brief Copy data from a Hexagon Buffer an external buffer. * * \param data The pointer to the external buffer. * * \param nbytes The number of bytes to copy. */ void CopyTo(void* data, size_t nbytes) const; /* \brief Copy data from an external buffer to a Hexagon Buffer. * * \param data The pointer to the external buffer. * * \param nbytes The number of bytes to copy. */ void CopyFrom(void* data, size_t nbytes); /* \brief Copy data from one Hexagon Buffer to another. * * \param other The other Hexagon Buffer. * * \param nbytes The number of bytes to copy. */ void CopyFrom(const HexagonBuffer& other, size_t nbytes); private: //! \brief Assign a storage scope to the buffer. void SetStorageScope(Optional<String> scope); /*! \brief Array of raw pointer allocations required by the buffer. * * For 1d (contiguous) storage a single allocation will result. * For 2d (discontiguous) storage `nallocs` allocations will result. */ std::vector<void*> allocations_; /*! \brief Managed allocations which follow RAII and are released * during destruction. */ std::vector<std::unique_ptr<Allocation>> managed_allocations_; /*! \brief The underlying storage type in which the allocation * resides. */ size_t nallocs_; size_t nbytes_; StorageScope storage_scope_; }; } // namespace hexagon } // namespace runtime } // namespace tvm #endif // TVM_RUNTIME_HEXAGON_HEXAGON_HEXAGON_BUFFER_H_
1,601
521
#ifndef _IPXE_MENU_H #define _IPXE_MENU_H /** @file * * Menu selection * */ FILE_LICENCE ( GPL2_OR_LATER ); #include <ipxe/list.h> /** A menu */ struct menu { /** List of menus */ struct list_head list; /** Name */ const char *name; /** Title */ const char *title; /** Menu items */ struct list_head items; }; /** A menu item */ struct menu_item { /** List of menu items */ struct list_head list; /** Label */ const char *label; /** Text */ const char *text; /** Shortcut key */ int shortcut; /** Is default item */ int is_default; }; extern struct menu * create_menu ( const char *name, const char *title ); extern struct menu_item * add_menu_item ( struct menu *menu, const char *label, const char *text, int shortcut, int is_default ); extern void destroy_menu ( struct menu *menu ); extern struct menu * find_menu ( const char *name ); extern int show_menu ( struct menu *menu, unsigned int timeout_ms, const char *select, struct menu_item **selected ); #endif /* _IPXE_MENU_H */
370
1,738
<reponame>jeikabu/lumberyard /* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ // Original file Copyright Crytek GMBH or its affiliates, used under license. #ifndef CRYINCLUDE_EDITOR_USERMESSAGEDEFINES_H #define CRYINCLUDE_EDITOR_USERMESSAGEDEFINES_H #pragma once enum ESandboxUserMessages { // InPlaceComboBox WM_USER_ON_SELECTION_CANCEL = WM_USER + 1, WM_USER_ON_SELECTION_OK, WM_USER_ON_NEW_SELECTION, WM_USER_ON_EDITCHANGE, WM_USER_ON_OPENDROPDOWN, WM_USER_ON_EDITKEYDOWN, WM_USER_ON_EDITCLICK, // ACListWnd ENAC_UPDATE, // EditWithButton WM_USER_EDITWITHBUTTON_CLICKED, // FillSliderCtrl WMU_FS_CHANGED, WMU_FS_LBUTTONDOWN, WMU_FS_LBUTTONUP, FLM_EDITTEXTCHANGED, FLM_FILTERTEXTCHANGED, // NumberCtrlEdit WMU_LBUTTONDOWN, WMU_LBUTTONUP, // Mannequin/CharacterEditor WM_ONWINDOWFOCUSCHANGES, // SelectObjectDialog IDT_TIMER_0, IDT_TIMER_1, // LensFlareEditor WM_FLAREEDITOR_UPDATETREECONTROL, // EquipPackDialog UM_EQUIPLIST_CHECKSTATECHANGE, // MaterialSender/MatEditMainDlg WM_MATEDITPICK, // GridMapWindow WM_USER_ON_DBL_CLICK, // LMCompDialog WM_UPDATE_LIGHTMAP_GENERATION_PROGRESS, WM_UPDATE_LIGHTMAP_GENERATION_MEMUSAGE, WM_UPDATE_LIGHTMAP_GENERATION_MEMUSAGE_STATIC, WM_UPDATE_GLM_NAME_EDIT, // Viewport WM_VIEWPORT_ON_TITLE_CHANGE, // VisualLogControls UWM_BUTTON_CLICKED, }; #endif // CRYINCLUDE_EDITOR_USERMESSAGEDEFINES_H
837
1,350
<filename>sdk/aot/azure-aot-graalvm-support/src/main/java/com/azure/aot/graalvm/support/implementation/ClassReflectionAttributes.java // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.aot.graalvm.support.implementation; import java.util.EnumSet; import java.util.Objects; /** * Represents the attributes of a given class that should be made available reflectively as part of a GraalVM native * image compilation. */ public final class ClassReflectionAttributes { private final String name; private final EnumSet<ReflectionAttributes> set; public enum ReflectionAttributes { DECLARED_FIELDS, PUBLIC_FIELDS, DECLARED_CONSTRUCTORS, PUBLIC_CONSTRUCTORS, DECLARED_METHODS, PUBLIC_METHODS, DECLARED_CLASSES, PUBLIC_CLASSES } private ClassReflectionAttributes(String name, EnumSet<ReflectionAttributes> set) { this.name = Objects.requireNonNull(name); this.set = set; } public static ClassReflectionAttributes createWithAll(String name) { return create(name, EnumSet.allOf(ReflectionAttributes.class)); } public static ClassReflectionAttributes create(String name, EnumSet<ReflectionAttributes> attributes) { return new ClassReflectionAttributes(name, attributes); } public static ClassReflectionAttributes createWithAllDeclared(String name) { return new ClassReflectionAttributes(name, EnumSet.of( ReflectionAttributes.DECLARED_CLASSES, ReflectionAttributes.DECLARED_FIELDS, ReflectionAttributes.DECLARED_CONSTRUCTORS, ReflectionAttributes.DECLARED_METHODS)); } public static ClassReflectionAttributes createWithAllPublic(String name) { return new ClassReflectionAttributes(name, EnumSet.of( ReflectionAttributes.PUBLIC_CLASSES, ReflectionAttributes.PUBLIC_FIELDS, ReflectionAttributes.PUBLIC_CONSTRUCTORS, ReflectionAttributes.PUBLIC_METHODS)); } public String getName() { return name; } public boolean includeDeclaredFields() { return set.contains(ReflectionAttributes.DECLARED_FIELDS); } public boolean includePublicFields() { return set.contains(ReflectionAttributes.PUBLIC_FIELDS); } public boolean includeDeclaredConstructors() { return set.contains(ReflectionAttributes.DECLARED_CONSTRUCTORS); } public boolean includePublicConstructors() { return set.contains(ReflectionAttributes.PUBLIC_CONSTRUCTORS); } public boolean includeDeclaredMethods() { return set.contains(ReflectionAttributes.DECLARED_METHODS); } public boolean includePublicMethods() { return set.contains(ReflectionAttributes.PUBLIC_METHODS); } public boolean includeDeclaredClasses() { return set.contains(ReflectionAttributes.DECLARED_CLASSES); } public boolean includePublicClasses() { return set.contains(ReflectionAttributes.PUBLIC_CLASSES); } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final ClassReflectionAttributes that = (ClassReflectionAttributes) o; return name.equals(that.name); } @Override public int hashCode() { return Objects.hash(name); } }
1,318
432
<gh_stars>100-1000 #include <sys/types.h> #include <sys/ipc.h> #include <sys/shm.h> #include "sysvipc_shm.h" extern char sysvipc_userland; extern int __sys_shmdt(const void *); int shmdt(const void *addr) { if (sysvipc_userland) return (sysvipc_shmdt(addr)); return (__sys_shmdt(addr)); }
141
4,054
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.tensor; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.DoubleBinaryOperator; import java.util.stream.Collectors; /** * A mixed tensor type. This is class is currently suitable for serialization * and deserialization, not yet for computation. * * A mixed tensor has a combination of mapped and indexed dimensions. By * reordering the mapped dimensions before the indexed dimensions, one can * think of mixed tensors as the mapped dimensions mapping to a * dense tensor. This dense tensor is called a dense subspace. * * @author lesters */ public class MixedTensor implements Tensor { /** The dimension specification for this tensor */ private final TensorType type; /** The list of cells in the tensor */ private final ImmutableList<Cell> cells; /** An index structure over the cell list */ private final Index index; private MixedTensor(TensorType type, ImmutableList<Cell> cells, Index index) { this.type = type; this.cells = ImmutableList.copyOf(cells); this.index = index; } /** Returns the tensor type */ @Override public TensorType type() { return type; } /** Returns the size of the tensor measured in number of cells */ @Override public long size() { return cells.size(); } /** Returns the value at the given address */ @Override public double get(TensorAddress address) { long cellIndex = index.indexOf(address); if (cellIndex < 0 || cellIndex >= cells.size()) return 0.0; Cell cell = cells.get((int)cellIndex); if ( ! address.equals(cell.getKey())) return 0.0; return cell.getValue(); } @Override public boolean has(TensorAddress address) { long cellIndex = index.indexOf(address); if (cellIndex < 0 || cellIndex >= cells.size()) return false; Cell cell = cells.get((int)cellIndex); if ( ! address.equals(cell.getKey())) return false; return true; } /** * Returns an iterator over the cells of this tensor. * Cells are returned in order of increasing indexes in the * indexed dimensions, increasing indexes of later dimensions * in the dimension type before earlier. No guarantee is * given for the order of sparse dimensions. */ @Override public Iterator<Cell> cellIterator() { return cells.iterator(); } /** * Returns an iterator over the values of this tensor. * The iteration order is the same as for cellIterator. */ @Override public Iterator<Double> valueIterator() { return new Iterator<>() { Iterator<Cell> cellIterator = cellIterator(); @Override public boolean hasNext() { return cellIterator.hasNext(); } @Override public Double next() { return cellIterator.next().getValue(); } }; } @Override public Map<TensorAddress, Double> cells() { ImmutableMap.Builder<TensorAddress, Double> builder = new ImmutableMap.Builder<>(); for (Cell cell : cells) { builder.put(cell.getKey(), cell.getValue()); } return builder.build(); } @Override public Tensor withType(TensorType other) { if (!this.type.isRenamableTo(type)) { throw new IllegalArgumentException("MixedTensor.withType: types are not compatible. Current type: '" + this.type.toString() + "', requested type: '" + type.toString() + "'"); } return new MixedTensor(other, cells, index); } @Override public Tensor remove(Set<TensorAddress> addresses) { Tensor.Builder builder = Tensor.Builder.of(type()); // iterate through all sparse addresses referencing a dense subspace for (Map.Entry<TensorAddress, Long> entry : index.sparseMap.entrySet()) { TensorAddress sparsePartialAddress = entry.getKey(); if ( ! addresses.contains(sparsePartialAddress)) { // assumption: addresses only contain the sparse part long offset = entry.getValue(); for (int i = 0; i < index.denseSubspaceSize; ++i) { Cell cell = cells.get((int)offset + i); builder.cell(cell.getKey(), cell.getValue()); } } } return builder.build(); } @Override public int hashCode() { return cells.hashCode(); } @Override public String toString() { if (type.rank() == 0) return Tensor.toStandardString(this); if (type.rank() > 1 && type.dimensions().stream().filter(d -> d.isIndexed()).anyMatch(d -> d.size().isEmpty())) return Tensor.toStandardString(this); if (type.dimensions().stream().filter(d -> d.isMapped()).count() > 1) return Tensor.toStandardString(this); return type.toString() + ":" + index.contentToString(this); } @Override public boolean equals(Object other) { if ( ! ( other instanceof Tensor)) return false; return Tensor.equals(this, ((Tensor)other)); } /** Returns the size of dense subspaces */ public long denseSubspaceSize() { return index.denseSubspaceSize(); } /** * Base class for building mixed tensors. */ public abstract static class Builder implements Tensor.Builder { final TensorType type; /** * Create a builder depending upon the type of indexed dimensions. * If at least one indexed dimension is unbound, we create * a temporary structure while finding dimension bounds. */ public static Builder of(TensorType type) { if (type.dimensions().stream().anyMatch(d -> d instanceof TensorType.IndexedUnboundDimension)) { return new UnboundBuilder(type); } else { return new BoundBuilder(type); } } private Builder(TensorType type) { this.type = type; } @Override public TensorType type() { return type; } @Override public Tensor.Builder cell(float value, long... labels) { return cell((double)value, labels); } @Override public Tensor.Builder cell(double value, long... labels) { throw new UnsupportedOperationException("Not implemented."); } @Override public CellBuilder cell() { return new CellBuilder(type(), this); } @Override public abstract MixedTensor build(); } /** * Builder for mixed tensors with bound indexed dimensions. */ public static class BoundBuilder extends Builder { /** For each sparse partial address, hold a dense subspace */ private final Map<TensorAddress, double[]> denseSubspaceMap = new HashMap<>(); private final Index.Builder indexBuilder; private final Index index; private final TensorType denseSubtype; private BoundBuilder(TensorType type) { super(type); indexBuilder = new Index.Builder(type); index = indexBuilder.index(); denseSubtype = new TensorType(type.valueType(), type.dimensions().stream().filter(d -> d.isIndexed()).collect(Collectors.toList())); } public long denseSubspaceSize() { return index.denseSubspaceSize(); } private double[] denseSubspace(TensorAddress sparseAddress) { if (!denseSubspaceMap.containsKey(sparseAddress)) { denseSubspaceMap.put(sparseAddress, new double[(int)denseSubspaceSize()]); } return denseSubspaceMap.get(sparseAddress); } public IndexedTensor.DirectIndexBuilder denseSubspaceBuilder(TensorAddress sparseAddress) { double[] values = new double[(int)denseSubspaceSize()]; denseSubspaceMap.put(sparseAddress, values); return new DenseSubspaceBuilder(denseSubtype, values); } @Override public Tensor.Builder cell(TensorAddress address, float value) { return cell(address, (double)value); } @Override public Tensor.Builder cell(TensorAddress address, double value) { TensorAddress sparsePart = index.sparsePartialAddress(address); long denseOffset = index.denseOffset(address); double[] denseSubspace = denseSubspace(sparsePart); denseSubspace[(int)denseOffset] = value; return this; } public Tensor.Builder block(TensorAddress sparsePart, double[] values) { int denseSubspaceSize = (int)denseSubspaceSize(); if (values.length < denseSubspaceSize) throw new IllegalArgumentException("Block should have " + denseSubspaceSize + " values, but has only " + values.length); double[] denseSubspace = denseSubspace(sparsePart); System.arraycopy(values, 0, denseSubspace, 0, denseSubspaceSize); return this; } @Override public MixedTensor build() { long count = 0; ImmutableList.Builder<Cell> builder = new ImmutableList.Builder<>(); for (Map.Entry<TensorAddress, double[]> entry : denseSubspaceMap.entrySet()) { TensorAddress sparsePart = entry.getKey(); indexBuilder.put(sparsePart, count); double[] denseSubspace = entry.getValue(); for (long offset = 0; offset < denseSubspace.length; ++offset) { TensorAddress cellAddress = index.addressOf(sparsePart, offset); double value = denseSubspace[(int)offset]; builder.add(new Cell(cellAddress, value)); count++; } } return new MixedTensor(type, builder.build(), indexBuilder.build()); } } /** * Temporarily stores all cells to find bounds of indexed dimensions, * then creates a tensor using BoundBuilder. This is due to the * fact that for serialization the size of the dense subspace must be * known, and equal for all dense subspaces. A side effect is that the * tensor type is effectively changed, such that unbound indexed * dimensions become bound. */ public static class UnboundBuilder extends Builder { private Map<TensorAddress, Double> cells; private final long[] dimensionBounds; private UnboundBuilder(TensorType type) { super(type); cells = new HashMap<>(); dimensionBounds = new long[type.dimensions().size()]; } @Override public Tensor.Builder cell(TensorAddress address, float value) { return cell(address, (double)value); } @Override public Tensor.Builder cell(TensorAddress address, double value) { cells.put(address, value); trackBounds(address); return this; } @Override public MixedTensor build() { TensorType boundType = createBoundType(); BoundBuilder builder = new BoundBuilder(boundType); for (Map.Entry<TensorAddress, Double> cell : cells.entrySet()) { builder.cell(cell.getKey(), cell.getValue()); } return builder.build(); } public void trackBounds(TensorAddress address) { for (int i = 0; i < type.dimensions().size(); ++i) { TensorType.Dimension dimension = type.dimensions().get(i); if (dimension.isIndexed()) { dimensionBounds[i] = Math.max(address.numericLabel(i), dimensionBounds[i]); } } } public TensorType createBoundType() { TensorType.Builder typeBuilder = new TensorType.Builder(type().valueType()); for (int i = 0; i < type.dimensions().size(); ++i) { TensorType.Dimension dimension = type.dimensions().get(i); if (!dimension.isIndexed()) { typeBuilder.mapped(dimension.name()); } else { long size = dimension.size().orElse(dimensionBounds[i] + 1); typeBuilder.indexed(dimension.name(), size); } } return typeBuilder.build(); } } /** * An immutable index into a list of cells. * Contains additional information required * for handling mixed tensor addresses. * Assumes indexed dimensions are bound. */ private static class Index { private final TensorType type; private final TensorType sparseType; private final TensorType denseType; private final List<TensorType.Dimension> mappedDimensions; private final List<TensorType.Dimension> indexedDimensions; private ImmutableMap<TensorAddress, Long> sparseMap; private long denseSubspaceSize = -1; private Index(TensorType type) { this.type = type; this.mappedDimensions = type.dimensions().stream().filter(d -> !d.isIndexed()).collect(Collectors.toList()); this.indexedDimensions = type.dimensions().stream().filter(d -> d.isIndexed()).collect(Collectors.toList()); this.sparseType = createPartialType(type.valueType(), mappedDimensions); this.denseType = createPartialType(type.valueType(), indexedDimensions); } /** Returns the index of the given address, or -1 if it is not present */ public long indexOf(TensorAddress address) { TensorAddress sparsePart = sparsePartialAddress(address); if ( ! sparseMap.containsKey(sparsePart)) return -1; long base = sparseMap.get(sparsePart); long offset = denseOffset(address); return base + offset; } public static class Builder { private final Index index; private final ImmutableMap.Builder<TensorAddress, Long> builder; public Builder(TensorType type) { index = new Index(type); builder = new ImmutableMap.Builder<>(); } public void put(TensorAddress address, long index) { builder.put(address, index); } public Index build() { index.sparseMap = builder.build(); return index; } public Index index() { return index; } } public long denseSubspaceSize() { if (denseSubspaceSize == -1) { denseSubspaceSize = 1; for (int i = 0; i < type.dimensions().size(); ++i) { TensorType.Dimension dimension = type.dimensions().get(i); if (dimension.isIndexed()) { denseSubspaceSize *= dimension.size().orElseThrow(() -> new IllegalArgumentException("Unknown size of indexed dimension")); } } } return denseSubspaceSize; } private TensorAddress sparsePartialAddress(TensorAddress address) { if (type.dimensions().size() != address.size()) throw new IllegalArgumentException("Tensor type of " + this + " is not the same size as " + address); TensorAddress.Builder builder = new TensorAddress.Builder(sparseType); for (int i = 0; i < type.dimensions().size(); ++i) { TensorType.Dimension dimension = type.dimensions().get(i); if ( ! dimension.isIndexed()) builder.add(dimension.name(), address.label(i)); } return builder.build(); } private long denseOffset(TensorAddress address) { long innerSize = 1; long offset = 0; for (int i = type.dimensions().size(); --i >= 0; ) { TensorType.Dimension dimension = type.dimensions().get(i); if (dimension.isIndexed()) { long label = address.numericLabel(i); offset += label * innerSize; innerSize *= dimension.size().orElseThrow(() -> new IllegalArgumentException("Unknown size of indexed dimension.")); } } return offset; } private TensorAddress denseOffsetToAddress(long denseOffset) { if (denseOffset < 0 || denseOffset > denseSubspaceSize) { throw new IllegalArgumentException("Offset out of bounds"); } long restSize = denseOffset; long innerSize = denseSubspaceSize; long[] labels = new long[indexedDimensions.size()]; for (int i = 0; i < labels.length; ++i) { TensorType.Dimension dimension = indexedDimensions.get(i); long dimensionSize = dimension.size().orElseThrow(() -> new IllegalArgumentException("Unknown size of indexed dimension.")); innerSize /= dimensionSize; labels[i] = restSize / innerSize; restSize %= innerSize; } return TensorAddress.of(labels); } private TensorAddress addressOf(TensorAddress sparsePart, long denseOffset) { TensorAddress densePart = denseOffsetToAddress(denseOffset); String[] labels = new String[type.dimensions().size()]; int mappedIndex = 0; int indexedIndex = 0; for (TensorType.Dimension d : type.dimensions()) { if (d.isIndexed()) { labels[mappedIndex + indexedIndex] = densePart.label(indexedIndex); indexedIndex++; } else { labels[mappedIndex + indexedIndex] = sparsePart.label(mappedIndex); mappedIndex++; } } return TensorAddress.of(labels); } @Override public String toString() { return "index into " + type; } private String contentToString(MixedTensor tensor) { if (mappedDimensions.size() > 1) throw new IllegalStateException("Should be ensured by caller"); if (mappedDimensions.size() == 0) { StringBuilder b = new StringBuilder(); denseSubspaceToString(tensor, 0, b); return b.toString(); } // Exactly 1 mapped dimension StringBuilder b = new StringBuilder("{"); sparseMap.entrySet().stream().sorted(Map.Entry.comparingByKey()).forEach(entry -> { b.append(TensorAddress.labelToString(entry.getKey().label(0 ))); b.append(":"); denseSubspaceToString(tensor, entry.getValue(), b); b.append(","); }); if (b.length() > 1) b.setLength(b.length() - 1); b.append("}"); return b.toString(); } private void denseSubspaceToString(MixedTensor tensor, long subspaceIndex, StringBuilder b) { if (denseSubspaceSize == 1) { b.append(getDouble(subspaceIndex, 0, tensor)); return; } IndexedTensor.Indexes indexes = IndexedTensor.Indexes.of(denseType); for (int index = 0; index < denseSubspaceSize; index++) { indexes.next(); // start brackets for (int i = 0; i < indexes.nextDimensionsAtStart(); i++) b.append("["); // value switch (type.valueType()) { case DOUBLE: b.append(getDouble(subspaceIndex, index, tensor)); break; case FLOAT: b.append(getDouble(subspaceIndex, index, tensor)); break; // TODO: Really use floats case BFLOAT16: b.append(getDouble(subspaceIndex, index, tensor)); break; case INT8: b.append(getDouble(subspaceIndex, index, tensor)); break; default: throw new IllegalStateException("Unexpected value type " + type.valueType()); } // end bracket and comma for (int i = 0; i < indexes.nextDimensionsAtEnd(); i++) b.append("]"); if (index < denseSubspaceSize - 1) b.append(", "); } } private double getDouble(long indexedSubspaceIndex, long indexInIndexedSubspace, MixedTensor tensor) { return tensor.cells.get((int)(indexedSubspaceIndex + indexInIndexedSubspace)).getDoubleValue(); } } private static class DenseSubspaceBuilder implements IndexedTensor.DirectIndexBuilder { private final TensorType type; private final double[] values; public DenseSubspaceBuilder(TensorType type, double[] values) { this.type = type; this.values = values; } @Override public TensorType type() { return type; } @Override public void cellByDirectIndex(long index, double value) { values[(int)index] = value; } @Override public void cellByDirectIndex(long index, float value) { values[(int)index] = value; } } public static TensorType createPartialType(TensorType.Value valueType, List<TensorType.Dimension> dimensions) { TensorType.Builder builder = new TensorType.Builder(valueType); for (TensorType.Dimension dimension : dimensions) { builder.set(dimension); } return builder.build(); } }
9,749
7,746
/*++ Copyright (c) 2012 Microsoft Corporation Module Name: permutation.cpp Abstract: Simple abstraction for managing permutations. Author: <NAME> (leonardo) 2012-01-04 Revision History: --*/ #include "util/permutation.h" #include "util/util.h" #include "util/vector.h" void apply_permutation_copy(unsigned sz, unsigned const * src, unsigned const * p, unsigned * target) { for (unsigned i = 0; i < sz; i++) { target[i] = src[p[i]]; } } static void tst1(unsigned sz, unsigned num_tries, unsigned max = UINT_MAX) { #if 0 unsigned_vector data; unsigned_vector p; unsigned_vector new_data; data.resize(sz); p.resize(sz); new_data.resize(sz); random_gen g; for (unsigned i = 0; i < sz; i++) p[i] = i; // fill data with random numbers for (unsigned i = 0; i < sz; i++) data[i] = g() % max; for (unsigned k = 0; k < num_tries; k ++) { shuffle(p.size(), p.c_ptr(), g); // std::cout << "p: "; display(std::cout, p.begin(), p.end()); std::cout << "\n"; // std::cout << "data: "; display(std::cout, data.begin(), data.end()); std::cout << "\n"; apply_permutation_copy(sz, data.c_ptr(), p.c_ptr(), new_data.c_ptr()); apply_permutation(sz, data.c_ptr(), p.c_ptr()); // std::cout << "data: "; display(std::cout, data.begin(), data.end()); std::cout << "\n"; for (unsigned i = 0; i < 0; i++) ENSURE(data[i] == new_data[i]); } #endif } void tst_permutation() { tst1(10, 1000, 5); tst1(10, 1000, 1000); tst1(10, 1000, UINT_MAX); tst1(100, 1000, 33); tst1(100, 1000, 1000); tst1(100, 1000, UINT_MAX); tst1(1000, 1000, 121); tst1(1000, 1000, 1000); tst1(1000, 1000, UINT_MAX); tst1(33, 1000, 121); tst1(33, 1000, 1000); tst1(33, 1000, UINT_MAX); tst1(121, 1000, 121); tst1(121, 1000, 1000); tst1(121, 1000, UINT_MAX); for (unsigned i = 0; i < 1000; i++) { tst1(1000, 2, 333); tst1(1000, 2, 10000); tst1(1000, 2, UINT_MAX); } random_gen g; for (unsigned i = 0; i < 100000; i++) { unsigned sz = (g() % 131) + 1; tst1(sz, 1, sz*2); tst1(sz, 1, UINT_MAX); tst1(sz, 1, sz/2 + 1); } }
1,097
743
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.guacamole.resource; import java.io.InputStream; /** * A resource which is located within the classpath of an arbitrary * ClassLoader. */ public class ClassPathResource extends AbstractResource { /** * The classloader to use when reading this resource. */ private final ClassLoader classLoader; /** * The path of this resource relative to the classloader. */ private final String path; /** * Creates a new ClassPathResource which uses the given ClassLoader to * read the resource having the given path. * * @param classLoader * The ClassLoader to use when reading the resource. * * @param mimetype * The mimetype of the resource. * * @param path * The path of the resource relative to the given ClassLoader. */ public ClassPathResource(ClassLoader classLoader, String mimetype, String path) { super(mimetype); this.classLoader = classLoader; this.path = path; } /** * Creates a new ClassPathResource which uses the ClassLoader associated * with the ClassPathResource class to read the resource having the given * path. * * @param mimetype * The mimetype of the resource. * * @param path * The path of the resource relative to the ClassLoader associated * with the ClassPathResource class. */ public ClassPathResource(String mimetype, String path) { this(ClassPathResource.class.getClassLoader(), mimetype, path); } @Override public InputStream asStream() { return classLoader.getResourceAsStream(path); } }
793
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #ifndef _TXATBASE_HXX #define _TXATBASE_HXX #include <tools/solar.h> #include <svl/poolitem.hxx> #include <hintids.hxx> #include <errhdl.hxx> #include <boost/utility.hpp> class SfxItemPool; class SvXMLAttrContainerItem; class SwFmtRuby; class SwFmtCharFmt; class SwFmtAutoFmt; class SwFmtINetFmt; class SwFmtFld; class SwFmtFtn; class SwFmtFlyCnt; class SwTOXMark; class SwFmtRefMark; class SwFmtMeta; class SwTxtAttr : private boost::noncopyable { private: SfxPoolItem * const m_pAttr; xub_StrLen m_nStart; bool m_bDontExpand : 1; bool m_bLockExpandFlag : 1; bool m_bDontMoveAttr : 1; // refmarks, toxmarks bool m_bCharFmtAttr : 1; // charfmt, inet bool m_bOverlapAllowedAttr : 1; // refmarks, toxmarks bool m_bPriorityAttr : 1; // attribute has priority (redlining) bool m_bDontExpandStart : 1; // don't expand start at paragraph start (ruby) bool m_bNesting : 1; // SwTxtAttrNesting bool m_bHasDummyChar : 1; // without end + meta bool m_bHasContent : 1; // text attribute with content protected: SwTxtAttr( SfxPoolItem& rAttr, xub_StrLen nStart ); virtual ~SwTxtAttr(); void SetLockExpandFlag( bool bFlag ) { m_bLockExpandFlag = bFlag; } void SetDontMoveAttr( bool bFlag ) { m_bDontMoveAttr = bFlag; } void SetCharFmtAttr( bool bFlag ) { m_bCharFmtAttr = bFlag; } void SetOverlapAllowedAttr( bool bFlag ){ m_bOverlapAllowedAttr = bFlag; } void SetDontExpandStartAttr(bool bFlag) { m_bDontExpandStart = bFlag; } void SetNesting(const bool bFlag) { m_bNesting = bFlag; } void SetHasDummyChar(const bool bFlag) { m_bHasDummyChar = bFlag; } void SetHasContent( const bool bFlag ) { m_bHasContent = bFlag; } public: /// destroy instance static void Destroy( SwTxtAttr * pToDestroy, SfxItemPool& rPool ); /// start position xub_StrLen* GetStart() { return & m_nStart; } const xub_StrLen* GetStart() const { return & m_nStart; } /// end position virtual xub_StrLen* GetEnd(); // also used to change the end position inline const xub_StrLen* End() const; /// end (if available), else start inline const xub_StrLen* GetAnyEnd() const; inline void SetDontExpand( bool bDontExpand ); bool DontExpand() const { return m_bDontExpand; } bool IsLockExpandFlag() const { return m_bLockExpandFlag; } bool IsDontMoveAttr() const { return m_bDontMoveAttr; } bool IsCharFmtAttr() const { return m_bCharFmtAttr; } bool IsOverlapAllowedAttr() const { return m_bOverlapAllowedAttr; } bool IsPriorityAttr() const { return m_bPriorityAttr; } void SetPriorityAttr( bool bFlag ) { m_bPriorityAttr = bFlag; } bool IsDontExpandStartAttr() const { return m_bDontExpandStart; } bool IsNesting() const { return m_bNesting; } bool HasDummyChar() const { return m_bHasDummyChar; } bool HasContent() const { return m_bHasContent; } inline const SfxPoolItem& GetAttr() const; inline SfxPoolItem& GetAttr(); inline sal_uInt16 Which() const { return GetAttr().Which(); } virtual int operator==( const SwTxtAttr& ) const; inline const SwFmtCharFmt &GetCharFmt() const; inline const SwFmtAutoFmt &GetAutoFmt() const; inline const SwFmtFld &GetFmtFld() const; inline const SwFmtFtn &GetFtn() const; inline const SwFmtFlyCnt &GetFlyCnt() const; inline const SwTOXMark &GetTOXMark() const; inline const SwFmtRefMark &GetRefMark() const; inline const SwFmtINetFmt &GetINetFmt() const; inline const SwFmtRuby &GetRuby() const; inline const SwFmtMeta &GetMeta() const; }; class SwTxtAttrEnd : public SwTxtAttr { protected: xub_StrLen m_nEnd; public: SwTxtAttrEnd( SfxPoolItem& rAttr, sal_uInt16 nStart, sal_uInt16 nEnd ); virtual xub_StrLen* GetEnd(); }; // --------------- Inline Implementierungen ------------------------ inline const xub_StrLen* SwTxtAttr::End() const { return const_cast<SwTxtAttr * >(this)->GetEnd(); } inline const xub_StrLen* SwTxtAttr::GetAnyEnd() const { const xub_StrLen* pEnd = End(); return pEnd ? pEnd : GetStart(); } inline const SfxPoolItem& SwTxtAttr::GetAttr() const { ASSERT( m_pAttr, "SwTxtAttr: where is my attribute?" ); return *m_pAttr; } inline SfxPoolItem& SwTxtAttr::GetAttr() { return const_cast<SfxPoolItem&>( const_cast<const SwTxtAttr*>(this)->GetAttr()); } inline void SwTxtAttr::SetDontExpand( bool bDontExpand ) { if ( !m_bLockExpandFlag ) { m_bDontExpand = bDontExpand; } } //------------------------------------------------------------------------ inline const SwFmtCharFmt& SwTxtAttr::GetCharFmt() const { ASSERT( m_pAttr && m_pAttr->Which() == RES_TXTATR_CHARFMT, "Wrong attribute" ); return (const SwFmtCharFmt&)(*m_pAttr); } inline const SwFmtAutoFmt& SwTxtAttr::GetAutoFmt() const { ASSERT( m_pAttr && m_pAttr->Which() == RES_TXTATR_AUTOFMT, "Wrong attribute" ); return (const SwFmtAutoFmt&)(*m_pAttr); } inline const SwFmtFld& SwTxtAttr::GetFmtFld() const { ASSERT( m_pAttr && ( m_pAttr->Which() == RES_TXTATR_FIELD || m_pAttr->Which() == RES_TXTATR_ANNOTATION || m_pAttr->Which() == RES_TXTATR_INPUTFIELD ), "Wrong attribute" ); return (const SwFmtFld&)(*m_pAttr); } inline const SwFmtFtn& SwTxtAttr::GetFtn() const { ASSERT( m_pAttr && m_pAttr->Which() == RES_TXTATR_FTN, "Wrong attribute" ); return (const SwFmtFtn&)(*m_pAttr); } inline const SwFmtFlyCnt& SwTxtAttr::GetFlyCnt() const { ASSERT( m_pAttr && m_pAttr->Which() == RES_TXTATR_FLYCNT, "Wrong attribute" ); return (const SwFmtFlyCnt&)(*m_pAttr); } inline const SwTOXMark& SwTxtAttr::GetTOXMark() const { ASSERT( m_pAttr && m_pAttr->Which() == RES_TXTATR_TOXMARK, "Wrong attribute" ); return (const SwTOXMark&)(*m_pAttr); } inline const SwFmtRefMark& SwTxtAttr::GetRefMark() const { ASSERT( m_pAttr && m_pAttr->Which() == RES_TXTATR_REFMARK, "Wrong attribute" ); return (const SwFmtRefMark&)(*m_pAttr); } inline const SwFmtINetFmt& SwTxtAttr::GetINetFmt() const { ASSERT( m_pAttr && m_pAttr->Which() == RES_TXTATR_INETFMT, "Wrong attribute" ); return (const SwFmtINetFmt&)(*m_pAttr); } inline const SwFmtRuby& SwTxtAttr::GetRuby() const { ASSERT( m_pAttr && m_pAttr->Which() == RES_TXTATR_CJK_RUBY, "Wrong attribute" ); return (const SwFmtRuby&)(*m_pAttr); } inline const SwFmtMeta& SwTxtAttr::GetMeta() const { ASSERT( m_pAttr && (m_pAttr->Which() == RES_TXTATR_META || m_pAttr->Which() == RES_TXTATR_METAFIELD), "Wrong attribute" ); return (const SwFmtMeta&)(*m_pAttr); } #endif
3,464
6,457
// License: Apache 2.0. See LICENSE file in root directory. // Copyright(c) 2020 Intel Corporation. All Rights Reserved. #include "RsSource.hh" #include "BasicUsageEnvironment.hh" #include "RsStatistics.h" #include <GroupsockHelper.hh> #include <cassert> #include <compression/CompressionFactory.h> #include <ipDeviceCommon/RsCommon.h> #include <ipDeviceCommon/Statistic.h> #include <librealsense2/h/rs_sensor.h> RsDeviceSource* RsDeviceSource::createNew(UsageEnvironment& t_env, rs2::video_stream_profile& t_videoStreamProfile, rs2::frame_queue& t_queue) { return new RsDeviceSource(t_env, t_videoStreamProfile, t_queue); } RsDeviceSource::RsDeviceSource(UsageEnvironment& t_env, rs2::video_stream_profile& t_videoStreamProfile, rs2::frame_queue& t_queue) : FramedSource(t_env) { m_framesQueue = &t_queue; m_streamProfile = &t_videoStreamProfile; } RsDeviceSource::~RsDeviceSource() {} void RsDeviceSource::doGetNextFrame() { // This function is called (by our 'downstream' object) when it asks for new data. rs2::frame frame; try { if(!m_framesQueue->poll_for_frame(&frame)) { nextTask() = envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc*)waitForFrame, this); } else { frame.keep(); deliverRSFrame(&frame); } } catch(const std::exception& e) { envir() << "RsDeviceSource: " << e.what() << '\n'; } } void RsDeviceSource::handleWaitForFrame() { // If a new frame of data is immediately available to be delivered, then do this now: rs2::frame frame; try { if(!(getFramesQueue()->poll_for_frame(&frame))) { nextTask() = envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc*)RsDeviceSource::waitForFrame, this); } else { frame.keep(); deliverRSFrame(&frame); } } catch(const std::exception& e) { envir() << "RsDeviceSource: " << e.what() << '\n'; } } // The following is called after each delay between packet sends: void RsDeviceSource::waitForFrame(RsDeviceSource* t_deviceSource) { t_deviceSource->handleWaitForFrame(); } void RsDeviceSource::deliverRSFrame(rs2::frame* t_frame) { if(!isCurrentlyAwaitingData()) { envir() << "isCurrentlyAwaitingData returned false\n"; return; // we're not ready for the data yet } unsigned newFrameSize = t_frame->get_data_size(); gettimeofday(&fPresentationTime, NULL); // If you have a more accurate time - e.g., from an encoder - then use that instead. RsFrameHeader header; unsigned char* data; if(CompressionFactory::isCompressionSupported(t_frame->get_profile().format(), t_frame->get_profile().stream_type())) { fFrameSize = ((int*)t_frame->get_data())[0]; data = (unsigned char*)t_frame->get_data() + sizeof(int); } else { fFrameSize = t_frame->get_data_size(); data = (unsigned char*)t_frame->get_data(); } memmove(fTo + sizeof(RsFrameHeader), data, fFrameSize); fFrameSize += sizeof(RsMetadataHeader); header.networkHeader.data.frameSize = fFrameSize; fFrameSize += sizeof(RsNetworkHeader); if(t_frame->supports_frame_metadata(RS2_FRAME_METADATA_FRAME_TIMESTAMP)) { header.metadataHeader.data.timestamp = t_frame->get_frame_metadata(RS2_FRAME_METADATA_FRAME_TIMESTAMP) / 1000; } else { header.metadataHeader.data.timestamp = t_frame->get_timestamp(); } if(t_frame->supports_frame_metadata(RS2_FRAME_METADATA_FRAME_COUNTER)) { header.metadataHeader.data.frameCounter = t_frame->get_frame_metadata(RS2_FRAME_METADATA_FRAME_COUNTER); } else { header.metadataHeader.data.frameCounter = t_frame->get_frame_number(); } if(t_frame->supports_frame_metadata(RS2_FRAME_METADATA_ACTUAL_FPS)) { header.metadataHeader.data.actualFps = t_frame->get_frame_metadata(RS2_FRAME_METADATA_ACTUAL_FPS); } header.metadataHeader.data.timestampDomain = t_frame->get_frame_timestamp_domain(); memmove(fTo, &header, sizeof(header)); // After delivering the data, inform the reader that it is now available: FramedSource::afterGetting(this); }
1,724
5,169
<filename>Specs/2/c/d/TableViewContent/0.1.0/TableViewContent.podspec.json { "name": "TableViewContent", "version": "0.1.0", "summary": "A short description of TableViewContent.", "description": "TODO: Add long description of the pod here.", "homepage": "https://github.com/Akira Matsuda/TableViewContent", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "<NAME>": "<EMAIL>" }, "source": { "git": "https://github.com/0x0c/TableViewContent.git", "tag": "0.1.0" }, "platforms": { "ios": "8.0" }, "source_files": "TableViewContent/Classes/**/*" }
250
14,668
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.components.browser_ui.widget; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.util.AttributeSet; import android.widget.ScrollView; import androidx.annotation.IntDef; import org.chromium.base.ApiCompatibilityUtils; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; /** * An extension of the ScrollView that supports edge boundaries coming in. */ public class FadingEdgeScrollView extends ScrollView { @IntDef({EdgeType.NONE, EdgeType.FADING, EdgeType.HARD}) @Retention(RetentionPolicy.SOURCE) public @interface EdgeType { /** Draw no lines at all. */ int NONE = 0; /** Draw an edge that fades in, depending on how much is left to scroll. */ int FADING = 1; /** Draw either no line (if there is nothing to scroll) or a fully opaque line. */ int HARD = 2; } private static final int POSITION_TOP = 0; private static final int POSITION_BOTTOM = 1; private final Paint mSeparatorPaint = new Paint(); private final int mSeparatorColor; private final int mSeparatorHeight; @EdgeType private int mDrawTopEdge = EdgeType.FADING; @EdgeType private int mDrawBottomEdge = EdgeType.FADING; public FadingEdgeScrollView(Context context, AttributeSet attrs) { super(context, attrs); mSeparatorColor = ApiCompatibilityUtils.getColor(getResources(), R.color.toolbar_shadow_color); mSeparatorHeight = getResources().getDimensionPixelSize(R.dimen.divider_height); } @Override protected void dispatchDraw(Canvas canvas) { super.dispatchDraw(canvas); setVerticalFadingEdgeEnabled(true); float topEdgeStrength = getTopFadingEdgeStrength(); float bottomEdgeStrength = getBottomFadingEdgeStrength(); setVerticalFadingEdgeEnabled(false); drawBoundaryLine(canvas, POSITION_TOP, topEdgeStrength, mDrawTopEdge); drawBoundaryLine(canvas, POSITION_BOTTOM, bottomEdgeStrength, mDrawBottomEdge); } /** * Sets which edge should be drawn. * @param topEdgeType Whether to draw the edge on the top part of the view. * @param bottomEdgeType Whether to draw the edge on the bottom part of the view. */ public void setEdgeVisibility(@EdgeType int topEdgeType, @EdgeType int bottomEdgeType) { mDrawTopEdge = topEdgeType; mDrawBottomEdge = bottomEdgeType; invalidate(); } /** * Draws a line at the top or bottom of the view. This should be called from dispatchDraw() so * it gets drawn on top of the View's children. * * @param canvas The canvas on which to draw. * @param position Where to draw the line: either POSITION_TOP or POSITION_BOTTOM. * @param edgeStrength A value between 0 and 1 indicating the relative size of the line. 0 * means no line at all. 1 means a fully opaque line. * @param edgeType How to draw the line. */ private void drawBoundaryLine( Canvas canvas, int position, float edgeStrength, @EdgeType int edgeType) { if (edgeType == EdgeType.NONE) { return; } else if (edgeType == EdgeType.FADING) { edgeStrength = Math.max(0.0f, Math.min(1.0f, edgeStrength)); } else { edgeStrength = 1.0f; } if (edgeStrength <= 0.0f) return; int adjustedA = (int) (Color.alpha(mSeparatorColor) * edgeStrength); int adjustedR = (int) (Color.red(mSeparatorColor) * edgeStrength); int adjustedG = (int) (Color.green(mSeparatorColor) * edgeStrength); int adjustedB = (int) (Color.blue(mSeparatorColor) * edgeStrength); mSeparatorPaint.setColor(Color.argb(adjustedA, adjustedR, adjustedG, adjustedB)); int left = getScrollX(); int right = left + getRight(); if (position == POSITION_BOTTOM) { int bottom = getScrollY() + getBottom() - getTop(); canvas.drawRect(left, bottom - mSeparatorHeight, right, bottom, mSeparatorPaint); } else if (position == POSITION_TOP) { int top = getScrollY(); canvas.drawRect(left, top, right, top + mSeparatorHeight, mSeparatorPaint); } } }
1,715
1,707
//------------------------------------------------------------------------------ // LogTest.cc // Test Log class. //------------------------------------------------------------------------------ #include "Pre.h" #include "UnitTest++/src/UnitTest++.h" #include "Core/Log.h" #include "Core/Logger.h" using namespace Oryol; class MyLogger : public Logger { OryolClassDecl(MyLogger); /// generic vprint-style method virtual void VPrint(Log::Level l, const char* msg, va_list args) override { printf("In MyLogger::VPrint(): "); vprintf(msg, args); }; }; void test_vinfo(const char* msg, ...) { va_list args; va_start(args, msg); Log::VInfo(msg, args); va_end(args); } void test_log() { Log::Dbg("Dbg log msg %d '%s'\n", 2, "Bla"); Log::Info("Info log msg %f %d\n", 0.1, 4); Log::Warn("Warning log msg %d %d\n", 2, 3); Log::Error("Error log msg %d\n", 10); } TEST(LogTest) { test_log(); Log::SetLogLevel(Log::Level::Dbg); test_log(); Log::SetLogLevel(Log::Level::Info); test_log(); Log::SetLogLevel(Log::Level::Warn); test_log(); Log::SetLogLevel(Log::Level::Error); test_log(); Log::SetLogLevel(Log::Level::None); test_log(); Log::SetLogLevel(Log::Level::Dbg); test_vinfo("Log::VInfo %d %d %d...\n", 1, 2, 3); Log::AddLogger(MyLogger::Create()); test_log(); }
556
852
import FWCore.ParameterSet.Config as cms # # produce kinFit hypothesis with all necessary # ingredients # ## std sequence to perform kinematic fit import TopQuarkAnalysis.TopKinFitter.TtSemiLepKinFitProducer_Muons_cfi kinFitTtSemiLepEventHypothesis = TopQuarkAnalysis.TopKinFitter.TtSemiLepKinFitProducer_Muons_cfi.kinFitTtSemiLepEvent.clone() ## configure kinFit hypothesis from TopQuarkAnalysis.TopJetCombination.TtSemiLepHypKinFit_cfi import * ## make hypothesis makeHypothesis_kinFitTask = cms.Task( kinFitTtSemiLepEventHypothesis, ttSemiLepHypKinFit ) makeHypothesis_kinFit = cms.Sequence(makeHypothesis_kinFitTask)
231
416
<reponame>khauser/SimpleFlatMapper<filename>sfm-converter/src/main/java/org/simpleflatmapper/converter/UncheckedConverter.java package org.simpleflatmapper.converter; public interface UncheckedConverter<I, O> extends ContextualConverter<I, O> { O convert(I in, Context context); }
102
1,368
default_app_config = 'select2_many_to_many.apps.TestApp'
22
1,139
package com.journaldev.groups; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; public class TestNGGroupsExample { @Test(groups = "foo") public void foo() { System.out.println("foo"); } @Test(groups = "bar") public void bar() { System.out.println("bar"); } @Test(groups = { "alpha", "sanity" }) public void alpha() { System.out.println("alpha"); } @Test(groups = { "beta", "integration" }) public void beta() { System.out.println("beta"); } @BeforeClass(groups = "integration") public void beforeIntegrationTests() { System.out.println("Before Running integration test methods"); } @AfterClass(groups = "integration") public void afterIntegrationTests() { System.out.println("Before Running integration test methods"); } }
281
4,551
package org.robolectric.shadows; import android.os.SystemClock; import java.util.concurrent.TimeUnit; import org.robolectric.annotation.LooperMode; public class ShadowSystem { /** * Implements {@link System#nanoTime} through ShadowWrangler. * * @return Current time with nanos. */ @SuppressWarnings("unused") public static long nanoTime() { if (ShadowLooper.looperMode() == LooperMode.Mode.PAUSED) { return TimeUnit.MILLISECONDS.toNanos(SystemClock.uptimeMillis()); } else { return ShadowLegacySystemClock.nanoTime(); } } /** * Implements {@link System#currentTimeMillis} through ShadowWrangler. * * @return Current time with millis. */ @SuppressWarnings("unused") public static long currentTimeMillis() { if (ShadowLooper.looperMode() == LooperMode.Mode.PAUSED) { return SystemClock.uptimeMillis(); } else { return ShadowLegacySystemClock.currentTimeMillis(); } } }
355
2,151
/* * Copyright (C) 2007 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.graphics; import java.io.InputStream; import java.io.OutputStream; /** * A Picture records drawing calls (via the canvas returned by beginRecording) * and can then play them back into Canvas (via {@link Picture#draw(Canvas)} or * {@link Canvas#drawPicture(Picture)}).For most content (e.g. text, lines, rectangles), * drawing a sequence from a picture can be faster than the equivalent API * calls, since the picture performs its playback without incurring any * method-call overhead. * * <p class="note"><strong>Note:</strong> Prior to API level 23 a picture cannot * be replayed on a hardware accelerated canvas.</p> */ public class Picture { private Canvas mRecordingCanvas; private long mNativePicture; private static final int WORKING_STREAM_STORAGE = 16 * 1024; /** * Creates an empty picture that is ready to record. */ public Picture() { this(nativeConstructor(0)); } /** * Create a picture by making a copy of what has already been recorded in * src. The contents of src are unchanged, and if src changes later, those * changes will not be reflected in this picture. */ public Picture(Picture src) { this(nativeConstructor(src != null ? src.mNativePicture : 0)); } private Picture(long nativePicture) { if (nativePicture == 0) { throw new RuntimeException(); } mNativePicture = nativePicture; } @Override protected void finalize() throws Throwable { try { nativeDestructor(mNativePicture); mNativePicture = 0; } finally { super.finalize(); } } /** * To record a picture, call beginRecording() and then draw into the Canvas * that is returned. Nothing we appear on screen, but all of the draw * commands (e.g. {@link Canvas#drawRect(Rect, Paint)}) will be recorded. * To stop recording, call endRecording(). After endRecording() the Canvas * that was returned must no longer be used, and nothing should be drawn * into it. */ public Canvas beginRecording(int width, int height) { long ni = nativeBeginRecording(mNativePicture, width, height); mRecordingCanvas = new RecordingCanvas(this, ni); return mRecordingCanvas; } /** * Call endRecording when the picture is built. After this call, the picture * may be drawn, but the canvas that was returned by beginRecording must not * be used anymore. This is automatically called if {@link Picture#draw} * or {@link Canvas#drawPicture(Picture)} is called. */ public void endRecording() { if (mRecordingCanvas != null) { mRecordingCanvas = null; nativeEndRecording(mNativePicture); } } /** * Get the width of the picture as passed to beginRecording. This * does not reflect (per se) the content of the picture. */ public int getWidth() { return nativeGetWidth(mNativePicture); } /** * Get the height of the picture as passed to beginRecording. This * does not reflect (per se) the content of the picture. */ public int getHeight() { return nativeGetHeight(mNativePicture); } /** * Draw this picture on the canvas. * <p> * Prior to {@link android.os.Build.VERSION_CODES#LOLLIPOP}, this call could * have the side effect of changing the matrix and clip of the canvas * if this picture had imbalanced saves/restores. * * <p> * <strong>Note:</strong> This forces the picture to internally call * {@link Picture#endRecording()} in order to prepare for playback. * * @param canvas The picture is drawn to this canvas */ public void draw(Canvas canvas) { if (mRecordingCanvas != null) { endRecording(); } nativeDraw(canvas.getNativeCanvasWrapper(), mNativePicture); } /** * Create a new picture (already recorded) from the data in the stream. This * data was generated by a previous call to writeToStream(). Pictures that * have been persisted across device restarts are not guaranteed to decode * properly and are highly discouraged. * * @see #writeToStream(java.io.OutputStream) * @deprecated The recommended alternative is to not use writeToStream and * instead draw the picture into a Bitmap from which you can persist it as * raw or compressed pixels. */ @Deprecated public static Picture createFromStream(InputStream stream) { return new Picture(nativeCreateFromStream(stream, new byte[WORKING_STREAM_STORAGE])); } /** * Write the picture contents to a stream. The data can be used to recreate * the picture in this or another process by calling createFromStream(...) * The resulting stream is NOT to be persisted across device restarts as * there is no guarantee that the Picture can be successfully reconstructed. * * @see #createFromStream(java.io.InputStream) * @deprecated The recommended alternative is to draw the picture into a * Bitmap from which you can persist it as raw or compressed pixels. */ @Deprecated public void writeToStream(OutputStream stream) { // do explicit check before calling the native method if (stream == null) { throw new NullPointerException(); } if (!nativeWriteToStream(mNativePicture, stream, new byte[WORKING_STREAM_STORAGE])) { throw new RuntimeException(); } } // return empty picture if src is 0, or a copy of the native src private static native long nativeConstructor(long nativeSrcOr0); private static native long nativeCreateFromStream(InputStream stream, byte[] storage); private static native int nativeGetWidth(long nativePicture); private static native int nativeGetHeight(long nativePicture); private static native long nativeBeginRecording(long nativeCanvas, int w, int h); private static native void nativeEndRecording(long nativeCanvas); private static native void nativeDraw(long nativeCanvas, long nativePicture); private static native boolean nativeWriteToStream(long nativePicture, OutputStream stream, byte[] storage); private static native void nativeDestructor(long nativePicture); private static class RecordingCanvas extends Canvas { private final Picture mPicture; public RecordingCanvas(Picture pict, long nativeCanvas) { super(nativeCanvas); mPicture = pict; } @Override public void setBitmap(Bitmap bitmap) { throw new RuntimeException("Cannot call setBitmap on a picture canvas"); } @Override public void drawPicture(Picture picture) { if (mPicture == picture) { throw new RuntimeException("Cannot draw a picture into its recording canvas"); } super.drawPicture(picture); } } }
2,651
364
<reponame>matus-chochlik/oglplus // File include/oglplus/enums/shader_type.ipp // // Automatically generated file, DO NOT modify manually. // Edit the source 'source/enums/oglplus/shader_type.txt' // or the 'source/enums/make_enum.py' script instead. // // Copyright 2010-2019 <NAME>. // Distributed under the Boost Software License, Version 1.0. // See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt // #if OGLPLUS_DOCUMENTATION_ONLY /// VERTEX_SHADER Vertex, /// TESS_CONTROL_SHADER TessControl, /// TESS_EVALUATION_SHADER TessEvaluation, /// GEOMETRY_SHADER Geometry, /// FRAGMENT_SHADER Fragment, /// COMPUTE_SHADER Compute #else // !OGLPLUS_DOCUMENTATION_ONLY #include <oglplus/enums/shader_type_def.ipp> #endif
303
765
<reponame>hyu-iot/gem5 /***************************************************************************** Licensed to Accellera Systems Initiative Inc. (Accellera) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. Accellera licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *****************************************************************************/ /***************************************************************************** sc_fxtype_params.cpp - Original Author: <NAME>, Synopsys, Inc. *****************************************************************************/ /***************************************************************************** MODIFICATION LOG - modifiers, enter your name, affiliation, date and changes you are making here. Name, Affiliation, Date: Description of Modification: *****************************************************************************/ // $Log: sc_fxtype_params.cpp,v $ // Revision 1.1.1.1 2006/12/15 20:20:04 acg // SystemC 2.3 // // Revision 1.3 2006/01/13 18:53:58 acg // <NAME>: added $Log command so that CVS comments are reproduced in // the source. // #include "sysc/datatypes/fx/sc_fxtype_params.h" namespace sc_dt { // ---------------------------------------------------------------------------- // CLASS : sc_fxtype_params // // Fixed-point type parameters class. // ---------------------------------------------------------------------------- const std::string sc_fxtype_params::to_string() const { std::string s; char buf[BUFSIZ]; s += "("; std::sprintf( buf, "%d", m_wl ); s += buf; s += ","; std::sprintf( buf, "%d", m_iwl ); s += buf; s += ","; s += sc_dt::to_string( m_q_mode ); s += ","; s += sc_dt::to_string( m_o_mode ); s += ","; std::sprintf( buf, "%d", m_n_bits ); s += buf; s += ")"; return s; } void sc_fxtype_params::print( ::std::ostream& os ) const { os << to_string(); } void sc_fxtype_params::dump( ::std::ostream& os ) const { os << "sc_fxtype_params" << ::std::endl; os << "(" << ::std::endl; os << "wl = " << m_wl << ::std::endl; os << "iwl = " << m_iwl << ::std::endl; os << "q_mode = " << m_q_mode << ::std::endl; os << "o_mode = " << m_o_mode << ::std::endl; os << "n_bits = " << m_n_bits << ::std::endl; os << ")" << ::std::endl; } } // namespace sc_dt // Taf!
950
651
/****************************************************************************** * Copyright (c) Intel Corporation - All rights reserved. * * This file is part of the LIBXSMM library. * * * * For information on the license, see the LICENSE file. * * Further information: https://github.com/hfp/libxsmm/ * * SPDX-License-Identifier: BSD-3-Clause * ******************************************************************************/ /* <NAME>, <NAME> (Intel Corp.) ******************************************************************************/ #ifndef GENERATOR_MATELTWISE_TRANSFORM_COMMON_X86_H #define GENERATOR_MATELTWISE_TRANSFORM_COMMON_X86_H #include "generator_common.h" LIBXSMM_API_INTERN void libxsmm_generator_transform_Xway_unpack_network_avx_avx512( libxsmm_generated_code* io_generated_code, const char i_vector_name, const unsigned char* i_in_idx, const unsigned int i_vec_reg_src_start, const unsigned int i_vec_reg_dst_start, const unsigned int i_out_offset, const unsigned int i_even_instr, const unsigned int i_odd_instr, const unsigned int i_ways ); LIBXSMM_API_INTERN void libxsmm_generator_transform_Xway_full_load_avx_avx512( libxsmm_generated_code* io_generated_code, const char i_vector_name, const unsigned int i_gp_reg_in, const unsigned int i_vec_reg_dst_start, const unsigned int i_ld, const unsigned int i_ld_instr, const unsigned int i_ways, const unsigned int i_valid_ways, const unsigned int i_use_masking, const unsigned int i_mask_reg ); LIBXSMM_API_INTERN void libxsmm_generator_transform_Xway_full_store_avx_avx512( libxsmm_generated_code* io_generated_code, const char i_vector_name, const unsigned int i_gp_reg_out, const unsigned int i_vec_reg_src_start, const unsigned int i_ld, const unsigned int i_st_instr, const unsigned int i_use_masking, const unsigned int i_mask_reg, const unsigned int i_ways ); #endif /* GENERATOR_MATELTWISE_TRANSFORM_COMMON_X86_H */
2,503
887
<reponame>traversaro/gazebo<filename>gazebo/gui/model/LinkConfig_TEST.cc /* * Copyright (C) 2015 Open Source Robotics Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #include "gazebo/gui/ConfigWidget.hh" #include "gazebo/gui/model/LinkConfig.hh" #include "gazebo/gui/model/LinkConfig_TEST.hh" #include "test_config.h" using namespace gazebo; using namespace gui; ///////////////////////////////////////////////// void LinkConfig_TEST::Initialization() { LinkConfig lc; const ConfigWidget *cw = lc.LinkConfigWidget(); QVERIFY(cw != NULL); QCOMPARE(cw->DoubleWidgetValue("inertial::mass"), 1.0); QCOMPARE(cw->DoubleWidgetValue("inertial::ixx"), 1.0); QCOMPARE(cw->DoubleWidgetValue("inertial::iyy"), 1.0); QCOMPARE(cw->DoubleWidgetValue("inertial::izz"), 1.0); QVERIFY(cw->BoolWidgetValue("gravity")); QVERIFY(!cw->BoolWidgetValue("self_collide")); QVERIFY(!cw->BoolWidgetValue("kinematic")); QVERIFY(!cw->BoolWidgetValue("enable_wind")); } ///////////////////////////////////////////////// void LinkConfig_TEST::LinkMsgUpdate() { gazebo::gui::LinkConfig lc; msgs::LinkPtr linkMsgPtr(new msgs::Link); const ConfigWidget *cw = lc.LinkConfigWidget(); QVERIFY(cw != NULL); linkMsgPtr->set_gravity(false); linkMsgPtr->set_self_collide(true); linkMsgPtr->set_kinematic(true); linkMsgPtr->set_enable_wind(true); lc.Update(linkMsgPtr); QVERIFY(!cw->BoolWidgetValue("gravity")); QVERIFY(cw->BoolWidgetValue("self_collide")); QVERIFY(cw->BoolWidgetValue("kinematic")); QVERIFY(cw->BoolWidgetValue("enable_wind")); } ///////////////////////////////////////////////// void LinkConfig_TEST::PoseUpdate() { gazebo::gui::LinkConfig lc; const ignition::math::Pose3d pose(5.0, 10.0, 15.0, -0.1, -0.2, -0.3); const ConfigWidget *cw = lc.LinkConfigWidget(); QVERIFY(cw != NULL); lc.SetPose(pose); ignition::math::Pose3d p = cw->PoseWidgetValue("pose"); QCOMPARE(p, pose); } ///////////////////////////////////////////////// void LinkConfig_TEST::MassUpdate() { gazebo::gui::LinkConfig lc; const ConfigWidget *cw = lc.LinkConfigWidget(); QVERIFY(cw != NULL); lc.SetMass(50.0); QCOMPARE(cw->DoubleWidgetValue("inertial::mass"), 50.0); } ///////////////////////////////////////////////// void LinkConfig_TEST::InertiaMatrixUpdate() { gazebo::gui::LinkConfig lc; const ConfigWidget *cw = lc.LinkConfigWidget(); QVERIFY(cw != NULL); lc.SetInertiaMatrix(1.0, 2.0, 3.0, 4.0, 5.0, 6.0); QCOMPARE(cw->DoubleWidgetValue("inertial::ixx"), 1.0); QCOMPARE(cw->DoubleWidgetValue("inertial::iyy"), 2.0); QCOMPARE(cw->DoubleWidgetValue("inertial::izz"), 3.0); QCOMPARE(cw->DoubleWidgetValue("inertial::ixy"), 4.0); QCOMPARE(cw->DoubleWidgetValue("inertial::ixz"), 5.0); QCOMPARE(cw->DoubleWidgetValue("inertial::iyz"), 6.0); } ///////////////////////////////////////////////// void LinkConfig_TEST::InertialPoseUpdate() { gazebo::gui::LinkConfig lc; const ignition::math::Pose3d pose(5.0, 10.0, 15.0, -0.1, -0.2, -0.3); const ConfigWidget *cw = lc.LinkConfigWidget(); QVERIFY(cw != NULL); lc.SetInertialPose(pose); ignition::math::Pose3d p = cw->PoseWidgetValue("inertial::pose"); QCOMPARE(p, pose); QCOMPARE(p.Pos().X(), pose.Pos().X()); } // Generate a main function for the test QTEST_MAIN(LinkConfig_TEST)
1,429
1,109
<reponame>LuShengDong/wasabi<filename>modules/eventlog/src/test/java/com/intuit/wasabi/eventlog/events/AuthorizationChangeEventTest.java /******************************************************************************* * Copyright 2016 Intuit * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.intuit.wasabi.eventlog.events; import com.intuit.wasabi.authenticationobjects.UserInfo; import com.intuit.wasabi.eventlog.EventLog; import com.intuit.wasabi.experimentobjects.Application; import org.junit.Assert; import org.junit.Test; /** * */ public class AuthorizationChangeEventTest { @Test public void testGetDefaultDescription() throws Exception { UserInfo affectedUser = UserInfo.from(UserInfo.Username.valueOf("AffectedUser")).withUserId("AffectedUser").withFirstName("Aff").withLastName("Ected").build(); UserInfo invokingUser = UserInfo.from(UserInfo.Username.valueOf("InvokingUser")).withUserId("InvokingUser").withFirstName("Inv").withLastName("Oking").build(); AuthorizationChangeEvent event1 = new AuthorizationChangeEvent(invokingUser, Application.Name.valueOf("TestApp"), affectedUser, "", "ADMIN"); Assert.assertTrue("Description does not contain the application name, was:\n" + event1.getDefaultDescription(), event1.getDefaultDescription().contains("TestApp")); Assert.assertTrue("Description does not contain the username (affected user), was:\n" + event1.getDefaultDescription(), event1.getDefaultDescription().contains(affectedUser.getUsername().toString())); Assert.assertTrue("Description does not contain the username (invoking user), was:\n" + event1.getDefaultDescription(), event1.getDefaultDescription().contains(invokingUser.getUsername().toString())); Assert.assertTrue("Description does not contain the role change, was:\n" + event1.getDefaultDescription(), event1.getDefaultDescription().contains("ADMIN")); AuthorizationChangeEvent event2 = new AuthorizationChangeEvent(invokingUser, Application.Name.valueOf("TestApp"), affectedUser, "", ""); Assert.assertTrue("Description does not contain the application name, was:\n" + event1.getDefaultDescription(), event2.getDefaultDescription().contains("TestApp")); Assert.assertTrue("Description does not contain the username (affected user), was:\n" + event1.getDefaultDescription(), event2.getDefaultDescription().contains(affectedUser.getUsername().toString())); Assert.assertTrue("Description does not contain the username (invoking user), was:\n" + event1.getDefaultDescription(), event2.getDefaultDescription().contains(invokingUser.getUsername().toString())); Assert.assertTrue("Description does not contain the role deletion (\"removed\"), was:\n" + event1.getDefaultDescription(), event2.getDefaultDescription().contains("removed")); } @Test public void testGetApplicationName() throws Exception { AuthorizationChangeEvent event = new AuthorizationChangeEvent(Application.Name.valueOf("TestApp"), EventLog.SYSTEM_USER, "", "ADMIN"); Assert.assertEquals(Application.Name.valueOf("TestApp"), event.getApplicationName()); } @Test public void testChangeProperties() throws Exception { AuthorizationChangeEvent event = new AuthorizationChangeEvent(Application.Name.valueOf("TestApp"), EventLog.SYSTEM_USER, "", "ADMIN"); AbstractChangeEventTest.testValidSystemEvent(event, "System User (system_user)", "", "ADMIN"); } }
1,290
518
<filename>manifests/definitions/82.json { "name": "<NAME>", "category": "User Support & Survey", "start_url": "https://www.surveymonkey.com/user/sign-in/", "icons": [ { "src": "https://cdn.filestackcontent.com/Z4YKjQm3QCqtKrLRcu0G", "platform": "browserx" } ], "theme_color": "#00BF6F", "scope": "https://www.surveymonkey.com", "bx_legacy_service_id": "surveymonkey" }
181
1,607
<reponame>groov1kk/assertj-core<gh_stars>1000+ /* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2022 the original author or authors. */ package org.assertj.core.data; import static java.lang.Math.abs; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static org.assertj.core.util.Preconditions.checkArgument; import java.time.Duration; import java.time.temporal.Temporal; import java.time.temporal.TemporalUnit; import java.util.Objects; /** * Base class for {@link TemporalOffset} on basis of {@link TemporalUnit}. * @since 3.7.0 */ public abstract class TemporalUnitOffset implements TemporalOffset<Temporal> { protected final TemporalUnit unit; protected final long value; /** * Creates a new temporal offset for a given temporal unit. * @param value the value of the offset. * @param unit temporal unit of the offset. * @throws NullPointerException if the given unit is {@code null}. * @throws IllegalArgumentException if the given value is negative. */ public TemporalUnitOffset(long value, TemporalUnit unit) { requireNonNull(unit); checkThatValueIsPositive(value); this.value = value; this.unit = unit; } private static void checkThatValueIsPositive(long value) { checkArgument(value >= 0, "The value of the offset should be greater than zero"); } /** * {@inheritDoc} */ @Override public String getBeyondOffsetDifferenceDescription(Temporal temporal1, Temporal temporal2) { try { return format("%s %s but difference was %s %s", value, unit, getDifference(temporal1, temporal2), unit); } catch (@SuppressWarnings("unused") ArithmeticException e) { return format("%s %s but difference was %s", value, unit, getAbsoluteDuration(temporal1, temporal2)); } } /** * Returns absolute value of the difference according to time unit. * * @param temporal1 the first {@link Temporal} * @param temporal2 the second {@link Temporal} * @return absolute value of the difference according to time unit. */ protected long getDifference(Temporal temporal1, Temporal temporal2) { return abs(unit.between(temporal1, temporal2)); } /** * Returns absolute value of the difference as Duration. * * @param temporal1 the first {@link Temporal} * @param temporal2 the second {@link Temporal} * @return absolute value of the difference as Duration. */ protected Duration getAbsoluteDuration(Temporal temporal1, Temporal temporal2) { return Duration.between(temporal1, temporal2).abs(); } public TemporalUnit getUnit() { return unit; } @Override public int hashCode() { return Objects.hash(value); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; TemporalUnitOffset other = (TemporalUnitOffset) obj; return value == other.value; } }
1,062
1,666
// Copyright 2017 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/libplatform/default-foreground-task-runner.h" #include "src/base/platform/mutex.h" #include "src/libplatform/default-platform.h" namespace v8 { namespace platform { DefaultForegroundTaskRunner::RunTaskScope::RunTaskScope( std::shared_ptr<DefaultForegroundTaskRunner> task_runner) : task_runner_(task_runner) { DCHECK_GE(task_runner->nesting_depth_, 0); task_runner->nesting_depth_++; } DefaultForegroundTaskRunner::RunTaskScope::~RunTaskScope() { DCHECK_GT(task_runner_->nesting_depth_, 0); task_runner_->nesting_depth_--; } DefaultForegroundTaskRunner::DefaultForegroundTaskRunner( IdleTaskSupport idle_task_support, TimeFunction time_function) : idle_task_support_(idle_task_support), time_function_(time_function) {} void DefaultForegroundTaskRunner::Terminate() { base::MutexGuard guard(&lock_); terminated_ = true; // Drain the task queues. while (!task_queue_.empty()) task_queue_.pop_front(); while (!delayed_task_queue_.empty()) delayed_task_queue_.pop(); while (!idle_task_queue_.empty()) idle_task_queue_.pop(); } void DefaultForegroundTaskRunner::PostTaskLocked(std::unique_ptr<Task> task, Nestability nestability, const base::MutexGuard&) { if (terminated_) return; task_queue_.push_back(std::make_pair(nestability, std::move(task))); event_loop_control_.NotifyOne(); } void DefaultForegroundTaskRunner::PostTask(std::unique_ptr<Task> task) { base::MutexGuard guard(&lock_); PostTaskLocked(std::move(task), kNestable, guard); } double DefaultForegroundTaskRunner::MonotonicallyIncreasingTime() { return time_function_(); } void DefaultForegroundTaskRunner::PostDelayedTask(std::unique_ptr<Task> task, double delay_in_seconds) { DCHECK_GE(delay_in_seconds, 0.0); base::MutexGuard guard(&lock_); if (terminated_) return; double deadline = MonotonicallyIncreasingTime() + delay_in_seconds; delayed_task_queue_.push(std::make_pair(deadline, std::move(task))); } void DefaultForegroundTaskRunner::PostIdleTask(std::unique_ptr<IdleTask> task) { CHECK_EQ(IdleTaskSupport::kEnabled, idle_task_support_); base::MutexGuard guard(&lock_); if (terminated_) return; idle_task_queue_.push(std::move(task)); } bool DefaultForegroundTaskRunner::IdleTasksEnabled() { return idle_task_support_ == IdleTaskSupport::kEnabled; } void DefaultForegroundTaskRunner::PostNonNestableTask( std::unique_ptr<Task> task) { base::MutexGuard guard(&lock_); PostTaskLocked(std::move(task), kNonNestable, guard); } bool DefaultForegroundTaskRunner::NonNestableTasksEnabled() const { return true; } bool DefaultForegroundTaskRunner::HasPoppableTaskInQueue() const { if (nesting_depth_ == 0) return !task_queue_.empty(); for (auto it = task_queue_.cbegin(); it != task_queue_.cend(); it++) { if (it->first == kNestable) return true; } return false; } std::unique_ptr<Task> DefaultForegroundTaskRunner::PopTaskFromQueue( MessageLoopBehavior wait_for_work) { base::MutexGuard guard(&lock_); // Move delayed tasks that hit their deadline to the main queue. std::unique_ptr<Task> task = PopTaskFromDelayedQueueLocked(guard); while (task) { PostTaskLocked(std::move(task), kNestable, guard); task = PopTaskFromDelayedQueueLocked(guard); } while (!HasPoppableTaskInQueue()) { if (wait_for_work == MessageLoopBehavior::kDoNotWait) return {}; WaitForTaskLocked(guard); } auto it = task_queue_.begin(); for (; it != task_queue_.end(); it++) { // When the task queue is nested (i.e. popping a task from the queue from // within a task), only nestable tasks may run. Otherwise, any task may run. if (nesting_depth_ == 0 || it->first == kNestable) break; } DCHECK(it != task_queue_.end()); task = std::move(it->second); task_queue_.erase(it); return task; } std::unique_ptr<Task> DefaultForegroundTaskRunner::PopTaskFromDelayedQueueLocked( const base::MutexGuard&) { if (delayed_task_queue_.empty()) return {}; double now = MonotonicallyIncreasingTime(); const DelayedEntry& deadline_and_task = delayed_task_queue_.top(); if (deadline_and_task.first > now) return {}; // The const_cast here is necessary because there does not exist a clean way // to get a unique_ptr out of the priority queue. We provide the priority // queue with a custom comparison operator to make sure that the priority // queue does not access the unique_ptr. Therefore it should be safe to reset // the unique_ptr in the priority queue here. Note that the DelayedEntry is // removed from the priority_queue immediately afterwards. std::unique_ptr<Task> result = std::move(const_cast<DelayedEntry&>(deadline_and_task).second); delayed_task_queue_.pop(); return result; } std::unique_ptr<IdleTask> DefaultForegroundTaskRunner::PopTaskFromIdleQueue() { base::MutexGuard guard(&lock_); if (idle_task_queue_.empty()) return {}; std::unique_ptr<IdleTask> task = std::move(idle_task_queue_.front()); idle_task_queue_.pop(); return task; } void DefaultForegroundTaskRunner::WaitForTaskLocked(const base::MutexGuard&) { event_loop_control_.Wait(&lock_); } } // namespace platform } // namespace v8
1,900
850
from .. utils import TranspileTestCase, BuiltinFunctionTestCase class BytesTests(TranspileTestCase): pass class BuiltinBytesFunctionTests(BuiltinFunctionTestCase, TranspileTestCase): functions = ["bytes"]
67
2,406
// Copyright (C) 2018-2021 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // #include <vpu/frontend/frontend.hpp> #include <memory> #include <vpu/utils/numeric.hpp> #include <vpu/utils/profiling.hpp> #include <vpu/stages/stub_stage.hpp> namespace vpu { namespace { class StubPriorBoxStage final : public StageNode { public: using StageNode::StageNode; private: StagePtr cloneImpl() const override { return std::make_shared<StubPriorBoxStage>(*this); } void propagateDataOrderImpl(StageDataInfo<DimsOrder> &orderInfo) override { } void getDataStridesRequirementsImpl(StageDataInfo<StridesRequirement> &stridesInfo) override { } void finalizeDataLayoutImpl() override { } void getBatchSupportInfoImpl(StageDataInfo<BatchSupport> & /*batchInfo*/) override { } void initialCheckImpl() const override { IE_ASSERT(numInputs() == 2); IE_ASSERT(numOutputs() == 1); assertInputsOutputsTypes(this, {{DataType::FP16}, {DataType::FP16}}, {{DataType::FP16}}); } void serializeParamsImpl(BlobSerializer &) const override { VPU_THROW_EXCEPTION << "Must never be called"; } void serializeDataImpl(BlobSerializer &) const override { VPU_THROW_EXCEPTION << "Must never be called"; } }; } // namespace void FrontEnd::parsePriorBox(const Model& model, const ie::CNNLayerPtr& layer, const DataVector& inputs, const DataVector& outputs) const { IE_ASSERT(inputs.size() == 2); IE_ASSERT(outputs.size() == 1); model->addNewStage<StubPriorBoxStage>(layer->name, StageType::StubPriorBox, layer, inputs, outputs); } void FrontEnd::parsePriorBoxClustered(const Model& model, const ie::CNNLayerPtr& layer, const DataVector& inputs, const DataVector& outputs) const { IE_ASSERT(inputs.size() == 2); IE_ASSERT(outputs.size() == 1); model->addNewStage<StubPriorBoxStage>(layer->name, StageType::StubPriorBoxClustered, layer, inputs, outputs); } } // namespace vpu
799
348
{"nom":"Laféline","circ":"1ère circonscription","dpt":"Allier","inscrits":157,"abs":59,"votants":98,"blancs":2,"nuls":3,"exp":93,"res":[{"nuance":"COM","nom":"<NAME>","voix":57},{"nuance":"REM","nom":"<NAME>","voix":36}]}
88
1,639
<gh_stars>1000+ #include<bits/stdc++.h> using namespace std; const int N = 1e5 + 9; int t[N][18], a[N]; void build(int n) { for(int i = 1; i <= n; ++i) t[i][0] = a[i]; for(int k = 1; k < 18; ++k) { for(int i = 1; i + (1 << k) - 1 <= n; ++i) { t[i][k] = min(t[i][k - 1], t[i + (1 << (k - 1))][k - 1]); } } } int query(int l, int r) { int k = 31 - __builtin_clz(r - l + 1); return min(t[l][k], t[r - (1 << k) + 1][k]); } int32_t main() { ios_base::sync_with_stdio(0); cin.tie(0); int n; cin >> n; for(int i = 1; i <= n; i++) cin >> a[i]; build(n); int q; cin >> q; while(q--) { int l, r; cin >> l >> r; ++l; ++r; cout << query(l, r) << '\n'; } return 0; }
433
1,738
/* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ #include "AssetMemoryAnalyzer_precompiled.h" #include "FormatUtils.h" #include "AssetMemoryAnalyzer.h" #include <AzFramework/StringFunc/StringFunc.h> namespace AssetMemoryAnalyzer { namespace FormatUtils { const char* FormatCodePoint(const Data::CodePoint& cp) { static char buff[1024]; azsnprintf(buff, sizeof(buff), "%s:%d", cp.m_file, cp.m_line); return buff; } const char* FormatKB(size_t bytes) { static char buff[32]; int len = azsnprintf(buff, sizeof(buff), "%0.2f", bytes / 1024.0f); AzFramework::StringFunc::NumberFormatting::GroupDigits(buff, sizeof(buff), len - 3); return buff; } } }
481
383
<gh_stars>100-1000 #pragma once #include <optional> #include "json.h" using namespace nlohmann; #ifdef _MSC_VER // Microsoft compilers # define GET_ARG_COUNT(...) INTERNAL_EXPAND_ARGS_PRIVATE(INTERNAL_ARGS_AUGMENTER(__VA_ARGS__)) # define INTERNAL_ARGS_AUGMENTER(...) unused, __VA_ARGS__ # define INTERNAL_EXPAND(x) x # define INTERNAL_EXPAND_ARGS_PRIVATE(...) INTERNAL_EXPAND(INTERNAL_GET_ARG_COUNT_PRIVATE(__VA_ARGS__, 69, 68, 67, 66, 65, 64, 63, 62, 61, 60, 59, 58, 57, 56, 55, 54, 53, 52, 51, 50, 49, 48, 47, 46, 45, 44, 43, 42, 41, 40, 39, 38, 37, 36, 35, 34, 33, 32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0)) # define INTERNAL_GET_ARG_COUNT_PRIVATE(_1_, _2_, _3_, _4_, _5_, _6_, _7_, _8_, _9_, _10_, _11_, _12_, _13_, _14_, _15_, _16_, _17_, _18_, _19_, _20_, _21_, _22_, _23_, _24_, _25_, _26_, _27_, _28_, _29_, _30_, _31_, _32_, _33_, _34_, _35_, _36, _37, _38, _39, _40, _41, _42, _43, _44, _45, _46, _47, _48, _49, _50, _51, _52, _53, _54, _55, _56, _57, _58, _59, _60, _61, _62, _63, _64, _65, _66, _67, _68, _69, _70, count, ...) count #else // Non-Microsoft compilers # define GET_ARG_COUNT(...) INTERNAL_GET_ARG_COUNT_PRIVATE(0, ## __VA_ARGS__, 70, 69, 68, 67, 66, 65, 64, 63, 62, 61, 60, 59, 58, 57, 56, 55, 54, 53, 52, 51, 50, 49, 48, 47, 46, 45, 44, 43, 42, 41, 40, 39, 38, 37, 36, 35, 34, 33, 32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0) # define INTERNAL_GET_ARG_COUNT_PRIVATE(_0, _1_, _2_, _3_, _4_, _5_, _6_, _7_, _8_, _9_, _10_, _11_, _12_, _13_, _14_, _15_, _16_, _17_, _18_, _19_, _20_, _21_, _22_, _23_, _24_, _25_, _26_, _27_, _28_, _29_, _30_, _31_, _32_, _33_, _34_, _35_, _36, _37, _38, _39, _40, _41, _42, _43, _44, _45, _46, _47, _48, _49, _50, _51, _52, _53, _54, _55, _56, _57, _58, _59, _60, _61, _62, _63, _64, _65, _66, _67, _68, _69, _70, count, ...) count #endif #define STRINGIZE(arg) STRINGIZE1(arg) #define STRINGIZE1(arg) STRINGIZE2(arg) #define STRINGIZE2(arg) #arg #define CONCATENATE(arg1, arg2) CONCATENATE1(arg1, arg2) #define CONCATENATE1(arg1, arg2) CONCATENATE2(arg1, arg2) #define CONCATENATE2(arg1, arg2) arg1##arg2 #define FOR_EACH_0(what) #define FOR_EACH_1(what, x1) what(x1) #define FOR_EACH_2(what, x1, x2)\ what(x1);\ FOR_EACH_1(what, x2) #define FOR_EACH_3(what, x1, x2, x3)\ what(x1);\ FOR_EACH_2(what, x2, x3) #define FOR_EACH_4(what, x1, x2, x3, x4)\ what(x1);\ FOR_EACH_3(what, x2, x3, x4) #define FOR_EACH_5(what, x1, x2, x3, x4, x5)\ what(x1);\ FOR_EACH_4(what, x2, x3, x4, x5) #define FOR_EACH_6(what, x1, x2, x3, x4, x5, x6)\ what(x1);\ FOR_EACH_5(what, x2, x3, x4, x5, x6) #define FOR_EACH_7(what, x1, x2, x3, x4, x5, x6, x7)\ what(x1);\ FOR_EACH_6(what, x2, x3, x4, x5, x6, x7) #define FOR_EACH_8(what, x1, x2, x3, x4, x5, x6, x7, x8)\ what(x1);\ FOR_EACH_7(what, x2, x3, x4, x5, x6, x7, x8) #define FOR_EACH_9(what, x1, x2, x3, x4, x5, x6, x7, x8, x9)\ what(x1);\ FOR_EACH_8(what, x2, x3, x4, x5, x6, x7, x8, x9) #define FOR_EACH_10(what, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)\ what(x1);\ FOR_EACH_9(what, x2, x3, x4, x5, x6, x7, x8, x9, x10) #define FOR_EACH_11(what, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)\ what(x1);\ FOR_EACH_10(what, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11) #define FOR_EACH_12(what, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)\ what(x1);\ FOR_EACH_11(what, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12) #define FOR_EACH_13(what, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)\ what(x1);\ FOR_EACH_12(what, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13) #define FOR_EACH_14(what, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)\ what(x1);\ FOR_EACH_13(what, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14) #define VA_ARGS(...) , ##__VA_ARGS__ #define FOR_EACH_(N, what, ...) CONCATENATE(FOR_EACH_, N)(what VA_ARGS(__VA_ARGS__)) #define FOR_EACH(what, ...) FOR_EACH_(GET_ARG_COUNT(__VA_ARGS__), what VA_ARGS(__VA_ARGS__)) template<class A> struct json_setter { __forceinline static void set(const A& field, json& j, const char* str) { j[str] = field; } }; template<class A> struct json_setter<std::optional<A>> { __forceinline static void set(const std::optional<A>& field, json& j, const char* str) { if (field) { j[str] = field.value(); } } }; template<class A> struct json_getter { __forceinline static void get(A& field, const json& j, const char* str) { field = j.at(str).get<A>(); } }; template<class A> struct json_getter<std::optional<A>> { __forceinline static void get(std::optional<A>& field, const json& j, const char* str) { if (j.find(str) != j.cend()) { field = j[str].get<A>(); } } }; #define JSON_SET(field) \ json_setter<std::decay<decltype(object.field)>::type>::set(object.field, j, STRINGIZE(field)) #define JSON_GET(field)\ json_getter<std::decay<decltype(object.field)>::type>::get(object.field, j, STRINGIZE(field)) #define JSON_SERIALIZE(CLS, ...)\ inline void to_json(json&j, const CLS& object) \ {\ j = json{};\ FOR_EACH(JSON_SET, __VA_ARGS__);\ }\ #define JSON_DESERIALIZE(CLS, ...)\ inline void from_json(const json&j, CLS& object) \ {\ FOR_EACH(JSON_GET, __VA_ARGS__);\ }\ #define JSON_AUTO(CLS, ...)\ JSON_DESERIALIZE(CLS, __VA_ARGS__)\ JSON_SERIALIZE(CLS, __VA_ARGS__)
3,143
3,212
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.registry.provider.flow; import org.apache.nifi.registry.flow.FlowPersistenceException; import org.apache.nifi.registry.flow.FlowPersistenceProvider; import org.apache.nifi.registry.flow.FlowSnapshotContext; import org.apache.nifi.registry.provider.ProviderConfigurationContext; import org.apache.nifi.registry.provider.ProviderContext; import org.apache.nifi.registry.provider.ProviderCreationException; import org.springframework.jdbc.core.JdbcTemplate; import javax.sql.DataSource; import java.util.ArrayList; import java.util.List; /** * A FlowPersistenceProvider that uses a database table for storage. The intent is to use the same database as the rest * of the application so that all data can be stored together and benefit from any replication/scaling of the database. */ public class DatabaseFlowPersistenceProvider implements FlowPersistenceProvider { private DataSource dataSource; private JdbcTemplate jdbcTemplate; @ProviderContext public void setDataSource(final DataSource dataSource) { this.dataSource = dataSource; this.jdbcTemplate = new JdbcTemplate(this.dataSource); } @Override public void onConfigured(final ProviderConfigurationContext configurationContext) throws ProviderCreationException { // there is no config since we get the DataSource from the framework } @Override public void saveFlowContent(final FlowSnapshotContext context, final byte[] content) throws FlowPersistenceException { final String sql = "INSERT INTO FLOW_PERSISTENCE_PROVIDER (BUCKET_ID, FLOW_ID, VERSION, FLOW_CONTENT) VALUES (?, ?, ?, ?)"; jdbcTemplate.update(sql, context.getBucketId(), context.getFlowId(), context.getVersion(), content); } @Override public byte[] getFlowContent(final String bucketId, final String flowId, final int version) throws FlowPersistenceException { final List<byte[]> results = new ArrayList<>(); final String sql = "SELECT FLOW_CONTENT FROM FLOW_PERSISTENCE_PROVIDER WHERE BUCKET_ID = ? and FLOW_ID = ? and VERSION = ?"; jdbcTemplate.query(sql, new Object[] {bucketId, flowId, version}, (rs) -> { final byte[] content = rs.getBytes("FLOW_CONTENT"); results.add(content); }); if (results.isEmpty()) { return null; } else { return results.get(0); } } @Override public void deleteAllFlowContent(final String bucketId, final String flowId) throws FlowPersistenceException { final String sql = "DELETE FROM FLOW_PERSISTENCE_PROVIDER WHERE BUCKET_ID = ? and FLOW_ID = ?"; jdbcTemplate.update(sql, bucketId, flowId); } @Override public void deleteFlowContent(final String bucketId, final String flowId, final int version) throws FlowPersistenceException { final String sql = "DELETE FROM FLOW_PERSISTENCE_PROVIDER WHERE BUCKET_ID = ? and FLOW_ID = ? and VERSION = ?"; jdbcTemplate.update(sql, bucketId, flowId, version); } }
1,223
5,279
<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io.kinesis; import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Lists.newArrayList; import java.util.List; /** * Filters out records, which were already processed and checkpointed. * * <p>We need this step, because we can get iterators from Kinesis only with "sequenceNumber" * accuracy, not with "subSequenceNumber" accuracy. */ class RecordFilter { public List<KinesisRecord> apply(List<KinesisRecord> records, ShardCheckpoint checkpoint) { List<KinesisRecord> filteredRecords = newArrayList(); for (KinesisRecord record : records) { if (checkpoint.isBeforeOrAt(record)) { filteredRecords.add(record); } } return filteredRecords; } }
457
14,668
// Copyright (c) 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "components/offline_pages/core/offline_clock.h" #include <ostream> #include "base/check.h" #include "base/time/default_clock.h" #include "base/time/time.h" namespace offline_pages { namespace { const base::Clock* custom_clock_ = nullptr; } const base::Clock* OfflineClock() { if (custom_clock_) return custom_clock_; return base::DefaultClock::GetInstance(); } void SetOfflineClockForTesting(const base::Clock* clock) { DCHECK(clock == nullptr || custom_clock_ == nullptr) << "Offline clock is being overridden a second time, which might " "indicate a bug."; custom_clock_ = clock; } base::Time OfflineTimeNow() { return OfflineClock()->Now(); } } // namespace offline_pages
296
460
<gh_stars>100-1000 from . import screenpoint project = screenpoint.project
22
860
<filename>src/main/groovy/lang/Writable.java /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package groovy.lang; import java.io.IOException; import java.io.Writer; /** * Represents an object which is capable of writing itself to a text stream * in a more efficient format than just creating a toString() representation * of itself. This mechanism is particularly useful for templates and such like. * <p> * It is worth noting that writable implementations often override their * toString() implementation as well to allow rendering the same result * directly to a String; however this is not required. * * @author <a href="mailto:<EMAIL>"><NAME></a> */ public interface Writable { /** * Writes this object to the given writer. * <p> * This is used to defer content creation until the point when it is * streamed to the output destination. Oftentimes, content will be defined * but not necessarily created (as is may be the case with a Closure * definition.) In that case, the output is then 'deferred' to the point * when it is serialized to the writer. This class may be used whenever an * object should be responsible for creating its own textual representation, * but creating the entire output as a single String would be inefficient * (such as outputting a multi-gigabyte XML document.) * * @param out the Writer to which this Writable should output its data. * @return the Writer that was passed * @throws IOException if an error occurred while outputting data to the writer */ Writer writeTo(Writer out) throws IOException; }
669
1,001
<filename>aliyun-python-sdk-ecs/aliyunsdkecs/request/v20140526/DescribeSecurityGroupsRequest.py # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkecs.endpoint import endpoint_data class DescribeSecurityGroupsRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'DescribeSecurityGroups','ecs') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_ResourceOwnerId(self): # Long return self.get_query_params().get('ResourceOwnerId') def set_ResourceOwnerId(self, ResourceOwnerId): # Long self.add_query_param('ResourceOwnerId', ResourceOwnerId) def get_FuzzyQuery(self): # Boolean return self.get_query_params().get('FuzzyQuery') def set_FuzzyQuery(self, FuzzyQuery): # Boolean self.add_query_param('FuzzyQuery', FuzzyQuery) def get_SecurityGroupId(self): # String return self.get_query_params().get('SecurityGroupId') def set_SecurityGroupId(self, SecurityGroupId): # String self.add_query_param('SecurityGroupId', SecurityGroupId) def get_IsQueryEcsCount(self): # Boolean return self.get_query_params().get('IsQueryEcsCount') def set_IsQueryEcsCount(self, IsQueryEcsCount): # Boolean self.add_query_param('IsQueryEcsCount', IsQueryEcsCount) def get_NetworkType(self): # String return self.get_query_params().get('NetworkType') def set_NetworkType(self, NetworkType): # String self.add_query_param('NetworkType', NetworkType) def get_SecurityGroupName(self): # String return self.get_query_params().get('SecurityGroupName') def set_SecurityGroupName(self, SecurityGroupName): # String self.add_query_param('SecurityGroupName', SecurityGroupName) def get_PageNumber(self): # Integer return self.get_query_params().get('PageNumber') def set_PageNumber(self, PageNumber): # Integer self.add_query_param('PageNumber', PageNumber) def get_ResourceGroupId(self): # String return self.get_query_params().get('ResourceGroupId') def set_ResourceGroupId(self, ResourceGroupId): # String self.add_query_param('ResourceGroupId', ResourceGroupId) def get_NextToken(self): # String return self.get_query_params().get('NextToken') def set_NextToken(self, NextToken): # String self.add_query_param('NextToken', NextToken) def get_PageSize(self): # Integer return self.get_query_params().get('PageSize') def set_PageSize(self, PageSize): # Integer self.add_query_param('PageSize', PageSize) def get_Tags(self): # RepeatList return self.get_query_params().get('Tag') def set_Tags(self, Tag): # RepeatList for depth1 in range(len(Tag)): if Tag[depth1].get('value') is not None: self.add_query_param('Tag.' + str(depth1 + 1) + '.value', Tag[depth1].get('value')) if Tag[depth1].get('Key') is not None: self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key')) def get_DryRun(self): # Boolean return self.get_query_params().get('DryRun') def set_DryRun(self, DryRun): # Boolean self.add_query_param('DryRun', DryRun) def get_ResourceOwnerAccount(self): # String return self.get_query_params().get('ResourceOwnerAccount') def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount) def get_OwnerAccount(self): # String return self.get_query_params().get('OwnerAccount') def set_OwnerAccount(self, OwnerAccount): # String self.add_query_param('OwnerAccount', OwnerAccount) def get_OwnerId(self): # Long return self.get_query_params().get('OwnerId') def set_OwnerId(self, OwnerId): # Long self.add_query_param('OwnerId', OwnerId) def get_SecurityGroupIds(self): # String return self.get_query_params().get('SecurityGroupIds') def set_SecurityGroupIds(self, SecurityGroupIds): # String self.add_query_param('SecurityGroupIds', SecurityGroupIds) def get_SecurityGroupType(self): # String return self.get_query_params().get('SecurityGroupType') def set_SecurityGroupType(self, SecurityGroupType): # String self.add_query_param('SecurityGroupType', SecurityGroupType) def get_VpcId(self): # String return self.get_query_params().get('VpcId') def set_VpcId(self, VpcId): # String self.add_query_param('VpcId', VpcId) def get_MaxResults(self): # Integer return self.get_query_params().get('MaxResults') def set_MaxResults(self, MaxResults): # Integer self.add_query_param('MaxResults', MaxResults)
1,909
3,579
package com.querydsl.jpa.domain.sql; import static com.querydsl.core.types.PathMetadataFactory.forVariable; import javax.annotation.Generated; import com.querydsl.core.types.Path; import com.querydsl.core.types.PathMetadata; import com.querydsl.core.types.dsl.NumberPath; import com.querydsl.sql.ColumnMetadata; /** * SEviltype is a Querydsl query type for SEviltype */ @Generated("com.querydsl.sql.codegen.MetaDataSerializer") public class SEviltype extends com.querydsl.sql.RelationalPathBase<SEviltype> { private static final long serialVersionUID = 1348954496; public static final SEviltype eviltype_ = new SEviltype("eviltype_"); public final NumberPath<Integer> _asc = createNumber("_asc", Integer.class); public final NumberPath<Integer> _desc = createNumber("_desc", Integer.class); public final NumberPath<Integer> getClassId = createNumber("getClassId", Integer.class); public final NumberPath<Integer> getId = createNumber("getId", Integer.class); public final NumberPath<Integer> getMetadataId = createNumber("getMetadataId", Integer.class); public final NumberPath<Integer> getTypeId = createNumber("getTypeId", Integer.class); public final NumberPath<Integer> hashCodeId = createNumber("hashCodeId", Integer.class); public final NumberPath<Integer> id = createNumber("id", Integer.class); public final NumberPath<Integer> isnotnullId = createNumber("isnotnullId", Integer.class); public final NumberPath<Integer> isnullId = createNumber("isnullId", Integer.class); public final NumberPath<Integer> notifyAllId = createNumber("notifyAllId", Integer.class); public final NumberPath<Integer> notifyId = createNumber("notifyId", Integer.class); public final NumberPath<Integer> toStringId = createNumber("toStringId", Integer.class); public final NumberPath<Integer> waitId = createNumber("waitId", Integer.class); public final com.querydsl.sql.PrimaryKey<SEviltype> primary = createPrimaryKey(id); public final com.querydsl.sql.ForeignKey<SEviltype> fkd21f83516787cd9e = createForeignKey(toStringId, "id"); public final com.querydsl.sql.ForeignKey<SEviltype> fkd21f835114c0ad20 = createForeignKey(_desc, "id"); public final com.querydsl.sql.ForeignKey<SEviltype> fkd21f835151e065d5 = createForeignKey(waitId, "id"); public final com.querydsl.sql.ForeignKey<SEviltype> fkd21f83517e62bab2 = createForeignKey(notifyAllId, "id"); public final com.querydsl.sql.ForeignKey<SEviltype> fkd21f8351c4df9054 = createForeignKey(getId, "id"); public final com.querydsl.sql.ForeignKey<SEviltype> fkd21f835112489019 = createForeignKey(isnullId, "id"); public final com.querydsl.sql.ForeignKey<SEviltype> fkd21f8351b09c8448 = createForeignKey(getClassId, "id"); public final com.querydsl.sql.ForeignKey<SEviltype> fkd21f835180b69f81 = createForeignKey(notifyId, "id"); public final com.querydsl.sql.ForeignKey<SEviltype> fkd21f8351b71279da = createForeignKey(getTypeId, "id"); public final com.querydsl.sql.ForeignKey<SEviltype> fkd21f8351226ee98f = createForeignKey(hashCodeId, "id"); public final com.querydsl.sql.ForeignKey<SEviltype> fkd21f8351f5ec12fa = createForeignKey(isnotnullId, "id"); public final com.querydsl.sql.ForeignKey<SEviltype> fkd21f8351f839f62 = createForeignKey(_asc, "id"); public final com.querydsl.sql.ForeignKey<SEviltype> fkd21f83512d7708c5 = createForeignKey(getMetadataId, "id"); public final com.querydsl.sql.ForeignKey<SEviltype> _fkd21f83516787cd9e = createInvForeignKey(id, "toString_id"); public final com.querydsl.sql.ForeignKey<SEviltype> _fkd21f835114c0ad20 = createInvForeignKey(id, "_desc"); public final com.querydsl.sql.ForeignKey<SEviltype> _fkd21f835151e065d5 = createInvForeignKey(id, "wait_id"); public final com.querydsl.sql.ForeignKey<SEviltype> _fkd21f83517e62bab2 = createInvForeignKey(id, "notifyAll_id"); public final com.querydsl.sql.ForeignKey<SEviltype> _fkd21f8351c4df9054 = createInvForeignKey(id, "get_id"); public final com.querydsl.sql.ForeignKey<SEviltype> _fkd21f835112489019 = createInvForeignKey(id, "isnull_id"); public final com.querydsl.sql.ForeignKey<SEviltype> _fkd21f8351b09c8448 = createInvForeignKey(id, "getClass_id"); public final com.querydsl.sql.ForeignKey<SEviltype> _fkd21f835180b69f81 = createInvForeignKey(id, "notify_id"); public final com.querydsl.sql.ForeignKey<SEviltype> _fkd21f8351b71279da = createInvForeignKey(id, "getType_id"); public final com.querydsl.sql.ForeignKey<SEviltype> _fkd21f8351226ee98f = createInvForeignKey(id, "hashCode_id"); public final com.querydsl.sql.ForeignKey<SEviltype> _fkd21f8351f5ec12fa = createInvForeignKey(id, "isnotnull_id"); public final com.querydsl.sql.ForeignKey<SEviltype> _fkd21f8351f839f62 = createInvForeignKey(id, "_asc"); public final com.querydsl.sql.ForeignKey<SEviltype> _fkd21f83512d7708c5 = createInvForeignKey(id, "getMetadata_id"); public SEviltype(String variable) { super(SEviltype.class, forVariable(variable), "", "eviltype_"); addMetadata(); } public SEviltype(String variable, String schema, String table) { super(SEviltype.class, forVariable(variable), schema, table); addMetadata(); } public SEviltype(Path<? extends SEviltype> path) { super(path.getType(), path.getMetadata(), "", "eviltype_"); addMetadata(); } public SEviltype(PathMetadata metadata) { super(SEviltype.class, metadata, "", "eviltype_"); addMetadata(); } public void addMetadata() { addMetadata(_asc, ColumnMetadata.named("_asc").withIndex(2).ofType(4).withSize(10)); addMetadata(_desc, ColumnMetadata.named("_desc").withIndex(3).ofType(4).withSize(10)); addMetadata(getClassId, ColumnMetadata.named("getClass_id").withIndex(5).ofType(4).withSize(10)); addMetadata(getId, ColumnMetadata.named("get_id").withIndex(4).ofType(4).withSize(10)); addMetadata(getMetadataId, ColumnMetadata.named("getMetadata_id").withIndex(6).ofType(4).withSize(10)); addMetadata(getTypeId, ColumnMetadata.named("getType_id").withIndex(7).ofType(4).withSize(10)); addMetadata(hashCodeId, ColumnMetadata.named("hashCode_id").withIndex(8).ofType(4).withSize(10)); addMetadata(id, ColumnMetadata.named("id").withIndex(1).ofType(4).withSize(10).notNull()); addMetadata(isnotnullId, ColumnMetadata.named("isnotnull_id").withIndex(9).ofType(4).withSize(10)); addMetadata(isnullId, ColumnMetadata.named("isnull_id").withIndex(10).ofType(4).withSize(10)); addMetadata(notifyAllId, ColumnMetadata.named("notifyAll_id").withIndex(12).ofType(4).withSize(10)); addMetadata(notifyId, ColumnMetadata.named("notify_id").withIndex(11).ofType(4).withSize(10)); addMetadata(toStringId, ColumnMetadata.named("toString_id").withIndex(13).ofType(4).withSize(10)); addMetadata(waitId, ColumnMetadata.named("wait_id").withIndex(14).ofType(4).withSize(10)); } }
2,646
491
<reponame>ebasdee/webrtc<gh_stars>100-1000 /* * Copyright 2004 The WebRTC Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #ifndef RTC_BASE_GUNIT_H_ #define RTC_BASE_GUNIT_H_ #include "rtc_base/fakeclock.h" #include "rtc_base/logging.h" #include "rtc_base/thread.h" #if defined(GTEST_RELATIVE_PATH) #include "test/gtest.h" #else #include "testing/base/public/gunit.h" #endif // Wait until "ex" is true, or "timeout" expires. #define WAIT(ex, timeout) \ for (int64_t start = rtc::SystemTimeMillis(); \ !(ex) && rtc::SystemTimeMillis() < start + (timeout);) { \ rtc::Thread::Current()->ProcessMessages(0); \ rtc::Thread::Current()->SleepMs(1); \ } // This returns the result of the test in res, so that we don't re-evaluate // the expression in the XXXX_WAIT macros below, since that causes problems // when the expression is only true the first time you check it. #define WAIT_(ex, timeout, res) \ do { \ int64_t start = rtc::SystemTimeMillis(); \ res = (ex); \ while (!res && rtc::SystemTimeMillis() < start + (timeout)) { \ rtc::Thread::Current()->ProcessMessages(0); \ rtc::Thread::Current()->SleepMs(1); \ res = (ex); \ } \ } while (0) // The typical EXPECT_XXXX and ASSERT_XXXXs, but done until true or a timeout. // One can add failure message by appending "<< msg". #define EXPECT_TRUE_WAIT(ex, timeout) \ GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ if (bool res = true) { \ WAIT_(ex, timeout, res); \ if (!res) \ goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ } else \ GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : EXPECT_TRUE(ex) #define EXPECT_EQ_WAIT(v1, v2, timeout) \ GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ if (bool res = true) { \ WAIT_(v1 == v2, timeout, res); \ if (!res) \ goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ } else \ GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : EXPECT_EQ(v1, v2) #define ASSERT_TRUE_WAIT(ex, timeout) \ GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ if (bool res = true) { \ WAIT_(ex, timeout, res); \ if (!res) \ goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ } else \ GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : ASSERT_TRUE(ex) #define ASSERT_EQ_WAIT(v1, v2, timeout) \ GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ if (bool res = true) { \ WAIT_(v1 == v2, timeout, res); \ if (!res) \ goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ } else \ GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : ASSERT_EQ(v1, v2) // Version with a "soft" timeout and a margin. This logs if the timeout is // exceeded, but it only fails if the expression still isn't true after the // margin time passes. #define EXPECT_TRUE_WAIT_MARGIN(ex, timeout, margin) \ GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ if (bool res = true) { \ WAIT_(ex, timeout, res); \ if (res) \ break; \ RTC_LOG(LS_WARNING) << "Expression " << #ex << " still not true after " \ << (timeout) << "ms; waiting an additional " << margin \ << "ms"; \ WAIT_(ex, margin, res); \ if (!res) \ goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ } else \ GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : EXPECT_TRUE(ex) // Wait until "ex" is true, or "timeout" expires, using fake clock where // messages are processed every millisecond. // TODO(pthatcher): Allow tests to control how many milliseconds to advance. #define SIMULATED_WAIT(ex, timeout, clock) \ for (int64_t start = rtc::TimeMillis(); \ !(ex) && rtc::TimeMillis() < start + (timeout);) { \ (clock).AdvanceTime(rtc::TimeDelta::FromMilliseconds(1)); \ } // This returns the result of the test in res, so that we don't re-evaluate // the expression in the XXXX_WAIT macros below, since that causes problems // when the expression is only true the first time you check it. #define SIMULATED_WAIT_(ex, timeout, res, clock) \ do { \ int64_t start = rtc::TimeMillis(); \ res = (ex); \ while (!res && rtc::TimeMillis() < start + (timeout)) { \ (clock).AdvanceTime(rtc::TimeDelta::FromMilliseconds(1)); \ res = (ex); \ } \ } while (0) // The typical EXPECT_XXXX, but done until true or a timeout with a fake clock. #define EXPECT_TRUE_SIMULATED_WAIT(ex, timeout, clock) \ do { \ bool res; \ SIMULATED_WAIT_(ex, timeout, res, clock); \ if (!res) { \ EXPECT_TRUE(ex); \ } \ } while (0) #define EXPECT_EQ_SIMULATED_WAIT(v1, v2, timeout, clock) \ GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ if (bool res = true) { \ SIMULATED_WAIT_(v1 == v2, timeout, res, clock); \ if (!res) \ goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ } else \ GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : EXPECT_EQ(v1, v2) #define ASSERT_TRUE_SIMULATED_WAIT(ex, timeout, clock) \ GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ if (bool res = true) { \ SIMULATED_WAIT_(ex, timeout, res, clock); \ if (!res) \ goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ } else \ GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : ASSERT_TRUE(ex) #define ASSERT_EQ_SIMULATED_WAIT(v1, v2, timeout, clock) \ GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ if (bool res = true) { \ SIMULATED_WAIT_(v1 == v2, timeout, res, clock); \ if (!res) \ goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ } else \ GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : ASSERT_EQ(v1, v2) #endif // RTC_BASE_GUNIT_H_
5,084
1,251
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. */ #ifndef _FA_MULTIMAP_AR_UNIQ_H_ #define _FA_MULTIMAP_AR_UNIQ_H_ #include "FAConfig.h" #include "FAMultiMapA.h" #include "FAArray_cont_t.h" #include "FAMap_judy.h" #include "FASecurity.h" namespace BlingFire { class FAAllocatorA; /// /// Array-based implementation of FAMultiMapA. /// /// Note: /// 1. Add (one element to the end) and Remove operations are not supported /// class FAMultiMap_ar_uniq : public FAMultiMapA { public: FAMultiMap_ar_uniq (); virtual ~FAMultiMap_ar_uniq (); public: // sets up allocator (call before any usage) void SetAllocator (FAAllocatorA * pAlloc); // makes map as if it was just constructed void Clear (); /// FAMultiMapA public: const int Get ( const int Key, __out_ecount_opt(MaxCount) int * pValues, const int MaxCount ) const; const int GetMaxCount () const; const int Get (const int Key, const int ** ppValues) const; void Set (const int Key, const int * pValues, const int ValuesCount); // not implemented void Add (const int Key, const int Value); const int Next (int * pKey, const int ** ppValues) const; const int Prev (int * pKey, const int ** ppValues) const; public: // makes Arrays of values sorted and uniq void SortUniq (); private: // ensures m_key2idx holds the Key inline void ensure (const int Key); // calculates hash key inline static const int hash_key (const int * pValues, const int Size); // compares values with array by index Idx inline const bool equal ( const int Idx, const int * pValues, const int Size ) const; private: // mapping: key -> idx FAArray_cont_t < int > m_key2idx; // mapping: idx -> array FAArray_cont_t < FAArray_cont_t < int > > m_idx2arr; // mapping: HashKey (array) -> idx FAMap_judy m_hash2idx; // keeps max number of elements Get can return int m_MaxCount; // allocator FAAllocatorA * m_pAlloc; }; } #endif
933
7,713
// Copyright 2004-present Facebook. All Rights Reserved. #import <React/RCTViewManager.h> @class RCTWrapperView; NS_ASSUME_NONNULL_BEGIN @interface RCTWrapperViewManager : RCTViewManager - (RCTWrapperView *)view NS_REQUIRES_SUPER; @end NS_ASSUME_NONNULL_END
102
6,270
<filename>.changes/2.538.0.json [ { "type": "feature", "category": "Amplify", "description": "This release adds access logs APIs and artifact APIs for AWS Amplify Console." }, { "type": "feature", "category": "ECS", "description": "This release of Amazon Elastic Container Service (Amazon ECS) removes FirelensConfiguration from the DescribeTask output during the FireLens public preview." } ]
164
621
c.ServerApp.port = 8888 # noqa c.ServerApp.token = "" # noqa c.ServerApp.password = "" # noqa c.ServerApp.disable_check_xsrf = True # noqa c.ServerApp.open_browser = False # noqa c.LabApp.open_browser = False # noqa c.LabApp.expose_app_in_browser = True # noqa
109
709
package com.olacabs.jackhammer.utilities; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.time.Duration; import java.util.HashMap; import java.util.List; import java.util.concurrent.*; import com.google.common.collect.Lists; import com.google.inject.Inject; import com.google.inject.name.Named; import com.olacabs.jackhammer.configuration.JackhammerConfiguration; import com.olacabs.jackhammer.db.*; import com.olacabs.jackhammer.security.AES; import com.olacabs.jackhammer.tool.interfaces.sdk.bridge.SdkCommunicator; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.TrueFileFilter; import liquibase.util.file.FilenameUtils; import lombok.extern.slf4j.Slf4j; import com.olacabs.jackhammer.models.*; import com.olacabs.jackhammer.common.Constants; import com.olacabs.jackhammer.common.CustomErrorCodes; import com.olacabs.jackhammer.common.ExceptionMessages; import com.olacabs.jackhammer.exceptions.GitCloneException; import com.olacabs.jackhammer.exceptions.TempDirCreationException; import org.apache.commons.lang3.StringUtils; @Slf4j public class ScanUtil { @Inject SdkCommunicator sdkCommunicator; @Inject @Named(Constants.SCAN_DAO) ScanDAO scanDAO; @Inject @Named(Constants.SCAN_TOOL_DAO) ScanToolDAO scanToolDAO; @Inject @Named(Constants.TOOL_DAO) ToolDAO toolDAO; @Inject @Named(Constants.SCAN_TYPE_DAO) ScanTypeDAO scanTypeDAO; @Inject @Named(Constants.LANGUAGE_DAO) LanguageDAO languageDAO; @Inject @Named(Constants.GIT_DAO) GitDAO gitDAO; @Inject JackhammerConfiguration jackhammerConfiguration; public void startScan(Scan scan) { try { log.info("picking scan with id {} {}", scan.getId()); scan.setStatus(Constants.SCAN_PICKED_STATUS); scanDAO.updateScanStatus(scan); ScanType scanType = scanTypeDAO.findScanTypeById(scan.getScanTypeId()); if (scan.getIsTaggedTools() == false) { Path tempDirPath = null; if (scanType.getIsStatic() || scanType.getIsHardCodeSecret()) { tempDirPath = createTempDirectory(); if (!cloneRepo(scan, tempDirPath)) { scan.setStatus(Constants.SCAN_FAILED_STATUS); scan.setStatusReason(Constants.GIT_CLONE_FAILED); scanDAO.updateScanStatusandReason(scan); return; } } tagPlatform(scan, tempDirPath); } if (scan.isSupported() && SdkCommunicator.clients != null && SdkCommunicator.clients.size() > 0) { Boolean toolsTagged = tagScanTools(scan); if (toolsTagged) sdkCommunicator.sendScanRequest(scan); } else if (scan.isSupported() == false) { String failedMessage = (scanType.getIsStatic() || scanType.getIsHardCodeSecret()) && scan.isAccessible() == false ? Constants.STATIC_SCAN_FAILED_MESSAGE : Constants.TOOLS_NOT_SUPPORTED; scan.setStatus(Constants.SCAN_FAILED_STATUS); scan.setStatusReason(failedMessage); scanDAO.updateScanStatusandReason(scan); } else if (SdkCommunicator.clients.size() == 0) { scan.setStatus(Constants.SCAN_QUEUED_STATUS); scanDAO.updateScanStatus(scan); } } catch (Exception e) { log.error("Exception while fetching pending scans", e); } catch (Throwable e) { log.info("Error while sending scans", e); } } public Path createTempDirectory() throws TempDirCreationException { Path tempDirPath; try { tempDirPath = Files.createTempDirectory(Constants.TEMP_DIR_PREFIX); } catch (IOException io) { throw new TempDirCreationException(ExceptionMessages.TEMP_DIR_CREATION_ERROR, null, CustomErrorCodes.TEMP_DIR_CREATION_ERROR); } return tempDirPath; } public Boolean cloneRepo(Scan scan, Path tmpDir) throws GitCloneException { StringBuilder command = getGitCloneProcessBuilderWithCredentials(scan); String gitCommand = command.toString() + Constants.STRING_SPACER + tmpDir.toAbsolutePath().toString(); try { return runCloneCmd(gitCommand); } catch (Exception e) { log.error("Error while cloning the repo", e); return false; } catch (Throwable th) { log.error("Error while cloning the repo", th); return false; } } public void tagPlatform(Scan scan, Path tmpDir) { try { scan.setSupported(false); List<Long> scanToolIds = Lists.newArrayList(); ScanType scanType = scanTypeDAO.findScanTypeById(scan.getScanTypeId()); if (scanType.getIsStatic()) { tagStaticPlatform(scan, scanToolIds, tmpDir); } else { if (scanType.getIsHardCodeSecret()) scan.setCloneRequired(true); tagNonStaticPlatform(scan, scanType, scanToolIds); } if (scanType.getIsHardCodeSecret() || scanType.getIsStatic()) { File targetDir = new File(tmpDir.toAbsolutePath().toString()); if (targetDir.exists()) targetDir.delete(); } if (scan.getPlatforms().size() > 0) { scan.setSupported(true); scan.setScanPlatforms(String.join(Constants.COMMA, scan.getPlatforms())); } else { String failedMessage = (scanType.getIsStatic() || scanType.getIsHardCodeSecret()) && scan.isAccessible() == false ? Constants.STATIC_SCAN_FAILED_MESSAGE : Constants.TOOLS_NOT_SUPPORTED; scan.setStatus(Constants.SCAN_FAILED_STATUS); scan.setStatusReason(failedMessage); scanDAO.updateScanStatusandReason(scan); } scanDAO.updatedScanDetails(scan); } catch (Exception e) { log.error("Error while doing tagPlatform.....", e); } catch (Throwable th) { log.error("Error while doing tagPlatform.....", th); } } public Boolean tagScanTools(Scan scan) { List<ScanTool> scanTools = scanToolDAO.getQueuedScanTools(scan.getId()); ScanType scanType = scanTypeDAO.findScanTypeById(scan.getScanTypeId()); if (scanTools.size() == 0) { scanToolDAO.deleteScanTools(scan.getId()); Path tempDirPath = null; if (scanType.getIsStatic() || scanType.getIsHardCodeSecret()) { try { tempDirPath = createTempDirectory(); if (!cloneRepo(scan, tempDirPath)) { scan.setStatus(Constants.SCAN_FAILED_STATUS); scan.setStatusReason(Constants.GIT_CLONE_FAILED); scanDAO.updateScanStatusandReason(scan); return false; } } catch (GitCloneException gce) { log.error("GitCloneException => ", gce); } catch (TempDirCreationException tce) { log.error("TempDirCreationException => ", tce); } } tagPlatform(scan, tempDirPath); } else { for (ScanTool scanTool : scanTools) { Tool tool = toolDAO.get(scanTool.getToolId()); scan.addTool(tool); } } if (scanType.getIsStatic() || scanType.getIsHardCodeSecret()) { StringBuilder gitCloneCmd = getGitCloneProcessBuilderWithCredentials(scan); scan.setTarget(gitCloneCmd.toString()); } else { scan.setCloneRequired(false); } if (scanType.getIsMobile()) { scan.setIsMobileScan(true); } else { scan.setIsMobileScan(false); } return true; } private Boolean runCloneCmd(final String command) throws IOException, InterruptedException, ExecutionException { Boolean status; final Duration timeout = Duration.ofMinutes(5); ExecutorService executor = Executors.newSingleThreadExecutor(); final Future<String> handler = executor.submit(new Callable() { @Override public String call() throws Exception { log.info("started cloning the repo....."); Process process = Runtime.getRuntime().exec(command); process.waitFor(); return "Success"; } }); try { handler.get(timeout.toMillis(), TimeUnit.MILLISECONDS); log.info("cloning completed....."); status = true; } catch (TimeoutException e) { handler.cancel(true); status = false; log.info("TimeoutException while cloning the repo....."); } executor.shutdownNow(); return status; } private void tagStaticPlatform(Scan scan, List<Long> scanToolIds, Path tmpDir) { HashMap<String, Language> languagesHash = new HashMap<String, Language>(); List<Language> languageList = languageDAO.getLanguages(); for (Language language : languageList) { languagesHash.put(language.getFileExtension(), language); } File modifiedTempDir = new File(tmpDir.toAbsolutePath().toString()); if (modifiedTempDir != null && modifiedTempDir.list() != null && modifiedTempDir.list().length > 0) { List<File> files = (List<File>) FileUtils.listFiles(modifiedTempDir, TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE); for (File file : files) { if (file.isFile()) { String fileExtension = FilenameUtils.getExtension(file.getAbsolutePath()); Language language = languagesHash.get(fileExtension); if (language != null) pickStaticPlatforms(scan, language, scanToolIds); } } scan.setCloneRequired(true); } else { scan.setAccessible(false); } } private void tagNonStaticPlatform(Scan scan, ScanType scanType, List<Long> scanToolIds) { scan.getPlatforms().add(scanType.getName().toLowerCase()); List<Tool> scanTypeTools = toolDAO.findByScanTypeId(scanType.getId()); for (Tool eachScanTypeTool : scanTypeTools) { if (!scanToolIds.contains(eachScanTypeTool.getId())) { scanToolIds.add(eachScanTypeTool.getId()); scan.addTool(eachScanTypeTool); ScanTool scanTool = new ScanTool(); scanTool.setScanId(scan.getId()); scanTool.setToolId(eachScanTypeTool.getId()); scanToolDAO.insert(scanTool); } } } private void pickStaticPlatforms(Scan scan, Language language, List<Long> scanToolIds) { List<Tool> languageTools = toolDAO.findByLanguageId(language.getId()); if (languageTools.size() == 0) return; scan.getPlatforms().add(language.getName().toLowerCase()); for (Tool eachLanguageTool : languageTools) { if (!scanToolIds.contains(eachLanguageTool.getId())) { scanToolIds.add(eachLanguageTool.getId()); scan.addTool(eachLanguageTool); ScanTool scanTool = new ScanTool(); scanTool.setScanId(scan.getId()); scanTool.setToolId(eachLanguageTool.getId()); scanToolDAO.insert(scanTool); } } } private StringBuilder getGitCloneProcessBuilderWithCredentials(Scan scan) { Git git = gitDAO.get(); String target = scan.getTarget(); String internalUrl = jackhammerConfiguration.getGitConfiguration().getInternalUrl(); String externalUrl = jackhammerConfiguration.getGitConfiguration().getExternalUrl(); if (!StringUtils.isEmpty(internalUrl)) target = target.replace(externalUrl, internalUrl); StringBuilder gitCmd = new StringBuilder(); try { if (git != null) { String privateToken = AES.decrypt(git.getApiAccessToken(), jackhammerConfiguration.getJwtConfiguration().getTokenSigningKey()); StringBuilder targetWithCredentials = new StringBuilder(); String repoUrlWithoutHttps = target.split(Constants.GIT_HTTPS)[1]; targetWithCredentials.append(Constants.GIT_HTTPS); targetWithCredentials.append(git.getUserName()); targetWithCredentials.append(Constants.COLON); targetWithCredentials.append(privateToken); targetWithCredentials.append(Constants.AT_THE_RATE); targetWithCredentials.append(repoUrlWithoutHttps); target = targetWithCredentials.toString(); } gitCmd.append(Constants.GIT); gitCmd.append(Constants.STRING_SPACER); gitCmd.append(Constants.CLONE); if (scan.getBranch() != null) { gitCmd.append(Constants.STRING_SPACER); gitCmd.append(Constants.BRANCH_ARG_OPTION); gitCmd.append(Constants.STRING_SPACER); gitCmd.append(scan.getBranch()); } gitCmd.append(Constants.STRING_SPACER); gitCmd.append(target); } catch (Exception e) { log.error("Error while building clone command", e); } catch (Throwable th) { log.error("Error while building git clone command", th); } return gitCmd; } }
6,303
790
/* * @brief Unit test for BinarySearch * @file test_binarysearch.cuh */ #include <examples/core/test_binarysearch.cuh> using namespace gunrock; TEST(utils, BinarySearch) { cudaError_t retval = BinarySearchTest(); EXPECT_EQ(retval, cudaSuccess); }
96
1,036
<gh_stars>1000+ package com.mylhyl.circledialog.view; import android.content.Context; import android.text.TextUtils; import android.view.Gravity; import android.widget.LinearLayout; import android.widget.TextView; import androidx.annotation.Nullable; import com.airbnb.lottie.LottieAnimationView; import com.airbnb.lottie.LottieDrawable; import com.mylhyl.circledialog.internal.BackgroundHelper; import com.mylhyl.circledialog.internal.CircleParams; import com.mylhyl.circledialog.internal.Controller; import com.mylhyl.circledialog.params.DialogParams; import com.mylhyl.circledialog.params.LottieParams; import com.mylhyl.circledialog.view.listener.OnCreateLottieListener; /** * Created by hupei on 2018/7/7. */ final class BodyLottieView extends LinearLayout { private LottieAnimationView mLottieAnimationView; private TextView mTextView; private DialogParams mDialogParams; private LottieParams mLottieParams; private OnCreateLottieListener mOnCreateLottieListener; public BodyLottieView(Context context, CircleParams circleParams) { super(context); init(circleParams); } private void init(CircleParams circleParams) { this.mDialogParams = circleParams.dialogParams; this.mLottieParams = circleParams.lottieParams; this.mOnCreateLottieListener = circleParams.circleListeners.createLottieListener; setOrientation(LinearLayout.VERTICAL); // 如果没有背景色,则使用默认色 int backgroundColor = mLottieParams.backgroundColor != 0 ? mLottieParams.backgroundColor : mDialogParams.backgroundColor; BackgroundHelper.handleBodyBackground(this, backgroundColor, circleParams); createLottieView(); createText(); if (mOnCreateLottieListener != null) { mOnCreateLottieListener.onCreateLottieView(mLottieAnimationView, mTextView); } } private void createLottieView() { mLottieAnimationView = new LottieAnimationView(getContext()); int lottieWidth = Controller.dp2px(getContext(), mLottieParams.lottieWidth); int lottieHeight = Controller.dp2px(getContext(), mLottieParams.lottieHeight); LayoutParams layoutParams = new LayoutParams(lottieWidth <= 0 ? LayoutParams.WRAP_CONTENT : lottieWidth, lottieHeight <= 0 ? LayoutParams.WRAP_CONTENT : lottieHeight); int[] margins = mLottieParams.margins; if (margins != null) layoutParams.setMargins(Controller.dp2px(getContext(), margins[0]), Controller.dp2px(getContext(), margins[1]), Controller.dp2px(getContext(), margins[2]), Controller.dp2px(getContext(), margins[3])); layoutParams.gravity = Gravity.CENTER; if (mLottieParams.animationResId != 0) { mLottieAnimationView.setAnimation(mLottieParams.animationResId); } if (!TextUtils.isEmpty(mLottieParams.animationFileName)) { mLottieAnimationView.setAnimation(mLottieParams.animationFileName); } if (!TextUtils.isEmpty(mLottieParams.imageAssetsFolder)) { mLottieAnimationView.setImageAssetsFolder(mLottieParams.imageAssetsFolder); } if (mLottieParams.autoPlay) { mLottieAnimationView.playAnimation(); } if (mLottieParams.loop) { mLottieAnimationView.setRepeatCount(LottieDrawable.INFINITE); } addView(mLottieAnimationView, layoutParams); } @Nullable private void createText() { //构建文本 if (!TextUtils.isEmpty(mLottieParams.text)) { mTextView = new TextView(getContext()); LayoutParams textLayoutParams = new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); textLayoutParams.gravity = Gravity.CENTER; int[] textMargins = mLottieParams.textMargins; if (textMargins != null) { textLayoutParams.setMargins(Controller.dp2px(getContext(), textMargins[0]), Controller.dp2px(getContext(), textMargins[1]), Controller.dp2px(getContext(), textMargins[2]), Controller.dp2px(getContext(), textMargins[3])); } if (mDialogParams.typeface != null) { mTextView.setTypeface(mDialogParams.typeface); } mTextView.setText(mLottieParams.text); mTextView.setTextSize(mLottieParams.textSize); mTextView.setTextColor(mLottieParams.textColor); mTextView.setTypeface(mTextView.getTypeface(), mLottieParams.styleText); int[] textPadding = mLottieParams.textPadding; if (textPadding != null) { mTextView.setPadding(Controller.dp2px(getContext(), textPadding[0]), Controller.dp2px(getContext(), textPadding[1]), Controller.dp2px(getContext(), textPadding[2]), Controller.dp2px(getContext(), textPadding[3])); } addView(mTextView, textLayoutParams); } } public void refreshText() { if (mLottieParams == null) { return; } if (mLottieAnimationView != null) { if (mLottieParams.animationResId != 0) { mLottieAnimationView.setAnimation(mLottieParams.animationResId); } if (!TextUtils.isEmpty(mLottieParams.animationFileName)) { mLottieAnimationView.setAnimation(mLottieParams.animationFileName); } if (!TextUtils.isEmpty(mLottieParams.imageAssetsFolder)) { mLottieAnimationView.setImageAssetsFolder(mLottieParams.imageAssetsFolder); } mLottieAnimationView.playAnimation(); } if (mTextView != null && !TextUtils.isEmpty(mLottieParams.text)) { mTextView.setText(mLottieParams.text); } } }
2,713
5,169
{ "name": "ViewModelOwners", "version": "1.0.0", "license": { "type": "MIT", "file": "LICENSE" }, "summary": "Protocols that help make your MVVM setup more consistent", "homepage": "http://merowing.info", "social_media_url": "https://twitter.com/merowing_", "authors": { "<NAME>": "<EMAIL>" }, "source": { "git": "https://github.com/krzysztofzablocki/ViewModelOwners.git", "tag": "v1.0.0" }, "platforms": { "ios": "9.0", "osx": "10.10", "tvos": "9.0", "watchos": "2.0" }, "requires_arc": true, "default_subspecs": "Core", "subspecs": [ { "name": "Core", "source_files": "Sources/**/*.swift", "frameworks": "Foundation" } ] }
336
365
from docopt import docopt # noinspection PyListCreation def main(): args = docopt(""" Usage: corpus2svd.sh [options] <corpus> <output_dir> Options: --thr NUM The minimal word count for being in the vocabulary [default: 100] --win NUM Window size [default: 2] --pos Positional contexts --dyn Dynamic context windows --sub NUM Subsampling threshold [default: 0] --del Delete out-of-vocabulary and subsampled placeholders --cds NUM Context distribution smoothing [default: 1.0] --dim NUM Dimensionality of eigenvectors [default: 500] --neg NUM Number of negative samples; subtracts its log from PMI [default: 1] --w+c Use ensemble of word and context vectors --eig NUM Weighted exponent of the eigenvalue matrix [default: 0.5] """) corpus = args['<corpus>'] output_dir = args['<output_dir>'] corpus2pairs_opts = [] corpus2pairs_opts.append('--thr ' + args['--thr']) corpus2pairs_opts.append('--win ' + args['--win']) if args['--pos']: corpus2pairs_opts.append('--pos') if args['--dyn']: corpus2pairs_opts.append('--dyn') corpus2pairs_opts.append('--sub ' + args['--sub']) if args['--del']: corpus2pairs_opts.append('--del') counts2pmi_opts = [] counts2pmi_opts.append('--cds ' + args['--cds']) pmi2svd_opts = [] pmi2svd_opts.append('--dim ' + args['--dim']) pmi2svd_opts.append('--neg ' + args['--neg']) svd2text_opts = [] if args['--w+c']: svd2text_opts.append('--w+c') svd2text_opts.append('--eig ' + args['--eig']) print '@'.join([ corpus, output_dir, ' '.join(corpus2pairs_opts), ' '.join(counts2pmi_opts), ' '.join(pmi2svd_opts), ' '.join(svd2text_opts) ]) if __name__ == '__main__': main()
970
1,968
<reponame>agramonte/corona<gh_stars>1000+ ////////////////////////////////////////////////////////////////////////////// // // This file is part of the Corona game engine. // For overview and more information on licensing please refer to README.md // Home page: https://github.com/coronalabs/corona // Contact: <EMAIL> // ////////////////////////////////////////////////////////////////////////////// #ifndef _AndroidNativePngDecoder_H__ #define _AndroidNativePngDecoder_H__ #include "AndroidBaseNativeImageDecoder.h" #include "AndroidOperationResult.h" // Forward declarations. struct Rtt_Allocator; class AndroidBaseImageDecoder; class AndroidBinaryReader; class NativeToJavaBridge; /// Decodes an image via the "libpng" C library. /// <br> /// Note that this image decoder loads PNGs faster than the AndroidJavaImageDecoder class. class AndroidNativePngDecoder : public AndroidBaseNativeImageDecoder { public: AndroidNativePngDecoder(Rtt_Allocator *allocatorPointer, NativeToJavaBridge *ntjb); AndroidNativePngDecoder(const AndroidBaseImageDecoder &decoder, NativeToJavaBridge *ntjb); virtual ~AndroidNativePngDecoder(); protected: AndroidOperationResult OnDecodeFrom(AndroidBinaryReader &reader); }; #endif // _AndroidNativePngDecoder_H__
353
348
<gh_stars>100-1000 {"nom":"Aubepierre-sur-Aube","circ":"1ère circonscription","dpt":"Haute-Marne","inscrits":169,"abs":90,"votants":79,"blancs":0,"nuls":6,"exp":73,"res":[{"nuance":"REM","nom":"<NAME>","voix":43},{"nuance":"LR","nom":"<NAME>","voix":30}]}
105
814
""" Each PuzzleGenerator has one or more Instances corresponding a to a different input. A "simple" problem like (lambda x: x + "world" == "Hello world") that has no inputs has just one instance. """ import inspect import json from typing import List, Callable, Dict, Set import random import re import sys import traceback import time import abc import utils # The seed used for randomness is important because if a solver has access to this seed it can cheat and # reverse-engineer the solutions to some puzzles. Don't share the seed with AI puzzle solvers :-) _AI_SEED = 12389484322359235125123212243523534510980967133563 DEFAULT_TIMEOUT = 1.0 # seconds def type_check(typ, obj): """ Checks the object is the correct type. Supports only bool, int, float, str, and (possibly nested) lists of these """ type_s = type_str(typ) # convert to string if necessary nest_depth = type_s.count("List") assert type_s.count("[") == nest_depth, "type_check only supports List for now, no Sets, Dicts, Tuples, ..." assert type_s.startswith("List[" * nest_depth) and type_s.endswith("]" * nest_depth) base_type = {"bool": bool, "int": int, "float": float, "str": str}[type_s[5 * nest_depth:len(type_s) - nest_depth]] def helper(depth, o): if depth == 0: return type(o) is base_type else: return type(o) is list and all(helper(depth - 1, i) for i in o) return helper(nest_depth, obj) def test_puzzle(f: callable, x, ans_type: str): """Checks if x is of the correct type and makes f return True (literally True, not an integer or whatever) :param f: Puzzle :param x: candidate answer :param ans_tye: :return: """ if not type_check(x, ans_type): raise TypeError return f(x) is True class InterpreterError(Exception): pass def my_exec(cmd, globals=None, locals=None, description='source string'): """ https://stackoverflow.com/questions/28836078/how-to-get-the-line-number-of-an-error-from-exec-or-execfile-in-python """ try: exec(cmd, globals, locals) except SyntaxError as err: error_class = err.__class__.__name__ detail = err.args[0] if err.args else "" line_number = err.lineno except Exception as err: error_class = err.__class__.__name__ detail = err.args[0] if err.args else "" cl, exc, tb = sys.exc_info() line_number = traceback.extract_tb(tb)[-1][1] else: return cmd_str = "\n".join([f"{i + 1}: {x}" for i, x in enumerate(cmd.split("\n"))]) raise InterpreterError("%s at line %d of %s: %s\n%s" % (error_class, line_number, description, detail, cmd_str)) def type_str(ty: type) -> str: """ Convert type ty to string. :param ty: str, typing.List[int] , typing.List[typing.List[bool]], etc. :return: string form of type, "str", "List[int]" , "List[List[bool]]", etc. """ type_str = str(ty).replace("typing.", "") return type_str[8:-2] if type_str.startswith("<class '") else type_str def gen_dump_code(var_name: str, ty: type) -> str: """ create code to output an object of type ty as a string :param var_name: The variable name, like "x" :param ty: str, typing.List[int] , typing.List[typing.List[bool]], etc. :return: code that writes the variable to standard out as a json object """ tys = type_str(ty) if tys.startswith("Set["): return "print(json.dumps({k : 1 for k in " + var_name + "})) # write sets as dictionaries\n" return f"print(json.dumps({var_name}))\n" def gen_load_code(var_name: str, ty: type) -> str: """ create code to load an object of type ty as a string :param var_name: The variable name, like "x" :param ty: str, typing.List[int] , typing.List[typing.List[bool]], etc. :return: code that reads the variable from stdin as a json object """ tys = type_str(ty) if tys.startswith("Set["): assert tys.endswith("]") inside = tys[4:-1] ans = f"{var_name} = set(json.load(sys.stdin))) # convert set (stored as json dictionary)" assertions = [f"all(isinstance(x, {inside}) for x in {var_name})"] else: ans = f"{var_name} = json.load(sys.stdin)" num_lists = tys.count("List[") assert tys.startswith("List[" * num_lists) and tys.endswith("]" * num_lists) inside = tys[5 * num_lists: len(tys) - num_lists] if num_lists == 0: assertions = [f"isinstance({var_name}, {inside})"] else: assertions = [f"isinstance({var_name}, list)"] if num_lists == 1: assertions.append(f"all(isinstance(x, {inside}) for x in {var_name})") else: assertions.append(f"all(isinstance(x, list) for x in {var_name})") if num_lists == 2: assertions.append(f"all(isinstance(y, {inside}) for x in {var_name} for y in x)") elif num_lists == 3: assertions += [f"all(isinstance(y, list) for x in {var_name} for y in x)", f"all(isinstance(z, {inside}) for x in {var_name} for y in x for z in y)"] else: assert False, f'Unknown type {tys}' assert inside in ["int", "float", "bool", "str"], f'Unknown type {tys}' return ans + "\n\n" + "\n".join(f"assert {a}, 'Type error: expecting `{tys}`'" for a in assertions) def add_preamble(src): preamble = [] types = [] if "List[" in src: types.append("List") if "Set[" in src: types.append("Set") if types: preamble.append(f"from typing import {','.join(types)}") if "json." in src: preamble.append("import json") if "sys." in src: preamble.append("import sys") return "\n".join(preamble) + "\n" * 3 + src if preamble else src def gen_prob_code(var_name: str, var_type: type, prob_src: str, inputs: str): s = f"""{prob_src} {gen_load_code(var_name, var_type)} inputs = {inputs} assert problem({var_name}, **inputs) print("Success!") """ # import inspect # print(inspect.getsource(problem)) return add_preamble(s) def gen_sol_code(var_name: str, var_type: type, sol_src: str, inputs: str): s = f"""{sol_src} inputs = {inputs} {var_name} = solution(**inputs) {gen_dump_code(var_name, var_type)} """ return add_preamble(s) class BuilderRandom(random.Random): """Adds extra random functions useful for building instances.""" def __init__(self, seed=None): self._init_seed = seed super().__init__(seed) def reseed(self): self.seed(self._init_seed) def pseudo_word(self, min_len=1, max_len=20): w = "".join(self.choice(["text", "th", "ch", "qu", *"bcdfghjklmnprstvwxz"]) + self.choice("aeiyou") for _ in range(1 + max_len // 2)) return w[:self.randrange(min_len, max_len + 1)] def heavy_tail_float(self, lower=-1000.0, upper=1000.0, median_dev=1.0): # heavy tailed distribution mean = (lower + upper) / 2.0 trunc = (upper - lower) / 2.0 while True: r = (self.random() ** (-2) - 1) / 3 if self.randrange(2): r = -r x = mean - median_dev * r if abs(x - mean) <= trunc: return x def char(self, chars="0123456789abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ,.:|/;?[]<>-=()+*&^%$#@!"): return self.choice(chars) def string(self, min_len=1, max_len=20): length = self.randrange(min_len, max_len + 1) return "".join(self.char() for _ in range(length)) def get_problems(globs: dict): seen = {PuzzleGenerator} # don't add abstract class PuzzleGenerator ans = [] for v in globs.values(): try: if v in seen: continue else: seen.add(v) except TypeError: continue try: is_prob = isinstance(v, PuzzleGenerator) except TypeError: is_prob = False if is_prob: ans.append(v) else: try: is_prob_class = issubclass(v, PuzzleGenerator) except TypeError: is_prob_class = False if is_prob_class: ans.append(v()) return ans def deep_copy(obj): t = type(obj) if t in {tuple, list, set}: return t(deep_copy(x) for x in obj) if t == dict: return {k: deep_copy(v) for k, v in obj.items()} return obj def same_types(obj1, obj2): """ Recursively check that obj1 and obj2 are of the same types. Better than type(obj1) == type(obj2) because it recursively checks inside lists, sets, dicts, and tuples """ t = type(obj1) if t is not type(obj2): return False if t in {list, set, dict}: for iterables in ([(obj1, obj2), (obj1.values(), obj2.values())] if t is dict else [(obj1, obj2)]): lst = [i for o in iterables for i in o] if not all(same_types(lst[0], o) for o in lst[1:]): return False if t is tuple: return len(obj1) == len(obj2) and all(same_types(o1, o2) for o1, o2 in zip(obj1, obj2)) return True # def test_same_types(): # assert same_types(1, 2) # assert same_types({1:[]}, {}) # assert same_types({1:[2,3]}, {4:[5,6]}) # assert not same_types(True, 1) # assert not same_types(1, 2.0) # assert not same_types(1, 2.0) # assert not same_types({1:[2,3]}, {4:[5.,6.]}) # assert not same_types({1:[2,3], 3:[5.]}, {}) # # test_same_types() def homogeneous_type(obj): """ Checks that the type is "homogeneous" in that all lists are of objects of the same type, etc. """ return same_types(obj, obj) # def test_homogeneous_types(): # assert homogeneous_type(1) # assert homogeneous_type([1, 2, 3]) # assert homogeneous_type([[[]], [[]]], [[3], [4]]) # assert homogeneous_type({}) # assert not homogeneous_type([1, 2, 3.3]) # assert homogeneous_type([[[]], [[], [4.]]], [[3], []]) # # test_homogeneous_types() def decode(st: str): # small modifications to make json roundtrip work def helper(obj): if type(obj) in [int, str, float, bool]: return obj if type(obj) == list: if len(obj) == 2 and obj[0] == "__SET__:": return set(helper(obj[1])) return [helper(i) for i in obj] if type(obj) == dict: return {json.loads(k): helper(v) for k, v in obj.items()} assert False, f"Unexpected type {type(obj)}" return helper(json.loads(st)) def encode(obj): # small modifications to make json roundtrip work def helper(x): # encodes sets in a json-friendly fashion if type(x) in [int, str, float, bool]: return x if type(x) == list: return [helper(i) for i in x] if type(x) == set: return ["__SET__:", helper({i: 0 for i in x})] if type(x) == dict: return {json.dumps(k): helper(v) for k, v in x.items()} assert False, f"Unexpected type {type(x)}" return json.dumps(helper(obj)) class Instance: def __init__(self, name: str, src: str, sol_header: str, sol_bodies: List[str], multiplier: float): self.name = name # instance name self.src = src self.sol_header = sol_header self.sol_bodies = sol_bodies self.multiplier = multiplier def unindent(docstr): lines = [line for line in docstr.strip().split("\n")] de_indent = None for i in range(1, len(lines)): line = lines[i] if de_indent is None and line.strip(): de_indent = len(line) - len(line.lstrip(" ")) if de_indent and len(line) > de_indent: assert not line[:de_indent].strip(), f"Weird indentation in docstring:\n{docstr}" lines[i] = line[de_indent:] return "\n".join(lines) def get_body(function_src): match = re.search(r"\)\s*:(.*)\n", function_src) assert match and (match.group(1).replace(" ", "") == ""), \ f"Bad solution header for, maybe move to next line:\n\n{match.group(1)}\n\nin:\n\n{function_src}" return function_src[match.end():] class Tags(abc.ABC): brute_force = "brute_force" # can be solved by brute force codeforces = "codeforces" # inspired by a codeforces.com problem data_structures = "data_structures" # needs fancy data structures to solve dp = "dp" # dynamic programming famous = "famous" # roughly at the level that there is a Wikipedia page about the topic games = "games" # puzzle describes a game graphs = "graphs" # solution can use graph algorithms like depth-first search, etc. greedy = "greedy" # solution can use a greedy algorithm hard = "hard" # challenging human_eval = "human_eval" # inspired by a problem from the Human Eval dataset unsolved = "unsolved" # unsolved, involves open some unsolved puzzles math = "math" # mainly mathematical reasoning strings = "strings" # involves constructing a string # trees = "trees" # will we use this? trivial = "trivial" # trivial *solution* even if it may require some work to understand what the puzzle is asking Tags._all_tags = {getattr(Tags, k) for k in dir(Tags) if not k.startswith("_")} class PuzzleGenerator: '''PuzzleGenerator is an abstract class for a puzzle generator which builds 1 or more instances. Each problem MUST OVERRIDE sat. Examples from templates/hello.py: class HelloWorld(PuzzleGenerator): """Trivial example, no solutions provided""" @staticmethod def sat(s: str): return s + 'world' == 'Hello world' class BackWorlds(PuzzleGenerator): """Two solutions, no inputs""" @staticmethod def sat(s: str): return s[::-1] + 'world' == 'Hello world' @staticmethod def sol(): return 'olleH ' @staticmethod def sol2(): # solution methods must begin with 'sol' return 'Hello '[::-1] # With other inputs, the default values of the input are used to generate the first instance. # You can run Uncat.get_example() to get the inputs, so you can then run # assert Uncat.sat(Uncat.sol(**Uncat.get_example())) class Uncat(PuzzleGenerator): """Simple example with inputs.""" @staticmethod def sat(st: str, a='world', b='Hello world'): return st + a == b @staticmethod def sol(a, b): return b[:len(b)-len(a)] def gen_random(self): b = self.random.pseudo_word() a = b[self.random.randrange(len(b)+1):] self.add({"a": a, "b": b}) ''' tags = [] # add tags, e.g., tags = [Tags.trivial, Tags.math] DEBUG = False # DEBUG = True while making a puzzle makes it run before any other problems skip_example = False # skip the example in the default arguments to sat, so it's not the first instance @staticmethod def sat(ans, *other_inputs): # must override raise NotImplementedError @classmethod def get_example(cls): if not hasattr(cls, "_example"): p_spec = inspect.getfullargspec(cls.sat) if p_spec.defaults: cls._example = dict(zip(p_spec.args[-len(p_spec.defaults):], p_spec.defaults)) else: cls._example = {} cls._example_copy = deep_copy(cls._example) return cls._example @classmethod def subclass_descendents(cls): # finds all problems def descendents(cls): ans = [] for c in cls.__subclasses__(): ans.append(c) ans.extend(descendents(c)) return ans ans = utils.dedup(descendents(cls)) # ans = [cls for cls in ans if cls.sat is not PuzzleGenerator.sat] names = set() for problem in ans: name = problem.__name__ assert name not in names, f"Duplicate problems named `{name}`" names.add(name) return ans @classmethod def debug_problems(cls, target_num_instances=None): defaults = {"target_num_instances": target_num_instances} if target_num_instances else {} all_gens = PuzzleGenerator.subclass_descendents() debug_problems = [cls for cls in all_gens if cls.DEBUG] if debug_problems: for P in debug_problems: P().debug(**defaults) print(f"PuzzleGenerator.DEBUG=True problem(s) succeeded: {[p.__name__ for p in debug_problems]}") print("Next, remove `DEBUG=True` from these classes") else: print("Suggestion for debugging: set DEBUG=True on PuzzleGenerator classes to test a single class.") print(f"No DEBUG=True PuzzleGenerator classes found, so testing {len(all_gens):,} classes:") for P in all_gens: P().test(**defaults) print(f"Success on all {len(all_gens):,} problem(s).") print("To make the dataset, run make_dataset.py") print("See https://github.com/microsoft/PythonProgrammingPuzzles/wiki/How-to-add-a-puzzle for more info.") def __init__(self): self.name = self.__class__.__name__ assert len(self.tags) == len(set(self.tags)), "duplicate tags in {self.name}" assert all(t in Tags._all_tags for t in self.tags), f"invalid tag(s) in {self.name}" assert self.sat is not PuzzleGenerator.sat, f"Must override {self.name}.sat" self.sat_src, sat_spec = get_src_spec(self.sat) self.docstring = utils.get_docstring(self.sat_src) self.sat_src = utils.remove_docstring(self.sat_src) assert len(sat_spec.args) > 0, f"{self.name}.sat() takes no arguments!" self.ans_name, *self.arg_names = sat_spec.args assert self.ans_name in sat_spec.annotations, f"Missing type hint for {self.name}.sat({self.ans_name}: ???" self.ans_type = type_str(sat_spec.annotations[self.ans_name]) assert self.ans_type.replace("List[", "").replace("]", "") in "bool float int str".split(), \ f"Answer type for {self.name} must be bool/int/float/str or Lists (or Lists of Lists etc.) of those" if not self.__doc__ or self.__doc__ == PuzzleGenerator.__doc__: self.desc = "" else: self.desc = unindent(self.__doc__) self.random = BuilderRandom(seed=self.name) # these are created at Build time self._seen_inputs = None self._inputs = None # inputs to test during build self.instances = None sol_names = [k for k in dir(self) if k.startswith("sol")] self.sols = [getattr(self, k) for k in sol_names] self.sol_bodies = [] for sol in self.sols: # check solution headers and extract bodies sol_src, sol_spec = get_src_spec(sol) assert self.arg_names == sol_spec.args, f"mismatched problem/solution arguments for {self.name}" assert not sol_spec.defaults, f"Don't set default parameter values for {self.name}.sol -- we'll do it" self.sol_bodies.append(get_body(sol_src)) assert set(self.arg_names) == set(self.get_example()), f"Bad {self.name} example" for v, val in self.get_example().items(): if not homogeneous_type(val): utils.warn(f"Non-homogeneous type for example var {v} in {self.name}") # check that sat and sol's are @staticmethod's mro_dict = {} for mro in inspect.getmro(self.__class__)[::-1]: mro_dict.update(mro.__dict__) assert all(isinstance(mro_dict[k], staticmethod) for k in ["sat"] + sol_names), \ f"{self.name} `sat` and `sol` must be defined with @staticmethod" def test_input(self, name, inp, test: bool, multiplier: float, already_tested={}): """Check if the input has been tested already. If not, assert that the solution(s) satisfy the given inputs. Do a round-trip json encoding/decoding to mimic the actual test. Ideally this could be done by running a protected process (like in evaluating programming contest submissions) but that is much slower. Since this is a test we authored presumably it has no evil code. Returns the new instance and number of solutions actually tested (that were not in cache)""" num_tested = 0 new_sat_src = create_sat(self.sat_src, self.ans_name, self.ans_type, self.arg_names, inp) sol_header = create_sol_header(inp) instance = Instance( name, new_sat_src, sol_header, self.sol_bodies if test else [], multiplier ) for sol_body, sol_func in zip(instance.sol_bodies, self.sols): if new_sat_src in already_tested and sol_body in already_tested[new_sat_src]: continue # skip num_tested += 1 time0 = time.perf_counter() env = dict(List=List) if self.DEBUG: # In debug mode just run the darn tests answer = sol_func(**inp) else: try: my_exec( instance.sol_header + " \n" + sol_body + "\n" + "answer = sol()", env, description=instance.name ) except Exception: sol_func(**inp) utils.error("Strange, failed test in exec but passed without exec") raise answer = env["answer"] assert answer is not None, "sol returned None" assert type_check(self.ans_type, answer), f"Solution returned wrong type for {self.name}" if self.DEBUG: assert self.sat(answer, **inp) is True, f"Puzzle {self.name} didn't return True on `{inp}`" else: assert answer == decode(encode(answer)) try: env2 = dict(answer=answer, List=List) # in case we screwed up env my_exec(instance.src + "\n" + "assert sat(answer) is True", env2, description=self.name) except Exception: assert self.sat(answer, **inp) is True, \ f"Puzzle {instance.name} didn't return True on `{inp}`" utils.error("Strange, failed test in exec but passed without exec") raise dur = time.perf_counter() - time0 if dur > DEFAULT_TIMEOUT * multiplier: utils.warn(f"Took {dur}s to test {instance.name} (multiplier={multiplier})") return instance, num_tested def num_generated_so_far(self): """ Call this function during gen/gen_random to see how many unique puzzle instances have been generated so far. """ return len(self._inputs) def build(self, target_num_instances, already_tested={}, max_random_attempts=100, force_trivial_test=False): self.check_for_trivial_solutions(force_trivial_test, already_tested) self._seen_inputs = set() self._inputs = [] # for recording the inputs to test self.random.reseed() start_time = time.perf_counter() if not self.skip_example: self.add(self.get_example()) if target_num_instances > len(self._inputs): self.gen(target_num_instances - len(self._inputs)) while len(self._inputs) < target_num_instances: n = len(self._inputs) for _ in range(max_random_attempts): self.gen_random() if n != len(self._inputs): # added a problem assert len(self._inputs) == n + 1, f"{self.name}.gen_random() generated more than one instance" break if len(self._inputs) == n: # failed max_random_attempts, give up break self._inputs = self._inputs[:target_num_instances] num_tested = 0 self.instances = [] for inp, test, multiplier in self._inputs: instance, n = self.test_input(f"{self.name}:{len(self.instances)}", inp, test, multiplier, already_tested) self.instances.append(instance) num_tested += n build_time = time.perf_counter() - start_time assert self._example_copy == self._example, f"Puzzle {self.name} changed inputs" if num_tested: utils.info(f"Actually tested {num_tested:,}/{len(self.instances):,} " f"instances of {self.name} in {build_time:.1f}s") self._seen_inputs = None self._inputs = None # for recording the inputs to test def check_for_trivial_solutions(self, force, already_tested): # check for trivial solutions example = self.get_example() src = create_sat(self.sat_src, self.ans_name, self.ans_type, self.arg_names, example) if (not force and src in already_tested) or not hasattr(self, "sol"): return utils.info(f"Checking for trivial solutions to {self.name}") time0 = time.perf_counter() ans = self.sol(**example) if type(ans) == int: if ans in range(-1000, 1000): tests = [ans] else: tests = [] elif type(ans) == str: if len(ans) <= 1: tests = [ans] else: tests = ["cat", "dog", "aa", "ab", "foo", "bar", "baz"] elif type(ans) == float: tests = [-100.0, -10.0, -2.0, -1.0, -0.5, -0.1, 0.0, 0.1, 0.5, 1.0, 2.0, 10.0, 100.0] elif type(ans) == bool: tests = [True, False] elif type(ans) == list: if len(ans) == 0: tests = [ans] else: el = list(ans)[0] if type(el) == int: base = list(range(-3, 4)) elif type(el) == str: base = ["a", "b", "foo", "bar", "baz"] elif type(el) == bool: base = [True, False] elif type(el) == float: base = [-1.0, -0.1, 0.0, 0.1, 0.5, 1.0, 2.0] else: # print(f"Can't create trivial instances fitting pattern `{ans}`"[:1000]) base = [] from itertools import product tests = [] for r in range(6): tests.extend(list(p) for p in product(base, repeat=r)) else: print(f"Can't check for types, unexpected type `{type(ans)}`") tests = [] for t in tests: try: assert self.sat(t, **example) except: continue utils.warn(f"`{self.name}` in file `{self.__module__.split('.')[-1]}` " f"has trivial solution `{t}`") break dur = time.perf_counter() - time0 if dur > 1.0: # warn if above one second utils.warn(f"Took {dur:.1f}s to test for trivial solutions to `{self.name}`") def gen(self, target_num_instances): pass def gen_random(self): pass def add(self, inp: dict, test=True, multiplier=1.0): s = str(inp) if s in self._seen_inputs: return # duplicate problem else: self._seen_inputs.add(s) assert set(inp) == set(self.arg_names), f"Instance #{self.num_generated_so_far()} keys mismatch in {self.name}" example = self.get_example() for k in inp: v1, v2 = example[k], inp[k] if not same_types(v1, v2): utils.warn(f"Instance #{self.num_generated_so_far()} variable `{k}` type mismatch in {self.name}") self._inputs.append((inp, test, multiplier)) def debug(self, target_num_instances=10000): print(f"Debugging {self.name}") old_debug = self.DEBUG self.DEBUG = True self.build(target_num_instances, force_trivial_test=True) self.DEBUG = old_debug def get_src_spec(f: Callable): try: src = inspect.getsource(f) spec = inspect.getfullargspec(f) except OSError: utils.error("Cannot use inspect, happens in some interpreters... Try running in ipython.") raise de_indent = min([len(line) - len(line.lstrip(" ")) for line in src.splitlines() if line.strip()]) src = "\n".join([line[de_indent:] for line in src.splitlines()]).strip() if src.startswith("@staticmethod"): src = src[len("@staticmethod"):].strip() assert src.startswith("def ") return src, spec def create_sol_header(defaults, function_name="sol"): # could add types here if needed ans = f"def {function_name}(" ans += ", ".join(f'{var}={utils.stringify(default)}' for var, default in defaults.items()) ans += "):" return ans def create_sat(src, ans_name, ans_type, args, defaults, function_name="sat"): assert set(defaults) == set(args), f"Add error: defaults don't match args {args} in {src}" ans = f"def {function_name}({ans_name}: {ans_type}" if args: ans += ", " + ", ".join(f"{v_name}={utils.stringify(defaults[v_name])}" for v_name in args) ans += "):\n" ans += get_body(src) return ans def get_func_name(src): assert src.startswith("def ") return src[4:src.index("(")]
13,348
510
from pathlib import Path import matplotlib.pyplot as plt import torch from ..data_tools import ImageDirectoryDataset, get_transform from ..gan import ProGAN from ..networks import Discriminator, Generator from .utils import device def test_pro_gan_progressive_downsample_batch() -> None: batch = torch.randn((4, 3, 1024, 1024)).to(device) batch = torch.clamp(batch, min=0, max=1) progan = ProGAN(Generator(10), Discriminator(10), device=device) for res_log2 in range(2, 10): modified_batch = progan.progressive_downsample_batch( batch, depth=res_log2, alpha=0.001 ) print(f"Downsampled batch at res_log2 {res_log2}: {modified_batch.shape}") plt.figure() plt.title(f"Image at resolution: {int(2 ** res_log2)}x{int(2 ** res_log2)}") plt.imshow(modified_batch.permute((0, 2, 3, 1))[0].cpu().numpy()) assert modified_batch.shape == ( batch.shape[0], batch.shape[1], int(2 ** res_log2), int(2 ** res_log2), ) plt.figure() plt.title(f"Image at resolution: {1024}x{1024}") plt.imshow(batch.permute((0, 2, 3, 1))[0].cpu().numpy()) plt.show() def test_pro_gan_train() -> None: depth = 4 progan = ProGAN(Generator(depth), Discriminator(depth), device=device) progan.train( dataset=ImageDirectoryDataset( Path("/media/deepstorage01/datasets_external/cifar_10/cifar/images"), transform=get_transform( new_size=(int(2 ** depth), int(2 ** depth)), flip_horizontal=False ), rec_dir=False, ), epochs=[100 for _ in range(3)], batch_sizes=[256, 256, 256], fade_in_percentages=[50 for _ in range(3)], save_dir=Path("./test_train"), num_samples=64, feedback_factor=10, ) print("test_finished")
860
325
package com.box.l10n.mojito.cli.command; import com.box.l10n.mojito.cli.CLITestBase; import com.box.l10n.mojito.entity.Repository; import com.box.l10n.mojito.io.Files; import com.box.l10n.mojito.rest.client.AssetClient; import com.box.l10n.mojito.rest.client.RepositoryClient; import com.box.l10n.mojito.service.repository.RepositoryRepository; import com.box.l10n.mojito.service.tm.TMTextUnitVariantRepository; import com.google.common.base.Strings; import org.junit.Assume; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import java.io.File; import java.util.stream.Collectors; import java.util.stream.IntStream; /** * By default this run on HSQL, so point it to proper configuration to run with MySQL for example by changing the * pom.xml to use webapp configuration instead of cli's. */ public class PerformanceSanboxTest extends CLITestBase { /** * logger */ static Logger logger = LoggerFactory.getLogger(PerformanceSanboxTest.class); static int NUMBER_OF_TEXTUNITS = 10000; @Autowired RepositoryClient repositoryClient; @Autowired RepositoryRepository repositoryRepository; @Autowired AssetClient assetClient; @Autowired TMTextUnitVariantRepository tmTextUnitVariantRepository; @Value("${test.l10n.cli.performance:false}") boolean runPerformance; // String repoName = "perfclitest-a-" + NUMBER_OF_TEXTUNITS; String repoName = "perf-10k"; @Before public void before() { Assume.assumeTrue(runPerformance); } @Test public void createRepository() throws Exception { getOrCreateRepository(); } @Test public void push() throws Exception { Repository repository = getOrCreateRepository(); generateInputFiles(getTargetTestDir("input"), 0, NUMBER_OF_TEXTUNITS); getL10nJCommander().run("push", "-r", repository.getName(), "-s", getTargetTestDir("input").getAbsolutePath(), "--filter-options", "0" ); } @Test public void pushSmallDiff() throws Exception { Repository repository = getOrCreateRepository(); generateInputFiles(getTargetTestDir("input"), 0, NUMBER_OF_TEXTUNITS + 20); getL10nJCommander().run("push", "-r", repository.getName(), "-s", getTargetTestDir("input").getAbsolutePath(), "--filter-options", "0" ); } @Test public void extractDiff() throws Exception { Repository repository = getOrCreateRepository(); generateInputFiles(getTargetTestDir("input/master"), 0, NUMBER_OF_TEXTUNITS); generateInputFiles(getTargetTestDir("input/branch1"), 0, NUMBER_OF_TEXTUNITS, NUMBER_OF_TEXTUNITS, NUMBER_OF_TEXTUNITS + 5, 0); generateInputFiles(getTargetTestDir("input/branch2"), 5, NUMBER_OF_TEXTUNITS, NUMBER_OF_TEXTUNITS + 5, NUMBER_OF_TEXTUNITS + 10, 0); getL10nJCommander().run("push", "-r", repository.getName(), "-s", getTargetTestDir("input/master").getAbsolutePath(), "-b", "master", "--filter-options", "0" ); getL10nJCommander().run("extract", "-s", getTargetTestDir("input/master").getAbsolutePath(), "-o", getTargetTestDir("extractions").getAbsolutePath(), "-n", "master", "-fo", "sometestoption=value1"); getL10nJCommander().run("extract", "-s", getTargetTestDir("input/branch1").getAbsolutePath(), "-o", getTargetTestDir("extractions").getAbsolutePath(), "-n", "branch1", "-fo", "sometestoption=value1"); getL10nJCommander().run("extract", "-s", getTargetTestDir("input/branch2").getAbsolutePath(), "-o", getTargetTestDir("extractions").getAbsolutePath(), "-n", "branch2", "-fo", "sometestoption=value1"); getL10nJCommander().run("extract-diff", "-i", getTargetTestDir("extractions").getAbsolutePath(), "-o", getTargetTestDir("extraction-diffs").getAbsolutePath(), "-c", "branch1", "-b", "master", "--push-to", repository.getName(), "--push-to-branch", "branch1"); getL10nJCommander().run("extract-diff", "-i", getTargetTestDir("extractions").getAbsolutePath(), "-o", getTargetTestDir("extraction-diffs").getAbsolutePath(), "-c", "branch2", "-b", "master", "--push-to", repository.getName(), "--push-to-branch", "branch2"); } @Test public void changeUsed() throws Exception { Repository repository = getOrCreateRepository(); generateInputFiles(getTargetTestDir("input/master-10"), 0, 10); generateInputFiles(getTargetTestDir("input/master-100"), 0, 100); getL10nJCommander().run("push", "-r", repository.getName(), "-s", getTargetTestDir("input/master-10").getAbsolutePath(), "-b", "master", "--filter-options", "0" ); getL10nJCommander().run("push", "-r", repository.getName(), "-s", getTargetTestDir("input/master-100").getAbsolutePath(), "-b", "master", "--filter-options", "0" ); getL10nJCommander().run("push", "-r", repository.getName(), "-s", getTargetTestDir("input/master-10").getAbsolutePath(), "-b", "master", "--filter-options", "0" ); } Repository getOrCreateRepository() throws Exception { Repository repository = repositoryRepository.findByName(repoName); if (repository == null) { repository = repositoryService.createRepository(repoName, repoName + " description", null, false); repositoryService.addRepositoryLocale(repository, "fr-FR"); repositoryService.addRepositoryLocale(repository, "fr-CA", "fr-FR", false); repositoryService.addRepositoryLocale(repository, "ja-JP"); } return repository; } @Test public void generate() { generateInputFiles(new File("/Users/jeanaurambault/tmp/" + repoName + "/master"), 0, NUMBER_OF_TEXTUNITS); generateInputFiles(new File("/Users/jeanaurambault/tmp/" + repoName + "/branch1"), 0, NUMBER_OF_TEXTUNITS, NUMBER_OF_TEXTUNITS, NUMBER_OF_TEXTUNITS + 5, 0); generateInputFiles(new File("/Users/jeanaurambault/tmp/" + repoName + "/branch2"), 0, NUMBER_OF_TEXTUNITS, NUMBER_OF_TEXTUNITS + 10, NUMBER_OF_TEXTUNITS + 15, 0); // demojito repo-create -n perf-10k -l cs-CZ da-DK de-DE el-GR en-GB es-AR es-ES es-MX fi-FI fr-FR hi-IN hu-HU id-ID it-IT ja-JP ko-KR ms-MY nb-NO nl-NL pl-PL pt-BR pt-PT ro-RO ru-RU sk-SK sv-SE th-TH tl-PH tr-TR uk-UA vi-VN // first push - 38 sec // time demojito push -r perf-10k -s master // branch 1 full push with small diff // demojito push -r perf-10k -s branch1 -b branch1 // update branch 1 using diff // demojito extract -s master -n master; demojito extract -s branch1 -n branch1 ; demojito extract-diff -b master -c branch1 -p perf-10k -pb branch1 -pbc jeanaurambault // demojito extract -s master -n master; demojito extract -s branch2 -n branch2 ; time demojito extract-diff -b master -c branch2 -p perf-10k -pb branch2 -pbc jeanaurambault // demojito pull -r perf-10k -s master } @Test public void generateManyAssets() { for (int i = 0; i < 100; i++) { generateInputFiles(new File("/Users/jeanaurambault/tmp/manyAsset/master"), 0, 10, 0, 0, i); } } void generateInputFiles(File inputsDirectory, int startIdxRange1, int endIdxRange1) { generateInputFiles(inputsDirectory, startIdxRange1, endIdxRange1, 0, 0, 0); } void generateInputFiles(File inputsDirectory, int startIdxRange1, int endIdxRange1, int startIdxRange2, int endIdxRange2, int filenameIdx) { String fileContent = IntStream.concat(IntStream.range(startIdxRange1, endIdxRange1), IntStream.range(startIdxRange2, endIdxRange2)).mapToObj(idx -> { return String.format("# %s\n%s=%s\n\n", "comment-" + idx + "-" + Strings.padStart("", 50, 'a'), "name-" + idx, "value-" + idx + "-" + Strings.padStart("", 30, 'a')); }).collect(Collectors.joining()); Files.createDirectories(inputsDirectory.toPath()); Files.write(inputsDirectory.toPath().resolve("performance-" + filenameIdx + ".properties"), fileContent); } }
3,856
12,718
<reponame>lukekras/zig<gh_stars>1000+ /** * This file has no copyright assigned and is placed in the Public Domain. * This file is part of the mingw-w64 runtime package. * No warranty is given; refer to the file DISCLAIMER.PD within this package. */ #include <math.h> float rintf (float x) { float retval = 0.0F; __asm__ __volatile__ ("frndint;": "=t" (retval) : "0" (x)); return retval; }
143
3,702
// Copyright (c) YugaByte, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except // in compliance with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the License // is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express // or implied. See the License for the specific language governing permissions and limitations // under the License. // #include "yb/yql/pggate/pg_table.h" namespace yb { namespace pggate { PgTable::PgTable(const PgTableDescPtr& desc) : desc_(desc), columns_(std::make_shared<std::vector<PgColumn>>()) { if (!desc_) { return; } size_t num_columns = desc_->num_columns(); columns_->reserve(num_columns + 1); for (size_t i = 0; i != num_columns; ++i) { columns_->emplace_back(desc->schema(), i); } columns_->emplace_back(desc_->schema(), num_columns); } Result<PgColumn&> PgTable::ColumnForAttr(int attr_num) { return (*columns_)[VERIFY_RESULT(desc_->FindColumn(attr_num))]; } PgColumn& PgTable::ColumnForIndex(size_t index) { CHECK_LT(index + 1, columns_->size()); return (*columns_)[index]; } } // namespace pggate } // namespace yb
460
1,162
<filename>KafkaCenter-Core/src/test/java/org/nesc/ec/bigdata/email/EmailTest.java<gh_stars>1000+ package org.nesc.ec.bigdata.email; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit4.SpringRunner; import org.nesc.ec.bigdata.Application; import org.nesc.ec.bigdata.service.EmailService; /** * @author Reason.H.Duan * @version 1.0 * @date 3/29/2019 */ @RunWith(SpringRunner.class) @SpringBootTest(classes={Application.class}) public class EmailTest { @Autowired EmailService emailService; /** * Test send email. * Email服务已升级,需要传入特定的参数<br> * 2020/03/04 */ @Test public void testSendEmail() { // emailService.sendDefaultTitleMail("<EMAIL>", "KafkaCenter test send email."); } }
370
852
<filename>DQMOffline/JetMET/test/cruzet09_PromptAna.py<gh_stars>100-1000 # import FWCore.ParameterSet.Config as cms process = cms.Process("test") process.load("CondCore.DBCommon.CondDBSetup_cfi") # # DQM # process.load("DQMServices.Core.DQM_cfg") process.load("DQMServices.Components.MEtoEDMConverter_cfi") # the task - JetMET objects process.load("DQMOffline.JetMET.jetMETDQMOfflineSourceCosmic_cff") process.jetMETAnalyzer.OutputMEsInRootFile = cms.bool(True) process.jetMETAnalyzer.OutputFileName = cms.string('jetMETMonitoring_cruzet100945.root') process.jetMETAnalyzer.DoJetPtAnalysis = cms.untracked.bool(True) process.jetMETAnalyzer.caloMETAnalysis.allSelection = cms.bool(True) process.jetMETAnalyzer.caloMETNoHFAnalysis.allSelection = cms.bool(True) process.jetMETAnalyzer.caloMETHOAnalysis.allSelection = cms.bool(True) process.jetMETAnalyzer.caloMETNoHFHOAnalysis.allSelection = cms.bool(True) #process.jetMETAnalyzer.caloMETAnalysis.verbose = cms.int32(1) # the task - JetMET trigger process.load("DQMOffline.Trigger.JetMETHLTOfflineSource_cfi") # check # of bins process.load("DQMServices.Components.DQMStoreStats_cfi") # for igprof #process.IgProfService = cms.Service("IgProfService", # reportFirstEvent = cms.untracked.int32(0), # reportEventInterval = cms.untracked.int32(25), # reportToFileAtPostEvent = cms.untracked.string("| gzip -c > igdqm.%I.gz") #) process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring( '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/FA72B935-0960-DE11-A902-000423D98DB4.root' #'/store/data/Commissioning09/Calo/RECO/v3/000/100/945/0C547BAF-0C60-DE11-83C3-000423D98868.root' #'/store/data/CRUZET09/Calo/RECO/v1/000/098/154/EADF3BE3-BE4F-DE11-8BB8-000423D9870C.root' #'file:/tmp/hatake/EADF3BE3-BE4F-DE11-8BB8-000423D9870C.root' ) ) # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/FA72B935-0960-DE11-A902-000423D98DB4.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/EA597588-0F60-DE11-938A-001D09F251B8.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/B80B1DA7-0560-DE11-8850-000423D6CA02.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/A2A21790-0A60-DE11-8231-001617E30D40.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/94D38392-0A60-DE11-8DE9-0019DB2F3F9A.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/8C1F34B5-0C60-DE11-9413-000423D985B0.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/6243D9DC-0960-DE11-8104-001617C3B5F4.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/52F63D01-2460-DE11-8DB4-001D09F24489.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/4EDF8E52-0B60-DE11-A6CB-000423D98930.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/301E3B65-0D60-DE11-B51C-000423D94908.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/22E1ECD9-0E60-DE11-B9EB-001D09F28D54.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/22BEF6AE-0C60-DE11-B76D-001617C3B6C6.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/1EA1811B-0E60-DE11-A6F0-001617C3B76E.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/1CE0FB64-0D60-DE11-9F26-000423D98BE8.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/1AA3E288-0F60-DE11-8900-001D09F24399.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/12CCE9D9-0E60-DE11-95FD-001D09F2A690.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/10075189-0F60-DE11-B1E5-001D09F2932B.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/0C547BAF-0C60-DE11-83C3-000423D98868.root', # '/store/data/Commissioning09/Calo/RECO/v3/000/100/945/06B387FF-0B60-DE11-94A9-000423D95220.root' process.source.inputCommands = cms.untracked.vstring('keep *', 'drop *_MEtoEDMConverter_*_*') process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32( 1000 ) ) process.Timing = cms.Service("Timing") process.MessageLogger = cms.Service("MessageLogger", debugModules = cms.untracked.vstring('jetMETAnalyzer'), cout = cms.untracked.PSet( default = cms.untracked.PSet( limit = cms.untracked.int32(0) ), jetMETAnalyzer = cms.untracked.PSet( limit = cms.untracked.int32(100) ), noLineBreaks = cms.untracked.bool(True), DEBUG = cms.untracked.PSet( limit = cms.untracked.int32(0) ), #FwkJob = cms.untracked.PSet( # limit = cms.untracked.int32(0) #), threshold = cms.untracked.string('DEBUG') ), categories = cms.untracked.vstring('jetMETAnalyzer'), destinations = cms.untracked.vstring('cout') ) process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(True) ) process.FEVT = cms.OutputModule("PoolOutputModule", outputCommands = cms.untracked.vstring('keep *_MEtoEDMConverter_*_*'), #outputCommands = cms.untracked.vstring('keep *'), fileName = cms.untracked.string('reco_DQM_cruzet100945.root') ) process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(True) ## default is false ) #process.load('Configuration/StandardSequences/EDMtoMEAtRunEnd_cff') #process.load("DQMOffline.JetMET.jetMETDQMStoreClean_cff"); process.p = cms.Path(process.jetMETHLTOfflineSource * process.jetMETDQMOfflineSourceCosmic * process.MEtoEDMConverter * process.dqmStoreStats) process.outpath = cms.EndPath(process.FEVT) process.DQM.collectorHost = ''
2,810
350
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Tests for the API /deploy_templates/ methods. """ import datetime from http import client as http_client from unittest import mock from urllib import parse as urlparse from oslo_config import cfg from oslo_utils import timeutils from oslo_utils import uuidutils from ironic.api.controllers import base as api_base from ironic.api.controllers import v1 as api_v1 from ironic.api.controllers.v1 import notification_utils from ironic.common import exception from ironic import objects from ironic.objects import fields as obj_fields from ironic.tests.unit.api import base as test_api_base from ironic.tests.unit.api import utils as test_api_utils from ironic.tests.unit.objects import utils as obj_utils def _obj_to_api_step(obj_step): """Convert a deploy step in 'object' form to one in 'API' form.""" return { 'interface': obj_step['interface'], 'step': obj_step['step'], 'args': obj_step['args'], 'priority': obj_step['priority'], } class BaseDeployTemplatesAPITest(test_api_base.BaseApiTest): headers = {api_base.Version.string: str(api_v1.max_version())} invalid_version_headers = {api_base.Version.string: '1.54'} class TestListDeployTemplates(BaseDeployTemplatesAPITest): def test_empty(self): data = self.get_json('/deploy_templates', headers=self.headers) self.assertEqual([], data['deploy_templates']) def test_one(self): template = obj_utils.create_test_deploy_template(self.context) data = self.get_json('/deploy_templates', headers=self.headers) self.assertEqual(1, len(data['deploy_templates'])) self.assertEqual(template.uuid, data['deploy_templates'][0]['uuid']) self.assertEqual(template.name, data['deploy_templates'][0]['name']) self.assertNotIn('steps', data['deploy_templates'][0]) self.assertNotIn('extra', data['deploy_templates'][0]) def test_get_one(self): template = obj_utils.create_test_deploy_template(self.context) data = self.get_json('/deploy_templates/%s' % template.uuid, headers=self.headers) self.assertEqual(template.uuid, data['uuid']) self.assertEqual(template.name, data['name']) self.assertEqual(template.extra, data['extra']) for t_dict_step, t_step in zip(data['steps'], template.steps): self.assertEqual(t_dict_step['interface'], t_step['interface']) self.assertEqual(t_dict_step['step'], t_step['step']) self.assertEqual(t_dict_step['args'], t_step['args']) self.assertEqual(t_dict_step['priority'], t_step['priority']) def test_get_one_with_json(self): template = obj_utils.create_test_deploy_template(self.context) data = self.get_json('/deploy_templates/%s.json' % template.uuid, headers=self.headers) self.assertEqual(template.uuid, data['uuid']) def test_get_one_with_suffix(self): template = obj_utils.create_test_deploy_template(self.context, name='CUSTOM_DT1') data = self.get_json('/deploy_templates/%s' % template.uuid, headers=self.headers) self.assertEqual(template.uuid, data['uuid']) def test_get_one_custom_fields(self): template = obj_utils.create_test_deploy_template(self.context) fields = 'name,steps' data = self.get_json( '/deploy_templates/%s?fields=%s' % (template.uuid, fields), headers=self.headers) # We always append "links" self.assertCountEqual(['name', 'steps', 'links'], data) def test_get_collection_custom_fields(self): fields = 'uuid,steps' for i in range(3): obj_utils.create_test_deploy_template( self.context, uuid=uuidutils.generate_uuid(), name='CUSTOM_DT%s' % i) data = self.get_json( '/deploy_templates?fields=%s' % fields, headers=self.headers) self.assertEqual(3, len(data['deploy_templates'])) for template in data['deploy_templates']: # We always append "links" self.assertCountEqual(['uuid', 'steps', 'links'], template) def test_get_custom_fields_invalid_fields(self): template = obj_utils.create_test_deploy_template(self.context) fields = 'uuid,spongebob' response = self.get_json( '/deploy_templates/%s?fields=%s' % (template.uuid, fields), headers=self.headers, expect_errors=True) self.assertEqual(http_client.BAD_REQUEST, response.status_int) self.assertEqual('application/json', response.content_type) self.assertIn('spongebob', response.json['error_message']) def test_get_all_invalid_api_version(self): obj_utils.create_test_deploy_template(self.context) response = self.get_json('/deploy_templates', headers=self.invalid_version_headers, expect_errors=True) self.assertEqual(http_client.NOT_FOUND, response.status_int) def test_get_one_invalid_api_version(self): template = obj_utils.create_test_deploy_template(self.context) response = self.get_json( '/deploy_templates/%s' % (template.uuid), headers=self.invalid_version_headers, expect_errors=True) self.assertEqual(http_client.NOT_FOUND, response.status_int) def test_detail_query(self): template = obj_utils.create_test_deploy_template(self.context) data = self.get_json('/deploy_templates?detail=True', headers=self.headers) self.assertEqual(template.uuid, data['deploy_templates'][0]['uuid']) self.assertIn('name', data['deploy_templates'][0]) self.assertIn('steps', data['deploy_templates'][0]) self.assertIn('extra', data['deploy_templates'][0]) def test_detail_query_false(self): obj_utils.create_test_deploy_template(self.context) data1 = self.get_json('/deploy_templates', headers=self.headers) data2 = self.get_json( '/deploy_templates?detail=False', headers=self.headers) self.assertEqual(data1['deploy_templates'], data2['deploy_templates']) def test_detail_using_query_false_and_fields(self): obj_utils.create_test_deploy_template(self.context) data = self.get_json( '/deploy_templates?detail=False&fields=steps', headers=self.headers) self.assertIn('steps', data['deploy_templates'][0]) self.assertNotIn('uuid', data['deploy_templates'][0]) self.assertNotIn('extra', data['deploy_templates'][0]) def test_detail_using_query_and_fields(self): obj_utils.create_test_deploy_template(self.context) response = self.get_json( '/deploy_templates?detail=True&fields=name', headers=self.headers, expect_errors=True) self.assertEqual(http_client.BAD_REQUEST, response.status_int) def test_many(self): templates = [] for id_ in range(5): template = obj_utils.create_test_deploy_template( self.context, uuid=uuidutils.generate_uuid(), name='CUSTOM_DT%s' % id_) templates.append(template.uuid) data = self.get_json('/deploy_templates', headers=self.headers) self.assertEqual(len(templates), len(data['deploy_templates'])) uuids = [n['uuid'] for n in data['deploy_templates']] self.assertCountEqual(templates, uuids) def test_links(self): uuid = uuidutils.generate_uuid() obj_utils.create_test_deploy_template(self.context, uuid=uuid) data = self.get_json('/deploy_templates/%s' % uuid, headers=self.headers) self.assertIn('links', data) self.assertEqual(2, len(data['links'])) self.assertIn(uuid, data['links'][0]['href']) for link in data['links']: bookmark = link['rel'] == 'bookmark' self.assertTrue(self.validate_link(link['href'], bookmark=bookmark, headers=self.headers)) def test_collection_links(self): templates = [] for id_ in range(5): template = obj_utils.create_test_deploy_template( self.context, uuid=uuidutils.generate_uuid(), name='CUSTOM_DT%s' % id_) templates.append(template.uuid) data = self.get_json('/deploy_templates/?limit=3', headers=self.headers) self.assertEqual(3, len(data['deploy_templates'])) next_marker = data['deploy_templates'][-1]['uuid'] self.assertIn(next_marker, data['next']) def test_collection_links_default_limit(self): cfg.CONF.set_override('max_limit', 3, 'api') templates = [] for id_ in range(5): template = obj_utils.create_test_deploy_template( self.context, uuid=uuidutils.generate_uuid(), name='CUSTOM_DT%s' % id_) templates.append(template.uuid) data = self.get_json('/deploy_templates', headers=self.headers) self.assertEqual(3, len(data['deploy_templates'])) next_marker = data['deploy_templates'][-1]['uuid'] self.assertIn(next_marker, data['next']) def test_collection_links_custom_fields(self): cfg.CONF.set_override('max_limit', 3, 'api') templates = [] fields = 'uuid,steps' for i in range(5): template = obj_utils.create_test_deploy_template( self.context, uuid=uuidutils.generate_uuid(), name='CUSTOM_DT%s' % i) templates.append(template.uuid) data = self.get_json('/deploy_templates?fields=%s' % fields, headers=self.headers) self.assertEqual(3, len(data['deploy_templates'])) next_marker = data['deploy_templates'][-1]['uuid'] self.assertIn(next_marker, data['next']) self.assertIn('fields', data['next']) def test_get_collection_pagination_no_uuid(self): fields = 'name' limit = 2 templates = [] for id_ in range(3): template = obj_utils.create_test_deploy_template( self.context, uuid=uuidutils.generate_uuid(), name='CUSTOM_DT%s' % id_) templates.append(template) data = self.get_json( '/deploy_templates?fields=%s&limit=%s' % (fields, limit), headers=self.headers) self.assertEqual(limit, len(data['deploy_templates'])) self.assertIn('marker=%s' % templates[limit - 1].uuid, data['next']) def test_sort_key(self): templates = [] for id_ in range(3): template = obj_utils.create_test_deploy_template( self.context, uuid=uuidutils.generate_uuid(), name='CUSTOM_DT%s' % id_) templates.append(template.uuid) data = self.get_json('/deploy_templates?sort_key=uuid', headers=self.headers) uuids = [n['uuid'] for n in data['deploy_templates']] self.assertEqual(sorted(templates), uuids) def test_sort_key_invalid(self): invalid_keys_list = ['extra', 'foo', 'steps'] for invalid_key in invalid_keys_list: path = '/deploy_templates?sort_key=%s' % invalid_key response = self.get_json(path, expect_errors=True, headers=self.headers) self.assertEqual(http_client.BAD_REQUEST, response.status_int) self.assertEqual('application/json', response.content_type) self.assertIn(invalid_key, response.json['error_message']) def _test_sort_key_allowed(self, detail=False): template_uuids = [] for id_ in range(3, 0, -1): template = obj_utils.create_test_deploy_template( self.context, uuid=uuidutils.generate_uuid(), name='CUSTOM_DT%s' % id_) template_uuids.append(template.uuid) template_uuids.reverse() url = '/deploy_templates?sort_key=name&detail=%s' % str(detail) data = self.get_json(url, headers=self.headers) data_uuids = [p['uuid'] for p in data['deploy_templates']] self.assertEqual(template_uuids, data_uuids) def test_sort_key_allowed(self): self._test_sort_key_allowed() def test_detail_sort_key_allowed(self): self._test_sort_key_allowed(detail=True) def test_sensitive_data_masked(self): template = obj_utils.get_test_deploy_template(self.context) template.steps[0]['args']['password'] = '<PASSWORD>' template.create() data = self.get_json('/deploy_templates/%s' % template.uuid, headers=self.headers) self.assertEqual("******", data['steps'][0]['args']['password']) @mock.patch.object(objects.DeployTemplate, 'save', autospec=True) class TestPatch(BaseDeployTemplatesAPITest): def setUp(self): super(TestPatch, self).setUp() self.template = obj_utils.create_test_deploy_template( self.context, name='CUSTOM_DT1') def _test_update_ok(self, mock_save, patch): response = self.patch_json('/deploy_templates/%s' % self.template.uuid, patch, headers=self.headers) self.assertEqual('application/json', response.content_type) self.assertEqual(http_client.OK, response.status_code) mock_save.assert_called_once_with(mock.ANY) return response def _test_update_bad_request(self, mock_save, patch, error_msg=None): response = self.patch_json('/deploy_templates/%s' % self.template.uuid, patch, expect_errors=True, headers=self.headers) self.assertEqual('application/json', response.content_type) self.assertEqual(http_client.BAD_REQUEST, response.status_code) self.assertTrue(response.json['error_message']) if error_msg: self.assertIn(error_msg, response.json['error_message']) self.assertFalse(mock_save.called) return response @mock.patch.object(notification_utils, '_emit_api_notification', autospec=True) def test_update_by_id(self, mock_notify, mock_save): name = 'CUSTOM_DT2' patch = [{'path': '/name', 'value': name, 'op': 'add'}] response = self._test_update_ok(mock_save, patch) self.assertEqual(name, response.json['name']) mock_notify.assert_has_calls([mock.call(mock.ANY, mock.ANY, 'update', obj_fields.NotificationLevel.INFO, obj_fields.NotificationStatus.START), mock.call(mock.ANY, mock.ANY, 'update', obj_fields.NotificationLevel.INFO, obj_fields.NotificationStatus.END)]) def test_update_by_name(self, mock_save): steps = [{ 'interface': 'bios', 'step': 'apply_configuration', 'args': {'foo': 'bar'}, 'priority': 42 }] patch = [{'path': '/steps', 'value': steps, 'op': 'replace'}] response = self.patch_json('/deploy_templates/%s' % self.template.name, patch, headers=self.headers) self.assertEqual('application/json', response.content_type) self.assertEqual(http_client.OK, response.status_code) mock_save.assert_called_once_with(mock.ANY) self.assertEqual(steps, response.json['steps']) def test_update_by_name_with_json(self, mock_save): interface = 'bios' path = '/deploy_templates/%s.json' % self.template.name response = self.patch_json(path, [{'path': '/steps/0/interface', 'value': interface, 'op': 'replace'}], headers=self.headers) self.assertEqual('application/json', response.content_type) self.assertEqual(http_client.OK, response.status_code) self.assertEqual(interface, response.json['steps'][0]['interface']) def test_update_name_standard_trait(self, mock_save): name = 'HW_CPU_X86_VMX' patch = [{'path': '/name', 'value': name, 'op': 'replace'}] response = self._test_update_ok(mock_save, patch) self.assertEqual(name, response.json['name']) def test_update_name_custom_trait(self, mock_save): name = 'CUSTOM_DT2' patch = [{'path': '/name', 'value': name, 'op': 'replace'}] response = self._test_update_ok(mock_save, patch) self.assertEqual(name, response.json['name']) def test_update_invalid_name(self, mock_save): self._test_update_bad_request( mock_save, [{'path': '/name', 'value': 'aa:bb_cc', 'op': 'replace'}], "'aa:bb_cc' does not match '^CUSTOM_[A-Z0-9_]+$'") def test_update_by_id_invalid_api_version(self, mock_save): name = 'CUSTOM_DT2' headers = self.invalid_version_headers response = self.patch_json('/deploy_templates/%s' % self.template.uuid, [{'path': '/name', 'value': name, 'op': 'add'}], headers=headers, expect_errors=True) self.assertEqual(http_client.METHOD_NOT_ALLOWED, response.status_int) self.assertFalse(mock_save.called) def test_update_by_name_old_api_version(self, mock_save): name = 'CUSTOM_DT2' response = self.patch_json('/deploy_templates/%s' % self.template.name, [{'path': '/name', 'value': name, 'op': 'add'}], expect_errors=True) self.assertEqual(http_client.METHOD_NOT_ALLOWED, response.status_int) self.assertFalse(mock_save.called) def test_update_not_found(self, mock_save): name = 'CUSTOM_DT2' uuid = uuidutils.generate_uuid() response = self.patch_json('/deploy_templates/%s' % uuid, [{'path': '/name', 'value': name, 'op': 'add'}], expect_errors=True, headers=self.headers) self.assertEqual('application/json', response.content_type) self.assertEqual(http_client.NOT_FOUND, response.status_int) self.assertTrue(response.json['error_message']) self.assertFalse(mock_save.called) @mock.patch.object(notification_utils, '_emit_api_notification', autospec=True) def test_replace_name_already_exist(self, mock_notify, mock_save): name = 'CUSTOM_DT2' obj_utils.create_test_deploy_template(self.context, uuid=uuidutils.generate_uuid(), name=name) mock_save.side_effect = exception.DeployTemplateAlreadyExists( uuid=self.template.uuid) response = self.patch_json('/deploy_templates/%s' % self.template.uuid, [{'path': '/name', 'value': name, 'op': 'replace'}], expect_errors=True, headers=self.headers) self.assertEqual('application/json', response.content_type) self.assertEqual(http_client.CONFLICT, response.status_code) self.assertTrue(response.json['error_message']) mock_save.assert_called_once_with(mock.ANY) mock_notify.assert_has_calls([mock.call(mock.ANY, mock.ANY, 'update', obj_fields.NotificationLevel.INFO, obj_fields.NotificationStatus.START), mock.call(mock.ANY, mock.ANY, 'update', obj_fields.NotificationLevel.ERROR, obj_fields.NotificationStatus.ERROR)]) def test_replace_invalid_name_too_long(self, mock_save): name = 'CUSTOM_' + 'X' * 249 patch = [{'path': '/name', 'op': 'replace', 'value': name}] self._test_update_bad_request( mock_save, patch, "'%s' is too long" % name) def test_replace_invalid_name_not_a_trait(self, mock_save): name = 'not-a-trait' patch = [{'path': '/name', 'op': 'replace', 'value': name}] self._test_update_bad_request( mock_save, patch, "'not-a-trait' does not match '^CUSTOM_[A-Z0-9_]+$'") def test_replace_invalid_name_none(self, mock_save): patch = [{'path': '/name', 'op': 'replace', 'value': None}] self._test_update_bad_request( mock_save, patch, "None is not of type 'string'") def test_replace_duplicate_step(self, mock_save): # interface & step combination must be unique. steps = [ { 'interface': 'raid', 'step': 'create_configuration', 'args': {'foo': '%d' % i}, 'priority': i, } for i in range(2) ] patch = [{'path': '/steps', 'op': 'replace', 'value': steps}] self._test_update_bad_request( mock_save, patch, "Duplicate deploy steps") def test_replace_invalid_step_interface_fail(self, mock_save): step = { 'interface': 'foo', 'step': 'apply_configuration', 'args': {'foo': 'bar'}, 'priority': 42 } patch = [{'path': '/steps/0', 'op': 'replace', 'value': step}] self._test_update_bad_request( mock_save, patch, "'foo' is not one of") def test_replace_non_existent_step_fail(self, mock_save): step = { 'interface': 'bios', 'step': 'apply_configuration', 'args': {'foo': 'bar'}, 'priority': 42 } patch = [{'path': '/steps/1', 'op': 'replace', 'value': step}] self._test_update_bad_request(mock_save, patch) def test_replace_empty_step_list_fail(self, mock_save): patch = [{'path': '/steps', 'op': 'replace', 'value': []}] self._test_update_bad_request( mock_save, patch, '[] is too short') def _test_remove_not_allowed(self, mock_save, field, error_msg=None): patch = [{'path': '/%s' % field, 'op': 'remove'}] self._test_update_bad_request(mock_save, patch, error_msg) def test_remove_uuid(self, mock_save): self._test_remove_not_allowed( mock_save, 'uuid', "Cannot patch /uuid") def test_remove_name(self, mock_save): self._test_remove_not_allowed( mock_save, 'name', "'name' is a required property") def test_remove_steps(self, mock_save): self._test_remove_not_allowed( mock_save, 'steps', "'steps' is a required property") def test_remove_foo(self, mock_save): self._test_remove_not_allowed(mock_save, 'foo') def test_replace_step_invalid_interface(self, mock_save): patch = [{'path': '/steps/0/interface', 'op': 'replace', 'value': 'foo'}] self._test_update_bad_request( mock_save, patch, "'foo' is not one of") def test_replace_multi(self, mock_save): steps = [ { 'interface': 'raid', 'step': 'create_configuration%d' % i, 'args': {}, 'priority': 10, } for i in range(3) ] template = obj_utils.create_test_deploy_template( self.context, uuid=uuidutils.generate_uuid(), name='CUSTOM_DT2', steps=steps) # mutate steps so we replace all of them for step in steps: step['priority'] = step['priority'] + 1 patch = [] for i, step in enumerate(steps): patch.append({'path': '/steps/%s' % i, 'value': step, 'op': 'replace'}) response = self.patch_json('/deploy_templates/%s' % template.uuid, patch, headers=self.headers) self.assertEqual('application/json', response.content_type) self.assertEqual(http_client.OK, response.status_code) self.assertEqual(steps, response.json['steps']) mock_save.assert_called_once_with(mock.ANY) def test_remove_multi(self, mock_save): steps = [ { 'interface': 'raid', 'step': 'create_configuration%d' % i, 'args': {}, 'priority': 10, } for i in range(3) ] template = obj_utils.create_test_deploy_template( self.context, uuid=uuidutils.generate_uuid(), name='CUSTOM_DT2', steps=steps) # Removing one step from the collection steps.pop(1) response = self.patch_json('/deploy_templates/%s' % template.uuid, [{'path': '/steps/1', 'op': 'remove'}], headers=self.headers) self.assertEqual('application/json', response.content_type) self.assertEqual(http_client.OK, response.status_code) self.assertEqual(steps, response.json['steps']) mock_save.assert_called_once_with(mock.ANY) def test_remove_non_existent_property_fail(self, mock_save): patch = [{'path': '/non-existent', 'op': 'remove'}] self._test_update_bad_request(mock_save, patch) def test_remove_non_existent_step_fail(self, mock_save): patch = [{'path': '/steps/1', 'op': 'remove'}] self._test_update_bad_request(mock_save, patch) def test_remove_only_step_fail(self, mock_save): patch = [{'path': '/steps/0', 'op': 'remove'}] self._test_update_bad_request( mock_save, patch, "[] is too short") def test_remove_non_existent_step_property_fail(self, mock_save): patch = [{'path': '/steps/0/non-existent', 'op': 'remove'}] self._test_update_bad_request(mock_save, patch) def test_add_root_non_existent(self, mock_save): patch = [{'path': '/foo', 'value': 'bar', 'op': 'add'}] self._test_update_bad_request( mock_save, patch, "Cannot patch /foo") def test_add_too_high_index_step_fail(self, mock_save): step = { 'interface': 'bios', 'step': 'apply_configuration', 'args': {'foo': 'bar'}, 'priority': 42 } patch = [{'path': '/steps/2', 'op': 'add', 'value': step}] self._test_update_bad_request(mock_save, patch) def test_add_multi(self, mock_save): steps = [ { 'interface': 'raid', 'step': 'create_configuration%d' % i, 'args': {}, 'priority': 10, } for i in range(3) ] patch = [] for i, step in enumerate(steps): patch.append({'path': '/steps/%d' % i, 'value': step, 'op': 'add'}) response = self.patch_json('/deploy_templates/%s' % self.template.uuid, patch, headers=self.headers) self.assertEqual('application/json', response.content_type) self.assertEqual(http_client.OK, response.status_code) self.assertEqual(steps, response.json['steps'][:-1]) self.assertEqual(_obj_to_api_step(self.template.steps[0]), response.json['steps'][-1]) mock_save.assert_called_once_with(mock.ANY) class TestPost(BaseDeployTemplatesAPITest): @mock.patch.object(notification_utils, '_emit_api_notification', autospec=True) @mock.patch.object(timeutils, 'utcnow', autospec=True) def test_create(self, mock_utcnow, mock_notify): tdict = test_api_utils.post_get_test_deploy_template() test_time = datetime.datetime(2000, 1, 1, 0, 0) mock_utcnow.return_value = test_time response = self.post_json('/deploy_templates', tdict, headers=self.headers) self.assertEqual(http_client.CREATED, response.status_int) result = self.get_json('/deploy_templates/%s' % tdict['uuid'], headers=self.headers) self.assertEqual(tdict['uuid'], result['uuid']) self.assertFalse(result['updated_at']) return_created_at = timeutils.parse_isotime( result['created_at']).replace(tzinfo=None) self.assertEqual(test_time, return_created_at) # Check location header self.assertIsNotNone(response.location) expected_location = '/v1/deploy_templates/%s' % tdict['uuid'] self.assertEqual(expected_location, urlparse.urlparse(response.location).path) mock_notify.assert_has_calls([mock.call(mock.ANY, mock.ANY, 'create', obj_fields.NotificationLevel.INFO, obj_fields.NotificationStatus.START), mock.call(mock.ANY, mock.ANY, 'create', obj_fields.NotificationLevel.INFO, obj_fields.NotificationStatus.END)]) def test_create_invalid_api_version(self): tdict = test_api_utils.post_get_test_deploy_template() response = self.post_json( '/deploy_templates', tdict, headers=self.invalid_version_headers, expect_errors=True) self.assertEqual(http_client.METHOD_NOT_ALLOWED, response.status_int) def test_create_doesnt_contain_id(self): with mock.patch.object( self.dbapi, 'create_deploy_template', wraps=self.dbapi.create_deploy_template) as mock_create: tdict = test_api_utils.post_get_test_deploy_template() self.post_json('/deploy_templates', tdict, headers=self.headers) self.get_json('/deploy_templates/%s' % tdict['uuid'], headers=self.headers) mock_create.assert_called_once_with(mock.ANY) # Check that 'id' is not in first arg of positional args self.assertNotIn('id', mock_create.call_args[0][0]) @mock.patch.object(notification_utils.LOG, 'exception', autospec=True) @mock.patch.object(notification_utils.LOG, 'warning', autospec=True) def test_create_generate_uuid(self, mock_warn, mock_except): tdict = test_api_utils.post_get_test_deploy_template() del tdict['uuid'] response = self.post_json('/deploy_templates', tdict, headers=self.headers) result = self.get_json('/deploy_templates/%s' % response.json['uuid'], headers=self.headers) self.assertTrue(uuidutils.is_uuid_like(result['uuid'])) self.assertFalse(mock_warn.called) self.assertFalse(mock_except.called) @mock.patch.object(notification_utils, '_emit_api_notification', autospec=True) @mock.patch.object(objects.DeployTemplate, 'create', autospec=True) def test_create_error(self, mock_create, mock_notify): mock_create.side_effect = Exception() tdict = test_api_utils.post_get_test_deploy_template() self.post_json('/deploy_templates', tdict, headers=self.headers, expect_errors=True) mock_notify.assert_has_calls([mock.call(mock.ANY, mock.ANY, 'create', obj_fields.NotificationLevel.INFO, obj_fields.NotificationStatus.START), mock.call(mock.ANY, mock.ANY, 'create', obj_fields.NotificationLevel.ERROR, obj_fields.NotificationStatus.ERROR)]) def _test_create_ok(self, tdict): response = self.post_json('/deploy_templates', tdict, headers=self.headers) self.assertEqual(http_client.CREATED, response.status_int) def _test_create_bad_request(self, tdict, error_msg): response = self.post_json('/deploy_templates', tdict, expect_errors=True, headers=self.headers) self.assertEqual(http_client.BAD_REQUEST, response.status_int) self.assertEqual('application/json', response.content_type) self.assertTrue(response.json['error_message']) self.assertIn(error_msg, response.json['error_message']) def test_create_long_name(self): name = 'CUSTOM_' + 'X' * 248 tdict = test_api_utils.post_get_test_deploy_template(name=name) self._test_create_ok(tdict) def test_create_standard_trait_name(self): name = 'HW_CPU_X86_VMX' tdict = test_api_utils.post_get_test_deploy_template(name=name) self._test_create_ok(tdict) def test_create_name_invalid_too_long(self): name = 'CUSTOM_' + 'X' * 249 tdict = test_api_utils.post_get_test_deploy_template(name=name) self._test_create_bad_request( tdict, "'%s' is too long" % name) def test_create_name_invalid_not_a_trait(self): name = 'not-a-trait' tdict = test_api_utils.post_get_test_deploy_template(name=name) self._test_create_bad_request( tdict, "'not-a-trait' does not match '^CUSTOM_[A-Z0-9_]+$'") def test_create_steps_invalid_duplicate(self): steps = [ { 'interface': 'raid', 'step': 'create_configuration', 'args': {'foo': '%d' % i}, 'priority': i, } for i in range(2) ] tdict = test_api_utils.post_get_test_deploy_template(steps=steps) self._test_create_bad_request(tdict, "Duplicate deploy steps") def _test_create_no_mandatory_field(self, field): tdict = test_api_utils.post_get_test_deploy_template() del tdict[field] self._test_create_bad_request(tdict, "is a required property") def test_create_no_mandatory_field_name(self): self._test_create_no_mandatory_field('name') def test_create_no_mandatory_field_steps(self): self._test_create_no_mandatory_field('steps') def _test_create_no_mandatory_step_field(self, field): tdict = test_api_utils.post_get_test_deploy_template() del tdict['steps'][0][field] self._test_create_bad_request(tdict, "is a required property") def test_create_no_mandatory_step_field_interface(self): self._test_create_no_mandatory_step_field('interface') def test_create_no_mandatory_step_field_step(self): self._test_create_no_mandatory_step_field('step') def test_create_no_mandatory_step_field_args(self): self._test_create_no_mandatory_step_field('args') def test_create_no_mandatory_step_field_priority(self): self._test_create_no_mandatory_step_field('priority') def _test_create_invalid_field(self, field, value, error_msg): tdict = test_api_utils.post_get_test_deploy_template() tdict[field] = value self._test_create_bad_request(tdict, error_msg) def test_create_invalid_field_name(self): self._test_create_invalid_field( 'name', 42, "42 is not of type 'string'") def test_create_invalid_field_name_none(self): self._test_create_invalid_field( 'name', None, "None is not of type 'string'") def test_create_invalid_field_steps(self): self._test_create_invalid_field( 'steps', {}, "{} is not of type 'array'") def test_create_invalid_field_empty_steps(self): self._test_create_invalid_field( 'steps', [], "[] is too short") def test_create_invalid_field_extra(self): self._test_create_invalid_field( 'extra', 42, "42 is not of type 'object'") def test_create_invalid_field_foo(self): self._test_create_invalid_field( 'foo', 'bar', "Additional properties are not allowed ('foo' was unexpected)") def _test_create_invalid_step_field(self, field, value, error_msg=None): tdict = test_api_utils.post_get_test_deploy_template() tdict['steps'][0][field] = value if error_msg is None: error_msg = "Deploy template invalid: " self._test_create_bad_request(tdict, error_msg) def test_create_invalid_step_field_interface1(self): self._test_create_invalid_step_field( 'interface', [3], "[3] is not of type 'string'") def test_create_invalid_step_field_interface2(self): self._test_create_invalid_step_field( 'interface', 'foo', "'foo' is not one of") def test_create_invalid_step_field_step(self): self._test_create_invalid_step_field( 'step', 42, "42 is not of type 'string'") def test_create_invalid_step_field_args1(self): self._test_create_invalid_step_field( 'args', 'not a dict', "'not a dict' is not of type 'object'") def test_create_invalid_step_field_args2(self): self._test_create_invalid_step_field( 'args', [], "[] is not of type 'object'") def test_create_invalid_step_field_priority(self): self._test_create_invalid_step_field( 'priority', 'not a number', "'not a number' is not of type 'integer'") def test_create_invalid_step_field_negative_priority(self): self._test_create_invalid_step_field( 'priority', -1, "-1 is less than the minimum of 0") def test_create_invalid_step_field_foo(self): self._test_create_invalid_step_field( 'foo', 'bar', "Additional properties are not allowed ('foo' was unexpected)") def test_create_step_string_priority(self): tdict = test_api_utils.post_get_test_deploy_template() tdict['steps'][0]['priority'] = '42' self._test_create_ok(tdict) def test_create_complex_step_args(self): tdict = test_api_utils.post_get_test_deploy_template() tdict['steps'][0]['args'] = {'foo': [{'bar': 'baz'}]} self._test_create_ok(tdict) @mock.patch.object(objects.DeployTemplate, 'destroy', autospec=True) class TestDelete(BaseDeployTemplatesAPITest): def setUp(self): super(TestDelete, self).setUp() self.template = obj_utils.create_test_deploy_template(self.context) @mock.patch.object(notification_utils, '_emit_api_notification', autospec=True) def test_delete_by_uuid(self, mock_notify, mock_destroy): self.delete('/deploy_templates/%s' % self.template.uuid, headers=self.headers) mock_destroy.assert_called_once_with(mock.ANY) mock_notify.assert_has_calls([mock.call(mock.ANY, mock.ANY, 'delete', obj_fields.NotificationLevel.INFO, obj_fields.NotificationStatus.START), mock.call(mock.ANY, mock.ANY, 'delete', obj_fields.NotificationLevel.INFO, obj_fields.NotificationStatus.END)]) def test_delete_by_uuid_with_json(self, mock_destroy): self.delete('/deploy_templates/%s.json' % self.template.uuid, headers=self.headers) mock_destroy.assert_called_once_with(mock.ANY) def test_delete_by_name(self, mock_destroy): self.delete('/deploy_templates/%s' % self.template.name, headers=self.headers) mock_destroy.assert_called_once_with(mock.ANY) def test_delete_by_name_with_json(self, mock_destroy): self.delete('/deploy_templates/%s.json' % self.template.name, headers=self.headers) mock_destroy.assert_called_once_with(mock.ANY) def test_delete_invalid_api_version(self, mock_dpt): response = self.delete('/deploy_templates/%s' % self.template.uuid, expect_errors=True, headers=self.invalid_version_headers) self.assertEqual(http_client.METHOD_NOT_ALLOWED, response.status_int) def test_delete_old_api_version(self, mock_dpt): # Names like CUSTOM_1 were not valid in API 1.1, but the check should # go after the microversion check. response = self.delete('/deploy_templates/%s' % self.template.name, expect_errors=True) self.assertEqual(http_client.METHOD_NOT_ALLOWED, response.status_int) def test_delete_by_name_non_existent(self, mock_dpt): res = self.delete('/deploy_templates/%s' % 'blah', expect_errors=True, headers=self.headers) self.assertEqual(http_client.NOT_FOUND, res.status_code)
20,508
6,283
<gh_stars>1000+ #pragma once #include <i3ipc++/ipc.hpp> #include "common.hpp" #include "x11/extensions/randr.hpp" POLYBAR_NS class connection; namespace i3_util { using connection_t = i3ipc::connection; using workspace_t = i3ipc::workspace_t; const auto ws_numsort = [](shared_ptr<workspace_t> a, shared_ptr<workspace_t> b) { return a->num < b->num; }; vector<shared_ptr<workspace_t>> workspaces(const connection_t& conn, const string& output = "", const bool show_urgent = false); shared_ptr<workspace_t> focused_workspace(const connection_t&); vector<xcb_window_t> root_windows(connection& conn, const string& output_name = ""); bool restack_to_root(connection& conn, const xcb_window_t win); } namespace { inline bool operator==(i3_util::workspace_t& a, i3_util::workspace_t& b) { return a.num == b.num && a.output == b.output; } inline bool operator!=(i3_util::workspace_t& a, i3_util::workspace_t& b) { return !(a == b); } } POLYBAR_NS_END
389
686
<gh_stars>100-1000 package hello; import io.micronaut.http.annotation.Controller; import io.micronaut.http.annotation.Get; import io.reactivex.Single; @Controller("/") public class HelloController { @Get("/") public Single<String> greet() { return Single.just("Hello World!"); } }
113
2,027
<reponame>giuseppe/quay def exponential_backoff(attempts, scaling_factor, base): backoff = 5 * (pow(2, attempts) - 1) backoff_time = backoff * scaling_factor retry_at = backoff_time / 10 + base return retry_at
90
57,637
<gh_stars>1000+ [ { "id": "author-1", "name": "Novikov", "firstName": "Mikhail", "email": "<EMAIL>", "picture": "../images/gatsby-astronaut.png" }, { "id": "author-2", "name": "Probst", "firstName": "Stefan", "email": "<EMAIL>", "picture": "../images/gatsby-astronaut.png" } ]
162