max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
868
<gh_stars>100-1000 /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.server; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.Inet6Address; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.URL; import java.net.URLConnection; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Set; import java.util.concurrent.TimeUnit; import org.apache.activemq.artemis.logs.ActiveMQUtilLogger; import org.apache.activemq.artemis.utils.ActiveMQThreadFactory; import org.apache.activemq.artemis.utils.Env; import org.apache.activemq.artemis.utils.collections.ConcurrentHashSet; import org.jboss.logging.Logger; /** * This will use {@link InetAddress#isReachable(int)} to determine if the network is alive. * It will have a set of addresses, and if any address is reached the network will be considered alive. */ public class NetworkHealthCheck extends ActiveMQScheduledComponent { private static final Logger logger = Logger.getLogger(NetworkHealthCheck.class); private final Set<ActiveMQComponent> componentList = new ConcurrentHashSet<>(); private final Set<String> addresses = new ConcurrentHashSet<>(); private final Set<URL> urls = new ConcurrentHashSet<>(); private NetworkInterface networkInterface; public static final String IPV6_DEFAULT_COMMAND = "ping6 -c 1 %2$s"; public static final String IPV4_DEFAULT_COMMAND = Env.isMacOs() ? "ping -c 1 -t %d %s" : "ping -c 1 -w %d %s"; private String ipv4Command = IPV4_DEFAULT_COMMAND; private String ipv6Command = IPV6_DEFAULT_COMMAND; // To be used on tests. As we use the loopback as a valid address on tests. private boolean ignoreLoopback = false; private boolean ownShutdown = false; /** * The timeout to be used on isReachable */ private int networkTimeout; public NetworkHealthCheck() { this(null, 1000, 1000); } public NetworkHealthCheck(String nicName, long checkPeriod, int networkTimeout) { super(null, null, checkPeriod, TimeUnit.MILLISECONDS, false); this.networkTimeout = networkTimeout; this.setNICName(nicName); } public NetworkHealthCheck setNICName(String nicName) { NetworkInterface netToUse; try { if (nicName != null) { netToUse = NetworkInterface.getByName(nicName); } else { netToUse = null; } } catch (Exception e) { ActiveMQUtilLogger.LOGGER.failedToSetNIC(e, nicName); netToUse = null; } this.networkInterface = netToUse; return this; } public boolean isIgnoreLoopback() { return ignoreLoopback; } public NetworkHealthCheck setIgnoreLoopback(boolean ignoreLoopback) { this.ignoreLoopback = ignoreLoopback; return this; } public Set<String> getAddresses() { return addresses; } public Set<URL> getUrls() { return urls; } public String getNICName() { if (networkInterface != null) { return networkInterface.getName(); } else { return null; } } public NetworkHealthCheck parseAddressList(String addressList) { if (addressList != null) { String[] addresses = addressList.split(","); for (String address : addresses) { if (!address.trim().isEmpty()) { try { String strAddress = address.trim(); this.addAddress(strAddress); } catch (Exception e) { ActiveMQUtilLogger.LOGGER.failedToParseAddressList(e, addressList); } } } } return this; } public NetworkHealthCheck parseURIList(String addressList) { if (addressList != null) { String[] addresses = addressList.split(","); for (String address : addresses) { if (!address.trim().isEmpty()) { try { this.addURL(new URL(address.trim())); } catch (Exception e) { ActiveMQUtilLogger.LOGGER.failedToParseUrlList(e, addressList); } } } } return this; } @Override protected ActiveMQThreadFactory getThreadFactory() { return new ActiveMQThreadFactory("NetworkChecker", "Network-Checker-", false, getThisClassLoader()); } private ClassLoader getThisClassLoader() { return AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() { @Override public ClassLoader run() { return NetworkHealthCheck.this.getClass().getClassLoader(); } }); } public int getNetworkTimeout() { return networkTimeout; } @Override public synchronized NetworkHealthCheck setPeriod(long period) { super.setPeriod(period); return this; } @Override public synchronized NetworkHealthCheck setTimeUnit(TimeUnit timeUnit) { super.setTimeUnit(timeUnit); return this; } public NetworkHealthCheck setNetworkTimeout(int networkTimeout) { this.networkTimeout = networkTimeout; return this; } public NetworkHealthCheck addComponent(ActiveMQComponent component) { componentList.add(component); checkStart(); return this; } public NetworkHealthCheck clearComponents() { componentList.clear(); return this; } public NetworkHealthCheck addAddress(String straddress) { InetAddress address = internalCheck(straddress); if (address == null) { ActiveMQUtilLogger.LOGGER.addressWasntReacheable(straddress); } if (!ignoreLoopback && address != null && address.isLoopbackAddress()) { ActiveMQUtilLogger.LOGGER.addressloopback(straddress); } else { addresses.add(straddress); checkStart(); } return this; } public NetworkHealthCheck removeAddress(String straddress) { addresses.remove(straddress); return this; } public NetworkHealthCheck clearAddresses() { addresses.clear(); return this; } public NetworkHealthCheck addURL(URL url) { if (!check(url)) { ActiveMQUtilLogger.LOGGER.urlWasntReacheable(url.toString()); } urls.add(url); checkStart(); return this; } public NetworkHealthCheck removeURL(URL url) { urls.remove(url); return this; } public NetworkHealthCheck clearURL() { urls.clear(); return this; } public String getIpv4Command() { return ipv4Command; } public NetworkHealthCheck setIpv4Command(String ipv4Command) { this.ipv4Command = ipv4Command; return this; } public String getIpv6Command() { return ipv6Command; } public NetworkHealthCheck setIpv6Command(String ipv6Command) { this.ipv6Command = ipv6Command; return this; } private void checkStart() { if (!isStarted() && (!addresses.isEmpty() || !urls.isEmpty()) && !componentList.isEmpty()) { try { this.run(); // run the first check immediately, this is to immediately shutdown the server if there's no net } finally { start(); } } } @Override public void run() { boolean healthy = check(); if (healthy) { for (ActiveMQComponent component : componentList) { if (!component.isStarted() && ownShutdown) { try { ActiveMQUtilLogger.LOGGER.startingService(component.toString()); component.start(); } catch (Exception e) { ActiveMQUtilLogger.LOGGER.errorStartingComponent(e, component.toString()); } } ownShutdown = false; } } else { for (ActiveMQComponent component : componentList) { if (component.isStarted()) { ownShutdown = true; try { ActiveMQUtilLogger.LOGGER.stoppingService(component.toString()); component.stop(); } catch (Exception e) { ActiveMQUtilLogger.LOGGER.errorStoppingComponent(e, component.toString()); } } } } } /** * @return true if no checks were done or if one address/url responds; false if all addresses/urls fail */ public boolean check() { if (isEmpty()) { return true; } for (String address : addresses) { if (check(address)) { return true; } } for (URL url : urls) { if (check(url)) { return true; } } return false; } public boolean check(String straddress) { if (straddress == null) { return false; } return internalCheck(straddress) != null; } private InetAddress internalCheck(String straddress) { try { InetAddress address = InetAddress.getByName(straddress); address = InetAddress.getByName(address.getHostName()); if (check(address)) { return address; } else { return null; } } catch (Exception e) { ActiveMQUtilLogger.LOGGER.failedToCheckAddress(e, straddress); return null; } } public boolean check(InetAddress address) throws IOException, InterruptedException { if (!hasCustomPingCommand() && isReachable(address)) { if (logger.isTraceEnabled()) { logger.tracef(address + " OK"); } return true; } else { return purePing(address); } } protected boolean isReachable(InetAddress address) throws IOException { return address.isReachable(networkInterface, 0, networkTimeout); } public boolean purePing(InetAddress address) throws IOException, InterruptedException { long timeout = Math.max(1, TimeUnit.MILLISECONDS.toSeconds(networkTimeout)); // it did not work with a simple isReachable, it could be because there's no root access, so we will try ping executable if (logger.isTraceEnabled()) { logger.trace("purePing on canonical address " + address.getCanonicalHostName()); } ProcessBuilder processBuilder; if (address instanceof Inet6Address) { processBuilder = buildProcess(ipv6Command, timeout, address.getCanonicalHostName()); } else { processBuilder = buildProcess(ipv4Command, timeout, address.getCanonicalHostName()); } Process pingProcess = processBuilder.start(); readStream(pingProcess.getInputStream(), false); readStream(pingProcess.getErrorStream(), true); return pingProcess.waitFor() == 0; } private ProcessBuilder buildProcess(String expressionCommand, long timeout, String host) { String command = String.format(expressionCommand, timeout, host); if (logger.isDebugEnabled()) { logger.debug("executing ping:: " + command); } ProcessBuilder builder = new ProcessBuilder(command.split(" ")); return builder; } private void readStream(InputStream stream, boolean error) throws IOException { BufferedReader reader = new BufferedReader(new InputStreamReader(stream)); String inputLine; while ((inputLine = reader.readLine()) != null) { if (error) { ActiveMQUtilLogger.LOGGER.failedToReadFromStream(inputLine); } else { logger.debug(inputLine); } } reader.close(); } public boolean check(URL url) { if (url == null) { return false; } try { URLConnection connection = url.openConnection(); connection.setReadTimeout(networkTimeout); InputStream is = connection.getInputStream(); is.close(); return true; } catch (Exception e) { ActiveMQUtilLogger.LOGGER.failedToCheckURL(e, url.toString()); return false; } } public boolean isEmpty() { return addresses.isEmpty() && urls.isEmpty(); } public boolean hasCustomPingCommand() { return !getIpv4Command().equals(IPV4_DEFAULT_COMMAND) || !getIpv6Command().equals(IPV6_DEFAULT_COMMAND); } }
5,134
1,707
<filename>code/Modules/Gfx/private/resourceBase.cc<gh_stars>1000+ //------------------------------------------------------------------------------ // resourceBase.cc //------------------------------------------------------------------------------ #include "Pre.h" #include "resourceBase.h" namespace Oryol { namespace _priv { //------------------------------------------------------------------------------ void shaderBase::Clear() { this->Setup = ShaderSetup(); } //------------------------------------------------------------------------------ void textureBase::Clear() { this->Setup = TextureSetup(); this->textureAttrs = TextureAttrs(); } //------------------------------------------------------------------------------ void meshBase::Clear() { this->Setup = MeshSetup(); this->vertexBufferAttrs = VertexBufferAttrs(); this->indexBufferAttrs = IndexBufferAttrs(); this->primGroups.Fill(PrimitiveGroup()); this->numPrimGroups = 0; } //------------------------------------------------------------------------------ void pipelineBase::Clear() { this->Setup = PipelineSetup(); this->shd = nullptr; } //------------------------------------------------------------------------------ void renderPassBase::Clear() { this->Setup = PassSetup(); this->colorTextures.Fill(nullptr); this->depthStencilTexture = nullptr; } } // namespace _priv } // namespace Oryol
344
453
<gh_stars>100-1000 #define INPUT_SIZE 8 #define VALUE_SIZE 256 extern void histogram(int in[INPUT_SIZE], int hist[VALUE_SIZE]);
48
3,227
/*! \ingroup PkgAlgebraicFoundationsAlgebraicStructuresConcepts \cgalConcept A model of `FractionTraits` is associated with a type `Type`. In case the associated type is a `Fraction`, a model of `FractionTraits` provides the relevant functionality for decomposing and re-composing as well as the numerator and denominator type. \cgalHasModel `CGAL::Fraction_traits<T>` \sa `FractionTraits_::Decompose` \sa `FractionTraits_::Compose` \sa `FractionTraits_::CommonFactor` */ class FractionTraits { public: /// \name Types /// @{ /*! The associated type */ typedef unspecified_type Type; /*! Tag indicating whether the associated type is a fraction and can be decomposed into a numerator and denominator. This is either `CGAL::Tag_true` or `CGAL::Tag_false`. */ typedef unspecified_type Is_fraction; /*! The type to represent the numerator. This is undefined in case the associated type is not a fraction. */ typedef unspecified_type Numerator_type ; /*! The (simpler) type to represent the denominator. This is undefined in case the associated type is not a fraction. */ typedef unspecified_type Denominator_type; /// @} /// \name Functors /// In case `Type` is not a `Fraction` all functors are `Null_functor`. /// @{ /*! A model of FractionTraits_::Compose. */ typedef unspecified_type Compose; /*! A model of FractionTraits_::Decompose. */ typedef unspecified_type Decompose; /*! A model of FractionTraits_::CommonFactor. */ typedef unspecified_type Common_factor; /// @} }; /* end FractionTraits */ namespace FractionTraits_ { /*! \ingroup PkgAlgebraicFoundationsAlgebraicStructuresConcepts \cgalConcept Functor decomposing a `Fraction` into its numerator and denominator. \sa `Fraction` \sa `FractionTraits` \sa `FractionTraits_::Compose` \sa `FractionTraits_::CommonFactor` */ class Decompose { public: /// \name Operations /// @{ /*! decompose \f$ f\f$ into numerator \f$ n\f$ and denominator \f$ d\f$. */ void operator()( FractionTraits::Type f, FractionTraits::Numerator_type & n, FractionTraits::Denominator_type & d); /// @} }; /* end Decompose */ /*! \ingroup PkgAlgebraicFoundationsAlgebraicStructuresConcepts \cgalConcept `AdaptableBinaryFunction`, returns the fraction of its arguments. \cgalRefines `AdaptableBinaryFunction` \sa `Fraction` \sa `FractionTraits` \sa `FractionTraits_::Decompose` \sa `FractionTraits_::CommonFactor` */ class Compose { public: /// \name Types /// @{ /*! */ typedef FractionTraits::Type result_type; /*! */ typedef FractionTraits::Numerator_type first_argument_type; /*! */ typedef FractionTraits::Denominator_type second_argument_type; /// @} /// \name Operations /// @{ /*! return the fraction \f$ n/d\f$. */ result_type operator()(first_argument_type n, second_argument_type d); /// @} }; /* end Compose */ /*! \ingroup PkgAlgebraicFoundationsAlgebraicStructuresConcepts \cgalConcept `AdaptableBinaryFunction`, finds great common factor of denominators. This can be considered as a relaxed version of `AlgebraicStructureTraits_::Gcd`, this is needed because it is not guaranteed that `FractionTraits::Denominator_type` is a model of `UniqueFactorizationDomain`. \cgalRefines `AdaptableBinaryFunction` \sa `Fraction` \sa `FractionTraits` \sa `FractionTraits_::Decompose` \sa `FractionTraits_::Compose` \sa `AlgebraicStructureTraits_::Gcd` */ class CommonFactor { public: /// \name Types /// @{ /*! */ typedef FractionTraits::Denominator_type result_type; /*! */ typedef FractionTraits::Denominator_type first_argument_type; /*! */ typedef FractionTraits::Denominator_type second_argument_type; /// @} /// \name Operations /// @{ /*! return a great common factor of \f$ d1\f$ and \f$ d2\f$. Note: <TT>operator()(0,0) = 0</TT> */ result_type operator()(first_argument_type d1, second_argument_type d2); /// @} }; /* end CommonFactor */ } /* end of namespace FractionTraits_ */
1,388
675
/********************************************************************** Copyright (c) 2016 Advanced Micro Devices, Inc. All rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ********************************************************************/ #ifndef FATNODE_BVH_TRANSLATOR_H #define FATNODE_BVH_TRANSLATOR_H #include <map> #include "radeon_rays.h" #include "../accelerator/bvh.h" #include "math/matrix.h" #include "math/quaternion.h" #include "math/float3.h" #include "../util/perfect_hash_map.h" namespace RadeonRays { /// Fatnode translator transforms regular binary BVH into the form where: /// * Each node contains bounding boxes of its children /// * Both children follow parent node in the layout (breadth first) /// * No parent informantion is stored for the node => stacked traversal only /// class FatNodeBvhTranslator { public: struct Face { // Up to 3 indices int idx[3]; // Shape index int shapeidx; // Primitive ID within the mesh int id; }; // Constructor FatNodeBvhTranslator() = default; // Fat BVH node // Encoding: // xbound.pmin.w == -1.f if x-child is an internal node otherwise triangle index // struct Node { union { struct { // Node's bounding box bbox bounds[2]; }s0; struct { // If node is a leaf we keep vertex indices here int i0, i1, i2; // Address of a left child int child0; // Shape ID int shape_id; // Primitive ID int prim_id; // Address of a right child int child1; }s1; }; Node() : s0() { } }; //void Flush(); void Process(Bvh& bvh); void InjectIndices(Face const* faces); //void Process(Bvh const** bvhs, int const* offsets, int numbvhs); //void UpdateTopLevel(Bvh const& bvh); std::vector<Node> nodes_; std::vector<int> extra_; std::vector<int> roots_; std::vector<int> indices_; std::vector<int> addresses_; int nodecnt_ = 0; int root_ = 0; std::unique_ptr<PerfectHashMap<int, int>> m_hash_map; int max_idx_; private: int ProcessRootNode(Bvh::Node const* node); //int ProcessNode(Bvh::Node const* n, int offset); FatNodeBvhTranslator(FatNodeBvhTranslator const&) = delete; FatNodeBvhTranslator& operator =(FatNodeBvhTranslator const&) = delete; }; } #endif // PLAIN_BVH_TRANSLATOR_H
1,691
2,151
<gh_stars>1000+ // Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/ui/views/message_center/popups_only_ui_delegate.h" #include <stddef.h> #include "base/macros.h" #include "base/strings/stringprintf.h" #include "base/strings/utf_string_conversions.h" #include "build/build_config.h" #include "content/public/test/test_utils.h" #include "ui/message_center/message_center.h" #include "ui/message_center/notification_list.h" #include "ui/message_center/public/cpp/message_center_constants.h" #include "ui/message_center/public/cpp/notification.h" #include "ui/message_center/public/cpp/notification_delegate.h" #include "ui/message_center/public/cpp/notification_types.h" #include "ui/message_center/ui_controller.h" #include "ui/message_center/views/message_popup_collection.h" #include "ui/views/controls/label.h" #include "ui/views/layout/fill_layout.h" #include "ui/views/test/widget_test.h" #include "ui/views/view.h" #include "ui/views/widget/widget.h" using message_center::MessageCenter; using message_center::Notification; using message_center::NotifierId; namespace { class PopupsOnlyUiDelegateTest : public views::test::WidgetTest { public: PopupsOnlyUiDelegateTest() {} ~PopupsOnlyUiDelegateTest() override {} void SetUp() override { views::test::WidgetTest::SetUp(); test_views_delegate()->set_use_desktop_native_widgets(true); MessageCenter::Initialize(); } void TearDown() override { MessageCenter::Get()->RemoveAllNotifications( false, MessageCenter::RemoveType::ALL); for (views::Widget* widget : GetAllWidgets()) widget->CloseNow(); MessageCenter::Shutdown(); views::test::WidgetTest::TearDown(); } protected: void AddNotification(const std::string& id) { auto notification = std::make_unique<Notification>( message_center::NOTIFICATION_TYPE_SIMPLE, id, base::ASCIIToUTF16("Test Web Notification"), base::ASCIIToUTF16("Notification message body."), gfx::Image(), base::ASCIIToUTF16("Some Chrome extension"), GURL("chrome-extension://abbccedd"), NotifierId(NotifierId::APPLICATION, id), message_center::RichNotificationData(), nullptr); MessageCenter::Get()->AddNotification(std::move(notification)); } void UpdateNotification(const std::string& id) { auto notification = std::make_unique<Notification>( message_center::NOTIFICATION_TYPE_SIMPLE, id, base::ASCIIToUTF16("Updated Test Web Notification"), base::ASCIIToUTF16("Notification message body."), gfx::Image(), base::ASCIIToUTF16("Some Chrome extension"), GURL("chrome-extension://abbccedd"), NotifierId(NotifierId::APPLICATION, id), message_center::RichNotificationData(), nullptr); MessageCenter::Get()->UpdateNotification(id, std::move(notification)); } void RemoveNotification(const std::string& id) { MessageCenter::Get()->RemoveNotification(id, false); } bool HasNotification(const std::string& id) { return !!MessageCenter::Get()->FindVisibleNotificationById(id); } private: DISALLOW_COPY_AND_ASSIGN(PopupsOnlyUiDelegateTest); }; TEST_F(PopupsOnlyUiDelegateTest, WebNotificationPopupBubble) { auto delegate = std::make_unique<PopupsOnlyUiDelegate>(); // Adding a notification should show the popup bubble. AddNotification("id1"); EXPECT_TRUE(delegate->GetUiControllerForTesting()->popups_visible()); // Updating a notification should not hide the popup bubble. AddNotification("id2"); UpdateNotification("id2"); EXPECT_TRUE(delegate->GetUiControllerForTesting()->popups_visible()); // Removing the first notification should not hide the popup bubble. RemoveNotification("id1"); EXPECT_TRUE(delegate->GetUiControllerForTesting()->popups_visible()); // Removing the visible notification should hide the popup bubble. RemoveNotification("id2"); EXPECT_FALSE(delegate->GetUiControllerForTesting()->popups_visible()); delegate->HidePopups(); } TEST_F(PopupsOnlyUiDelegateTest, ManyPopupNotifications) { auto delegate = std::make_unique<PopupsOnlyUiDelegate>(); // Add the max visible popup notifications +1, ensure the correct num visible. size_t notifications_to_add = message_center::kMaxVisiblePopupNotifications + 1; for (size_t i = 0; i < notifications_to_add; ++i) { std::string id = base::StringPrintf("id%d", static_cast<int>(i)); AddNotification(id); } EXPECT_TRUE(delegate->GetUiControllerForTesting()->popups_visible()); MessageCenter* message_center = delegate->message_center(); EXPECT_EQ(notifications_to_add, message_center->NotificationCount()); message_center::NotificationList::PopupNotifications popups = message_center->GetPopupNotifications(); EXPECT_EQ(message_center::kMaxVisiblePopupNotifications, popups.size()); } } // namespace
1,696
14,668
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef IOS_CHROME_BROWSER_UI_INFOBARS_BANNERS_INFOBAR_BANNER_ACCESSIBILITY_UTIL_H_ #define IOS_CHROME_BROWSER_UI_INFOBARS_BANNERS_INFOBAR_BANNER_ACCESSIBILITY_UTIL_H_ #import <UIKit/UIKit.h> // Updates the accessibility of the presenting view controller so that VoiceOver // users have the ability to select other elements while the banner is // presented. This should be called after the banner's presentation is // finished. |presenting_view_controller| and |banner_view| must not be nil. void UpdateBannerAccessibilityForPresentation( UIViewController* presenting_view_controller, UIView* banner_view); // Removes the banner view from |presenting_view_controller|'s accessibility // elements. This should be called after the banner's dismissal is finished. // |presenting_view_controller| must not be nil. void UpdateBannerAccessibilityForDismissal( UIViewController* presenting_view_controller); #endif // IOS_CHROME_BROWSER_UI_INFOBARS_BANNERS_INFOBAR_BANNER_ACCESSIBILITY_UTIL_H_
370
887
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.geekbang.thinking.in.spring.annotation; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.ComponentScan; import org.springframework.stereotype.Component; /** * {@link Component} 扫描示例 * * @author <a href="mailto:<EMAIL>">Mercy</a> * @see Component * @see ComponentScan * @since */ // basePackages() @AliasFor value() // value() @AliasFor basePackages() @MyComponentScan2(basePackages = "org.geekbang.thinking.in.spring.annotation") // 指定 Class-Path(s) //@ComponentScan(value = "org.geekbang.thinking.in.spring.annotation") // 指定 Class-Path(s) public class ComponentScanDemo { public static void main(String[] args) { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); // 注册 Configuration Class context.register(ComponentScanDemo.class); // 启动 Spring 应用上下文 context.refresh(); // 依赖查找 TestClass Bean // TestClass 标注 @MyComponent2 // @MyComponent2 <- @MyComponent <- @Component // 从 Spring 4.0 开始支持多层次 @Component "派生" TestClass testClass = context.getBean(TestClass.class); // Annotation -> AnnotationAttributes(Map) System.out.println(testClass); // 关闭 Spring 应用上下文 context.close(); } }
742
457
package denominator; import java.io.Closeable; import java.io.IOException; import javax.inject.Inject; /** * represents the connection between a {@link DNSApi} interface and the {@link Provider} that * implements it. */ public class DNSApiManager implements Closeable { private final Provider provider; private final DNSApi api; private final CheckConnection checkConnection; private final Closeable closer; @Inject DNSApiManager(Provider provider, DNSApi api, CheckConnection checkConnection, Closeable closer) { this.provider = provider; this.api = api; this.checkConnection = checkConnection; this.closer = closer; } /** * the currently configured {@link DNSApi} */ public DNSApi api() { return api; } /** * Get the provider associated with this instance */ public Provider provider() { return provider; } /** * Returns true, if api commands are likely to succeed. * * @see CheckConnection */ public boolean checkConnection() { return checkConnection.ok(); } /** * closes resources associated with the connections, such as thread pools or open files. */ @Override public void close() throws IOException { closer.close(); } @Override public String toString() { return provider.toString(); } }
399
1,826
package com.vladsch.flexmark.ext.definition.internal; import com.vladsch.flexmark.ext.definition.DefinitionExtension; import com.vladsch.flexmark.parser.Parser; import com.vladsch.flexmark.parser.ParserEmulationProfile; import com.vladsch.flexmark.util.data.DataHolder; @SuppressWarnings("WeakerAccess") class DefinitionOptions { final public int markerSpaces; final public boolean tildeMarker; final public boolean colonMarker; final public ParserEmulationProfile myParserEmulationProfile; final public boolean autoLoose; final public boolean autoLooseOneLevelLists; final public boolean looseOnPrevLooseItem; final public boolean looseWhenHasLooseSubItem; final public boolean looseWhenHasTrailingBlankLine; final public boolean looseWhenBlankFollowsItemParagraph; final public boolean doubleBlankLineBreaksList; final public int codeIndent; final public int itemIndent; final public int newItemCodeIndent; public DefinitionOptions(DataHolder options) { markerSpaces = DefinitionExtension.MARKER_SPACES.get(options); tildeMarker = DefinitionExtension.TILDE_MARKER.get(options); colonMarker = DefinitionExtension.COLON_MARKER.get(options); myParserEmulationProfile = Parser.PARSER_EMULATION_PROFILE.get(options); autoLoose = Parser.LISTS_AUTO_LOOSE.get(options); autoLooseOneLevelLists = Parser.LISTS_AUTO_LOOSE_ONE_LEVEL_LISTS.get(options); looseOnPrevLooseItem = Parser.LISTS_LOOSE_WHEN_PREV_HAS_TRAILING_BLANK_LINE.get(options); looseWhenBlankFollowsItemParagraph = Parser.LISTS_LOOSE_WHEN_BLANK_LINE_FOLLOWS_ITEM_PARAGRAPH.get(options); looseWhenHasLooseSubItem = Parser.LISTS_LOOSE_WHEN_HAS_LOOSE_SUB_ITEM.get(options); looseWhenHasTrailingBlankLine = Parser.LISTS_LOOSE_WHEN_HAS_TRAILING_BLANK_LINE.get(options); codeIndent = Parser.LISTS_CODE_INDENT.get(options); itemIndent = Parser.LISTS_ITEM_INDENT.get(options); newItemCodeIndent = Parser.LISTS_NEW_ITEM_CODE_INDENT.get(options); doubleBlankLineBreaksList = DefinitionExtension.DOUBLE_BLANK_LINE_BREAKS_LIST.get(options); } }
828
2,023
""" This modules provides a lightweight API to access Excel data. There are many ways to read Excel data, including ODBC. This module uses ADODB and has the advantage of only requiring a file name and a sheet name (no setup required). """ import win32com.client class ExcelDocument(object): """ Represents an opened Excel document. """ def __init__(self,filename): self.connection = win32com.client.Dispatch('ADODB.Connection') self.connection.Open( 'PROVIDER=Microsoft.Jet.OLEDB.4.0;'+ 'DATA SOURCE=%s'%filename+ ';Extended Properties="Excel 8.0;HDR=1;IMEX=1"' ) def sheets(self): """ Returns a list of the name of the sheets found in the document. """ result = [] recordset = self.connection.OpenSchema(20) while not recordset.EOF: result.append(recordset.Fields[2].Value) recordset.MoveNext() recordset.Close() del recordset return result def sheet(self,name,encoding=None,order_by=None): """ Returns a sheet object by name. Use sheets() to obtain a list of valid names. encoding is a character encoding name which is used to encode the unicode strings returned by Excel, so that you get plain Python strings. """ return ExcelSheet(self,name,encoding,order_by) def __del__(self): self.close() def close(self): """ Closes the Excel document. It is automatically called when the object is deleted. """ try: self.connection.Close() del self.connection except: pass def strip(value): """ Strip the input value if it is a string and returns None if it had only whitespaces """ if isinstance(value,basestring): value = value.strip() if len(value)==0: return None return value class ExcelSheet(object): """ Represents an Excel sheet from a document, gives methods to obtain column names and iterate on its content. """ def __init__(self,document,name,encoding,order_by): self.document = document self.name = name self.order_by = order_by if encoding: def encoder(value): if isinstance(value,unicode): value = value.strip() if len(value)==0: return None else: return value.encode(encoding) elif isinstance(value,str): value = value.strip() if len(value)==0: return None else: return value else: return value self.encoding = encoder else: self.encoding = strip def columns(self): """ Returns a list of column names for the sheet. """ recordset = win32com.client.Dispatch('ADODB.Recordset') recordset.Open(u'SELECT * FROM [%s]'%self.name,self.document.connection,0,1) try: return [self.encoding(field.Name) for field in recordset.Fields] finally: recordset.Close() del recordset def __iter__(self): """ Returns a paged iterator by default. See paged(). """ return self.paged() def naive(self): """ Returns an iterator on the data contained in the sheet. Each row is returned as a dictionary with row headers as keys. """ # SLOW algorithm ! A lot of COM calls are performed. recordset = win32com.client.Dispatch('ADODB.Recordset') if self.order_by: recordset.Open(u'SELECT * FROM [%s] ORDER BY %s'%(self.name,self.order_by),self.document.connection,0,1) else: recordset.Open(u'SELECT * FROM [%s]'%self.name,self.document.connection,0,1) try: while not recordset.EOF: source = {} for field in recordset.Fields: source[self.encoding(field.Name)] = self.encoding(field.Value) yield source recordset.MoveNext() recordset.Close() del recordset except: # cannot use "finally" here because Python doesn't want # a "yield" statement inside a "try...finally" block. recordset.Close() del recordset raise def paged(self,pagesize=128): """ Returns an iterator on the data contained in the sheet. Each row is returned as a dictionary with row headers as keys. pagesize is the size of the buffer of rows ; it is an implementation detail but could have an impact on the speed of the iterator. Use pagesize=-1 to buffer the whole sheet in memory. """ # FAST algorithm ! It is about 10x faster than the naive algorithm # thanks to the use of GetRows, which dramatically decreases the number # of COM calls. recordset = win32com.client.Dispatch('ADODB.Recordset') if self.order_by: recordset.Open(u'SELECT * FROM [%s] ORDER BY %s'%(self.name,self.order_by),self.document.connection,0,1) else: recordset.Open(u'SELECT * FROM [%s]'%self.name,self.document.connection,0,1) try: fields = [self.encoding(field.Name) for field in recordset.Fields] ok = True while ok: # Thanks to <NAME> for the transposing tip rows = zip(*recordset.GetRows(pagesize)) if recordset.EOF: # close the recordset as soon as possible recordset.Close() recordset = None ok = False for row in rows: yield dict(zip(fields, map(self.encoding,row))) except: if recordset is not None: recordset.Close() del recordset raise
2,912
9,106
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. #pragma once #define NOMINMAX #include <windows.h> #include <winrt/Microsoft.ReactNative.h> #include <winrt/Windows.Data.Json.h> #include <winrt/Windows.Foundation.Collections.h> #include <winrt/Windows.Foundation.h> #include <winrt/Windows.Storage.Streams.h>
132
1,781
package com.marshalchen.common.uimodule.passcodelock; import android.app.Activity; import android.app.Application; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.preference.PreferenceManager; import android.util.Base64; import java.util.Arrays; import java.util.Date; import javax.crypto.Cipher; import javax.crypto.SecretKey; import javax.crypto.SecretKeyFactory; import javax.crypto.spec.DESKeySpec; public class DefaultAppLock extends AbstractAppLock { private Application currentApp; //Keep a reference to the app that invoked the locker private SharedPreferences settings; private Date lostFocusDate; //Add back-compatibility private static final String OLD_PASSWORD_SALT = "<PASSWORD>"; private static final String OLD_APP_LOCK_PASSWORD_PREF_KEY = "wp_app_lock_password_key"; private static final String PASSWORD_PREFERENCE_KEY="passcode_lock_prefs_password_key"; private static final String PASSWORD_SALT="<PASSWORD>!"; private static final String PASSWORD_ENC_SECRET="5-maggio-2002-Karel-Poborsky"; public DefaultAppLock(Application currentApp) { super(); SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(currentApp); this.settings = settings; this.currentApp = currentApp; } public void enable(){ if (android.os.Build.VERSION.SDK_INT < 14) return; if( isPasswordLocked() ) { currentApp.unregisterActivityLifecycleCallbacks(this); currentApp.registerActivityLifecycleCallbacks(this); } } public void disable( ){ if (android.os.Build.VERSION.SDK_INT < 14) return; currentApp.unregisterActivityLifecycleCallbacks(this); } public void forcePasswordLock(){ lostFocusDate = null; } public boolean verifyPassword( String password ){ String storedPassword = ""; if (settings.contains(OLD_APP_LOCK_PASSWORD_PREF_KEY)) { //add back-compatibility //Check if the old value is available storedPassword = settings.getString(OLD_APP_LOCK_PASSWORD_PREF_KEY, ""); password = OLD_PASSWORD_SALT + password + OLD_PASSWORD_<PASSWORD>; password = StringUtils.get<PASSWORD>(password); } else if (settings.contains(PASSWORD_PREFERENCE_KEY)) { //read the password from the new key storedPassword = settings.getString(PASSWORD_PREFERENCE_KEY, ""); storedPassword = decryptPassword(storedPassword); password = PASSWORD_<PASSWORD>T + password + PASSWORD_<PASSWORD>; } if( password.equalsIgnoreCase(storedPassword) ) { lostFocusDate = new Date(); return true; } else { return false; } } public boolean setPassword(String password){ SharedPreferences.Editor editor = settings.edit(); if(password == null) { editor.remove(OLD_APP_LOCK_PASSWORD_PREF_KEY); editor.remove(PASSWORD_PREFERENCE_KEY); editor.commit(); this.disable(); } else { password = <PASSWORD> + password + <PASSWORD>; password = <PASSWORD>(password); editor.putString(PASSWORD_PREFERENCE_KEY, password); editor.remove(OLD_APP_LOCK_PASSWORD_PREF_KEY); editor.commit(); this.enable(); } return true; } //Check if we need to show the lock screen at startup public boolean isPasswordLocked(){ if (settings.contains(OLD_APP_LOCK_PASSWORD_PREF_KEY)) //Check if the old value is available return true; if (settings.contains(PASSWORD_PREFERENCE_KEY)) return true; return false; } private String encryptPassword(String clearText) { try { DESKeySpec keySpec = new DESKeySpec( PASSWORD_ENC_SECRET.getBytes("UTF-8")); SecretKeyFactory keyFactory = SecretKeyFactory.getInstance("DES"); SecretKey key = keyFactory.generateSecret(keySpec); Cipher cipher = Cipher.getInstance("DES"); cipher.init(Cipher.ENCRYPT_MODE, key); String encrypedPwd = Base64.encodeToString(cipher.doFinal(clearText .getBytes("UTF-8")), Base64.DEFAULT); return encrypedPwd; } catch (Exception e) { } return clearText; } private String decryptPassword(String encryptedPwd) { try { DESKeySpec keySpec = new DESKeySpec(PASSWORD_ENC_SECRET.getBytes("UTF-8")); SecretKeyFactory keyFactory = SecretKeyFactory.getInstance("DES"); SecretKey key = keyFactory.generateSecret(keySpec); byte[] encryptedWithoutB64 = Base64.decode(encryptedPwd, Base64.DEFAULT); Cipher cipher = Cipher.getInstance("DES"); cipher.init(Cipher.DECRYPT_MODE, key); byte[] plainTextPwdBytes = cipher.doFinal(encryptedWithoutB64); return new String(plainTextPwdBytes); } catch (Exception e) { } return encryptedPwd; } private boolean mustShowUnlockSceen() { if( isPasswordLocked() == false) return false; if( lostFocusDate == null ) return true; //first startup or when we forced to show the password int currentTimeOut = lockTimeOut; //get a reference to the current password timeout and reset it to default lockTimeOut = DEFAULT_TIMEOUT; Date now = new Date(); long now_ms = now.getTime(); long lost_focus_ms = lostFocusDate.getTime(); int secondsPassed = (int) (now_ms - lost_focus_ms)/(1000); secondsPassed = Math.abs(secondsPassed); //Make sure changing the clock on the device to a time in the past doesn't by-pass PIN Lock if (secondsPassed >= currentTimeOut) { lostFocusDate = null; return true; } return false; } @Override public void onActivityPaused(Activity arg0) { if( arg0.getClass() == PasscodeUnlockActivity.class ) return; if( ( this.appLockDisabledActivities != null ) && Arrays.asList(this.appLockDisabledActivities).contains( arg0.getClass().getName() ) ) return; lostFocusDate = new Date(); } @Override public void onActivityResumed(Activity arg0) { if( arg0.getClass() == PasscodeUnlockActivity.class ) return; if( ( this.appLockDisabledActivities != null ) && Arrays.asList(this.appLockDisabledActivities).contains( arg0.getClass().getName() ) ) return; if(mustShowUnlockSceen()) { //uhhh ohhh! Intent i = new Intent(arg0.getApplicationContext(), PasscodeUnlockActivity.class); i.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); arg0.getApplication().startActivity(i); return; } } @Override public void onActivityCreated(Activity arg0, Bundle arg1) { } @Override public void onActivityDestroyed(Activity arg0) { } @Override public void onActivitySaveInstanceState(Activity arg0, Bundle arg1) { } @Override public void onActivityStarted(Activity arg0) { } @Override public void onActivityStopped(Activity arg0) { } }
3,017
315
#include <iostream> #include <assert.h> using namespace std; int gcdExtended(int a, int b, int* x, int* y) { // Base Condition (Special Case) if (a == 0) { *x = 0; *y = 1; return b; } // Call the function recursively int x1, y1; int gcd = gcdExtended(b % a, a, &x1, &y1); // Update x1 and y1 using results of recursive call *x = y1 - (b / a) * x1; *y = x1; return gcd; } int main() { int x, y, a = 60, b = 15; int g = gcdExtended(a, b, &x, &y); assert(g == 15); // Test the function cout << "gcd(" << a << ", " << b << ") = " << g << endl; return 0; }
315
493
/** * Copyright (C) 2016 Turi * All rights reserved. * * This software may be modified and distributed under the terms * of the BSD license. See the LICENSE file for details. */ #ifndef PROCESS_UTIL_HPP #define PROCESS_UTIL_HPP #include<string> #include<boost/optional.hpp> namespace graphlab { size_t get_parent_pid(); size_t get_my_pid(); void wait_for_parent_exit(size_t parent_pid); /* * Returns true if process is running */ bool is_process_running(size_t pid); /* * Returns the environment variable's value * * Note: on windows, the length of the return * value is limited to 65534. */ boost::optional<std::string> getenv_str(const char* variable_name); } // namespace graphlab #endif // PROCESS_UTIL_HPP
240
648
{"resourceType":"DataElement","id":"Goal.author","meta":{"lastUpdated":"2015-10-24T07:41:03.495+11:00"},"url":"http://hl7.org/fhir/DataElement/Goal.author","status":"draft","experimental":true,"stringency":"fully-specified","element":[{"path":"Goal.author","short":"Who's responsible for creating Goal?","definition":"Indicates whose goal this is - patient goal, practitioner goal, etc.","comments":"This is the individual reponsible for establishing the goal, not necessarily who recorded it. (For that, use the Provenance resource.).","min":0,"max":"1","type":[{"code":"Reference","profile":["http://hl7.org/fhir/StructureDefinition/Patient"]},{"code":"Reference","profile":["http://hl7.org/fhir/StructureDefinition/Practitioner"]},{"code":"Reference","profile":["http://hl7.org/fhir/StructureDefinition/RelatedPerson"]}],"isSummary":true,"mapping":[{"identity":"w5","map":"who.actor"}]}]}
249
9,734
<reponame>timkpaine/arrow<gh_stars>1000+ // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #include "arrow/util/formatting.h" #include "arrow/util/config.h" #include "arrow/util/double_conversion.h" #include "arrow/util/logging.h" namespace arrow { using util::double_conversion::DoubleToStringConverter; static constexpr int kMinBufferSize = DoubleToStringConverter::kBase10MaximalLength + 1; namespace internal { namespace detail { const char digit_pairs[] = "0001020304050607080910111213141516171819" "2021222324252627282930313233343536373839" "4041424344454647484950515253545556575859" "6061626364656667686970717273747576777879" "8081828384858687888990919293949596979899"; } // namespace detail struct FloatToStringFormatter::Impl { Impl() : converter_(DoubleToStringConverter::EMIT_POSITIVE_EXPONENT_SIGN, "inf", "nan", 'e', -6, 10, 6, 0) {} Impl(int flags, const char* inf_symbol, const char* nan_symbol, char exp_character, int decimal_in_shortest_low, int decimal_in_shortest_high, int max_leading_padding_zeroes_in_precision_mode, int max_trailing_padding_zeroes_in_precision_mode) : converter_(flags, inf_symbol, nan_symbol, exp_character, decimal_in_shortest_low, decimal_in_shortest_high, max_leading_padding_zeroes_in_precision_mode, max_trailing_padding_zeroes_in_precision_mode) {} DoubleToStringConverter converter_; }; FloatToStringFormatter::FloatToStringFormatter() : impl_(new Impl()) {} FloatToStringFormatter::FloatToStringFormatter( int flags, const char* inf_symbol, const char* nan_symbol, char exp_character, int decimal_in_shortest_low, int decimal_in_shortest_high, int max_leading_padding_zeroes_in_precision_mode, int max_trailing_padding_zeroes_in_precision_mode) : impl_(new Impl(flags, inf_symbol, nan_symbol, exp_character, decimal_in_shortest_low, decimal_in_shortest_high, max_leading_padding_zeroes_in_precision_mode, max_trailing_padding_zeroes_in_precision_mode)) {} FloatToStringFormatter::~FloatToStringFormatter() {} int FloatToStringFormatter::FormatFloat(float v, char* out_buffer, int out_size) { DCHECK_GE(out_size, kMinBufferSize); // StringBuilder checks bounds in debug mode for us util::double_conversion::StringBuilder builder(out_buffer, out_size); bool result = impl_->converter_.ToShortestSingle(v, &builder); DCHECK(result); ARROW_UNUSED(result); return builder.position(); } int FloatToStringFormatter::FormatFloat(double v, char* out_buffer, int out_size) { DCHECK_GE(out_size, kMinBufferSize); util::double_conversion::StringBuilder builder(out_buffer, out_size); bool result = impl_->converter_.ToShortest(v, &builder); DCHECK(result); ARROW_UNUSED(result); return builder.position(); } } // namespace internal } // namespace arrow
1,314
3,976
<reponame>saurabh896/python-1<gh_stars>1000+ #!/usr/bin/python # coding=utf-8 """ 纯文本文件 numbers.txt, 里面的内容(包括方括号)如下所示: 请将上述内容写到 numbers.xls 文件中,如下图所示: """ import os import json import xlwt def read_txt(path): with open(path, 'r') as f: text = f.read().decode('utf-8') text_json = json.loads(text) return text_json def save_into_excel(content_dict, excel_name): wb = xlwt.Workbook() ws = wb.add_sheet("numbers", cell_overwrite_ok=True) row = 0 col = 0 for i in content_dict: for k in i: ws.write(row, col, k) col += 1 row += 1 col = 0 wb.save(excel_name) if __name__ == "__main__": read_content = read_txt(os.path.join(os.path.split(__file__)[0], 'numbers.txt')) save_into_excel(read_content, 'numbers.xls')
465
812
<filename>src/edu/stanford/nlp/sempre/tables/serialize/TableReader.java package edu.stanford.nlp.sempre.tables.serialize; import java.io.*; import java.util.*; import au.com.bytecode.opencsv.CSVReader; import edu.stanford.nlp.sempre.tables.StringNormalizationUtils; import fig.basic.LogInfo; /** * Read a table in either CSV or TSV format. * * For CSV, this class is just a wrapper for OpenCSV. * Escape sequences for CSV: * - \\ => \ * - \" or "" => " * Each cell can be quoted inside "...". Embed newlines must be quoted. * * For TSV, each line must represent one table row (no embed newlines). * Escape sequences for TSV (custom): * - \n => [newline] * - \\ => \ * - \p => | * * @author ppasupat */ public class TableReader implements Closeable, Iterable<String[]> { enum DataType { CSV, TSV, UNKNOWN } CSVReader csvReader = null; List<String[]> tsvData = null; public TableReader(String filename) throws IOException { switch (guessDataType(filename)) { case CSV: csvReader = new CSVReader(new FileReader(filename)); break; case TSV: parseTSV(filename); break; default: throw new RuntimeException("Unknown data type for " + filename); } } private DataType guessDataType(String filename) { if (filename.endsWith(".csv")) return DataType.CSV; else if (filename.endsWith(".tsv")) return DataType.TSV; // Guess from the first line of the file try (BufferedReader reader = new BufferedReader(new FileReader(filename))) { String line = reader.readLine(); if (line.contains("\t")) return DataType.TSV; else if (line.contains(",") || line.startsWith("\"")) return DataType.CSV; } catch (IOException e) { throw new RuntimeException("Unknown data type for " + filename); } return DataType.UNKNOWN; } private void parseTSV(String filename) { try (BufferedReader reader = new BufferedReader(new FileReader(filename))) { String line; tsvData = new ArrayList<>(); while ((line = reader.readLine()) != null) { String[] fields = line.split("\t", -1); // Include trailing spaces for (int i = 0; i < fields.length; i++) fields[i] = StringNormalizationUtils.unescapeTSV(fields[i]); tsvData.add(fields); } } catch (IOException e) { throw new RuntimeException(e); } } @Override public Iterator<String[]> iterator() { if (csvReader != null) return csvReader.iterator(); else return tsvData.iterator(); } @Override public void close() throws IOException { if (csvReader != null) csvReader.close(); } // ============================================================ // Test // ============================================================ public static void main(String[] args) { String filename = "t/csv/200-csv/0.tsv"; LogInfo.logs("%s", filename); try (TableReader tableReader = new TableReader(filename)) { for (String[] x : tableReader) { LogInfo.begin_track("ROW"); for (String y : x) LogInfo.logs("|%s|", y); LogInfo.end_track(); } } catch (Exception e) { e.printStackTrace(); } } }
1,206
390
import re import execjs import requests js_code = """l=new Array(-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,62,-1,-1,-1,63,52,53,54,55,56,57,58,59,60,61,-1,-1,-1,-1,-1,-1,-1,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,-1,-1,-1,-1,-1,-1,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,-1,-1,-1,-1,-1);function u(t){var e,o,n,a,i,r,s;for(r=t.length,i=0,s="";i<r;){for(;e=l[255&t.charCodeAt(i++)],i<r&&-1==e;);if(-1==e)break;for(;o=l[255&t.charCodeAt(i++)],i<r&&-1==o;);if(-1==o)break;s+=String.fromCharCode(e<<2|(48&o)>>4);do{if(61==(n=255&t.charCodeAt(i++)))return s;n=l[n]}while(i<r&&-1==n);if(-1==n)break;s+=String.fromCharCode((15&o)<<4|(60&n)>>2);do{if(61==(a=255&t.charCodeAt(i++)))return s;a=l[a]}while(i<r&&-1==a);if(-1==a)break;s+=String.fromCharCode((3&n)<<6|a)}return s}""" js = execjs.compile(js_code) def get(url: str) -> dict: """ videos """ data = {} headers = { "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36" } rep = requests.get(url, headers=headers, timeout=10) if rep.status_code != 200: return {"msg": "获取失败"} enc_video_url = re.findall(r"video_url: '(.*?)',", rep.text)[0] video_url = "https:" + js.call("u", (enc_video_url,)) data["videos"] = [video_url] return data if __name__ == "__main__": print(get(input("url: ")))
833
1,333
<reponame>a6401040/Moss<filename>moss-client/moss-client-common/src/main/java/org/xujin/moss/client/endpoint/dependency/central/DefaultMavenSearchBuilder.java package org.xujin.moss.client.endpoint.dependency.central; import java.io.InputStream; import org.xujin.moss.client.endpoint.dependency.MavenSearchBuilder; import org.xujin.moss.client.endpoint.dependency.util.JsonMapper; import com.fasterxml.jackson.databind.ObjectMapper; import lombok.extern.slf4j.Slf4j; @Slf4j public class DefaultMavenSearchBuilder implements MavenSearchBuilder { @Override public String getPomUrl(InputStream is, String endsWith) throws Exception { ObjectMapper objectMapper = JsonMapper.defaultMapper().getMapper(); SearchResult results = objectMapper.readValue(is, SearchResult.class); if (results.getResponse() != null && results.getResponse().getDocs() != null && results.getResponse().getDocs().length > 0) { PomDoc pomInfo = results.getResponse().getDocs()[0]; String pomUrl = "https://search.maven.org/remotecontent?filepath=" + // pomInfo.getG().replace('.', '/') + "/" + // pomInfo.getA() + "/" + pomInfo.getV() + "/" + pomInfo.getA() + "-" + pomInfo.getV() + endsWith; log.debug(pomUrl); return pomUrl; } return null; } @Override public String getSearchUrl(String[] av) { String searchUrl = "http://search.maven.org/solrsearch/select?q=a:%22"; if (av.length > 2) { searchUrl += av[0] + "%22%20AND%20v:%22" + av[1] + "%22%20AND%20g:%22" + av[2] + "%22&rows=1&wt=json"; } else { searchUrl += av[0] + "%22%20AND%20v:%22" + av[1] + "%22&rows=1&wt=json"; } return searchUrl; } }
791
831
/* * Copyright (C) 2020 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.tools.idea.avdmanager; import com.android.repository.io.FileUtilKt; import com.android.sdklib.devices.Storage; import com.android.sdklib.devices.Storage.Unit; import com.android.sdklib.internal.avd.AvdInfo; import com.google.common.annotations.VisibleForTesting; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.MoreExecutors; import com.intellij.openapi.diagnostic.Logger; import com.intellij.ui.table.TableView; import com.intellij.util.concurrency.AppExecutorUtil; import com.intellij.util.concurrency.EdtExecutorService; import java.nio.file.Path; import java.nio.file.Paths; import java.util.concurrent.Executor; import java.util.function.Supplier; import javax.swing.table.AbstractTableModel; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * A value in the Size on Disk column in the Android Virtual Device Manager table. The primary purpose of this class is to encapsulate the * calculation of the virtual device size on a background thread. The future will notify the table when the calculation is done. */ final class SizeOnDisk implements Comparable<SizeOnDisk> { @VisibleForTesting static final int MODEL_COLUMN_INDEX = 7; /** * The actual string shown in the column */ @NotNull private String myString; private long myValue; @NotNull private final ListenableFuture<Long> myFuture; SizeOnDisk(@NotNull AvdInfo device, @NotNull TableView<AvdInfo> table) { this(device, table, () -> getSize(Paths.get(device.getDataFolderPath())), EdtExecutorService.getInstance()); } @NotNull private static ListenableFuture<Long> getSize(@NotNull Path path) { return MoreExecutors.listeningDecorator(AppExecutorUtil.getAppExecutorService()).submit(() -> FileUtilKt.recursiveSize(path)); } @VisibleForTesting SizeOnDisk(@NotNull AvdInfo device, @NotNull TableView<AvdInfo> table, @NotNull ListenableFuture<Long> future) { this(device, table, () -> future, MoreExecutors.directExecutor()); } private SizeOnDisk(@NotNull AvdInfo device, @NotNull TableView<AvdInfo> table, @NotNull Supplier<? extends ListenableFuture<Long>> futureSupplier, @NotNull Executor executor) { myString = "Calculating..."; myValue = -1; myFuture = futureSupplier.get(); addCallback(executor, new FutureCallback<Long>() { @Override public void onSuccess(@Nullable Long value) { assert value != null; Storage storage = new Storage(value); myString = SizeOnDisk.toString(storage); myValue = storage.getSize(); ((AbstractTableModel)table.getModel()).fireTableCellUpdated(table.getItems().indexOf(device), MODEL_COLUMN_INDEX); } @Override public void onFailure(@NotNull Throwable throwable) { Logger.getInstance(SizeOnDisk.class).warn(throwable); myString = "Failed to calculate"; myValue = Long.MAX_VALUE; ((AbstractTableModel)table.getModel()).fireTableCellUpdated(table.getItems().indexOf(device), MODEL_COLUMN_INDEX); } }); } private void addCallback(@NotNull Executor executor, @NotNull FutureCallback<Long> callback) { Futures.addCallback(myFuture, callback, executor); } @NotNull @VisibleForTesting static String toString(@NotNull Storage storage) { double value = storage.getPreciseSizeAsUnit(Unit.MiB); Object unitSymbol = "MB"; if (value >= 1024) { value = storage.getPreciseSizeAsUnit(Unit.GiB); unitSymbol = "GB"; } return String.format(value > 9.94 ? "%.0f %s" : "%.1f %s", value, unitSymbol); } @NotNull @Override public String toString() { return myString; } @Override public int compareTo(@NotNull SizeOnDisk sizeOnDisk) { return Long.compare(myValue, sizeOnDisk.myValue); } }
1,577
2,023
import inspect, _ast class Autoslots_meta(type): """ Looks for assignments in __init__ and creates a __slot__ variable for all the instance attributes in the assignment. Assumes that all assignments in __init__ are of the form: self.attr = <value> """ def __new__(cls, name, bases, dct): slots = dct.get('__slots__', []) orig_slots = [] for base in bases: if hasattr(base, "__slots__"): orig_slots += base.__slots__ if '__init__' in dct: init = dct['__init__'] initproc = type.__new__(cls, name, bases, dct) initproc_source = inspect.getsource(initproc) ast = compile(initproc_source, "dont_care", 'exec', _ast.PyCF_ONLY_AST) classdef = ast.body[0] stmts = classdef.body for declaration in stmts: if isinstance(declaration, _ast.FunctionDef): name1 = declaration.name if name1 == '__init__': # delete this line if you do not initialize all instance variables in __init__ initbody = declaration.body for statement in initbody: if isinstance(statement, _ast.Assign): for target in statement.targets: name1 = target.attr if name1 not in orig_slots: slots.append(name1) if slots: dct['__slots__'] = slots return type.__new__(cls, name, bases, dct) class Autoslots(object): __metaclass__ = Autoslots_meta class TestClass(Autoslots): def __init__(self): self.a = 1 self.b = 2 def t(self): pass
595
419
#include "Transform.h" //------------------------------------------------------------------------- namespace KRG { Transform const Transform::Identity = Transform( Quaternion( 0, 0, 0, 1 ), Vector( 0, 0, 0, 1 ), Vector( 1, 1, 1, 0 ) ); //------------------------------------------------------------------------- void Transform::SanitizeScaleValues() { float sx = m_scale.GetX(); float sy = m_scale.GetY(); float sz = m_scale.GetZ(); // Remove variance from values //------------------------------------------------------------------------- float averageScaleValue = sx + sy + sz; averageScaleValue /= 3.0f; float maxDeviation = Math::Max( Math::Abs( sx - averageScaleValue ), Math::Abs( sy - averageScaleValue ) ); maxDeviation = Math::Max( maxDeviation, Math::Abs( sz - averageScaleValue ) ); if ( maxDeviation < Math::LargeEpsilon ) { sx = sy = sz = averageScaleValue; } // If nearly 1 - set to exactly 1 //------------------------------------------------------------------------- if ( Math::IsNearEqual( sx, 1.0f, Math::LargeEpsilon ) ) { sx = 1.0f; } if ( Math::IsNearEqual( sy, 1.0f, Math::LargeEpsilon ) ) { sy = 1.0f; } if ( Math::IsNearEqual( sz, 1.0f, Math::LargeEpsilon ) ) { sz = 1.0f; } // Super rough rounding to 4 decimal places //------------------------------------------------------------------------- sx = Math::Round( sx * 1000 ) / 1000.0f; sy = Math::Round( sy * 1000 ) / 1000.0f; sz = Math::Round( sz * 1000 ) / 1000.0f; //------------------------------------------------------------------------- m_scale = Vector( sx, sy, sz, 0.0f ); } }
804
507
# terrascript/resource/Mongey/middesk.py # Automatically generated by tools/makecode.py (24-Sep-2021 15:21:39 UTC) import terrascript class middesk_webhook(terrascript.Resource): pass __all__ = [ "middesk_webhook", ]
90
1,338
<reponame>Kirishikesan/haiku<filename>headers/private/graphics/radeon/memcntrl_regs.h /* Copyright (c) 2002, <NAME> Part of Radeon driver BusMemory Control registers */ #ifndef _MEMCNTRL_REGS_H #define _MEMCNTRL_REGS_H #define RADEON_AGP_BASE 0x0170 #define RADEON_MEM_CNTL 0x0140 # define RADEON_MEM_NUM_CHANNELS_MASK 0x01 # define RADEON_MEM_USE_B_CH_ONLY (1<<1) # define RV100_HALF_MODE (1<<3) # define R300_MEM_NUM_CHANNELS_MASK 0x03 # define R300_MEM_USE_CD_CH_ONLY (1<<2) #define RADEON_MC_AGP_LOCATION 0x014c #define RADEON_MC_FB_LOCATION 0x0148 #define RADEON_MEM_INIT_LAT_TIMER 0x0154 #define RADEON_MEM_SDRAM_MODE_REG 0x0158 # define RADEON_MEM_CFG_TYPE_MASK (1 << 30) # define RADEON_MEM_CFG_SDR (0 << 30) # define RADEON_MEM_CFG_DDR (1 << 30) #define RADEON_NB_TOM 0x015c #define RADEON_DISPLAY_BASE_ADDRESS 0x023c #define RADEON_CRTC2_DISPLAY_BASE_ADDRESS 0x033c #define RADEON_OV0_BASE_ADDRESS 0x043c #define RADEON_GRPH_BUFFER_CNTL 0x02f0 # define RADEON_GRPH_START_REQ_MASK (0x7f) # define RADEON_GRPH_START_REQ_SHIFT 0 # define RADEON_GRPH_STOP_REQ_MASK (0x7f<<8) # define RADEON_GRPH_STOP_REQ_SHIFT 8 # define RADEON_GRPH_CRITICAL_POINT_MASK (0x7f<<16) # define RADEON_GRPH_CRITICAL_POINT_SHIFT 16 # define RADEON_GRPH_CRITICAL_CNTL (1<<28) # define RADEON_GRPH_BUFFER_SIZE (1<<29) # define RADEON_GRPH_CRITICAL_AT_SOF (1<<30) # define RADEON_GRPH_STOP_CNTL (1<<31) #define RADEON_GRPH2_BUFFER_CNTL 0x03f0 # define RADEON_GRPH2_START_REQ_MASK (0x7f) # define RADEON_GRPH2_START_REQ_SHIFT 0 # define RADEON_GRPH2_STOP_REQ_MASK (0x7f<<8) # define RADEON_GRPH2_STOP_REQ_SHIFT 8 # define RADEON_GRPH2_CRITICAL_POINT_MASK (0x7f<<16) # define RADEON_GRPH2_CRITICAL_POINT_SHIFT 16 # define RADEON_GRPH2_CRITICAL_CNTL (1<<28) # define RADEON_GRPH2_BUFFER_SIZE (1<<29) # define RADEON_GRPH2_CRITICAL_AT_SOF (1<<30) # define RADEON_GRPH2_STOP_CNTL (1<<31) #endif
1,460
362
package net.ripe.db.whois.update.handler.validator.personrole; import com.google.common.collect.ImmutableList; import net.ripe.db.whois.common.domain.CIString; import net.ripe.db.whois.common.rpsl.AttributeType; import net.ripe.db.whois.common.rpsl.ObjectType; import net.ripe.db.whois.common.rpsl.RpslAttribute; import net.ripe.db.whois.update.domain.Action; import net.ripe.db.whois.update.domain.PreparedUpdate; import net.ripe.db.whois.update.domain.UpdateContext; import net.ripe.db.whois.update.domain.UpdateMessages; import net.ripe.db.whois.update.handler.validator.BusinessRuleValidator; import org.springframework.stereotype.Component; import java.util.List; @Component public class SelfReferencePreventionValidator implements BusinessRuleValidator { private static final ImmutableList<Action> ACTIONS = ImmutableList.of(Action.CREATE, Action.MODIFY); private static final ImmutableList<ObjectType> TYPES = ImmutableList.of(ObjectType.ROLE); @Override public void validate(final PreparedUpdate update, final UpdateContext updateContext) { errorOnSelfReference(update, updateContext, AttributeType.ADMIN_C); errorOnSelfReference(update, updateContext, AttributeType.TECH_C); } private void errorOnSelfReference(final PreparedUpdate update, final UpdateContext updateContext, final AttributeType attributeType) { final List<RpslAttribute> submittedAttributes = update.getUpdate().getSubmittedObject().findAttributes(attributeType); final CIString submittedNicHdl = update.getUpdate().getSubmittedObject().getValueForAttribute(AttributeType.NIC_HDL); for (final RpslAttribute attribute : submittedAttributes) { if (attribute.getCleanValues().contains(submittedNicHdl)) { updateContext.addMessage(update, attribute, UpdateMessages.selfReferenceError(attributeType)); } } } @Override public ImmutableList<Action> getActions() { return ACTIONS; } @Override public ImmutableList<ObjectType> getTypes() { return TYPES; } }
712
338
package com.camnter.newlife.ui.activity.tabalphaview; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import android.support.v4.view.ViewPager; import com.camnter.newlife.R; import com.camnter.newlife.core.activity.BaseAppCompatActivity; import com.camnter.newlife.ui.fragment.tabalphaindicatorfragment.TabAlphaFragment; import com.camnter.newlife.widget.alphaview.TabAlphaIndicator; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * Description:TabAlphaViewActivity * Created by:CaMnter */ public class TabAlphaViewActivity extends BaseAppCompatActivity { /** * Fill in layout id * * @return layout id */ @Override protected int getLayoutId() { return R.layout.activity_tab_alpha_view; } /** * Initialize the view in the layout * * @param savedInstanceState savedInstanceState */ @Override protected void initViews(Bundle savedInstanceState) { final ViewPager viewPager = (ViewPager) this.findViewById(R.id.tab_alpha_view_pager); viewPager.setAdapter(new PagerAdapter(this.getSupportFragmentManager(), "微信", "发现", "我")); final TabAlphaIndicator tabAlphaIndicator = (TabAlphaIndicator) this.findViewById( R.id.tab_alpha_indicator); tabAlphaIndicator.setViewPager(viewPager); } /** * Initialize the View of the listener */ @Override protected void initListeners() { } /** * Initialize the Activity data */ @Override protected void initData() { } private class PagerAdapter extends FragmentPagerAdapter { private List<String> contentList = new ArrayList<>(); private List<Fragment> fragments = new ArrayList<>(); public PagerAdapter(FragmentManager fm, String... content) { super(fm); this.contentList.addAll(Arrays.asList(content)); // noinspection Convert2streamapi for (String element : this.contentList) { this.fragments.add(TabAlphaFragment.newInstance(element)); } } /** * Return the Fragment associated with a specified position. */ @Override public Fragment getItem(int position) { return this.fragments.get(position); } /** * Return the number of views available. */ @Override public int getCount() { return this.fragments.size(); } } }
1,044
1,900
/* * Copyright Terracotta, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ehcache.clustered.common.internal.messages; public class StateRepositoryMessageFactory { private final String cacheId; private final String mapId; public StateRepositoryMessageFactory(String cacheId, String mapId) { this.cacheId = cacheId; this.mapId = mapId; } public StateRepositoryOpMessage getMessage(Object key) { return new StateRepositoryOpMessage.GetMessage(cacheId, mapId, key); } public StateRepositoryOpMessage putIfAbsentMessage(Object key, Object value) { return new StateRepositoryOpMessage.PutIfAbsentMessage(cacheId, mapId, key, value); } public StateRepositoryOpMessage entrySetMessage() { return new StateRepositoryOpMessage.EntrySetMessage(cacheId, mapId); } }
379
1,110
import copy import tensorflow as tf from onnx_tf.handlers.backend_handler import BackendHandler from onnx_tf.handlers.handler import onnx_op from onnx_tf.handlers.handler import tf_func from .math_mixin import ReductionMixin from onnx_tf.common.tf_helper import tf_shape @onnx_op("ReduceSum") @tf_func(tf.reduce_sum) class ReduceSum(ReductionMixin, BackendHandler): @classmethod def version_1(cls, node, **kwargs): return cls._common(node, **kwargs) @classmethod def version_11(cls, node, **kwargs): return cls._common(node, **kwargs) @classmethod def version_13(cls, node, **kwargs): x = kwargs["tensor_dict"][node.inputs[0]] attrs = copy.deepcopy(node.attrs) noop_with_empty_axes = attrs.pop("noop_with_empty_axes", 0) axis = None if len(node.inputs) > 1: axes = kwargs["tensor_dict"][node.inputs[1]] axes_shape = tf_shape(axes) if len(axes_shape) > 1: axis = axes else: axis = axes[0] if axes_shape[0] != 0 else axis # return the input tensor when axis is None and noop_with_empty_axes is True if axis is None and noop_with_empty_axes: return [x] attrs["axis"] = axis # https://github.com/onnx/onnx/issues/585 attrs["keepdims"] = attrs.pop("keepdims", 1) == 1 return [ cls.make_tensor_from_onnx_node(node, inputs=[x], attrs=attrs, **kwargs) ]
589
1,738
/* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ // Original file Copyright Crytek GMBH or its affiliates, used under license. #pragma once //////////////////////////////////////////////////////////////////////////// // Crytek Engine Source File. // (c) 2001 - 2013 Crytek GmbH // ------------------------------------------------------------------------- // File name: BevelTool.h // Created: Oct/7/2013 by Jaesik. //////////////////////////////////////////////////////////////////////////// #include "Tools/Select/SelectTool.h" class BevelTool : public SelectTool { public: BevelTool(CD::EDesignerTool tool) : SelectTool(tool) , m_nMousePrevY(0) , m_fDelta(0) { } void OnLButtonDown(CViewport* view, UINT nFlags, const QPoint& point) override; void OnMouseMove(CViewport* view, UINT nFlags, const QPoint& point) override; bool OnKeyDown(CViewport* view, uint32 nKeycode, uint32 nRepCnt, uint32 nFlags) override; void Display(DisplayContext& dc) override; void Enter() override; void Leave() override; private: typedef std::vector< std::pair<BrushVec3, BrushVec3> > MapBewteenSpreadedVertexAndApex; typedef std::map<int, std::vector<BrushEdge3D> > MapBetweenElementIndexAndEdges; typedef std::map<int, CD::PolygonPtr > MapBetweenElementIndexAndOrignialPolygon; struct SMappingInfo { void Reset() { mapSpreadedVertex2Apex.clear(); mapElementIdx2Edges.clear(); mapElementIdx2OriginalPolygon.clear(); vertexSetToMakePolygon.clear(); } MapBewteenSpreadedVertexAndApex mapSpreadedVertex2Apex; MapBetweenElementIndexAndEdges mapElementIdx2Edges; MapBetweenElementIndexAndOrignialPolygon mapElementIdx2OriginalPolygon; std::set<BrushVec3> vertexSetToMakePolygon; }; // First - Polygon, Second - EdgeIndex typedef std::pair<CD::PolygonPtr, int> EdgeIdentifier; struct SResultForNextPhase { void Reset() { mapBetweenEdgeIdToApex.clear(); mapBetweenEdgeIdToVertex.clear(); middlePhaseEdgePolygons.clear(); middlePhaseSidePolygons.clear(); middlePhaseBottomPolygons.clear(); middlePhaseApexPolygons.clear(); } std::map<EdgeIdentifier, BrushVec3> mapBetweenEdgeIdToApex; std::map<EdgeIdentifier, BrushVec3> mapBetweenEdgeIdToVertex; std::vector<CD::PolygonPtr> middlePhaseEdgePolygons; std::vector<CD::PolygonPtr> middlePhaseSidePolygons; std::vector<CD::PolygonPtr> middlePhaseBottomPolygons; std::vector<CD::PolygonPtr> middlePhaseApexPolygons; }; SResultForNextPhase m_ResultForSecondPhase; bool PP0_Initialize(bool bSpreadEdge = false); void PP0_SpreadEdges(int offset, bool bSpreadEdge = true); bool PP1_PushEdgesAndVerticesOut(SResultForNextPhase& outResultForNextPhase, SMappingInfo& outMappingInfo); void PP1_MakeEdgePolygons(const SMappingInfo& mappingInfo, SResultForNextPhase& outResultForNextPhase); void PP2_MapBetweenEdgeIdToApexPos( const SMappingInfo& mappingInfo, CD::PolygonPtr pEdgePolygon, const BrushEdge3D& sideEdge0, const BrushEdge3D& sideEdge1, SResultForNextPhase& outResultForNextPhase); void PP1_MakeApexPolygons(const SMappingInfo& mappingInfo, SResultForNextPhase& outResultForNextPhase); void PP0_SubdivideSpreadedEdge(int nSubdivideNum); struct SInfoForSubdivingApexPolygon { BrushEdge3D edge; std::vector< std::pair<BrushVec3, BrushVec3> > vIntermediate; }; void PP1_SubdivideApexPolygon(int nSubdivideNum, const std::vector<SInfoForSubdivingApexPolygon>& infoForSubdividingApexPolygonList); private: int GetEdgeCountHavingVertexInElementList(const BrushVec3& vertex, const ElementManager& elementList) const; int FindCorrespondingEdge(const BrushEdge3D& e, const std::vector<SInfoForSubdivingApexPolygon>& infoForSubdividingApexPolygonList) const; std::vector<CD::PolygonPtr> CreateFirstOddSubdividedApexPolygons(const std::vector<const SInfoForSubdivingApexPolygon*>& subdividedEdges); std::vector<CD::PolygonPtr> CreateFirstEvenSubdividedApexPolygons(const std::vector<const SInfoForSubdivingApexPolygon*>& subdividedEdges); enum EBevelMode { eBevelMode_Nothing, eBevelMode_Spread, eBevelMode_Divide, }; EBevelMode m_BevelMode; _smart_ptr<CD::Model> m_pOriginalModel; ElementManager m_OriginalSelectedElements; std::vector<CD::PolygonPtr> m_OriginalPolygons; int m_nMousePrevY; BrushFloat m_fDelta; int m_nDividedNumber; };
1,944
5,813
<reponame>RomaKoks/druid<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.security.authorization; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.concurrent.Execs; import org.apache.druid.metadata.MetadataStorageTablesConfig; import org.apache.druid.metadata.TestDerbyConnector; import org.apache.druid.security.basic.BasicAuthCommonCacheConfig; import org.apache.druid.security.basic.BasicAuthUtils; import org.apache.druid.security.basic.authorization.BasicRoleBasedAuthorizer; import org.apache.druid.security.basic.authorization.db.updater.CoordinatorBasicAuthorizerMetadataStorageUpdater; import org.apache.druid.security.basic.authorization.endpoint.BasicAuthorizerResource; import org.apache.druid.security.basic.authorization.endpoint.CoordinatorBasicAuthorizerResourceHandler; import org.apache.druid.security.basic.authorization.entity.BasicAuthorizerGroupMapping; import org.apache.druid.security.basic.authorization.entity.BasicAuthorizerGroupMappingFull; import org.apache.druid.security.basic.authorization.entity.BasicAuthorizerPermission; import org.apache.druid.security.basic.authorization.entity.BasicAuthorizerRole; import org.apache.druid.security.basic.authorization.entity.BasicAuthorizerRoleFull; import org.apache.druid.security.basic.authorization.entity.BasicAuthorizerRoleSimplifiedPermissions; import org.apache.druid.security.basic.authorization.entity.BasicAuthorizerUser; import org.apache.druid.security.basic.authorization.entity.BasicAuthorizerUserFull; import org.apache.druid.security.basic.authorization.entity.BasicAuthorizerUserFullSimplifiedPermissions; import org.apache.druid.server.security.Action; import org.apache.druid.server.security.AuthValidator; import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.Resource; import org.apache.druid.server.security.ResourceAction; import org.apache.druid.server.security.ResourceType; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.core.Response; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; @RunWith(MockitoJUnitRunner.class) public class CoordinatorBasicAuthorizerResourceTest { private static final String AUTHORIZER_NAME = "test"; private static final String AUTHORIZER_NAME2 = "test2"; private static final String AUTHORIZER_NAME3 = "test3"; @Rule public ExpectedException expectedException = ExpectedException.none(); @Rule public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule(); @Mock private AuthValidator authValidator; @Mock private HttpServletRequest req; private TestDerbyConnector connector; private MetadataStorageTablesConfig tablesConfig; private BasicAuthorizerResource resource; private CoordinatorBasicAuthorizerMetadataStorageUpdater storageUpdater; @Before public void setUp() { connector = derbyConnectorRule.getConnector(); tablesConfig = derbyConnectorRule.metadataTablesConfigSupplier().get(); connector.createConfigTable(); AuthorizerMapper authorizerMapper = new AuthorizerMapper( ImmutableMap.of( AUTHORIZER_NAME, new BasicRoleBasedAuthorizer( null, AUTHORIZER_NAME, null, null, null, null, null, null ), AUTHORIZER_NAME2, new BasicRoleBasedAuthorizer( null, AUTHORIZER_NAME2, null, null, null, null, null, null ), AUTHORIZER_NAME3, new BasicRoleBasedAuthorizer( null, AUTHORIZER_NAME3, null, null, "adminGroupMapping", null, null, null ) ) ); storageUpdater = new CoordinatorBasicAuthorizerMetadataStorageUpdater( authorizerMapper, connector, tablesConfig, new BasicAuthCommonCacheConfig(null, null, null, null), new ObjectMapper(new SmileFactory()), new NoopBasicAuthorizerCacheNotifier(), null ); resource = new BasicAuthorizerResource( new CoordinatorBasicAuthorizerResourceHandler( storageUpdater, authorizerMapper, new ObjectMapper(new SmileFactory()) ), authValidator ); storageUpdater.start(); } @After public void tearDown() { storageUpdater.stop(); } @Test public void testSeparateDatabaseTables() { Response response = resource.getAllUsers(req, AUTHORIZER_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals( ImmutableSet.of(BasicAuthUtils.ADMIN_NAME, BasicAuthUtils.INTERNAL_USER_NAME), response.getEntity() ); response = resource.getAllUsers(req, AUTHORIZER_NAME2); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals( ImmutableSet.of(BasicAuthUtils.ADMIN_NAME, BasicAuthUtils.INTERNAL_USER_NAME), response.getEntity() ); response = resource.getAllGroupMappings(req, AUTHORIZER_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals( ImmutableSet.of(), response.getEntity() ); response = resource.getAllGroupMappings(req, AUTHORIZER_NAME2); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals( ImmutableSet.of(), response.getEntity() ); response = resource.getAllGroupMappings(req, AUTHORIZER_NAME3); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals( ImmutableSet.of("adminGroupMapping"), response.getEntity() ); resource.createUser(req, AUTHORIZER_NAME, "druid"); resource.createUser(req, AUTHORIZER_NAME, "druid2"); resource.createUser(req, AUTHORIZER_NAME, "druid3"); resource.createGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", new BasicAuthorizerGroupMapping("druidGroupMapping", "", new HashSet<>())); resource.createGroupMapping(req, AUTHORIZER_NAME, "druid2GroupMapping", new BasicAuthorizerGroupMapping("druid2GroupMapping", "", new HashSet<>())); resource.createGroupMapping(req, AUTHORIZER_NAME, "druid3GroupMapping", new BasicAuthorizerGroupMapping("druid3GroupMapping", "", new HashSet<>())); resource.createUser(req, AUTHORIZER_NAME2, "druid4"); resource.createUser(req, AUTHORIZER_NAME2, "druid5"); resource.createUser(req, AUTHORIZER_NAME2, "druid6"); resource.createGroupMapping(req, AUTHORIZER_NAME2, "druid4GroupMapping", new BasicAuthorizerGroupMapping("druid4GroupMapping", "", new HashSet<>())); resource.createGroupMapping(req, AUTHORIZER_NAME2, "druid5GroupMapping", new BasicAuthorizerGroupMapping("druid5GroupMapping", "", new HashSet<>())); resource.createGroupMapping(req, AUTHORIZER_NAME2, "druid6GroupMapping", new BasicAuthorizerGroupMapping("druid6GroupMapping", "", new HashSet<>())); Set<String> expectedUsers = ImmutableSet.of( BasicAuthUtils.ADMIN_NAME, BasicAuthUtils.INTERNAL_USER_NAME, "druid", "druid2", "druid3" ); Set<String> expectedUsers2 = ImmutableSet.of( BasicAuthUtils.ADMIN_NAME, BasicAuthUtils.INTERNAL_USER_NAME, "druid4", "druid5", "druid6" ); response = resource.getAllUsers(req, AUTHORIZER_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUsers, response.getEntity()); response = resource.getAllUsers(req, AUTHORIZER_NAME2); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUsers2, response.getEntity()); Set<String> expectedGroupMappings = ImmutableSet.of( "druidGroupMapping", "druid2GroupMapping", "druid3GroupMapping" ); Set<String> expectedGroupMappings2 = ImmutableSet.of( "druid4GroupMapping", "druid5GroupMapping", "druid6GroupMapping" ); response = resource.getAllGroupMappings(req, AUTHORIZER_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedGroupMappings, response.getEntity()); response = resource.getAllGroupMappings(req, AUTHORIZER_NAME2); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedGroupMappings2, response.getEntity()); } @Test public void testInvalidAuthorizer() { Response response = resource.getAllUsers(req, "invalidName"); Assert.assertEquals(400, response.getStatus()); Assert.assertEquals( errorMapWithMsg("Basic authorizer with name [invalidName] does not exist."), response.getEntity() ); } @Test public void testGetAllUsers() { Response response = resource.getAllUsers(req, AUTHORIZER_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals( ImmutableSet.of(BasicAuthUtils.ADMIN_NAME, BasicAuthUtils.INTERNAL_USER_NAME), response.getEntity() ); resource.createUser(req, AUTHORIZER_NAME, "druid"); resource.createUser(req, AUTHORIZER_NAME, "druid2"); resource.createUser(req, AUTHORIZER_NAME, "druid3"); Set<String> expectedUsers = ImmutableSet.of( BasicAuthUtils.ADMIN_NAME, BasicAuthUtils.INTERNAL_USER_NAME, "druid", "druid2", "druid3" ); response = resource.getAllUsers(req, AUTHORIZER_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUsers, response.getEntity()); } @Test public void testGetAllGroupMappings() { Response response = resource.getAllGroupMappings(req, AUTHORIZER_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals( ImmutableSet.of(), response.getEntity() ); resource.createGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", new BasicAuthorizerGroupMapping("druidGroupMapping", "", new HashSet<>())); resource.createGroupMapping(req, AUTHORIZER_NAME, "druid2GroupMapping", new BasicAuthorizerGroupMapping("druid2GroupMapping", "", new HashSet<>())); resource.createGroupMapping(req, AUTHORIZER_NAME, "druid3GroupMapping", new BasicAuthorizerGroupMapping("druid3GroupMapping", "", new HashSet<>())); Set<String> expectedGroupMappings = ImmutableSet.of( "druidGroupMapping", "druid2GroupMapping", "druid3GroupMapping" ); response = resource.getAllGroupMappings(req, AUTHORIZER_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedGroupMappings, response.getEntity()); } @Test public void testGetAllRoles() { Response response = resource.getAllRoles(req, AUTHORIZER_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals( ImmutableSet.of(BasicAuthUtils.ADMIN_NAME, BasicAuthUtils.INTERNAL_USER_NAME), response.getEntity() ); resource.createRole(req, AUTHORIZER_NAME, "druid"); resource.createRole(req, AUTHORIZER_NAME, "druid2"); resource.createRole(req, AUTHORIZER_NAME, "druid3"); Set<String> expectedRoles = ImmutableSet.of( BasicAuthUtils.ADMIN_NAME, BasicAuthUtils.INTERNAL_USER_NAME, "druid", "druid2", "druid3" ); response = resource.getAllRoles(req, AUTHORIZER_NAME); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedRoles, response.getEntity()); } @Test public void testCreateDeleteUser() { Response response = resource.createUser(req, AUTHORIZER_NAME, "druid"); Assert.assertEquals(200, response.getStatus()); response = resource.getUser(req, AUTHORIZER_NAME, "druid", null, null); Assert.assertEquals(200, response.getStatus()); BasicAuthorizerUser expectedUser = new BasicAuthorizerUser( "druid", ImmutableSet.of() ); Assert.assertEquals(expectedUser, response.getEntity()); response = resource.deleteUser(req, AUTHORIZER_NAME, "druid"); Assert.assertEquals(200, response.getStatus()); response = resource.deleteUser(req, AUTHORIZER_NAME, "druid"); Assert.assertEquals(400, response.getStatus()); Assert.assertEquals(errorMapWithMsg("User [druid] does not exist."), response.getEntity()); response = resource.getUser(req, AUTHORIZER_NAME, "druid", null, null); Assert.assertEquals(400, response.getStatus()); Assert.assertEquals(errorMapWithMsg("User [druid] does not exist."), response.getEntity()); } @Test public void testCreateDeleteGroupMapping() { Response response = resource.createGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", new BasicAuthorizerGroupMapping("druidGroupMapping", "", new HashSet<>())); Assert.assertEquals(200, response.getStatus()); response = resource.getGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", null); Assert.assertEquals(200, response.getStatus()); BasicAuthorizerGroupMapping expectedGroupMapping = new BasicAuthorizerGroupMapping( "druidGroupMapping", "", ImmutableSet.of() ); Assert.assertEquals(expectedGroupMapping, response.getEntity()); response = resource.deleteGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping"); Assert.assertEquals(200, response.getStatus()); response = resource.deleteGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping"); Assert.assertEquals(400, response.getStatus()); Assert.assertEquals(errorMapWithMsg("Group mapping [druidGroupMapping] does not exist."), response.getEntity()); response = resource.getGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", null); Assert.assertEquals(400, response.getStatus()); Assert.assertEquals(errorMapWithMsg("Group mapping [druidGroupMapping] does not exist."), response.getEntity()); } @Test public void testCreateDeleteRole() { Response response = resource.createRole(req, AUTHORIZER_NAME, "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole", null, null); Assert.assertEquals(200, response.getStatus()); BasicAuthorizerRole expectedRole = new BasicAuthorizerRole("druidRole", ImmutableList.of()); Assert.assertEquals(expectedRole, response.getEntity()); response = resource.deleteRole(req, AUTHORIZER_NAME, "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.deleteRole(req, AUTHORIZER_NAME, "druidRole"); Assert.assertEquals(400, response.getStatus()); Assert.assertEquals(errorMapWithMsg("Role [druidRole] does not exist."), response.getEntity()); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole", null, null); Assert.assertEquals(400, response.getStatus()); Assert.assertEquals(errorMapWithMsg("Role [druidRole] does not exist."), response.getEntity()); } @Test public void testUserRoleAssignment() { Response response = resource.createRole(req, AUTHORIZER_NAME, "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.createUser(req, AUTHORIZER_NAME, "druid"); Assert.assertEquals(200, response.getStatus()); response = resource.assignRoleToUser(req, AUTHORIZER_NAME, "druid", "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.getUser(req, AUTHORIZER_NAME, "druid", null, null); Assert.assertEquals(200, response.getStatus()); BasicAuthorizerUser expectedUser = new BasicAuthorizerUser( "druid", ImmutableSet.of("druidRole") ); Assert.assertEquals(expectedUser, response.getEntity()); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole", null, null); Assert.assertEquals(200, response.getStatus()); BasicAuthorizerRole expectedRole = new BasicAuthorizerRole("druidRole", ImmutableList.of()); Assert.assertEquals(expectedRole, response.getEntity()); response = resource.unassignRoleFromUser(req, AUTHORIZER_NAME, "druid", "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.getUser(req, AUTHORIZER_NAME, "druid", null, null); Assert.assertEquals(200, response.getStatus()); expectedUser = new BasicAuthorizerUser( "druid", ImmutableSet.of() ); Assert.assertEquals(expectedUser, response.getEntity()); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole", null, null); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedRole, response.getEntity()); } @Test public void testGroupMappingRoleAssignment() { Response response = resource.createRole(req, AUTHORIZER_NAME, "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.createGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", new BasicAuthorizerGroupMapping("druidGroupMapping", "", new HashSet<>())); Assert.assertEquals(200, response.getStatus()); response = resource.assignRoleToGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.getGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", null); Assert.assertEquals(200, response.getStatus()); BasicAuthorizerGroupMapping expectedGroupMapping = new BasicAuthorizerGroupMapping( "druidGroupMapping", "", ImmutableSet.of("druidRole") ); Assert.assertEquals(expectedGroupMapping, response.getEntity()); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole", null, null); Assert.assertEquals(200, response.getStatus()); BasicAuthorizerRole expectedRole = new BasicAuthorizerRole("druidRole", ImmutableList.of()); Assert.assertEquals(expectedRole, response.getEntity()); response = resource.unassignRoleFromGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.getGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", null); Assert.assertEquals(200, response.getStatus()); expectedGroupMapping = new BasicAuthorizerGroupMapping( "druidGroupMapping", "", ImmutableSet.of() ); Assert.assertEquals(expectedGroupMapping, response.getEntity()); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole", null, null); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedRole, response.getEntity()); } @Test public void testDeleteAssignedRole() { Response response = resource.createRole(req, AUTHORIZER_NAME, "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.createUser(req, AUTHORIZER_NAME, "druid"); Assert.assertEquals(200, response.getStatus()); response = resource.createUser(req, AUTHORIZER_NAME, "druid2"); Assert.assertEquals(200, response.getStatus()); response = resource.assignRoleToUser(req, AUTHORIZER_NAME, "druid", "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.assignRoleToUser(req, AUTHORIZER_NAME, "druid2", "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.createGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", new BasicAuthorizerGroupMapping("druidGroupMapping", "", new HashSet<>())); Assert.assertEquals(200, response.getStatus()); response = resource.createGroupMapping(req, AUTHORIZER_NAME, "druid2GroupMapping", new BasicAuthorizerGroupMapping("druid2GroupMapping", "", new HashSet<>())); Assert.assertEquals(200, response.getStatus()); response = resource.assignRoleToGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.assignRoleToGroupMapping(req, AUTHORIZER_NAME, "druid2GroupMapping", "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.getUser(req, AUTHORIZER_NAME, "druid", null, null); Assert.assertEquals(200, response.getStatus()); BasicAuthorizerUser expectedUser = new BasicAuthorizerUser( "druid", ImmutableSet.of("druidRole") ); Assert.assertEquals(expectedUser, response.getEntity()); response = resource.getUser(req, AUTHORIZER_NAME, "druid2", null, null); Assert.assertEquals(200, response.getStatus()); BasicAuthorizerUser expectedUser2 = new BasicAuthorizerUser( "druid2", ImmutableSet.of("druidRole") ); Assert.assertEquals(expectedUser2, response.getEntity()); response = resource.getGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", null); Assert.assertEquals(200, response.getStatus()); BasicAuthorizerGroupMapping expectedGroupMapping = new BasicAuthorizerGroupMapping( "druidGroupMapping", "", ImmutableSet.of("druidRole") ); Assert.assertEquals(expectedGroupMapping, response.getEntity()); response = resource.getGroupMapping(req, AUTHORIZER_NAME, "druid2GroupMapping", null); Assert.assertEquals(200, response.getStatus()); BasicAuthorizerGroupMapping expectedGroupMapping2 = new BasicAuthorizerGroupMapping( "druid2GroupMapping", "", ImmutableSet.of("druidRole") ); Assert.assertEquals(expectedGroupMapping2, response.getEntity()); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole", null, null); Assert.assertEquals(200, response.getStatus()); BasicAuthorizerRole expectedRole = new BasicAuthorizerRole("druidRole", ImmutableList.of()); Assert.assertEquals(expectedRole, response.getEntity()); response = resource.deleteRole(req, AUTHORIZER_NAME, "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.getUser(req, AUTHORIZER_NAME, "druid", null, null); Assert.assertEquals(200, response.getStatus()); expectedUser = new BasicAuthorizerUser( "druid", ImmutableSet.of() ); Assert.assertEquals(expectedUser, response.getEntity()); response = resource.getUser(req, AUTHORIZER_NAME, "druid2", null, null); Assert.assertEquals(200, response.getStatus()); expectedUser2 = new BasicAuthorizerUser( "druid2", ImmutableSet.of() ); Assert.assertEquals(expectedUser2, response.getEntity()); response = resource.getGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", null); Assert.assertEquals(200, response.getStatus()); expectedGroupMapping = new BasicAuthorizerGroupMapping( "druidGroupMapping", "", ImmutableSet.of() ); Assert.assertEquals(expectedGroupMapping, response.getEntity()); response = resource.getGroupMapping(req, AUTHORIZER_NAME, "druid2GroupMapping", null); Assert.assertEquals(200, response.getStatus()); expectedGroupMapping2 = new BasicAuthorizerGroupMapping( "druid2GroupMapping", "", ImmutableSet.of() ); Assert.assertEquals(expectedGroupMapping2, response.getEntity()); } @Test public void testRolesAndPerms() { Response response = resource.createRole(req, AUTHORIZER_NAME, "druidRole"); Assert.assertEquals(200, response.getStatus()); List<ResourceAction> perms = ImmutableList.of( new ResourceAction(new Resource("A", ResourceType.DATASOURCE), Action.READ), new ResourceAction(new Resource("B", ResourceType.DATASOURCE), Action.WRITE), new ResourceAction(new Resource("C", ResourceType.CONFIG), Action.WRITE) ); response = resource.setRolePermissions(req, AUTHORIZER_NAME, "druidRole", perms); Assert.assertEquals(200, response.getStatus()); response = resource.setRolePermissions(req, AUTHORIZER_NAME, "wrongRole", perms); Assert.assertEquals(400, response.getStatus()); Assert.assertEquals(errorMapWithMsg("Role [wrongRole] does not exist."), response.getEntity()); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole", null, null); Assert.assertEquals(200, response.getStatus()); BasicAuthorizerRole expectedRole = new BasicAuthorizerRole("druidRole", BasicAuthorizerPermission.makePermissionList(perms)); Assert.assertEquals(expectedRole, response.getEntity()); List<ResourceAction> newPerms = ImmutableList.of( new ResourceAction(new Resource("D", ResourceType.DATASOURCE), Action.READ), new ResourceAction(new Resource("B", ResourceType.DATASOURCE), Action.WRITE), new ResourceAction(new Resource("F", ResourceType.CONFIG), Action.WRITE) ); response = resource.setRolePermissions(req, AUTHORIZER_NAME, "druidRole", newPerms); Assert.assertEquals(200, response.getStatus()); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole", null, null); Assert.assertEquals(200, response.getStatus()); expectedRole = new BasicAuthorizerRole("druidRole", BasicAuthorizerPermission.makePermissionList(newPerms)); Assert.assertEquals(expectedRole, response.getEntity()); response = resource.setRolePermissions(req, AUTHORIZER_NAME, "druidRole", null); Assert.assertEquals(200, response.getStatus()); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole", null, null); Assert.assertEquals(200, response.getStatus()); expectedRole = new BasicAuthorizerRole("druidRole", null); Assert.assertEquals(expectedRole, response.getEntity()); } @Test public void testUsersGroupMappingsRolesAndPerms() { Response response = resource.createUser(req, AUTHORIZER_NAME, "druid"); Assert.assertEquals(200, response.getStatus()); response = resource.createUser(req, AUTHORIZER_NAME, "druid2"); Assert.assertEquals(200, response.getStatus()); response = resource.createGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", new BasicAuthorizerGroupMapping("druidGroupMapping", "", new HashSet<>())); Assert.assertEquals(200, response.getStatus()); response = resource.createGroupMapping(req, AUTHORIZER_NAME, "druid2GroupMapping", new BasicAuthorizerGroupMapping("druid2GroupMapping", "", new HashSet<>())); Assert.assertEquals(200, response.getStatus()); response = resource.createRole(req, AUTHORIZER_NAME, "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.createRole(req, AUTHORIZER_NAME, "druidRole2"); Assert.assertEquals(200, response.getStatus()); List<ResourceAction> perms = ImmutableList.of( new ResourceAction(new Resource("A", ResourceType.DATASOURCE), Action.READ), new ResourceAction(new Resource("B", ResourceType.DATASOURCE), Action.WRITE), new ResourceAction(new Resource("C", ResourceType.CONFIG), Action.WRITE) ); List<ResourceAction> perms2 = ImmutableList.of( new ResourceAction(new Resource("D", ResourceType.STATE), Action.READ), new ResourceAction(new Resource("E", ResourceType.DATASOURCE), Action.WRITE), new ResourceAction(new Resource("F", ResourceType.CONFIG), Action.WRITE) ); response = resource.setRolePermissions(req, AUTHORIZER_NAME, "druidRole", perms); Assert.assertEquals(200, response.getStatus()); response = resource.setRolePermissions(req, AUTHORIZER_NAME, "druidRole2", perms2); Assert.assertEquals(200, response.getStatus()); response = resource.assignRoleToUser(req, AUTHORIZER_NAME, "druid", "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.assignRoleToUser(req, AUTHORIZER_NAME, "druid", "druidRole2"); Assert.assertEquals(200, response.getStatus()); response = resource.assignRoleToUser(req, AUTHORIZER_NAME, "druid2", "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.assignRoleToUser(req, AUTHORIZER_NAME, "druid2", "druidRole2"); Assert.assertEquals(200, response.getStatus()); response = resource.assignRoleToGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.assignRoleToGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", "druidRole2"); Assert.assertEquals(200, response.getStatus()); response = resource.assignRoleToGroupMapping(req, AUTHORIZER_NAME, "druid2GroupMapping", "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.assignRoleToGroupMapping(req, AUTHORIZER_NAME, "druid2GroupMapping", "druidRole2"); Assert.assertEquals(200, response.getStatus()); BasicAuthorizerRole expectedRole = new BasicAuthorizerRole("druidRole", BasicAuthorizerPermission.makePermissionList(perms)); BasicAuthorizerRole expectedRole2 = new BasicAuthorizerRole("druidRole2", BasicAuthorizerPermission.makePermissionList(perms2)); Set<BasicAuthorizerRole> expectedRoles = Sets.newHashSet(expectedRole, expectedRole2); BasicAuthorizerUserFull expectedUserFull = new BasicAuthorizerUserFull("druid", expectedRoles); response = resource.getUser(req, AUTHORIZER_NAME, "druid", "", null); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUserFull, response.getEntity()); BasicAuthorizerUserFullSimplifiedPermissions expectedUserFullSimplifiedPermissions = new BasicAuthorizerUserFullSimplifiedPermissions( "druid", BasicAuthorizerRoleSimplifiedPermissions.convertRoles(expectedRoles) ); response = resource.getUser(req, AUTHORIZER_NAME, "druid", "", ""); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUserFullSimplifiedPermissions, response.getEntity()); BasicAuthorizerUserFull expectedUserFull2 = new BasicAuthorizerUserFull("druid2", expectedRoles); response = resource.getUser(req, AUTHORIZER_NAME, "druid2", "", null); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUserFull2, response.getEntity()); BasicAuthorizerUserFullSimplifiedPermissions expectedUserFullSimplifiedPermissions2 = new BasicAuthorizerUserFullSimplifiedPermissions( "druid2", BasicAuthorizerRoleSimplifiedPermissions.convertRoles(expectedRoles) ); response = resource.getUser(req, AUTHORIZER_NAME, "druid2", "", ""); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUserFullSimplifiedPermissions2, response.getEntity()); BasicAuthorizerGroupMappingFull expectedGroupMappingFull = new BasicAuthorizerGroupMappingFull("druidGroupMapping", "", expectedRoles); response = resource.getGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", ""); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedGroupMappingFull, response.getEntity()); BasicAuthorizerGroupMappingFull expectedGroupMappingFull2 = new BasicAuthorizerGroupMappingFull("druid2GroupMapping", "", expectedRoles); response = resource.getGroupMapping(req, AUTHORIZER_NAME, "druid2GroupMapping", ""); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedGroupMappingFull2, response.getEntity()); Set<String> expectedUserSet = Sets.newHashSet("druid", "druid2"); Set<String> expectedGroupMappingSet = Sets.newHashSet("druidGroupMapping", "druid2GroupMapping"); BasicAuthorizerRoleFull expectedRoleFull = new BasicAuthorizerRoleFull( "druidRole", expectedUserSet, expectedGroupMappingSet, BasicAuthorizerPermission.makePermissionList(perms) ); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole", "", null); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedRoleFull, response.getEntity()); BasicAuthorizerRoleSimplifiedPermissions expectedRoleSimplifiedPerms = new BasicAuthorizerRoleSimplifiedPermissions( "druidRole", expectedUserSet, perms ); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole", "", ""); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedRoleSimplifiedPerms, response.getEntity()); expectedRoleSimplifiedPerms = new BasicAuthorizerRoleSimplifiedPermissions( "druidRole", null, perms ); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole", null, ""); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedRoleSimplifiedPerms, response.getEntity()); BasicAuthorizerRoleFull expectedRoleFull2 = new BasicAuthorizerRoleFull( "druidRole2", expectedUserSet, expectedGroupMappingSet, BasicAuthorizerPermission.makePermissionList(perms2) ); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole2", "", null); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedRoleFull2, response.getEntity()); BasicAuthorizerRoleSimplifiedPermissions expectedRoleSimplifiedPerms2 = new BasicAuthorizerRoleSimplifiedPermissions( "druidRole2", expectedUserSet, perms2 ); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole2", "", ""); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedRoleSimplifiedPerms2, response.getEntity()); expectedRoleSimplifiedPerms2 = new BasicAuthorizerRoleSimplifiedPermissions( "druidRole2", null, perms2 ); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole2", null, ""); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedRoleSimplifiedPerms2, response.getEntity()); perms = ImmutableList.of( new ResourceAction(new Resource("A", ResourceType.DATASOURCE), Action.READ), new ResourceAction(new Resource("C", ResourceType.CONFIG), Action.WRITE) ); perms2 = ImmutableList.of( new ResourceAction(new Resource("E", ResourceType.DATASOURCE), Action.WRITE) ); response = resource.setRolePermissions(req, AUTHORIZER_NAME, "druidRole", perms); Assert.assertEquals(200, response.getStatus()); response = resource.setRolePermissions(req, AUTHORIZER_NAME, "druidRole2", perms2); Assert.assertEquals(200, response.getStatus()); expectedRole = new BasicAuthorizerRole("druidRole", BasicAuthorizerPermission.makePermissionList(perms)); expectedRole2 = new BasicAuthorizerRole("druidRole2", BasicAuthorizerPermission.makePermissionList(perms2)); expectedRoles = Sets.newHashSet(expectedRole, expectedRole2); expectedUserFull = new BasicAuthorizerUserFull("druid", expectedRoles); expectedUserFull2 = new BasicAuthorizerUserFull("druid2", expectedRoles); expectedUserFullSimplifiedPermissions = new BasicAuthorizerUserFullSimplifiedPermissions( "druid", BasicAuthorizerRoleSimplifiedPermissions.convertRoles(expectedRoles) ); expectedUserFullSimplifiedPermissions2 = new BasicAuthorizerUserFullSimplifiedPermissions( "druid2", BasicAuthorizerRoleSimplifiedPermissions.convertRoles(expectedRoles) ); response = resource.getUser(req, AUTHORIZER_NAME, "druid", "", null); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUserFull, response.getEntity()); response = resource.getUser(req, AUTHORIZER_NAME, "druid", "", ""); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUserFullSimplifiedPermissions, response.getEntity()); response = resource.getUser(req, AUTHORIZER_NAME, "druid2", "", null); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUserFull2, response.getEntity()); response = resource.getUser(req, AUTHORIZER_NAME, "druid2", "", ""); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUserFullSimplifiedPermissions2, response.getEntity()); response = resource.unassignRoleFromUser(req, AUTHORIZER_NAME, "druid", "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.unassignRoleFromUser(req, AUTHORIZER_NAME, "druid2", "druidRole2"); Assert.assertEquals(200, response.getStatus()); response = resource.unassignRoleFromGroupMapping(req, AUTHORIZER_NAME, "druidGroupMapping", "druidRole"); Assert.assertEquals(200, response.getStatus()); response = resource.unassignRoleFromGroupMapping(req, AUTHORIZER_NAME, "druid2GroupMapping", "druidRole2"); Assert.assertEquals(200, response.getStatus()); expectedUserFull = new BasicAuthorizerUserFull("druid", Sets.newHashSet(expectedRole2)); expectedUserFull2 = new BasicAuthorizerUserFull("druid2", Sets.newHashSet(expectedRole)); expectedRoleFull = new BasicAuthorizerRoleFull( "druidRole", Sets.newHashSet("druid2"), Sets.newHashSet("druid2GroupMapping"), BasicAuthorizerPermission.makePermissionList(perms) ); expectedRoleFull2 = new BasicAuthorizerRoleFull( "druidRole2", Sets.newHashSet("druid"), Sets.newHashSet("druidGroupMapping"), BasicAuthorizerPermission.makePermissionList(perms2) ); expectedUserFullSimplifiedPermissions = new BasicAuthorizerUserFullSimplifiedPermissions( "druid", BasicAuthorizerRoleSimplifiedPermissions.convertRoles(expectedUserFull.getRoles()) ); expectedUserFullSimplifiedPermissions2 = new BasicAuthorizerUserFullSimplifiedPermissions( "druid2", BasicAuthorizerRoleSimplifiedPermissions.convertRoles(expectedUserFull2.getRoles()) ); expectedRoleSimplifiedPerms = new BasicAuthorizerRoleSimplifiedPermissions(expectedRoleFull); expectedRoleSimplifiedPerms2 = new BasicAuthorizerRoleSimplifiedPermissions(expectedRoleFull2); response = resource.getUser(req, AUTHORIZER_NAME, "druid", "", null); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUserFull, response.getEntity()); response = resource.getUser(req, AUTHORIZER_NAME, "druid", "", ""); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUserFullSimplifiedPermissions, response.getEntity()); response = resource.getUser(req, AUTHORIZER_NAME, "druid2", "", null); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUserFull2, response.getEntity()); response = resource.getUser(req, AUTHORIZER_NAME, "druid2", "", ""); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedUserFullSimplifiedPermissions2, response.getEntity()); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole", "", null); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedRoleFull, response.getEntity()); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole", "", ""); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedRoleSimplifiedPerms, response.getEntity()); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole2", "", null); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedRoleFull2, response.getEntity()); response = resource.getRole(req, AUTHORIZER_NAME, "druidRole2", "", ""); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(expectedRoleSimplifiedPerms2, response.getEntity()); } @Test public void testConcurrentUpdate() { final int testMultiple = 100; // setup a user and the roles Response response = resource.createUser(req, AUTHORIZER_NAME, "druid"); Assert.assertEquals(200, response.getStatus()); List<ResourceAction> perms = ImmutableList.of( new ResourceAction(new Resource("A", ResourceType.DATASOURCE), Action.READ), new ResourceAction(new Resource("B", ResourceType.DATASOURCE), Action.WRITE), new ResourceAction(new Resource("C", ResourceType.CONFIG), Action.WRITE) ); for (int i = 0; i < testMultiple; i++) { String roleName = "druidRole-" + i; response = resource.createRole(req, AUTHORIZER_NAME, roleName); Assert.assertEquals(200, response.getStatus()); response = resource.setRolePermissions(req, AUTHORIZER_NAME, roleName, perms); Assert.assertEquals(200, response.getStatus()); } ExecutorService exec = Execs.multiThreaded(testMultiple, "thread---"); int[] responseCodesAssign = new int[testMultiple]; // assign 'testMultiple' roles to the user concurrently List<Callable<Void>> addRoleCallables = new ArrayList<>(); for (int i = 0; i < testMultiple; i++) { final int innerI = i; String roleName = "druidRole-" + i; addRoleCallables.add( () -> { Response response12 = resource.assignRoleToUser(req, AUTHORIZER_NAME, "druid", roleName); responseCodesAssign[innerI] = response12.getStatus(); return null; } ); } try { List<Future<Void>> futures = exec.invokeAll(addRoleCallables); for (Future future : futures) { future.get(); } } catch (Exception e) { throw new RuntimeException(e); } // the API can return !200 if the update attempt fails by exhausting retries because of // too much contention from other conflicting requests, make sure that we don't get any successful requests // that didn't actually take effect Set<String> roleNames = getRoleNamesAssignedToUser("druid"); for (int i = 0; i < testMultiple; i++) { String roleName = "druidRole-" + i; if (responseCodesAssign[i] == 200 && !roleNames.contains(roleName)) { Assert.fail( StringUtils.format("Got response status 200 for assigning role [%s] but user did not have role.", roleName) ); } } // Now unassign the roles concurrently List<Callable<Void>> removeRoleCallables = new ArrayList<>(); int[] responseCodesRemove = new int[testMultiple]; for (int i = 0; i < testMultiple; i++) { final int innerI = i; String roleName = "druidRole-" + i; removeRoleCallables.add( () -> { Response response1 = resource.unassignRoleFromUser(req, AUTHORIZER_NAME, "druid", roleName); responseCodesRemove[innerI] = response1.getStatus(); return null; } ); } try { List<Future<Void>> futures = exec.invokeAll(removeRoleCallables); for (Future future : futures) { future.get(); } } catch (Exception e) { throw new RuntimeException(e); } roleNames = getRoleNamesAssignedToUser("druid"); for (int i = 0; i < testMultiple; i++) { String roleName = "druidRole-" + i; if (responseCodesRemove[i] == 200 && roleNames.contains(roleName)) { Assert.fail( StringUtils.format("Got response status 200 for removing role [%s] but user still has role.", roleName) ); } } } private Set<String> getRoleNamesAssignedToUser( String user ) { Response response = resource.getUser(req, AUTHORIZER_NAME, user, "", null); Assert.assertEquals(200, response.getStatus()); BasicAuthorizerUserFull userFull = (BasicAuthorizerUserFull) response.getEntity(); Set<String> roleNames = new HashSet<>(); for (BasicAuthorizerRole role : userFull.getRoles()) { roleNames.add(role.getName()); } return roleNames; } private static Map<String, String> errorMapWithMsg(String errorMsg) { return ImmutableMap.of("error", errorMsg); } }
16,440
463
package java8testing; public final class BusinessService { private final Collaborator collaborator; public BusinessService(Collaborator collaborator) { this.collaborator = collaborator; } public Collaborator getCollaborator() { return collaborator; } public String performBusinessOperation(int value) { return collaborator.doSomething(value + 1); } }
106
630
<filename>deps/boyer-moore-horspool/StreamBoyerMooreHorspool.h /* * Copyright (c) 2010 Phusion v.o.f. * https://github.com/FooBarWidget/boyer-moore-horspool * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ #ifndef _STREAM_BOYER_MOORE_HORSPOOL_ #define _STREAM_BOYER_MOORE_HORSPOOL_ /* * Boyer-Moore-Horspool string search algorithm implementation with streaming support. * Most string search algorithm implementations require the entire haystack data to * be in memory. In contrast, this implementation allows one to feed the haystack data * piece-of-piece in a "streaming" manner. * * This implementation is optimized for both speed and memory usage. * Other than the memory needed for the context structure, it does not perform any * additional memory allocations (except for minimal usage of the stack). The context * structure, which contains the Boyer-Moore-Horspool occurance table and various * state information, is is organized in such a way that it can be allocated with a * single memory allocation action, regardless of the length of the needle. * Its inner loop also deviates a little bit from the original algorithm: the original * algorithm matches data right-to-left, but this implementation first matches the * rightmost character, then matches the data left-to-right, thereby incorporating * some ideas from "Tuning the Boyer-Moore-Horspool String Searching Algorithm" by * <NAME>, 1992. It uses memcmp() for this left-to-right match which is typically * heavily optimized. * * A few more notes: * - This code can be used for searching an arbitrary binary needle in an arbitrary binary * haystack. It is not limited to text. * - Boyer-Moore-Horspool works best for long needles. Generally speaking, the longer the * needle the faster the algorithm becomes. Thus, this implementation makes no effort * at being fast at searching single-character needles or even short needles (say, * less than 5 characters). You should just use memchr() and memmem() for that; those * functions are usually heavily optimized (e.g. by using tricks like searching 4 bytes * at the same time by treating data as an array of integers) and will probably be * *much* faster than this code at searching short needles. * - You can further tweak this code to favor either memory usage or performance. * See the typedef for sbmh_size_t for more information. * * * == Basic usage * * 1. Allocate a StreamBMH structure either on the stack (alloca) or on the heap. * It must be at least SBMH_SIZE(needle_len) bytes big. * The maximum supported needle size depends on the definition of sbmh_size_t. See * its typedef for more information. * * This structure contains haystack search state information and callback * information. The section 'Reuse' explains why this is important. * * 2. Allocate a StreamBMH_Occ structure somewhere. * This structure contains the Boyer-Moore-Horspool occurrance table. The section * 'Reuse' explains why this is important. * * 3. Initialize both structures with sbmh_init(). The structures are now usable for * searching the given needle, and only the given needle. * You must ensure that the StreamBMH structure has at least SBMH_SIZE(needle_len) * bytes of space, otherwise sbmh_init() will overwrite too much memory. * sbmh_init() does NOT make a copy of the needle data. * * 4. Feed haystack data using sbmh_feed(). You must pass it the same needle that you * passed to sbmh_init(), and the same StreamBMH and StreamBMH_Occ structures. * This library does not store a pointer to the needle passed to * sbmh_init() for memory efficiency reasons: the caller already has a pointer * to the needle data so there's no need for us to store it. * * sbmh_feed() returns the number of bytes that has been analyzed: * * - If the needle has now been found then the position of the last needle character * in the currently fed data will be returned: all data until the end of the needle * has been analyzed, but no more. Additionally, the 'found' field in the context * structure will be set to true. * - If the needle hasn't been found yet, then the size of the currently fed data * will be returned: all fed data has been analyzed. * - If the needle was already found, then any additional call to sbmh_feed() * will cause it to return 0: nothing in the fed data is analyzed. * * There's no need deinitialize the StreamBMH/StreamBMH_Occ structures. Just free their * memory. * * * == Convenience * * There's a convenience macro, SBMH_ALLOC_AND_INIT(), for combining steps 1 and 2. * It accepts a NULL-terminated needle and allocates the StreamBMH structure using * malloc(): * * struct StreamBMH *ctx; * SBMH_ALLOC_AND_INIT(ctx, occ, "my needle"); * if (ctx == NULL) { * // error... * } * ... * free(ctx); * * * == Reusing: finding the same needle in a different haystack * * You can reuse the StreamBMH structure and the StreamBMH_Occ structure for * finding the same needle in a different haystack. * * StreamBMH contains the haystack search state. It must be reset every time * you want to search in a new haystack. Call sbmh_reset() to do so. * * The StreamBMH_Occ structure must not be changed because it only contains * needle-specific preparation data, not haystack-specific state. You can * just reuse the old StreamBMH_Occ structure. * * You can then call sbmh_feed() to analyze haystack data. * * * == Reusing: finding a different needle * * You can reuse an existing StreamBMH/StreamBMH_Occ structure for finding a * *different* needle as well. Call sbmh_init() to re-initialize both structures * for use with a different needle. * However you must make sure that the StreamBMH structure is at least * SBMH_SIZE(new_needle_len) bytes big. * * * == Multithreading * * Once initialized, it is safe to share a StreamBMH_Occ structure and the * needle among multiple threads as long as they don't modify either of these. * Each thread must however have its own StreamBMH structure. * * * == Recognition of non-needle data * * The 'callback' field in the StreamBMH structure can be used for recognizing non-needle * data. This is especially useful for things like multipart MIME parsers where you're * interested in all data except for the needle. * * This callback is initially set to NULL by sbmh_init(). sbmh_reset() does not set it. * When set, sbmh_feed() will call this callback with any data that is determined to not * contain the needle. StreamBMH also has a 'user_data' field. You can set it to any * value for your own use; this code do not use it at all. * * The data passed to the callback can be either part of the data in sbmh_feed()'s * 'data' argument, or it can be part of the StreamBMH lookbehind buffer. If the latter * is the case, then consider the data only valid within the callback: once the * callback has finished, this code can do arbitrary things to the lookbehind buffer, * so to preserve that data you must make your own copy. */ /* This implementation is based on sample code originally written by Joel * Yliluoma <<EMAIL>>, licensed under MIT. */ // We assume that other compilers support the 'restrict' keyword. #ifdef __GNUC__ #ifndef G_GNUC_RESTRICT #if defined (__GNUC__) && (__GNUC__ >= 4) #define G_GNUC_RESTRICT __restrict__ #else #define G_GNUC_RESTRICT #endif #endif #ifndef restrict #define restrict G_GNUC_RESTRICT #endif #endif #ifndef likely #ifdef __GNUC__ #define likely(expr) __builtin_expect((expr), 1) #define unlikely(expr) __builtin_expect((expr), 0) #else #define likely(expr) expr #define unlikely(expr) expr #endif #endif #include <sys/types.h> #include <string.h> #include <assert.h> typedef enum { sbmh_false, sbmh_true } sbmh_bool; #define cast_scast_size_t(v) ((ssize_t)(v)) #define cast_size_t(v) ((size_t)(v)) #define sbmh_cast_size_t(v) ((sbmh_size_t)(v)) // namespace Passenger { struct StreamBMH; /* * sbmh_size_t is a type for representing the needle length. It should be unsigned; * it makes no sense for it not to be. * By default it's typedef'ed to 'unsigned short', which is a 16-bit integer on most * platforms, allowing us to support needles up to about 64 KB. This ought to be enough * for most people. In the odd situation that you're dealing with extremely large * needles, you can typedef this to 'unsigned int' or even 'unsigned long long'. * * Its typedef slightly affects performance. Benchmarks on OS X Snow Leopard (x86_64) * have shown that typedeffing this to size_t (64-bit integer) makes the benchmark * 4-8% faster at the cost of 4 times more memory usage per StreamBMH_Occ structure. * Consider changing the typedef depending on your needs. */ typedef unsigned short sbmh_size_t; typedef void (*sbmh_data_cb)(const struct StreamBMH *ctx, const unsigned char *data, size_t len); struct StreamBMH_Occ { sbmh_size_t occ[256]; }; struct StreamBMH { /***** Public but read-only fields *****/ sbmh_bool found; /***** Public fields; feel free to populate *****/ sbmh_data_cb callback; void *user_data; /***** Internal fields, do not access. *****/ sbmh_size_t lookbehind_size; /* After this field comes a 'lookbehind' field whose size is determined * by the allocator (e.g. SBMH_ALLOC_AND_INIT). * Algorithm uses at most needle_len - 1 bytes of space in lookbehind buffer. */ }; #define SBMH_SIZE(needle_len) (sizeof(struct StreamBMH) + (needle_len) - 1) #define SBMH_ALLOC_AND_INIT(sbmh, occ, needle) \ do { \ size_t needle_len = strlen((const char *) needle); \ sbmh = (struct StreamBMH *) malloc(SBMH_SIZE(needle_len)); \ sbmh_init(sbmh, occ, (const unsigned char *) needle, needle_len); \ } while (0) #if 0 #include <string> #include <cstdio> #define SBMH_DEBUG(format) printf(format) #define SBMH_DEBUG1(format, arg1) printf(format, arg1) #define SBMH_DEBUG2(format, arg1, arg2) printf(format, arg1, arg2) #else #define SBMH_DEBUG(format) do { /* nothing */ } while (0) #define SBMH_DEBUG1(format, arg1) do { /* nothing */ } while (0) #define SBMH_DEBUG2(format, arg1, arg2) do { /* nothing */ } while (0) #endif /* Accessor for the lookbehind field. */ #define _SBMH_LOOKBEHIND(ctx) ((unsigned char *) ctx + sizeof(struct StreamBMH)) static inline void sbmh_reset(struct StreamBMH *restrict ctx) { ctx->found = sbmh_false; ctx->lookbehind_size = 0; } static inline void sbmh_init(struct StreamBMH *restrict ctx, struct StreamBMH_Occ *restrict occ, const unsigned char *restrict needle, sbmh_size_t needle_len) { sbmh_size_t i; unsigned int j; if (ctx != NULL) { sbmh_reset(ctx); ctx->callback = NULL; ctx->user_data = NULL; } if (occ != NULL) { assert(needle_len > 0); /* Initialize occurrance table. */ for (j = 0; j < 256; j++) { occ->occ[j] = needle_len; } /* Populate occurance table with analysis of the needle, * ignoring last letter. */ if (needle_len >= 1) { for (i = 0; i < needle_len - 1; i++) { occ->occ[needle[i]] = needle_len - 1 - i; } } } } static inline char sbmh_lookup_char(const struct StreamBMH *restrict ctx, const unsigned char *restrict data, ssize_t pos) { if (pos < 0) { return _SBMH_LOOKBEHIND(ctx)[ctx->lookbehind_size + pos]; } else { return data[pos]; } } static inline sbmh_bool sbmh_memcmp(const struct StreamBMH *restrict ctx, const unsigned char *restrict needle, const unsigned char *restrict data, ssize_t pos, sbmh_size_t len) { ssize_t i = 0; while (i < cast_scast_size_t(len)) { unsigned char data_ch = sbmh_lookup_char(ctx, data, pos + i); unsigned char needle_ch = needle[i]; if (data_ch == needle_ch) { i++; } else { return sbmh_false; } } return sbmh_true; } static inline size_t sbmh_feed(struct StreamBMH *restrict ctx, const struct StreamBMH_Occ *restrict occtable, const unsigned char *restrict needle, sbmh_size_t needle_len, const unsigned char *restrict data, size_t len) { SBMH_DEBUG1("\n[sbmh] feeding: (%s)\n", std::string((const char *) data, len).c_str()); if (ctx->found) { return 0; } /* Positive: points to a position in 'data' * pos == 3 points to data[3] * Negative: points to a position in the lookbehind buffer * pos == -2 points to lookbehind[lookbehind_size - 2] */ ssize_t pos = -ctx->lookbehind_size; unsigned char last_needle_char = needle[needle_len - 1]; const sbmh_size_t *occ = occtable->occ; unsigned char *lookbehind = _SBMH_LOOKBEHIND(ctx); if (pos < 0) { SBMH_DEBUG2("[sbmh] considering lookbehind: (%s)(%s)\n", std::string((const char *) lookbehind, ctx->lookbehind_size).c_str(), std::string((const char *) data, len).c_str()); /* Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool * search with character lookup code that considers both the * lookbehind buffer and the current round's haystack data. * * Loop until * there is a match. * or until * we've moved past the position that requires the * lookbehind buffer. In this case we switch to the * optimized loop. * or until * the character to look at lies outside the haystack. */ while (pos < 0 && pos <= cast_scast_size_t(len) - cast_scast_size_t(needle_len)) { unsigned char ch = sbmh_lookup_char(ctx, data, pos + needle_len - 1); if (ch == last_needle_char && sbmh_memcmp(ctx, needle, data, pos, needle_len - 1)) { ctx->found = sbmh_true; ctx->lookbehind_size = 0; if (pos > -ctx->lookbehind_size && ctx->callback != NULL) { ctx->callback(ctx, lookbehind, ctx->lookbehind_size + pos); } SBMH_DEBUG1("[sbmh] found using lookbehind; end = %d\n", int(pos + needle_len)); return pos + needle_len; } else { pos += occ[ch]; } } // No match. if (pos < 0) { /* There's too few data for Boyer-Moore-Horspool to run, * so let's use a different algorithm to skip as much as * we can. * Forward pos until * the trailing part of lookbehind + data * looks like the beginning of the needle * or until * pos == 0 */ SBMH_DEBUG1("[sbmh] inconclusive; pos = %d\n", (int) pos); while (pos < 0 && !sbmh_memcmp(ctx, needle, data, pos, len - pos)) { pos++; } SBMH_DEBUG1("[sbmh] managed to skip to pos = %d\n", (int) pos); } if (pos >= 0) { /* Discard lookbehind buffer. */ SBMH_DEBUG("[sbmh] no match; discarding lookbehind\n"); if (ctx->callback != NULL) { ctx->callback(ctx, lookbehind, ctx->lookbehind_size); } ctx->lookbehind_size = 0; } else { /* Cut off part of the lookbehind buffer that has * been processed and append the entire haystack * into it. */ sbmh_size_t bytesToCutOff = sbmh_cast_size_t(cast_scast_size_t(ctx->lookbehind_size) + pos); if (bytesToCutOff > 0 && ctx->callback != NULL) { // The cut off data is guaranteed not to contain the needle. ctx->callback(ctx, lookbehind, bytesToCutOff); } memmove(lookbehind, lookbehind + bytesToCutOff, ctx->lookbehind_size - bytesToCutOff); ctx->lookbehind_size -= bytesToCutOff; assert(cast_scast_size_t(ctx->lookbehind_size + len) < cast_scast_size_t(needle_len)); memcpy(lookbehind + ctx->lookbehind_size, data, len); ctx->lookbehind_size += len; SBMH_DEBUG1("[sbmh] update lookbehind -> (%s)\n", std::string((const char *) lookbehind, ctx->lookbehind_size).c_str()); return len; } } assert(pos >= 0); assert(ctx->lookbehind_size == 0); SBMH_DEBUG1("[sbmh] starting from pos = %d\n", (int) pos); /* Lookbehind buffer is now empty. Perform Boyer-Moore-Horspool * search with optimized character lookup code that only considers * the current round's haystack data. */ while (likely( pos <= cast_scast_size_t(len) - cast_scast_size_t(needle_len) )) { unsigned char ch = data[pos + needle_len - 1]; if (unlikely( unlikely( ch == last_needle_char ) && unlikely( *(data + pos) == needle[0] ) && unlikely( memcmp(needle, data + pos, needle_len - 1) == 0 ) )) { SBMH_DEBUG1("[sbmh] found at position %d\n", (int) pos); ctx->found = sbmh_true; if (pos > 0 && ctx->callback != NULL) { ctx->callback(ctx, data, pos); } return pos + needle_len; } else { pos += occ[ch]; } } /* There was no match. If there's trailing haystack data that we cannot * match yet using the Boyer-Moore-Horspool algorithm (because the trailing * data is less than the needle size) then match using a modified * algorithm that starts matching from the beginning instead of the end. * Whatever trailing data is left after running this algorithm is added to * the lookbehind buffer. */ SBMH_DEBUG("[sbmh] no match\n"); if (cast_size_t(pos) < len) { while (cast_size_t(pos) < len && ( data[pos] != needle[0] || memcmp(data + pos, needle, len - pos) != 0 )) { pos++; } if (cast_size_t(pos) < len) { memcpy(lookbehind, data + pos, len - pos); ctx->lookbehind_size = len - pos; SBMH_DEBUG2("[sbmh] adding %d trailing bytes to lookbehind -> (%s)\n", int(len - pos), std::string((const char *) lookbehind, ctx->lookbehind_size).c_str()); } } /* Everything until pos is guaranteed not to contain needle data. */ if (pos > 0 && ctx->callback != NULL) { size_t m = cast_size_t(pos) < len ? cast_size_t(pos) : len; ctx->callback(ctx, data, m); } return len; } // } // namespace Passenger #endif /* _STREAM_BOYER_MOORE_HORSPOOL_ */
6,401
892
<reponame>github/advisory-database<filename>advisories/unreviewed/2022/05/GHSA-vr23-5gw8-f36f/GHSA-vr23-5gw8-f36f.json<gh_stars>100-1000 { "schema_version": "1.2.0", "id": "GHSA-vr23-5gw8-f36f", "modified": "2022-05-13T01:04:36Z", "published": "2022-05-13T01:04:36Z", "aliases": [ "CVE-2014-7959" ], "details": "SQL injection vulnerability in admin/htaccess/bpsunlock.php in the BulletProof Security plugin before .51.1 for WordPress allows remote authenticated users to execute arbitrary SQL commands via the tableprefix parameter.", "severity": [ ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-7959" }, { "type": "WEB", "url": "https://wordpress.org/plugins/bulletproof-security/changelog/" }, { "type": "WEB", "url": "http://packetstormsecurity.com/files/128977/WordPress-Bulletproof-Security-.51-XSS-SQL-Injection-SSRF.html" }, { "type": "WEB", "url": "http://www.securityfocus.com/archive/1/533904/100/0/threaded" }, { "type": "WEB", "url": "http://www.securityfocus.com/bid/70918" } ], "database_specific": { "cwe_ids": [ "CWE-89" ], "severity": "MODERATE", "github_reviewed": false } }
590
1,765
<gh_stars>1000+ package io.envoyproxy.pgv; import io.envoyproxy.pvg.cases.Enum; import org.junit.Test; import static org.assertj.core.api.Assertions.assertThatThrownBy; public class RequiredValidationTest { @Test public void requiredWorks() throws ValidationException { // Present RequiredValidation.required("x", Enum.Outer.getDefaultInstance()); // Absent assertThatThrownBy(() -> RequiredValidation.required("x", null)).isInstanceOf(ValidationException.class); } }
186
549
#include "bitmap.h" #include <stdio.h> #include <string.h> bitmap::bitmap(uint8_t *data, size_t size) : bitmap_size(size) { memset(data, 0xff, size / 8); buffer = data; last_free = 0; } void bitmap::set(size_t idx, bool value) { if (idx > bitmap_size) { printf("[error] bitmap: trying to set out of bound of the bitmap: %i> %i\n", idx, bitmap_size); plug_error_quit(0); return; } size_t bit = idx % 8; size_t byte = idx / 8; if (value) { buffer[byte] |= (1 << (bit)); } else { buffer[byte] &= ~(1 << (bit)); } } bool bitmap::get(size_t idx) const { if (idx > bitmap_size) { printf("[error] bitmap: trying to get out of bound of the bitmap: %i> %i\n", idx, bitmap_size); plug_error_quit(0); return false; } size_t bit = idx % 8; size_t byte = idx / 8; return (buffer[byte] & (1 << (bit))); } size_t bitmap::find_free(size_t length) { size_t current_founded_length = 0; size_t current_founded_idx = 0; for (size_t i = last_free; i < bitmap_size; i++) { if (i == 0) { continue; } if (!get(i)) { if (current_founded_length == 0) { current_founded_idx = i; } current_founded_length++; } else { current_founded_length = 0; current_founded_idx = 0; } if (current_founded_length == length) { last_free = current_founded_idx + current_founded_length; return current_founded_idx; } } if (last_free == 0) { printf("[error] bitmap: no free bitmap entry\n"); plug_error_quit(0); return 0; } else { last_free = 0; return find_free(length); } } size_t bitmap::alloc(size_t length) { size_t v = find_free(length); if (v == 0) { printf("[error] bitmap: can't allocate block count %i\n", length); return 0; } if (set_used(v, length) == 0) { printf("[error] bitmap: can't allocate block count %i\n", length); return 0; } return v; } size_t bitmap::set_free(size_t idx, size_t length) { for (size_t i = 0; i < length; i++) { if (get(idx + i) == false) { printf("freeing already free block: %x while freeing from %x - %x (size: %x)\n", idx + i, idx, idx + length, length); plug_error_quit(0); while (true) { }; } set(idx + i, false); } last_free = idx; return 1; } size_t bitmap::set_free(size_t idx, size_t length, bool forced) { for (size_t i = 0; i < length; i++) { if (!(forced) && get(idx + i) == false) { printf("freeing already free block: %x while freeing from %x - %x (size: %x)\n", idx + i, idx, idx + length, length); plug_error_quit(0); while (true) { }; } set(idx + i, false); } last_free = idx; return 1; } size_t bitmap::set_used(size_t idx, size_t length) { for (size_t i = 0; i < length; i++) { if (get(idx + i) == true) { printf("setting already set block: %x while setting from %x - %x (size: %x) \n", idx + i, idx, idx + length, length); plug_error_quit(0); } set(idx + i, true); } return 1; }
1,841
471
/* * Copyright 2012 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.greghaines.jesque.worker; /** * JobExecutor is an object that executes jobs. */ public interface JobExecutor { /** * States of the job executor. */ public enum State { /** * The JobExecutor has not started running. */ NEW, /** * The JobExecutor is currently running. */ RUNNING, /** * The JobExecutor has shutdown. */ SHUTDOWN, /** * The JobExecutor has shutdown, interrupting running jobs. */ SHUTDOWN_IMMEDIATE; } /** * The job factory. * @return the job factory */ JobFactory getJobFactory(); /** * The current exception handler. * @return the current exception handler */ ExceptionHandler getExceptionHandler(); /** * Set this JobExecutor's exception handler to the given handler. * @param exceptionHandler the exception handler to use */ void setExceptionHandler(ExceptionHandler exceptionHandler); /** * Shutdown this JobExecutor. * @param now if true, an effort will be made to stop any job in progress */ void end(boolean now); /** * Returns whether this JobExecutor is either shutdown or in the process of shutting down. * @return true if this JobExecutor is either shutdown or in the process of shutting down */ boolean isShutdown(); /** * Returns whether this JobExecutor is currently processing a job. * @return true if this JobExecutor is currently processing a job */ boolean isProcessingJob(); /** * Wait for this JobExecutor to complete. A timeout of 0 means to wait forever. * This method will only return after a thread has called {@link #end(boolean)}. * @param millis the time to wait in milliseconds * @throws InterruptedException if any thread has interrupted the current thread */ void join(long millis) throws InterruptedException; }
884
32,544
<gh_stars>1000+ package com.baeldung.msf4j.msf4japi; import java.util.ArrayList; import java.util.List; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.Response; import com.google.gson.Gson; @Path("/menu") public class MenuService { private List<Meal> meals = new ArrayList<Meal>(); public MenuService() { meals.add(new Meal("Java beans",42.0f)); } @GET @Path("/") @Produces({ "application/json" }) public Response index() { return Response.ok() .entity(meals) .build(); } @GET @Path("/{id}") @Produces({ "application/json" }) public Response meal(@PathParam("id") int id) { return Response.ok() .entity(meals.get(id)) .build(); } @POST @Path("/") @Consumes("application/json") @Produces({ "application/json" }) public Response create(Meal meal) { meals.add(meal); return Response.ok() .entity(meal) .build(); } @PUT @Path("/{id}") @Consumes("application/json") @Produces({ "application/json" }) public Response update(@PathParam("id") int id, Meal meal) { meals.set(id, meal); return Response.ok() .entity(meal) .build(); } @DELETE @Path("/{id}") @Produces({ "application/json" }) public Response delete(@PathParam("id") int id) { Meal meal = meals.get(id); meals.remove(id); return Response.ok() .entity(meal) .build(); } }
839
1,428
<filename>C/skand.c #include<stdio.h> int fac(int x); int main() { int n,m; printf("enter a number"); scanf("%d",&n); m=fac(n); printf("%d",m); return 0; } int fac(int x) { int z; if(x==0||x==1) return 1; else z=x*fac(x-1); return z; }
171
1,702
package com.example.session; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication public class SessionJdbcApplication { public static void main(String[] args) throws Throwable { SpringApplication.run(SessionJdbcApplication.class, args); } }
92
348
{"nom":"Tharaux","circ":"4ème circonscription","dpt":"Gard","inscrits":68,"abs":19,"votants":49,"blancs":16,"nuls":2,"exp":31,"res":[{"nuance":"REM","nom":"Mme <NAME>","voix":28},{"nuance":"FN","nom":"Mme <NAME>","voix":3}]}
92
500
#! /usr/bin/env python3 # -*- coding: utf-8 -*- """ Estimate joint feature vector of the speaker pair using GMM """ import argparse import os import sys from sprocket.model.GMM import GMMConvertor, GMMTrainer from sprocket.util import HDF5, estimate_twf, melcd from sprocket.util import static_delta, align_data from yml import SpeakerYML, PairYML from misc import read_feats, extsddata, transform_jnt def get_alignment(odata, onpow, tdata, tnpow, opow=-20, tpow=-20, sd=0, cvdata=None, given_twf=None, otflag=None, distance='melcd'): """Get alignment between original and target Paramters --------- odata : array, shape (`T`, `dim`) Acoustic feature vector of original onpow : array, shape (`T`) Normalized power vector of original tdata : array, shape (`T`, `dim`) Acoustic feature vector of target tnpow : array, shape (`T`) Normalized power vector of target opow : float, optional, Power threshold of original Default set to -20 tpow : float, optional, Power threshold of target Default set to -20 sd : int , optional, Start dimension to be used for alignment Default set to 0 cvdata : array, shape (`T`, `dim`), optional, Converted original data Default set to None given_twf : array, shape (`T_new`, `dim * 2`), optional, Alignment given twf Default set to None otflag : str, optional Alignment into the length of specification 'org' : alignment into original length 'tar' : alignment into target length Default set to None distance : str, Distance function to be used Default set to 'melcd' Returns ------- jdata : array, shape (`T_new` `dim * 2`) Joint static and delta feature vector twf : array, shape (`T_new` `dim * 2`) Time warping function mcd : float, Mel-cepstrum distortion between arrays """ oexdata = extsddata(odata[:, sd:], onpow, power_threshold=opow) texdata = extsddata(tdata[:, sd:], tnpow, power_threshold=tpow) if cvdata is None: align_odata = oexdata else: cvexdata = extsddata(cvdata, onpow, power_threshold=opow) align_odata = cvexdata if given_twf is None: twf = estimate_twf(align_odata, texdata, distance=distance, otflag=otflag) else: twf = given_twf jdata = align_data(oexdata, texdata, twf) mcd = melcd(align_odata[twf[0]], texdata[twf[1]]) return jdata, twf, mcd def align_feature_vectors(odata, onpows, tdata, tnpows, pconf, opow=-100, tpow=-100, itnum=3, sd=0, given_twfs=None, otflag=None): """Get alignment to create joint feature vector Paramters --------- odata : list, (`num_files`) List of original feature vectors onpows : list , (`num_files`) List of original npows tdata : list, (`num_files`) List of target feature vectors tnpows : list , (`num_files`) List of target npows opow : float, optional, Power threshold of original Default set to -100 tpow : float, optional, Power threshold of target Default set to -100 itnum : int , optional, The number of iteration Default set to 3 sd : int , optional, Start dimension of feature vector to be used for alignment Default set to 0 given_twf : array, shape (`T_new` `dim * 2`) Use given alignment while 1st iteration Default set to None otflag : str, optional Alignment into the length of specification 'org' : alignment into original length 'tar' : alignment into target length Default set to None Returns ------- jfvs : list, List of joint feature vectors twfs : list, List of time warping functions """ num_files = len(odata) cvgmm, cvdata = None, None for it in range(1, itnum + 1): print('{}-th joint feature extraction starts.'.format(it)) twfs, jfvs = [], [] for i in range(num_files): if it == 1 and given_twfs is not None: gtwf = given_twfs[i] else: gtwf = None if it > 1: cvdata = cvgmm.convert(static_delta(odata[i][:, sd:]), cvtype=pconf.GMM_mcep_cvtype) jdata, twf, mcd = get_alignment(odata[i], onpows[i], tdata[i], tnpows[i], opow=opow, tpow=tpow, sd=sd, cvdata=cvdata, given_twf=gtwf, otflag=otflag) twfs.append(twf) jfvs.append(jdata) print('distortion [dB] for {}-th file: {}'.format(i + 1, mcd)) jnt_data = transform_jnt(jfvs) if it != itnum: # train GMM, if not final iteration datagmm = GMMTrainer(n_mix=pconf.GMM_mcep_n_mix, n_iter=pconf.GMM_mcep_n_iter, covtype=pconf.GMM_mcep_covtype) datagmm.train(jnt_data) cvgmm = GMMConvertor(n_mix=pconf.GMM_mcep_n_mix, covtype=pconf.GMM_mcep_covtype) cvgmm.open_from_param(datagmm.param) it += 1 return jfvs, twfs def main(*argv): argv = argv if argv else sys.argv[1:] # Options for python description = 'estimate joint feature of source and target speakers' parser = argparse.ArgumentParser(description=description) parser.add_argument('org_yml', type=str, help='Yml file of the original speaker') parser.add_argument('tar_yml', type=str, help='Yml file of the target speaker') parser.add_argument('pair_yml', type=str, help='Yml file of the speaker pair') parser.add_argument('org_list_file', type=str, help='List file of original speaker') parser.add_argument('tar_list_file', type=str, help='List file of target speaker') parser.add_argument('pair_dir', type=str, help='Directory path of h5 files') args = parser.parse_args(argv) # read speaker-dependent yml files oconf = SpeakerYML(args.org_yml) tconf = SpeakerYML(args.tar_yml) # read pair-dependent yml file pconf = PairYML(args.pair_yml) # read source and target features from HDF file h5_dir = os.path.join(args.pair_dir, 'h5') org_mceps = read_feats(args.org_list_file, h5_dir, ext='mcep') org_npows = read_feats(args.org_list_file, h5_dir, ext='npow') tar_mceps = read_feats(args.tar_list_file, h5_dir, ext='mcep') tar_npows = read_feats(args.tar_list_file, h5_dir, ext='npow') assert len(org_mceps) == len(tar_mceps) assert len(org_npows) == len(tar_npows) assert len(org_mceps) == len(org_npows) # dtw between original and target w/o 0th and silence print('## Alignment mcep w/o 0-th and silence ##') jmceps, twfs = align_feature_vectors(org_mceps, org_npows, tar_mceps, tar_npows, pconf, opow=oconf.power_threshold, tpow=tconf.power_threshold, itnum=pconf.jnt_n_iter, sd=1, ) jnt_mcep = transform_jnt(jmceps) # create joint feature for codeap using given twfs print('## Alignment codeap using given twf ##') org_codeaps = read_feats(args.org_list_file, h5_dir, ext='codeap') tar_codeaps = read_feats(args.tar_list_file, h5_dir, ext='codeap') jcodeaps = [] for i in range(len(org_codeaps)): # extract codeap joint feature vector jcodeap, _, _ = get_alignment(org_codeaps[i], org_npows[i], tar_codeaps[i], tar_npows[i], opow=oconf.power_threshold, tpow=tconf.power_threshold, given_twf=twfs[i]) jcodeaps.append(jcodeap) jnt_codeap = transform_jnt(jcodeaps) # save joint feature vectors jnt_dir = os.path.join(args.pair_dir, 'jnt') os.makedirs(jnt_dir, exist_ok=True) jntpath = os.path.join(jnt_dir, 'it' + str(pconf.jnt_n_iter) + '_jnt.h5') jnth5 = HDF5(jntpath, mode='a') jnth5.save(jnt_mcep, ext='mcep') jnth5.save(jnt_codeap, ext='codeap') jnth5.close() # save twfs twf_dir = os.path.join(args.pair_dir, 'twf') os.makedirs(twf_dir, exist_ok=True) with open(args.org_list_file, 'r') as fp: for line, twf in zip(fp, twfs): f = os.path.basename(line.rstrip()) twfpath = os.path.join( twf_dir, 'it' + str(pconf.jnt_n_iter) + '_' + f + '.h5') twfh5 = HDF5(twfpath, mode='a') twfh5.save(twf, ext='twf') twfh5.close() if __name__ == '__main__': main()
5,163
1,233
# Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Generic class for determining the desired host operating system.""" from absl import flags from glazier.lib.spec import flags as flag_spec SPEC_OPTS = { 'flag': flag_spec, } FLAGS = flags.FLAGS flags.DEFINE_enum( 'glazier_spec', 'flag', list(SPEC_OPTS.keys()), ('Which host specification module to use for determining host features ' 'like Hostname and OS.')) class UnknownSpec(Exception): pass def GetModule(): try: return SPEC_OPTS[FLAGS.glazier_spec] except KeyError: raise UnknownSpec(FLAGS.glazier_spec)
342
409
<reponame>JensUweUlrich/seqan // ========================================================================== // SeqAn - The Library for Sequence Analysis // ========================================================================== // Copyright (c) 2006-2018, <NAME>, FU Berlin // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // * Neither the name of Knut Reinert or the FU Berlin nor the names of // its contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL KNUT REINERT OR THE FU BERLIN BE LIABLE // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT // LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY // OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH // DAMAGE. // // ========================================================================== // Author: <NAME> <[email protected]> // Author: <NAME> <[email protected]> // ========================================================================== #ifndef SEQAN_INCLUDE_SEQAN_VCF_READ_VCF_H_ #define SEQAN_INCLUDE_SEQAN_VCF_READ_VCF_H_ namespace seqan { // ============================================================================ // Tags, Classes, Enums // ============================================================================ // ---------------------------------------------------------------------------- // Tag Vcf // ---------------------------------------------------------------------------- /*! * @tag FileFormats#Vcf * @headerfile <seqan/vcf_io.h> * @brief Variant callinf format file. * * @signature typedef Tag<Vcf_> Vcf; */ struct Vcf_; typedef Tag<Vcf_> Vcf; // ============================================================================ // Functions // ============================================================================ // ---------------------------------------------------------------------------- // Function readRecord() [VcfHeader] // ---------------------------------------------------------------------------- template <typename TNameStore, typename TNameStoreCache, typename TStorageSpec, typename TString> inline void _readVcfContig(VcfIOContext<TNameStore, TNameStoreCache, TStorageSpec> & context, TString const & headerValue) { typedef OrFunctor<EqualsChar<','>, EqualsChar<'>'> > IsCommaOrGt; typedef typename DirectionIterator<TString const, Input>::Type TIter; TIter headerIter = directionIterator(headerValue, Input()); CharString &buffer = context.buffer; skipOne(headerIter, EqualsChar<'<'>()); // Seek contig ID key. while (!atEnd(headerIter)) { clear(buffer); readUntil(buffer, headerIter, EqualsChar<'='>()); if (buffer == "ID") break; skipUntil(headerIter, IsCommaOrGt()); skipOne(headerIter); } if (atEnd(headerIter)) SEQAN_THROW(ParseError("Contig ID key not found in header.")); // Read contig ID value. clear(buffer); skipOne(headerIter, EqualsChar<'='>()); readUntil(buffer, headerIter, IsCommaOrGt()); if (empty(buffer)) SEQAN_THROW(ParseError("Contig ID value not found in header.")); appendName(contigNamesCache(context), buffer); } template <typename TForwardIter, typename TNameStore, typename TNameStoreCache, typename TStorageSpec> inline void readHeader(VcfHeader & header, VcfIOContext<TNameStore, TNameStoreCache, TStorageSpec> & context, TForwardIter & iter, Vcf const & /*tag*/) { clear(header); CharString &buffer = context.buffer; VcfHeaderRecord record; while (!atEnd(iter) && value(iter) == '#') { skipOne(iter); clear(buffer); if (value(iter) == '#') { // Is header line. skipOne(iter); clear(record); // Read header key. readUntil(record.key, iter, OrFunctor<EqualsChar<'='>, AssertFunctor<NotFunctor<IsNewline>, ParseError, Vcf> >()); // Skip '='. skipOne(iter); // Read header value. readLine(record.value, iter); appendValue(header, record); // Parse out name if headerRecord is a contig field. if (record.key == "contig") _readVcfContig(context, record.value); } else { // Is line "#CHROM\t...". readLine(buffer, iter); if (!startsWith(buffer, "CHROM")) SEQAN_THROW(ParseError("Invalid line with samples.")); // Split line, get sample names. StringSet<CharString> fields; strSplit(fields, buffer, IsTab()); if (length(fields) < 8u) SEQAN_THROW(ParseError("Not enough fields.")); // Get sample names. for (unsigned i = 8; i < length(fields); ++i) { if(i == 8 && fields[i] == "FORMAT") continue; appendName(sampleNamesCache(context), fields[i]); } } } } // ---------------------------------------------------------------------------- // Function readRecord() [VcfRecord] // ---------------------------------------------------------------------------- // Read record, updating list of known sequences if new one occurs. template <typename TForwardIter, typename TNameStore, typename TNameStoreCache, typename TStorageSpec> inline void readRecord(VcfRecord & record, VcfIOContext<TNameStore, TNameStoreCache, TStorageSpec> & context, TForwardIter & iter, Vcf const & /*tag*/) { clear(record); CharString &buffer = context.buffer; // get the next line on the buffer. clear(buffer); readLine(buffer, iter); // Split line, get field and sample values. // The first 8(9) columns are fields and the rest are values for samples //"#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT") StringSet<CharString> field_values; strSplit(field_values, buffer, IsTab(), false); unsigned numSamples = length(sampleNames(context)); if (length(field_values) < 8u + numSamples) SEQAN_THROW(ParseError("Not enough values in a line.")); record.rID = nameToId(contigNamesCache(context), field_values[0]); record.beginPos = lexicalCast<int32_t>(field_values[1]) - 1; // Translate from 1-based to 0-based. record.id = field_values[2]; record.ref = field_values[3]; record.alt = field_values[4]; if (field_values[5] == ".") record.qual = VcfRecord::MISSING_QUAL(); else lexicalCastWithException(record.qual, field_values[5]); record.filter = field_values[6]; record.info = field_values[7]; //check if we have a spare column for FORMAT unsigned samplesColStart = 8; if (length(field_values) > 8u + numSamples) // we have extara column for FORMAT { record.format = field_values[8]; samplesColStart = 9; } // Get sample name values . for (unsigned i = samplesColStart; i < length(field_values); ++i) { appendValue(record.genotypeInfos, field_values[i]); } } } // namespace seqan #endif // #ifndef SEQAN_INCLUDE_SEQAN_VCF_READ_VCF_H_
3,053
21,684
<filename>src/rpc/directory/map_read_manager.cc // Copyright 2010-2014 RethinkDB, all rights reserved. #include "rpc/directory/map_read_manager.tcc" /* for unit tests */ template class directory_map_read_manager_t<int, int>; #include "clustering/table_manager/table_metadata.hpp" template class directory_map_read_manager_t< namespace_id_t, table_manager_bcard_t>; #include "clustering/query_routing/metadata.hpp" template class directory_map_read_manager_t< std::pair<namespace_id_t, uuid_u>, table_query_bcard_t>; template class directory_map_read_manager_t< server_id_t, empty_value_t>;
222
12,278
// Copyright (C) 2009-2012 <NAME> // Distributed under the Boost Software License, Version 1.0 // (see accompanying file LICENSE_1_0.txt or a copy at // http://www.boost.org/LICENSE_1_0.txt) // Home at http://www.boost.org/libs/local_function #include <boost/config.hpp> #ifdef BOOST_NO_CXX11_LAMBDAS # error "requires lambda functions" #else #include <cassert> int main(void) { //[const_block_cxx11_lambda int x = 1, y = 2; const decltype(x)& const_x = x; // Constant so cannot be modified const decltype(y)& const_y = y; // and reference so no copy. [&const_x, &const_y]() { // Lambda functions (C++11 only). assert(const_x = const_y); // Unfortunately, `const_` names. }(); //] return 0; } #endif // LAMBDAS
307
677
/* * Copyright (C) 2014 Apple Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #pragma once #if ENABLE(FTL_JIT) #include "DFGNode.h" #include "DataFormat.h" #include "FTLAbbreviatedTypes.h" #include "FTLRecoveryOpcode.h" namespace JSC { namespace FTL { class AvailableRecovery { public: AvailableRecovery() : m_node(0) , m_format(DataFormatNone) , m_opcode(AddRecovery) , m_left(0) , m_right(0) { } AvailableRecovery(DFG::Node* node, RecoveryOpcode opcode, LValue left, LValue right, DataFormat format) : m_node(node) , m_format(format) , m_opcode(opcode) , m_left(left) , m_right(right) { } DFG::Node* node() const { return m_node; } DataFormat format() const { return m_format; } RecoveryOpcode opcode() const { return m_opcode; } LValue left() const { return m_left; } LValue right() const { return m_right; } void dump(PrintStream&) const; private: DFG::Node* m_node; DataFormat m_format; RecoveryOpcode m_opcode; LValue m_left; LValue m_right; }; } } // namespace JSC::FTL #endif // ENABLE(FTL_JIT)
890
76,518
<gh_stars>1000+ #pragma once #include <vector> #include <string> #include <fstream> #include <filesystem> #include <unordered_map> #include <Windows.h> void ReportRegistry(const std::filesystem::path& tmpDir); void ReportCompatibilityTab(const std::filesystem::path& tmpDir);
94
1,109
<gh_stars>1000+ package org.cboard.services.job; import org.cboard.pojo.DashboardJob; import org.quartz.Job; import org.quartz.JobExecutionContext; import org.quartz.JobExecutionException; import org.quartz.SchedulerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.ApplicationContext; /** * Created by yfyuan on 2017/2/20. */ public class MailJobExecutor implements Job { private Logger LOG = LoggerFactory.getLogger(this.getClass()); @Override public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { try { JobService jobService = ((ApplicationContext) jobExecutionContext.getScheduler().getContext().get("applicationContext")).getBean(JobService.class); jobService.sendMail((DashboardJob) jobExecutionContext.getMergedJobDataMap().get("job")); } catch (SchedulerException e) { LOG.error("", e); } } }
347
310
package com.ilscipio.scipio.cms.template; import java.util.HashMap; import java.util.Map; import org.ofbiz.base.util.Debug; import org.ofbiz.base.util.UtilGenerics; import org.ofbiz.base.util.UtilMisc; import org.ofbiz.base.util.UtilValidate; import org.ofbiz.entity.Delegator; import org.ofbiz.entity.GenericValue; import org.ofbiz.service.DispatchContext; import org.ofbiz.service.LocalDispatcher; import org.ofbiz.service.ServiceUtil; import com.ilscipio.scipio.cms.CmsServiceUtil; import com.ilscipio.scipio.cms.ServiceErrorFormatter; import com.ilscipio.scipio.cms.ServiceErrorFormatter.FormattedError; public abstract class CmsScriptTemplateServices { private static final Debug.OfbizLogger module = Debug.getOfbizLogger(java.lang.invoke.MethodHandles.lookup().lookupClass()); private static final ServiceErrorFormatter errorFmt = CmsServiceUtil.getErrorFormatter().specialize().setDefaultLogMsgGeneral("Script Template Error").build(); protected CmsScriptTemplateServices() { } public static Map<String, Object> createUpdateScriptTemplate(DispatchContext dctx, Map<String, ?> context) { Map<String, Object> result = ServiceUtil.returnSuccess(); Delegator delegator = dctx.getDelegator(); LocalDispatcher dispatcher = dctx.getDispatcher(); try { GenericValue userLogin = CmsServiceUtil.getUserLoginOrSystem(dctx, context); //Debug.logInfo("createUpdateAsset triggered",module); String scriptTemplateId = (String) context.get("scriptTemplateId"); // Create empty template Map<String, Object> fields = ServiceUtil.setServiceFields(dispatcher, "cmsCreateUpdateScriptTemplate", UtilGenerics.<String, Object> checkMap(context), userLogin, null, null); CmsScriptTemplate scriptTmpl = null; if (UtilValidate.isNotEmpty(scriptTemplateId)) { scriptTmpl = CmsScriptTemplate.getWorker().findByIdAlways(delegator, scriptTemplateId, false); fields.put("createdBy", (String) userLogin.get("userLoginId")); // NOTE: 2016-12: IMPORTANT: EVERY TIME THERE IS A BODY OR LOCATION UPDATE OPERATION, // and standalone is not explicit false, we SWITCH standalone from N to Y, // so we NEVER delete user's changes automatically. if (UtilValidate.isEmpty((String) fields.get("standalone"))) { fields.put("standalone", "Y"); } scriptTmpl.update(fields); } else { fields.put("lastUpdatedBy", (String) userLogin.get("userLoginId")); scriptTmpl = new CmsScriptTemplate(delegator, fields); } scriptTmpl.store(); result.put("scriptTemplateId", scriptTmpl.getId()); } catch (Exception e) { FormattedError err = errorFmt.format(e, context); Debug.logError(err.getEx(), err.getLogMsg(), module); return err.returnError(); } return result; } public static Map<String, Object> copyScriptTemplate(DispatchContext dctx, Map<String, ?> context) { Delegator delegator = dctx.getDelegator(); Map<String, Object> copyArgs = new HashMap<>(); GenericValue userLogin = (GenericValue) context.get("userLogin"); if (userLogin != null) { copyArgs.put("copyCreatorId", userLogin.get("partyId")); } try { String srcScriptTemplateId = (String) context.get("srcScriptTemplateId"); CmsScriptTemplate srcScriptTmpl = CmsScriptTemplate.getWorker().findByIdAlways(delegator, srcScriptTemplateId, false); CmsScriptTemplate scriptTmpl = srcScriptTmpl.copy(copyArgs); scriptTmpl.update(UtilMisc.toHashMapWithKeys(context, "templateName", "description")); scriptTmpl.store(); Map<String, Object> result = ServiceUtil.returnSuccess(); result.put("scriptTemplateId", scriptTmpl.getId()); return result; } catch (Exception e) { FormattedError err = errorFmt.format(e, context); Debug.logError(err.getEx(), err.getLogMsg(), module); return err.returnError(); } } public static Map<String, Object> updateScriptTemplateInfo(DispatchContext dctx, Map<String, ?> context) { Map<String, Object> result = ServiceUtil.returnSuccess(); Delegator delegator = dctx.getDelegator(); try { String scriptTemplateId = (String) context.get("scriptTemplateId"); CmsScriptTemplate scriptTmp = CmsScriptTemplate.getWorker().findByIdAlways(delegator, scriptTemplateId, false); scriptTmp.update(context); scriptTmp.store(); } catch (Exception e) { FormattedError err = errorFmt.format(e, context); Debug.logError(err.getEx(), err.getLogMsg(), module); return err.returnError(); } return result; } public static Map<String, Object> getScriptTemplate(DispatchContext dctx, Map<String, ?> context) { Map<String, Object> result = ServiceUtil.returnSuccess(); Delegator delegator = dctx.getDelegator(); try { String scriptTemplateId = (String) context.get("scriptTemplateId"); CmsScriptTemplate scriptTemplate = null; scriptTemplate = CmsScriptTemplate.getWorker().findByIdAlways(delegator, scriptTemplateId, false); // FIXME: why is scriptTemplate GenericValue here? result.put("scriptTemplateValue", scriptTemplate.getEntity()); result.put("scriptTemplate", scriptTemplate); } catch (Exception e) { FormattedError err = errorFmt.format(e, context); Debug.logError(err.getEx(), err.getLogMsg(), module); return err.returnFailure(); } return result; } public static Map<String, Object> deleteScriptTemplate(DispatchContext dctx, Map<String, ?> context) { Map<String, Object> result = ServiceUtil.returnSuccess(); Delegator delegator = dctx.getDelegator(); try { String scriptTemplateId = (String) context.get("scriptTemplateId"); CmsScriptTemplate template = CmsScriptTemplate.getWorker().findByIdAlways(delegator, scriptTemplateId, false); template.remove(); } catch (Exception e) { FormattedError err = errorFmt.format(e, context); Debug.logError(err.getEx(), err.getLogMsg(), module); return err.returnError(); } return result; } public static Map<String, Object> deleteScriptTemplateIfOrphan(DispatchContext dctx, Map<String, ?> context) { Map<String, Object> result = ServiceUtil.returnSuccess(); Delegator delegator = dctx.getDelegator(); try { String scriptTemplateId = (String) context.get("scriptTemplateId"); CmsScriptTemplate template = CmsScriptTemplate.getWorker().findByIdAlways(delegator, scriptTemplateId, false); template.removeIfOrphan(); } catch (Exception e) { FormattedError err = errorFmt.format(e, context); Debug.logError(err.getEx(), err.getLogMsg(), module); return err.returnError(); } return result; } }
2,994
777
// Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "platform/graphics/paint/TransformPaintPropertyNode.h" namespace blink { TransformPaintPropertyNode* TransformPaintPropertyNode::root() { DEFINE_STATIC_REF(TransformPaintPropertyNode, root, (TransformPaintPropertyNode::create( nullptr, TransformationMatrix(), FloatPoint3D()))); return root; } String TransformPaintPropertyNode::toString() const { return String::format( "parent=%p transform=%s origin=%s flattensInheritedTransform=%s " "renderingContextId=%x directCompositingReasons=%s " "compositorElementId=(%d, " "%d)", m_parent.get(), m_matrix.toString().ascii().data(), m_origin.toString().ascii().data(), m_flattensInheritedTransform ? "yes" : "no", m_renderingContextId, compositingReasonsAsString(m_directCompositingReasons).ascii().data(), m_compositorElementId.primaryId, m_compositorElementId.secondaryId); } } // namespace blink
416
14,668
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_EXTENSIONS_API_IDLTEST_IDLTEST_API_H_ #define CHROME_BROWSER_EXTENSIONS_API_IDLTEST_IDLTEST_API_H_ #include "extensions/browser/extension_function.h" class IdltestSendArrayBufferFunction : public ExtensionFunction { public: DECLARE_EXTENSION_FUNCTION("idltest.sendArrayBuffer", IDLTEST_SENDARRAYBUFFER) protected: ~IdltestSendArrayBufferFunction() override {} ResponseAction Run() override; }; class IdltestSendArrayBufferViewFunction : public ExtensionFunction { public: DECLARE_EXTENSION_FUNCTION("idltest.sendArrayBufferView", IDLTEST_SENDARRAYBUFFERVIEW) protected: ~IdltestSendArrayBufferViewFunction() override {} ResponseAction Run() override; }; class IdltestGetArrayBufferFunction : public ExtensionFunction { public: DECLARE_EXTENSION_FUNCTION("idltest.getArrayBuffer", IDLTEST_GETARRAYBUFFER) protected: ~IdltestGetArrayBufferFunction() override {} ResponseAction Run() override; }; #endif // CHROME_BROWSER_EXTENSIONS_API_IDLTEST_IDLTEST_API_H_
414
5,846
<gh_stars>1000+ { "$schema": "https://raw.githubusercontent.com/aspnet/BuildTools/master/tools/korebuild.schema.json", "channel": "master" }
55
4,348
/*** Copyright (c) 2013-2015 CommonsWare, LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Covered in detail in the book _The Busy Coder's Guide to Android Development_ https://commonsware.com/Android */ package com.commonsware.android.databind.basic; import android.databinding.BindingAdapter; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.text.TextUtils; import android.util.Log; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.Toast; import com.commonsware.android.databind.basic.databinding.RowBinding; import com.squareup.picasso.Picasso; import java.util.ArrayList; import java.util.HashMap; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.converter.gson.GsonConverterFactory; public class QuestionsFragment extends RecyclerViewFragment { private ArrayList<Question> questions=new ArrayList<Question>(); private HashMap<String, Question> questionMap=new HashMap<String, Question>(); Retrofit retrofit= new Retrofit.Builder() .baseUrl("https://api.stackexchange.com") .addConverterFactory(GsonConverterFactory.create()) .build(); StackOverflowInterface so= retrofit.create(StackOverflowInterface.class); @BindingAdapter({"app:imageUrl", "app:placeholder", "app:error"}) public static void bindImageView(ImageView iv, String url, Drawable placeholder, Drawable error) { Picasso.with(iv.getContext()) .load(url) .fit() .centerCrop() .placeholder(placeholder) .error(error) .into(iv); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setRetainInstance(true); setHasOptionsMenu(true); } @Override public void onViewCreated(View view, Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); setLayoutManager(new LinearLayoutManager(getActivity())); so.questions("android").enqueue(new Callback<SOQuestions>() { @Override public void onResponse(Call<SOQuestions> call, Response<SOQuestions> response) { for (Item item : response.body().items) { Question question=new Question(item); questions.add(question); questionMap.put(question.id, question); } setAdapter(new QuestionsAdapter(questions)); } @Override public void onFailure(Call<SOQuestions> call, Throwable t) { onError(t); } }); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { inflater.inflate(R.menu.actions, menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { if (item.getItemId()==R.id.refresh) { updateQuestions(); } return(super.onOptionsItemSelected(item)); } private void updateQuestions() { ArrayList<String> idList=new ArrayList<String>(); for (Question question : questions) { idList.add(question.id); } String ids=TextUtils.join(";", idList); so.update(ids).enqueue(new Callback<SOQuestions>() { @Override public void onResponse(Call<SOQuestions> call, Response<SOQuestions> response) { for (Item item : response.body().items) { Question question=questionMap.get(item.id); if (question!=null) { question.updateFromItem(item); } } } @Override public void onFailure(Call<SOQuestions> call, Throwable t) { onError(t); } }); } private void onError(Throwable error) { Toast.makeText(getActivity(), error.getMessage(), Toast.LENGTH_LONG).show(); Log.e(getClass().getSimpleName(), "Exception from Retrofit request to StackOverflow", error); } class QuestionsAdapter extends RecyclerView.Adapter<QuestionController> { private final ArrayList<Question> questions; QuestionsAdapter(ArrayList<Question> questions) { this.questions=questions; } @Override public QuestionController onCreateViewHolder(ViewGroup parent, int viewType) { RowBinding rowBinding= RowBinding.inflate(getActivity().getLayoutInflater(), parent, false); return(new QuestionController(rowBinding, this)); } @Override public void onBindViewHolder(QuestionController holder, int position) { holder.bindModel(getItem(position)); } @Override public int getItemCount() { return(questions.size()); } Question getItem(int position) { return(questions.get(position)); } } }
2,179
511
// ---- GeneratedTypes.java ----------------- // This is a generated file. Not intended for manual editing. package generated; import com.intellij.psi.tree.IElementType; import com.intellij.psi.tree.TokenSet; public interface GeneratedTypes { IElementType ANOTHER_THREE_TOKENS = new IElementType("ANOTHER_THREE_TOKENS", null); IElementType FAST_CHOICE = new IElementType("FAST_CHOICE", null); IElementType FIVE_TOKENS_CHOICE = new IElementType("FIVE_TOKENS_CHOICE", null); IElementType FOUR_TOKENS_CHOICE = new IElementType("FOUR_TOKENS_CHOICE", null); IElementType PARENTHESIZED_CHOICE = new IElementType("PARENTHESIZED_CHOICE", null); IElementType REPEATING_TOKENS_CHOICE = new IElementType("REPEATING_TOKENS_CHOICE", null); IElementType SMART_CHOICE = new IElementType("SMART_CHOICE", null); IElementType SOME = new IElementType("SOME", null); IElementType TEN_TOKENS_CHOICE = new IElementType("TEN_TOKENS_CHOICE", null); IElementType THREE_TOKENS_CHOICE = new IElementType("THREE_TOKENS_CHOICE", null); IElementType THREE_TOKENS_IN_ANOTHER_ORDER = new IElementType("THREE_TOKENS_IN_ANOTHER_ORDER", null); IElementType A = new IElementType("A", null); IElementType B = new IElementType("B", null); IElementType C = new IElementType("C", null); IElementType D = new IElementType("D", null); IElementType E = new IElementType("E", null); IElementType F = new IElementType("F", null); IElementType G = new IElementType("G", null); IElementType H = new IElementType("H", null); IElementType I = new IElementType("I", null); IElementType J = new IElementType("J", null); IElementType P0 = new IElementType("P0", null); IElementType P1 = new IElementType("P1", null); IElementType P2 = new IElementType("P2", null); IElementType P3 = new IElementType("P3", null); IElementType S = new IElementType("S", null); interface TokenSets { TokenSet ANOTHER_THREE_TOKENS_TOKENS = TokenSet.create(A, B, D); TokenSet FAST_CHOICE_TOKENS = TokenSet.create(A, B, F); TokenSet FIVE_TOKENS_CHOICE_TOKENS = TokenSet.create( A, B, C, D, E ); TokenSet FOUR_TOKENS_CHOICE_TOKENS = TokenSet.create(A, B, C, D); TokenSet PARENTHESIZED_CHOICE_TOKENS = TokenSet.create(A, B, C); TokenSet PRIVATE_CHOICE_TOKENS = TokenSet.create(P0, P1, P2, P3); TokenSet REPEATING_TOKENS_CHOICE_TOKENS = FOUR_TOKENS_CHOICE_TOKENS; TokenSet SMART_CHOICE_TOKENS = TokenSet.create(A, B, S); TokenSet TEN_TOKENS_CHOICE_TOKENS = TokenSet.create( A, B, C, D, E, F, G, H, I, J ); TokenSet THREE_TOKENS_CHOICE_TOKENS = PARENTHESIZED_CHOICE_TOKENS; TokenSet THREE_TOKENS_IN_ANOTHER_ORDER_TOKENS = PARENTHESIZED_CHOICE_TOKENS; } }
1,080
852
## import skeleton process from PhysicsTools.PatAlgos.patTemplate_cfg import cms, process, patAlgosToolsTask #process.Tracer = cms.Service("Tracer") process.load("PhysicsTools.PatAlgos.producersLayer1.patCandidates_cff") patAlgosToolsTask.add(process.patCandidatesTask) #Temporary customize to the unit tests that fail due to old input samples process.patTaus.skipMissingTauID = True process.load("PhysicsTools.PatAlgos.selectionLayer1.selectedPatCandidates_cff") patAlgosToolsTask.add(process.selectedPatCandidatesTask) process.load("SimGeneral.HepPDTESSource.pythiapdt_cfi") process.load("RecoVertex.AdaptiveVertexFinder.inclusiveVertexing_cff") patAlgosToolsTask.add(process.inclusiveVertexingTask) patAlgosToolsTask.add(process.inclusiveCandidateVertexingTask) patAlgosToolsTask.add(process.inclusiveCandidateVertexingCvsLTask) process.load("PhysicsTools.PatAlgos.slimming.slimming_cff") patAlgosToolsTask.add(process.slimmingTask) from PhysicsTools.PatAlgos.slimming.miniAOD_tools import miniAOD_customizeCommon, miniAOD_customizeMC miniAOD_customizeCommon(process) miniAOD_customizeMC(process) ## ------------------------------------------------------ # In addition you usually want to change the following # parameters: ## ------------------------------------------------------ # # process.GlobalTag.globaltag = ... ## (according to https://twiki.cern.ch/twiki/bin/view/CMS/SWGuideFrontierConditions) # ## from PhysicsTools.PatAlgos.patInputFiles_cff import filesRelValZeeGENSIMRECO process.source.fileNames = filesRelValZeeGENSIMRECO #from PhysicsTools.PatAlgos.patInputFiles_cff import filesRelValTTbarGENSIMRECO #process.source.fileNames = filesRelValTTbarGENSIMRECO # ## process.maxEvents.input = 500 # ## process.out.outputCommands = process.MicroEventContentMC.outputCommands from PhysicsTools.PatAlgos.slimming.miniAOD_tools import miniAOD_customizeOutput miniAOD_customizeOutput(process.out) # ## # process.options.wantSummary = False ## (to suppress the long output at the end of the job) # ## process.out.fileName = 'patMiniAOD_standard.root' #
887
5,305
<gh_stars>1000+ public @interface Annot_1 { String[] values() default {}; }
28
2,151
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ash/shelf/app_list_shelf_item_delegate.h" #include <utility> #include "ash/app_list/app_list_controller_impl.h" #include "ash/public/cpp/app_list/app_list_constants.h" #include "ash/public/cpp/app_list/app_list_features.h" #include "ash/public/cpp/shelf_model.h" #include "ash/public/cpp/shell_window_ids.h" #include "ash/shell.h" #include "ash/wm/mru_window_tracker.h" #include "ash/wm/overview/window_selector_controller.h" #include "ash/wm/splitview/split_view_controller.h" #include "ash/wm/tablet_mode/tablet_mode_controller.h" #include "ash/wm/window_state.h" namespace ash { AppListShelfItemDelegate::AppListShelfItemDelegate() : ShelfItemDelegate(ShelfID(kAppListId)) {} AppListShelfItemDelegate::~AppListShelfItemDelegate() = default; void AppListShelfItemDelegate::ItemSelected(std::unique_ptr<ui::Event> event, int64_t display_id, ShelfLaunchSource source, ItemSelectedCallback callback) { if (!Shell::Get() ->app_list_controller() ->IsHomeLauncherEnabledInTabletMode()) { Shell::Get()->app_list_controller()->ToggleAppList( display_id, app_list::kShelfButton, event->time_stamp()); std::move(callback).Run(SHELF_ACTION_APP_LIST_SHOWN, base::nullopt); return; } // End overview mode. if (Shell::Get()->window_selector_controller()->IsSelecting()) Shell::Get()->window_selector_controller()->ToggleOverview(); // End split view mode. if (Shell::Get()->split_view_controller()->IsSplitViewModeActive()) Shell::Get()->split_view_controller()->EndSplitView(); // Minimize all windows that aren't the app list. aura::Window* app_list_container = Shell::Get()->GetPrimaryRootWindow()->GetChildById( kShellWindowId_AppListTabletModeContainer); aura::Window::Windows windows = Shell::Get()->mru_window_tracker()->BuildWindowListIgnoreModal(); for (auto* window : windows) { if (!app_list_container->Contains(window) && !wm::GetWindowState(window)->IsMinimized()) { wm::GetWindowState(window)->Minimize(); } } } void AppListShelfItemDelegate::ExecuteCommand(bool from_context_menu, int64_t command_id, int32_t event_flags, int64_t display_id) { // This delegate does not show custom context or application menu items. NOTIMPLEMENTED(); } void AppListShelfItemDelegate::Close() {} } // namespace ash
1,178
1,601
<reponame>ryankurte/codechecker<filename>web/client/codechecker_client/helpers/base.py # ------------------------------------------------------------------------- # # Part of the CodeChecker project, under the Apache License v2.0 with # LLVM Exceptions. See LICENSE for license information. # SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception # # ------------------------------------------------------------------------- """ Base Helper class for Thrift api calls. """ import sys from thrift.transport import THttpClient from thrift.protocol import TJSONProtocol from codechecker_client.credential_manager import SESSION_COOKIE_NAME from codechecker_client.product import create_product_url from codechecker_common.logger import get_logger LOG = get_logger('system') class BaseClientHelper: def __init__(self, protocol, host, port, uri, session_token=None, get_new_token=None): """ @param get_new_token: a function which can generate a new token. """ self.__host = host self.__port = port url = create_product_url(protocol, host, port, uri) self.transport = None try: self.transport = THttpClient.THttpClient(url) except ValueError: # Initalizing THttpClient may raise an exception if proxy settings # are used but the port number is not a valid integer. pass # Thrift do not handle the use case when invalid proxy format is # used (e.g.: no schema is specified). For this reason we need to # verify the proxy format in our side. self._validate_proxy_format() self.protocol = TJSONProtocol.TJSONProtocol(self.transport) self.client = None self.get_new_token = get_new_token self._set_token(session_token) def _validate_proxy_format(self): """ Validate the proxy settings. If the proxy settings are invalid, it will print an error message and stop the program. """ if self.transport and not self.transport.using_proxy(): return if not self.transport or not self.transport.host or \ not isinstance(self.transport.port, int): LOG.error("Invalid proxy format! Check your " "HTTP_PROXY/HTTPS_PROXY environment variables if " "these are in the right format: " "'http[s]://host:port'.") sys.exit(1) def _set_token(self, session_token): """ Set the given token in the transport layer. """ if not session_token: return headers = {'Cookie': SESSION_COOKIE_NAME + '=' + session_token} self.transport.setCustomHeaders(headers) def _reset_token(self): """ Get a new token and update the transport layer. """ if not self.get_new_token: return # get_new_token() function connects to a remote server to get a new # session token. session_token = self.get_new_token() self._set_token(session_token)
1,219
321
<filename>apis/api-core/api-core-common/src/main/java/org/hoteia/qalingo/core/web/mvc/viewbean/CustomerAddressListViewBean.java /** * Most of the code in the Qalingo project is copyrighted Hoteia and licensed * under the Apache License Version 2.0 (release version 0.8.0) * http://www.apache.org/licenses/LICENSE-2.0 * * Copyright (c) Hoteia, 2012-2014 * http://www.hoteia.com - http://twitter.com/hoteia - <EMAIL> * */ package org.hoteia.qalingo.core.web.mvc.viewbean; import java.util.ArrayList; import java.util.List; public class CustomerAddressListViewBean extends AbstractViewBean { /** * Generated UID */ private static final long serialVersionUID = 3367453428667832074L; private String backUrl; private List<CustomerAddressViewBean> customerAddressList = new ArrayList<CustomerAddressViewBean>(); public String getBackUrl() { return backUrl; } public void setBackUrl(String backUrl) { this.backUrl = backUrl; } public List<CustomerAddressViewBean> getCustomerAddressList() { return customerAddressList; } public void setCustomerAddressList(List<CustomerAddressViewBean> customerAddressList) { this.customerAddressList = customerAddressList; } }
442
915
package org.nzbhydra.historystats.stats; import lombok.Data; import java.time.DayOfWeek; import java.time.format.TextStyle; import java.util.Locale; @Data public class CountPerDayOfWeek { private String day = null; private Integer count = null; /** * @param dayIndex 1 for "Mon", 2 for "Tue", etc. */ public CountPerDayOfWeek(int dayIndex, Integer counter) { this.count = counter; day = DayOfWeek.of(dayIndex).getDisplayName(TextStyle.SHORT, Locale.US); } }
194
1,059
/* SPDX-License-Identifier: BSD-2-Clause */ #pragma once #include <tilck/common/basic_defs.h> #include <tilck/kernel/bintree.h> struct debug_kmalloc_heap_info { ulong vaddr; size_t size; size_t mem_allocated; size_t min_block_size; size_t alloc_block_size; int region; }; struct kmalloc_small_heaps_stats { int tot_count; int peak_count; int not_full_count; int peak_not_full_count; int empty_count; int lifetime_created_heaps_count; }; struct debug_kmalloc_chunks_ctx { struct bintree_walk_ctx ctx; }; struct debug_kmalloc_stats { struct kmalloc_small_heaps_stats small_heaps; size_t chunk_sizes_count; }; bool debug_kmalloc_get_heap_info(int heap_num, struct debug_kmalloc_heap_info *i); void debug_kmalloc_get_heap_info_by_ptr(struct kmalloc_heap *h, struct debug_kmalloc_heap_info *i); void debug_kmalloc_get_stats(struct debug_kmalloc_stats *stats); void debug_kmalloc_chunks_stats_start_read(struct debug_kmalloc_chunks_ctx *ctx); bool debug_kmalloc_chunks_stats_next(struct debug_kmalloc_chunks_ctx *ctx, size_t *size, size_t *count); /* Leak-detector and kmalloc logging */ void debug_kmalloc_start_leak_detector(bool save_metadata); void debug_kmalloc_stop_leak_detector(bool show_leaks); void debug_kmalloc_start_log(void); void debug_kmalloc_stop_log(void);
626
15,577
<gh_stars>1000+ #include <Functions/FunctionUnixTimestamp64.h> #include <Functions/FunctionFactory.h> namespace DB { void registerFromUnixTimestamp64Milli(FunctionFactory & factory) { factory.registerFunction("fromUnixTimestamp64Milli", [](ContextPtr){ return std::make_unique<FunctionToOverloadResolverAdaptor>( std::make_shared<FunctionFromUnixTimestamp64>(3, "fromUnixTimestamp64Milli")); }); } }
150
364
<filename>hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/delete/job/MultiUrlJobParameterValidatorTest.java package ca.uhn.fhir.jpa.delete.job; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterUtil; import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.ResourceSearch; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import com.fasterxml.jackson.core.JsonProcessingException; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.JobParametersInvalidException; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @ExtendWith(MockitoExtension.class) class MultiUrlJobParameterValidatorTest { static final FhirContext ourFhirContext = FhirContext.forR4Cached(); @Mock MatchUrlService myMatchUrlService; @Mock DaoRegistry myDaoRegistry; MultiUrlJobParameterValidator mySvc; @BeforeEach public void initMocks() { mySvc = new MultiUrlJobParameterValidator(myMatchUrlService, myDaoRegistry); } @Test public void testValidate() throws JobParametersInvalidException, JsonProcessingException { // setup JobParameters parameters = MultiUrlJobParameterUtil.buildJobParameters("Patient?address=memory", "Patient?name=smith"); ResourceSearch resourceSearch = new ResourceSearch(ourFhirContext.getResourceDefinition("Patient"), new SearchParameterMap(), RequestPartitionId.defaultPartition()); when(myMatchUrlService.getResourceSearch(anyString(), any())).thenReturn(resourceSearch); when(myDaoRegistry.isResourceTypeSupported("Patient")).thenReturn(true); // execute mySvc.validate(parameters); // verify verify(myMatchUrlService, times(2)).getResourceSearch(anyString(), any()); } @Test public void testValidateBadType() throws JobParametersInvalidException, JsonProcessingException { JobParameters parameters = MultiUrlJobParameterUtil.buildJobParameters("Patient?address=memory"); ResourceSearch resourceSearch = new ResourceSearch(ourFhirContext.getResourceDefinition("Patient"), new SearchParameterMap(), RequestPartitionId.defaultPartition()); when(myMatchUrlService.getResourceSearch(anyString(), any())).thenReturn(resourceSearch); when(myDaoRegistry.isResourceTypeSupported("Patient")).thenReturn(false); try { mySvc.validate(parameters); fail(); } catch (JobParametersInvalidException e) { assertEquals("The resource type Patient is not supported on this server.", e.getMessage()); } } }
1,041
2,151
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ash/window_manager_service.h" #include <memory> #include <utility> #include "ash/mojo_interface_factory.h" #include "ash/network_connect_delegate_mus.h" #include "ash/shell.h" #include "ash/system/power/power_status.h" #include "ash/window_manager.h" #include "base/bind.h" #include "base/threading/thread_task_runner_handle.h" #include "chromeos/audio/cras_audio_handler.h" #include "chromeos/cryptohome/system_salt_getter.h" #include "chromeos/dbus/dbus_thread_manager.h" #include "chromeos/dbus/power_policy_controller.h" #include "chromeos/network/network_connect.h" #include "chromeos/network/network_handler.h" #include "chromeos/system/fake_statistics_provider.h" #include "device/bluetooth/bluetooth_adapter_factory.h" #include "device/bluetooth/dbus/bluez_dbus_manager.h" #include "services/service_manager/public/cpp/connector.h" #include "services/service_manager/public/cpp/service_context.h" #include "services/ui/common/accelerator_util.h" #include "ui/aura/env.h" #include "ui/aura/mus/window_tree_client.h" #include "ui/events/event.h" #include "ui/views/mus/aura_init.h" namespace ash { WindowManagerService::WindowManagerService(bool show_primary_host_on_connect) : show_primary_host_on_connect_(show_primary_host_on_connect) {} WindowManagerService::~WindowManagerService() { // Verify that we created a WindowManager before attempting to tear everything // down. In some fast running tests OnStart may never have been called. if (!window_manager_.get()) return; // Destroy the WindowManager while still valid. This way we ensure // OnWillDestroyRootWindowController() is called (if it hasn't been already). window_manager_.reset(); statistics_provider_.reset(); ShutdownComponents(); } service_manager::Connector* WindowManagerService::GetConnector() { return context() ? context()->connector() : nullptr; } void WindowManagerService::InitWindowManager( std::unique_ptr<aura::WindowTreeClient> window_tree_client, bool init_network_handler) { // Tests may have already set the WindowTreeClient. if (!aura::Env::GetInstance()->HasWindowTreeClient()) aura::Env::GetInstance()->SetWindowTreeClient(window_tree_client.get()); InitializeComponents(init_network_handler); // TODO(jamescook): Refactor StatisticsProvider so we can get just the data // we need in ash. Right now StatisticsProviderImpl launches the crossystem // binary to get system data, which we don't want to do twice on startup. statistics_provider_.reset( new chromeos::system::ScopedFakeStatisticsProvider()); statistics_provider_->SetMachineStatistic("initial_locale", "en-US"); statistics_provider_->SetMachineStatistic("keyboard_layout", ""); window_manager_->Init(std::move(window_tree_client), /*initial_display_prefs=*/nullptr); } void WindowManagerService::InitializeComponents(bool init_network_handler) { // Must occur after mojo::ApplicationRunner has initialized AtExitManager, but // before WindowManager::Init(). Tests might initialize their own instance. if (!chromeos::DBusThreadManager::IsInitialized()) { chromeos::DBusThreadManager::Initialize( chromeos::DBusThreadManager::kShared); dbus_thread_manager_initialized_ = true; } chromeos::PowerPolicyController::Initialize( chromeos::DBusThreadManager::Get()->GetPowerManagerClient()); // See ChromeBrowserMainPartsChromeos for ordering details. bluez::BluezDBusManager::Initialize( chromeos::DBusThreadManager::Get()->GetSystemBus(), chromeos::DBusThreadManager::Get()->IsUsingFakes()); if (init_network_handler && !chromeos::NetworkHandler::IsInitialized()) { chromeos::NetworkHandler::Initialize(); network_handler_initialized_ = true; } network_connect_delegate_.reset(new NetworkConnectDelegateMus()); chromeos::NetworkConnect::Initialize(network_connect_delegate_.get()); // TODO(jamescook): Initialize real audio handler. chromeos::CrasAudioHandler::InitializeForTesting(); chromeos::SystemSaltGetter::Initialize(); } void WindowManagerService::ShutdownComponents() { // NOTE: PowerStatus is shutdown by Shell. chromeos::SystemSaltGetter::Shutdown(); chromeos::CrasAudioHandler::Shutdown(); chromeos::NetworkConnect::Shutdown(); network_connect_delegate_.reset(); // We may not have started the NetworkHandler. if (network_handler_initialized_) chromeos::NetworkHandler::Shutdown(); device::BluetoothAdapterFactory::Shutdown(); bluez::BluezDBusManager::Shutdown(); chromeos::PowerPolicyController::Shutdown(); if (dbus_thread_manager_initialized_) chromeos::DBusThreadManager::Shutdown(); } void WindowManagerService::OnStart() { mojo_interface_factory::RegisterInterfaces( &registry_, base::ThreadTaskRunnerHandle::Get()); const bool register_path_provider = running_standalone_; aura_init_ = views::AuraInit::Create( context()->connector(), context()->identity(), "ash_service_resources.pak", "ash_service_resources_200.pak", nullptr, views::AuraInit::Mode::AURA_MUS_WINDOW_MANAGER, register_path_provider); if (!aura_init_) { context()->QuitNow(); return; } window_manager_ = std::make_unique<WindowManager>( context()->connector(), show_primary_host_on_connect_); const bool automatically_create_display_roots = false; std::unique_ptr<aura::WindowTreeClient> window_tree_client = aura::WindowTreeClient::CreateForWindowManager( context()->connector(), window_manager_.get(), window_manager_.get(), automatically_create_display_roots); const bool init_network_handler = true; InitWindowManager(std::move(window_tree_client), init_network_handler); } void WindowManagerService::OnBindInterface( const service_manager::BindSourceInfo& source_info, const std::string& interface_name, mojo::ScopedMessagePipeHandle interface_pipe) { registry_.BindInterface(interface_name, std::move(interface_pipe)); } } // namespace ash
1,953
1,159
""" Dist Plot Bokeh =============== _thumb: .2, .8 """ import bokeh.plotting as bkp import numpy as np from bokeh.layouts import row import arviz as az a = np.random.poisson(4, 1000) b = np.random.normal(0, 1, 1000) figure_kwargs = dict(height=500, width=500, output_backend="webgl") ax_poisson = bkp.figure(**figure_kwargs) ax_normal = bkp.figure(**figure_kwargs) az.plot_dist(a, color="black", label="Poisson", ax=ax_poisson, backend="bokeh", show=False) az.plot_dist(b, color="red", label="Gaussian", ax=ax_normal, backend="bokeh", show=False) ax = row(ax_poisson, ax_normal) if az.rcParams["plot.bokeh.show"]: bkp.show(ax)
265
2,232
<filename>src/library/delayed_abstraction.h /* Copyright (c) 2016 Microsoft Corporation. All rights reserved. Released under Apache 2.0 license as described in the file LICENSE. Author: <NAME> */ #pragma once #include "kernel/environment.h" namespace lean { expr mk_delayed_abstraction(expr const & e, buffer<name> const & ns); expr mk_delayed_abstraction(expr const & e, name const & n); bool is_delayed_abstraction(expr const & e); expr const & get_delayed_abstraction_expr(expr const & e); void get_delayed_abstraction_info(expr const & e, buffer<name> & ns, buffer<expr> & es); /* Given a delayed abstraction `[delayed t, h_1 := e_1, ..., h_n := e_n]`, push the delayed substitutions `h_i := e_i` to the metavariables occurring in `t`. Remark: if `t` is a metavariable, then we just return `e`. */ expr push_delayed_abstraction(expr const & e); /* Append the new delayed substitutions `ns[i] := es[i]` to the metavariables occurring in `e`. \pre ns.size() == es.size() */ expr append_delayed_abstraction(expr const & e, buffer<name> const & ns, buffer<expr> const & es); /* Create e{ls[0] := ls[0], ..., ls[n-1] := ls[n-1]} \pre is_metavar(e) \pre for all x in ls, is_local(x) */ expr mk_delayed_abstraction_with_locals(expr const & e, buffer<expr> const & ls); /* Ceeate e{ns[0] := vs[0], ... ls[n-1] := vs[n-1]} \pre is_metavar(e) \pre ns.size() == es.size() \pre !ns.empty() */ expr mk_delayed_abstraction(expr const & e, buffer<name> const & ns, buffer<expr> const & vs); class metavar_context; /* Similar to abstract_locals, but create a delayed_abstraction macro around metavariables occurring in \c e. */ expr delayed_abstract_locals(metavar_context const & mctx, expr const & e, unsigned nlocals, expr const * locals); void initialize_delayed_abstraction(); void finalize_delayed_abstraction(); }
664
565
/* +----------------------------------------------------------------------+ | PHP Version 7 | +----------------------------------------------------------------------+ | Copyright (c) 1997-2016 The PHP Group | +----------------------------------------------------------------------+ | This source file is subject to version 3.01 of the PHP license, | | that is bundled with this package in the file LICENSE, and is | | available through the world-wide-web at the following url: | | http://www.php.net/license/3_01.txt | | If you did not receive a copy of the PHP license and are unable to | | obtain it through the world-wide-web, please send a note to | | <EMAIL> so we can mail you a copy immediately. | +----------------------------------------------------------------------+ | Author: krakjoe | +----------------------------------------------------------------------+ */ #ifndef HAVE_PHP_UI_COLOR #define HAVE_PHP_UI_COLOR #include <ui.h> #include "php.h" #include <classes/exceptions.h> #include <classes/control.h> #include <classes/color.h> zend_object_handlers php_std_handlers; zend_object_handlers php_ui_color_handlers; zend_class_entry *uiDrawColor_ce; zend_bool php_ui_color_set(zval *color, double *r, double *g, double *b, double *a) { if (Z_TYPE_P(color) == IS_OBJECT) { php_ui_color_t *object; if (!instanceof_function(Z_OBJCE_P(color), uiDrawColor_ce)) { return 0; } object = php_ui_color_fetch(color); *r = object->r; *g = object->g; *b = object->b; *a = object->a; return 1; } if (Z_TYPE_P(color) == IS_LONG || Z_TYPE_P(color) == IS_DOUBLE) { uint32_t components = zval_get_double(color); *r = ((double) (uint8_t) ((components >> 24) & 0xFF)) / 255; *g = ((double) (uint8_t) ((components >> 16) & 0xFF)) / 255; *b = ((double) (uint8_t) ((components >> 8) & 0xFF)) / 255; *a = ((double) (uint8_t) (components & 0xFF)) / 255; return 1; } return 0; } /* {{{ */ static zval* php_ui_color_read(zval *object, zval *member, int type, void **cache, zval *rv) { php_ui_color_t *color = php_ui_color_fetch(object); if (Z_TYPE_P(member) != IS_STRING) { goto php_std_read_color; } if (Z_STRLEN_P(member) != 1) { goto php_std_read_color; } #define php_ui_color_guard() do { \ if (type == BP_VAR_RW || type == BP_VAR_W) { \ php_ui_exception( \ "Failed to fetch reference to %s, not allowed", Z_STRVAL_P(member)); \ return &EG(uninitialized_zval); \ } \ } while(0) switch (Z_STRVAL_P(member)[0]) { case 'r': case 'R': php_ui_color_guard(); ZVAL_DOUBLE(rv, color->r); return rv; case 'g': case 'G': php_ui_color_guard(); ZVAL_DOUBLE(rv, color->g); return rv; case 'b': case 'B': php_ui_color_guard(); ZVAL_DOUBLE(rv, color->b); return rv; case 'a': case 'A': php_ui_color_guard(); ZVAL_DOUBLE(rv, color->a); return rv; } #undef php_ui_color_guard php_std_read_color: return php_std_handlers.read_property(object, member, type, cache, rv); } /* }}} */ /* {{{ */ zval* php_ui_color_noref(zval *object, zval *member, int type, void **cache) { return NULL; } /* }}} */ /* {{{ */ void php_ui_color_write(zval *object, zval *member, zval *value, void **cache) { php_ui_color_t *color = php_ui_color_fetch(object); if (Z_TYPE_P(member) != IS_STRING) { goto php_std_write_color; } if (Z_STRLEN_P(member) != 1) { goto php_std_write_color; } switch (Z_STRVAL_P(member)[0]) { case 'r': case 'R': color->r = zval_get_double(value); return; case 'g': case 'G': color->g = zval_get_double(value); return; case 'b': case 'B': color->b = zval_get_double(value); return; case 'a': case 'A': color->a = zval_get_double(value); return; } php_std_write_color: php_std_handlers.write_property(object, member, value, cache); } /* }}} */ zend_object* php_ui_color_create(zend_class_entry *ce) { php_ui_color_t *color = (php_ui_color_t*) ecalloc(1, sizeof(php_ui_color_t) + zend_object_properties_size(ce)); zend_object_std_init(&color->std, ce); object_properties_init(&color->std, ce); color->std.handlers = &php_ui_color_handlers; color->a = 1; return &color->std; } HashTable* php_ui_color_debug(zval *object, int *is_temp) { php_ui_color_t *color = php_ui_color_fetch(object); HashTable *table, *std; zval tmp; int is_std_temp = 0; *is_temp = 1; ALLOC_HASHTABLE(table); zend_hash_init(table, 4, NULL, ZVAL_PTR_DTOR, 0); std = php_std_handlers.get_debug_info(object, &is_std_temp); if (std) { zend_hash_merge(table, std, zval_add_ref, 1); if (is_std_temp) { zend_hash_destroy(std); FREE_HASHTABLE(std); } } ZVAL_DOUBLE(&tmp, color->r); zend_hash_str_update(table, "r", sizeof("r")-1, &tmp); ZVAL_DOUBLE(&tmp, color->g); zend_hash_str_update(table, "g", sizeof("g")-1, &tmp); ZVAL_DOUBLE(&tmp, color->b); zend_hash_str_update(table, "b", sizeof("b")-1, &tmp); ZVAL_DOUBLE(&tmp, color->a); zend_hash_str_update(table, "a", sizeof("a")-1, &tmp); return table; } ZEND_BEGIN_ARG_INFO_EX(php_ui_color_construct_info, 0, 0, 0) ZEND_ARG_INFO(0, rgba) ZEND_END_ARG_INFO() /* {{{ proto DrawColor DrawColor::__construct([int|double rgba = 0]) */ PHP_METHOD(DrawColor, __construct) { php_ui_color_t *color = php_ui_color_fetch(getThis()); zval *rgba = NULL; if (zend_parse_parameters_throw(ZEND_NUM_ARGS(), "|z", &rgba) != SUCCESS) { return; } if (ZEND_NUM_ARGS() > 0) { php_ui_color_set(rgba, &color->r, &color->g, &color->b, &color->a); } } /* }}} */ ZEND_BEGIN_ARG_INFO_EX(php_ui_color_set_channel_info, 0, 0, 2) ZEND_ARG_TYPE_INFO(0, channel, IS_LONG, 0) ZEND_ARG_TYPE_INFO(0, value, IS_DOUBLE, 0) ZEND_END_ARG_INFO() /* {{{ proto void DrawColor::setChannel(int channel, double value) */ PHP_METHOD(DrawColor, setChannel) { php_ui_color_t *color = php_ui_color_fetch(getThis()); zend_long channel = 0; double value = 0; if (zend_parse_parameters_throw(ZEND_NUM_ARGS(), "ld", &channel, &value) != SUCCESS) { return; } switch (channel) { case PHP_UI_COLOR_RED: color->r = value; break; case PHP_UI_COLOR_GREEN: color->g = value; break; case PHP_UI_COLOR_BLUE: color->b = value; break; case PHP_UI_COLOR_ALPHA: color->a = value; break; } } /* }}} */ #if PHP_VERSION_ID >= 70200 ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(php_ui_color_get_channel_info, 0, 0, IS_DOUBLE, 1) #else ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(php_ui_color_get_channel_info, 0, 0, IS_DOUBLE, NULL, 1) #endif ZEND_ARG_TYPE_INFO(0, channel, IS_LONG, 0) ZEND_END_ARG_INFO() /* {{{ proto double DrawColor::getChannel(int channel) */ PHP_METHOD(DrawColor, getChannel) { php_ui_color_t *color = php_ui_color_fetch(getThis()); zend_long channel = 0; if (zend_parse_parameters_throw(ZEND_NUM_ARGS(), "l", &channel) != SUCCESS) { return; } switch (channel) { case PHP_UI_COLOR_RED: RETURN_DOUBLE(color->r); case PHP_UI_COLOR_GREEN: RETURN_DOUBLE(color->g); case PHP_UI_COLOR_BLUE: RETURN_DOUBLE(color->b); case PHP_UI_COLOR_ALPHA: RETURN_DOUBLE(color->a); } RETURN_DOUBLE(-1); } /* }}} */ /* {{{ */ const zend_function_entry php_ui_color_methods[] = { PHP_ME(DrawColor, __construct, php_ui_color_construct_info, ZEND_ACC_PUBLIC) PHP_ME(DrawColor, setChannel, php_ui_color_set_channel_info, ZEND_ACC_PUBLIC) PHP_ME(DrawColor, getChannel, php_ui_color_get_channel_info, ZEND_ACC_PUBLIC) PHP_FE_END }; /* }}} */ /* {{{ */ PHP_MINIT_FUNCTION(UI_DrawColor) { zend_class_entry ce; INIT_NS_CLASS_ENTRY(ce, "UI\\Draw", "Color", php_ui_color_methods); uiDrawColor_ce = zend_register_internal_class(&ce); uiDrawColor_ce->create_object = php_ui_color_create; zend_declare_property_double(uiDrawColor_ce, ZEND_STRL("r"), 0.0, ZEND_ACC_PUBLIC); zend_declare_property_double(uiDrawColor_ce, ZEND_STRL("g"), 0.0, ZEND_ACC_PUBLIC); zend_declare_property_double(uiDrawColor_ce, ZEND_STRL("b"), 0.0, ZEND_ACC_PUBLIC); zend_declare_property_double(uiDrawColor_ce, ZEND_STRL("a"), 0.0, ZEND_ACC_PUBLIC); zend_declare_class_constant_long(uiDrawColor_ce, ZEND_STRL("Red"), PHP_UI_COLOR_RED); zend_declare_class_constant_long(uiDrawColor_ce, ZEND_STRL("Green"), PHP_UI_COLOR_GREEN); zend_declare_class_constant_long(uiDrawColor_ce, ZEND_STRL("Blue"), PHP_UI_COLOR_BLUE); zend_declare_class_constant_long(uiDrawColor_ce, ZEND_STRL("Alpha"), PHP_UI_COLOR_ALPHA); memcpy(&php_std_handlers, zend_get_std_object_handlers(), sizeof(zend_object_handlers)); memcpy(&php_ui_color_handlers, &php_std_handlers, sizeof(zend_object_handlers)); php_ui_color_handlers.offset = XtOffsetOf(php_ui_color_t, std); php_ui_color_handlers.get_debug_info = php_ui_color_debug; php_ui_color_handlers.read_property = php_ui_color_read; php_ui_color_handlers.get_property_ptr_ptr = php_ui_color_noref; php_ui_color_handlers.write_property = php_ui_color_write; return SUCCESS; } /* }}} */ #endif
4,063
1,039
#if !defined(SIMDE_X86_AVX512_ROL_H) #define SIMDE_X86_AVX512_ROL_H #include "types.h" #include "mov.h" #include "or.h" #include "srli.h" #include "slli.h" #include "../avx2.h" HEDLEY_DIAGNOSTIC_PUSH SIMDE_DISABLE_UNWANTED_DIAGNOSTICS SIMDE_BEGIN_DECLS_ #if defined(SIMDE_X86_AVX512F_NATIVE) && defined(SIMDE_X86_AVX512VL_NATIVE) #define simde_mm_rol_epi32(a, imm8) _mm_rol_epi32(a, imm8) #else SIMDE_FUNCTION_ATTRIBUTES simde__m128i simde_mm_rol_epi32 (simde__m128i a, int imm8) SIMDE_REQUIRE_CONSTANT_RANGE (imm8, 0, 255) { simde__m128i_private r_, a_ = simde__m128i_to_private(a); #if defined(SIMDE_POWER_ALTIVEC_P6_NATIVE) r_.altivec_i32 = vec_rl(a_.altivec_i32, vec_splats(HEDLEY_STATIC_CAST(unsigned int, imm8))); #elif defined(SIMDE_VECTOR_SUBSCRIPT_SCALAR) switch (imm8 & 31) { case 0: r_ = a_; break; default: r_.u32 = (a_.u32 << (imm8 & 31)) | (a_.u32 >> (32 - (imm8 & 31))); break; } #else switch (imm8 & 31) { case 0: r_ = a_; break; default: SIMDE_VECTORIZE for (size_t i = 0 ; i < (sizeof(r_.u32) / sizeof(r_.u32[0])) ; i++) { r_.u32[i] = (a_.u32[i] << (imm8 & 31)) | (a_.u32[i] >> (32 - (imm8 & 31))); } break; } #endif return simde__m128i_from_private(r_); } #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) && defined(SIMDE_X86_AVX512VL_ENABLE_NATIVE_ALIASES) #undef _mm_rol_epi32 #define _mm_rol_epi32(a, imm8) simde_mm_rol_epi32(a, imm8) #endif #if defined(SIMDE_X86_AVX512F_NATIVE) && defined(SIMDE_X86_AVX512VL_NATIVE) #define simde_mm_mask_rol_epi32(src, k, a, imm8) _mm_mask_rol_epi32(src, k, a, imm8) #else #define simde_mm_mask_rol_epi32(src, k, a, imm8) simde_mm_mask_mov_epi32(src, k, simde_mm_rol_epi32(a, imm8)) #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) && defined(SIMDE_X86_AVX512VL_ENABLE_NATIVE_ALIASES) #undef _mm_mask_rol_epi32 #define _mm_mask_rol_epi32(src, k, a, imm8) simde_mm_mask_rol_epi32(src, k, a, imm8) #endif #if defined(SIMDE_X86_AVX512F_NATIVE) && defined(SIMDE_X86_AVX512VL_NATIVE) #define simde_mm_maskz_rol_epi32(k, a, imm8) _mm_maskz_rol_epi32(k, a, imm8) #else #define simde_mm_maskz_rol_epi32(k, a, imm8) simde_mm_maskz_mov_epi32(k, simde_mm_rol_epi32(a, imm8)) #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) && defined(SIMDE_X86_AVX512VL_ENABLE_NATIVE_ALIASES) #undef _mm_maskz_rol_epi32 #define _mm_maskz_rol_epi32(src, k, a, imm8) simde_mm_maskz_rol_epi32(src, k, a, imm8) #endif #if defined(SIMDE_X86_AVX512F_NATIVE) && defined(SIMDE_X86_AVX512VL_NATIVE) #define simde_mm256_rol_epi32(a, imm8) _mm256_rol_epi32(a, imm8) #else SIMDE_FUNCTION_ATTRIBUTES simde__m256i simde_mm256_rol_epi32 (simde__m256i a, int imm8) SIMDE_REQUIRE_CONSTANT_RANGE (imm8, 0, 255) { simde__m256i_private r_, a_ = simde__m256i_to_private(a); #if defined(SIMDE_POWER_ALTIVEC_P6_NATIVE) for (size_t i = 0 ; i < (sizeof(r_.m128i_private) / sizeof(r_.m128i_private[0])) ; i++) { r_.m128i_private[i].altivec_i32 = vec_rl(a_.m128i_private[i].altivec_i32, vec_splats(HEDLEY_STATIC_CAST(unsigned int, imm8))); } #elif defined(SIMDE_VECTOR_SUBSCRIPT_SCALAR) switch (imm8 & 31) { case 0: r_ = a_; break; default: r_.u32 = (a_.u32 << (imm8 & 31)) | (a_.u32 >> (32 - (imm8 & 31))); break; } #else switch (imm8 & 31) { case 0: r_ = a_; break; default: SIMDE_VECTORIZE for (size_t i = 0 ; i < (sizeof(r_.u32) / sizeof(r_.u32[0])) ; i++) { r_.u32[i] = (a_.u32[i] << (imm8 & 31)) | (a_.u32[i] >> (32 - (imm8 & 31))); } break; } #endif return simde__m256i_from_private(r_); } #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) && defined(SIMDE_X86_AVX512VL_ENABLE_NATIVE_ALIASES) #undef _mm256_rol_epi32 #define _mm256_rol_epi32(a, imm8) simde_mm256_rol_epi32(a, imm8) #endif #if defined(SIMDE_X86_AVX512F_NATIVE) && defined(SIMDE_X86_AVX512VL_NATIVE) #define simde_mm256_mask_rol_epi32(src, k, a, imm8) _mm256_mask_rol_epi32(src, k, a, imm8) #else #define simde_mm256_mask_rol_epi32(src, k, a, imm8) simde_mm256_mask_mov_epi32(src, k, simde_mm256_rol_epi32(a, imm8)) #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) && defined(SIMDE_X86_AVX512VL_ENABLE_NATIVE_ALIASES) #undef _mm256_mask_rol_epi32 #define _mm256_mask_rol_epi32(src, k, a, imm8) simde_mm256_mask_rol_epi32(src, k, a, imm8) #endif #if defined(SIMDE_X86_AVX512F_NATIVE) && defined(SIMDE_X86_AVX512VL_NATIVE) #define simde_mm256_maskz_rol_epi32(k, a, imm8) _mm256_maskz_rol_epi32(k, a, imm8) #else #define simde_mm256_maskz_rol_epi32(k, a, imm8) simde_mm256_maskz_mov_epi32(k, simde_mm256_rol_epi32(a, imm8)) #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) && defined(SIMDE_X86_AVX512VL_ENABLE_NATIVE_ALIASES) #undef _mm256_maskz_rol_epi32 #define _mm256_maskz_rol_epi32(k, a, imm8) simde_mm256_maskz_rol_epi32(k, a, imm8) #endif #if defined(SIMDE_X86_AVX512F_NATIVE) #define simde_mm512_rol_epi32(a, imm8) _mm512_rol_epi32(a, imm8) #else SIMDE_FUNCTION_ATTRIBUTES simde__m512i simde_mm512_rol_epi32 (simde__m512i a, int imm8) SIMDE_REQUIRE_CONSTANT_RANGE (imm8, 0, 255) { simde__m512i_private r_, a_ = simde__m512i_to_private(a); #if defined(SIMDE_POWER_ALTIVEC_P6_NATIVE) for (size_t i = 0 ; i < (sizeof(r_.m128i_private) / sizeof(r_.m128i_private[0])) ; i++) { r_.m128i_private[i].altivec_i32 = vec_rl(a_.m128i_private[i].altivec_i32, vec_splats(HEDLEY_STATIC_CAST(unsigned int, imm8))); } #elif defined(SIMDE_VECTOR_SUBSCRIPT_SCALAR) switch (imm8 & 31) { case 0: r_ = a_; break; default: r_.u32 = (a_.u32 << (imm8 & 31)) | (a_.u32 >> (32 - (imm8 & 31))); break; } #else switch (imm8 & 31) { case 0: r_ = a_; break; default: SIMDE_VECTORIZE for (size_t i = 0 ; i < (sizeof(r_.u32) / sizeof(r_.u32[0])) ; i++) { r_.u32[i] = (a_.u32[i] << (imm8 & 31)) | (a_.u32[i] >> (32 - (imm8 & 31))); } break; } #endif return simde__m512i_from_private(r_); } #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) #undef _mm512_rol_epi32 #define _mm512_rol_epi32(a, imm8) simde_mm512_rol_epi32(a, imm8) #endif #if defined(SIMDE_X86_AVX512F_NATIVE) #define simde_mm512_mask_rol_epi32(src, k, a, imm8) _mm512_mask_rol_epi32(src, k, a, imm8) #else #define simde_mm512_mask_rol_epi32(src, k, a, imm8) simde_mm512_mask_mov_epi32(src, k, simde_mm512_rol_epi32(a, imm8)) #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) #undef _mm512_mask_rol_epi32 #define _mm512_mask_rol_epi32(src, k, a, imm8) simde_mm512_mask_rol_epi32(src, k, a, imm8) #endif #if defined(SIMDE_X86_AVX512F_NATIVE) #define simde_mm512_maskz_rol_epi32(k, a, imm8) _mm512_maskz_rol_epi32(k, a, imm8) #else #define simde_mm512_maskz_rol_epi32(k, a, imm8) simde_mm512_maskz_mov_epi32(k, simde_mm512_rol_epi32(a, imm8)) #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) #undef _mm512_maskz_rol_epi32 #define _mm512_maskz_rol_epi32(k, a, imm8) simde_mm512_maskz_rol_epi32(k, a, imm8) #endif #if defined(SIMDE_X86_AVX512F_NATIVE) && defined(SIMDE_X86_AVX512VL_NATIVE) #define simde_mm_rol_epi64(a, imm8) _mm_rol_epi64(a, imm8) #else SIMDE_FUNCTION_ATTRIBUTES simde__m128i simde_mm_rol_epi64 (simde__m128i a, int imm8) SIMDE_REQUIRE_CONSTANT_RANGE (imm8, 0, 255) { simde__m128i_private r_, a_ = simde__m128i_to_private(a); #if defined(SIMDE_POWER_ALTIVEC_P8_NATIVE) r_.altivec_i64 = vec_rl(a_.altivec_i64, vec_splats(HEDLEY_STATIC_CAST(unsigned long long, imm8))); #elif defined(SIMDE_VECTOR_SUBSCRIPT_SCALAR) switch (imm8 & 63) { case 0: r_ = a_; break; default: r_.u64 = (a_.u64 << (imm8 & 63)) | (a_.u64 >> (64 - (imm8 & 63))); break; } #else switch (imm8 & 63) { case 0: r_ = a_; break; default: SIMDE_VECTORIZE for (size_t i = 0 ; i < (sizeof(r_.u64) / sizeof(r_.u64[0])) ; i++) { r_.u64[i] = (a_.u64[i] << (imm8 & 63)) | (a_.u64[i] >> (64 - (imm8 & 63))); } break; } #endif return simde__m128i_from_private(r_); } #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) && defined(SIMDE_X86_AVX512VL_ENABLE_NATIVE_ALIASES) #undef _mm_rol_epi64 #define _mm_rol_epi64(a, imm8) simde_mm_rol_epi64(a, imm8) #endif #if defined(SIMDE_X86_AVX512F_NATIVE) && defined(SIMDE_X86_AVX512VL_NATIVE) #define simde_mm_mask_rol_epi64(src, k, a, imm8) _mm_mask_rol_epi64(src, k, a, imm8) #else #define simde_mm_mask_rol_epi64(src, k, a, imm8) simde_mm_mask_mov_epi64(src, k, simde_mm_rol_epi64(a, imm8)) #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) && defined(SIMDE_X86_AVX512VL_ENABLE_NATIVE_ALIASES) #undef _mm_mask_rol_epi64 #define _mm_mask_rol_epi64(src, k, a, imm8) simde_mm_mask_rol_epi64(src, k, a, imm8) #endif #if defined(SIMDE_X86_AVX512F_NATIVE) && defined(SIMDE_X86_AVX512VL_NATIVE) #define simde_mm_maskz_rol_epi64(k, a, imm8) _mm_maskz_rol_epi64(k, a, imm8) #else #define simde_mm_maskz_rol_epi64(k, a, imm8) simde_mm_maskz_mov_epi64(k, simde_mm_rol_epi64(a, imm8)) #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) && defined(SIMDE_X86_AVX512VL_ENABLE_NATIVE_ALIASES) #undef _mm_maskz_rol_epi64 #define _mm_maskz_rol_epi64(k, a, imm8) simde_mm_maskz_rol_epi64(k, a, imm8) #endif #if defined(SIMDE_X86_AVX512F_NATIVE) && defined(SIMDE_X86_AVX512VL_NATIVE) #define simde_mm256_rol_epi64(a, imm8) _mm256_rol_epi64(a, imm8) #else SIMDE_FUNCTION_ATTRIBUTES simde__m256i simde_mm256_rol_epi64 (simde__m256i a, int imm8) SIMDE_REQUIRE_CONSTANT_RANGE (imm8, 0, 255) { simde__m256i_private r_, a_ = simde__m256i_to_private(a); #if defined(SIMDE_POWER_ALTIVEC_P8_NATIVE) for (size_t i = 0 ; i < (sizeof(r_.m128i_private) / sizeof(r_.m128i_private[0])) ; i++) { r_.m128i_private[i].altivec_i64 = vec_rl(a_.m128i_private[i].altivec_i64, vec_splats(HEDLEY_STATIC_CAST(unsigned long long, imm8))); } #elif defined(SIMDE_VECTOR_SUBSCRIPT_SCALAR) switch (imm8 & 63) { case 0: r_ = a_; break; default: r_.u64 = (a_.u64 << (imm8 & 63)) | (a_.u64 >> (64 - (imm8 & 63))); break; } #else switch (imm8 & 63) { case 0: r_ = a_; break; default: SIMDE_VECTORIZE for (size_t i = 0 ; i < (sizeof(r_.u64) / sizeof(r_.u64[0])) ; i++) { r_.u64[i] = (a_.u64[i] << (imm8 & 63)) | (a_.u64[i] >> (64 - (imm8 & 63))); } break; } #endif return simde__m256i_from_private(r_); } #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) && defined(SIMDE_X86_AVX512VL_ENABLE_NATIVE_ALIASES) #undef _mm256_rol_epi64 #define _mm256_rol_epi64(a, imm8) simde_mm256_rol_epi64(a, imm8) #endif #if defined(SIMDE_X86_AVX512F_NATIVE) && defined(SIMDE_X86_AVX512VL_NATIVE) #define simde_mm256_mask_rol_epi64(src, k, a, imm8) _mm256_mask_rol_epi64(src, k, a, imm8) #else #define simde_mm256_mask_rol_epi64(src, k, a, imm8) simde_mm256_mask_mov_epi64(src, k, simde_mm256_rol_epi64(a, imm8)) #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) && defined(SIMDE_X86_AVX512VL_ENABLE_NATIVE_ALIASES) #undef _mm256_mask_rol_epi64 #define _mm256_mask_rol_epi64(src, k, a, imm8) simde_mm256_mask_rol_epi64(src, k, a, imm8) #endif #if defined(SIMDE_X86_AVX512F_NATIVE) && defined(SIMDE_X86_AVX512VL_NATIVE) #define simde_mm256_maskz_rol_epi64(k, a, imm8) _mm256_maskz_rol_epi64(k, a, imm8) #else #define simde_mm256_maskz_rol_epi64(k, a, imm8) simde_mm256_maskz_mov_epi64(k, simde_mm256_rol_epi64(a, imm8)) #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) && defined(SIMDE_X86_AVX512VL_ENABLE_NATIVE_ALIASES) #undef _mm256_maskz_rol_epi64 #define _mm256_maskz_rol_epi64(k, a, imm8) simde_mm256_maskz_rol_epi64(k, a, imm8) #endif #if defined(SIMDE_X86_AVX512F_NATIVE) #define simde_mm512_rol_epi64(a, imm8) _mm512_rol_epi64(a, imm8) #else SIMDE_FUNCTION_ATTRIBUTES simde__m512i simde_mm512_rol_epi64 (simde__m512i a, int imm8) SIMDE_REQUIRE_CONSTANT_RANGE (imm8, 0, 255) { simde__m512i_private r_, a_ = simde__m512i_to_private(a); #if defined(SIMDE_POWER_ALTIVEC_P8_NATIVE) for (size_t i = 0 ; i < (sizeof(r_.m128i_private) / sizeof(r_.m128i_private[0])) ; i++) { r_.m128i_private[i].altivec_i64 = vec_rl(a_.m128i_private[i].altivec_i64, vec_splats(HEDLEY_STATIC_CAST(unsigned long long, imm8))); } #elif defined(SIMDE_VECTOR_SUBSCRIPT_SCALAR) switch (imm8 & 63) { case 0: r_ = a_; break; default: r_.u64 = (a_.u64 << (imm8 & 63)) | (a_.u64 >> (64 - (imm8 & 63))); break; } #else switch (imm8 & 63) { case 0: r_ = a_; break; default: SIMDE_VECTORIZE for (size_t i = 0 ; i < (sizeof(r_.u64) / sizeof(r_.u64[0])) ; i++) { r_.u64[i] = (a_.u64[i] << (imm8 & 63)) | (a_.u64[i] >> (64 - (imm8 & 63))); } break; } #endif return simde__m512i_from_private(r_); } #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) #undef _mm512_rol_epi64 #define _mm512_rol_epi64(a, imm8) simde_mm512_rol_epi64(a, imm8) #endif #if defined(SIMDE_X86_AVX512F_NATIVE) #define simde_mm512_mask_rol_epi64(src, k, a, imm8) _mm512_mask_rol_epi64(src, k, a, imm8) #else #define simde_mm512_mask_rol_epi64(src, k, a, imm8) simde_mm512_mask_mov_epi64(src, k, simde_mm512_rol_epi64(a, imm8)) #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) #undef _mm512_mask_rol_epi64 #define _mm512_mask_rol_epi64(src, k, a, imm8) simde_mm512_mask_rol_epi64(src, k, a, imm8) #endif #if defined(SIMDE_X86_AVX512F_NATIVE) #define simde_mm512_maskz_rol_epi64(k, a, imm8) _mm512_maskz_rol_epi64(k, a, imm8) #else #define simde_mm512_maskz_rol_epi64(k, a, imm8) simde_mm512_maskz_mov_epi64(k, simde_mm512_rol_epi64(a, imm8)) #endif #if defined(SIMDE_X86_AVX512F_ENABLE_NATIVE_ALIASES) #undef _mm512_maskz_rol_epi64 #define _mm512_maskz_rol_epi64(k, a, imm8) simde_mm512_maskz_rol_epi64(k, a, imm8) #endif SIMDE_END_DECLS_ HEDLEY_DIAGNOSTIC_POP #endif /* !defined(SIMDE_X86_AVX512_ROL_H) */
8,016
370
<filename>jte-jsp-converter/src/main/java/org/apache/jasper/compiler/JtpParser.java package org.apache.jasper.compiler; import org.apache.jasper.JasperException; import org.apache.jasper.JspC; import org.apache.jasper.JspCompilationContext; import org.apache.jasper.Options; import org.apache.jasper.servlet.JspCServletContext; import org.apache.jasper.servlet.TldScanner; import org.xml.sax.SAXException; import javax.servlet.ServletContext; import javax.servlet.jsp.tagext.TagLibraryInfo; import java.io.*; import java.net.URL; import java.util.Map; public class JtpParser { public static Node.Nodes parse(String relativeFilePath, byte[] input, URL resourceBase, boolean tag) throws JasperException, IOException { JspC jspc = new JspC(); PrintWriter log = new PrintWriter(System.out); ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); ServletContext context = new JspCServletContext(log, resourceBase, classLoader, jspc.isValidateXml(), jspc.isBlockExternal()); TldScanner scanner = new TldScanner(context, true, jspc.isValidateTld(), jspc.isBlockExternal()); scanner.setClassLoader(classLoader); try { scanner.scan(); } catch (SAXException e) { throw new JasperException(e); } TldCache tldCache = new TldCache(context, scanner.getUriTldResourcePathMap(), scanner.getTldResourcePathTaglibXmlMap()); context.setAttribute(TldCache.SERVLET_CONTEXT_ATTRIBUTE_NAME, tldCache); JspConfig jspConfig = new JspConfig(context); JspRuntimeContext runtimeContext = new JspRuntimeContext(context, jspc); JspCompilationContext compilationContext = new JspCompilationContext("", new Options() { @Override public boolean getErrorOnUseBeanInvalidClassAttribute() { return true; } @Override public boolean getKeepGenerated() { return false; } @Override public boolean isPoolingEnabled() { return false; } @Override public boolean getMappedFile() { return false; } @Override public boolean getClassDebugInfo() { return false; } @Override public int getCheckInterval() { return 0; } @Override public boolean getDevelopment() { return false; } @Override public boolean getDisplaySourceFragment() { return false; } @Override public boolean isSmapSuppressed() { return false; } @Override public boolean isSmapDumped() { return false; } @Override public boolean getTrimSpaces() { return false; } @Override public String getIeClassId() { return null; } @Override public File getScratchDir() { return null; } @Override public String getClassPath() { return null; } @Override public String getCompiler() { return null; } @Override public String getCompilerTargetVM() { return null; } @Override public String getCompilerSourceVM() { return null; } @Override public String getCompilerClassName() { return null; } @Override public TldCache getTldCache() { return tldCache; } @Override public String getJavaEncoding() { return "UTF-8"; } @Override public boolean getFork() { return false; } @Override public JspConfig getJspConfig() { return jspConfig; } @Override public boolean isXpoweredBy() { return false; } @Override public TagPluginManager getTagPluginManager() { return null; } @Override public boolean genStringAsCharArray() { return false; } @Override public int getModificationTestInterval() { return 0; } @Override public boolean getRecompileOnFail() { return false; } @Override public boolean isCaching() { return false; } @Override public Map<String, TagLibraryInfo> getCache() { return null; } @Override public int getMaxLoadedJsps() { return 0; } @Override public int getJspIdleTimeout() { return 0; } @Override public boolean getStrictQuoteEscaping() { return false; } @Override public boolean getQuoteAttributeEL() { return false; } }, context, null, runtimeContext); Compiler compiler = compilationContext.createCompiler(); compiler.pageInfo = new PageInfo(null, "", tag); ParserController parserController = new ParserController(compilationContext, compiler); InputStreamReader reader = new InputStreamReader(new ByteArrayInputStream(input)); ErrorDispatcher errorDispatcher = new ErrorDispatcher(true); JspReader jspReader = new JspReader(compilationContext, relativeFilePath, reader, errorDispatcher); return Parser.parse(parserController, jspReader, null, tag, false, null, "UTF-8", "UTF-8", true, false); } }
3,079
557
<reponame>kritarthh/bakeware<gh_stars>100-1000 #include <ftw.h> #include <stdio.h> #include <unistd.h> #include <limits.h> #include "bakeware.h" // OPEN_MAX isn't defined on some systems. Note that many nftw man pages say // that the max open file handles parameter is ignored, but it's definitely not // ignored in the musl libc. On brief inspection, it looks like the number of // handles needed is related to the maximum directory depth, so this value // probably doesn't need to be that large in practice. #ifndef OPEN_MAX #define OPEN_MAX 255 #endif static int rm(const char *path, const struct stat *s, int flag, struct FTW *ftw) { int status; if (flag & FTW_DP) status = rmdir(path); else status = unlink(path); if (status < 0) bw_warn("Could not delete '%s'", path); return status; } int rm_fr(const char *path) { return nftw(path, rm, OPEN_MAX, FTW_DEPTH | FTW_PHYS | FTW_MOUNT); }
351
852
<filename>PhysicsTools/PatAlgos/python/slimming/selectedPatTrigger_cfi.py import FWCore.ParameterSet.Config as cms selectedPatTrigger = cms.EDFilter("PATTriggerObjectStandAloneSelector", src = cms.InputTag("patTrigger"), cut = cms.string("!filterLabels.empty()") )
97
1,104
{ "html": "BlueBeards_Bride.html", "css": "BlueBeards_Bride.css", "authors": "<NAME>", "roll20userid": "2321057", "preview": "BlueBeards_Bride.png", "instructions": "Help Information is included on the \"Help\" tab of the character sheet", "legacy": true }
98
595
/******************************************************************************* * Copyright (C) 2017 - 2020 Xilinx, Inc. All rights reserved. * SPDX-License-Identifier: MIT ******************************************************************************/ /******************************************************************************/ /** * @file xi2srx_selftest_example.c * * This file contains a example for using the I2S receiver hardware device * and I2S receiver driver. * * * <pre> MODIFICATION HISTORY: * * Ver Who Date Changes * ---- --- -------- ----------------------------------------------- * 1.0 kar 01/25/18 First release * </pre> * ****************************************************************************/ /***************************** Include Files **********************************/ #include "xparameters.h" #include "xi2srx.h" #include "xil_printf.h" /************************** Constant Definitions ******************************/ /* * The following constants map to the XPAR parameters created in the * xparameters.h file. They are defined here such that a user can easily * change all the needed parameters in one place. */ #ifndef TESTAPP_GEN #define I2S_RX_DEVICE_ID XPAR_XI2SRX_0_DEVICE_ID #endif /**************************** Type Definitions ********************************/ /************************** Function Prototypes *******************************/ int I2srx_SelfTest_Example(u16 DeviceId); /************************** Variable Definitions ******************************/ XI2s_Rx I2s_rx; /* Instance of the I2S Receiver device */ /******************************************************************************/ /** * * Main function to call the Self Test example. * * @param None. * * @return XST_SUCCESS if successful, XST_FAILURE if unsuccessful. * * @note None. * ******************************************************************************/ #ifndef TESTAPP_GEN int main(void) { int Status; xil_printf("I2S Self Test Example \r\n"); /* * Run the I2S RX Self Test example, specify the Device ID that is * generated in xparameters.h */ Status = I2srx_SelfTest_Example(I2S_RX_DEVICE_ID); if (Status != XST_SUCCESS) { xil_printf("I2S RX Self Test Failed\r\n"); return XST_FAILURE; } xil_printf("Successfully ran I2S RX Self Test Example Test\r\n"); return XST_SUCCESS; } #endif /*****************************************************************************/ /** * * This function does a minimal test on the I2S receiver device and * driver as a design example. The purpose of this function is to illustrate * how to use the xi2s_receiver component. * * * @param DeviceId is the Device ID of the I2S RX Device and is the * XPAR_<I2S_receiver_instance>_DEVICE_ID value from xparameters.h * * @return XST_SUCCESS if successful, otherwise XST_FAILURE. * * @note None. * * ******************************************************************************/ int I2srx_SelfTest_Example(u16 DeviceId) { int Status; XI2srx_Config *Config; /* * Initialize the I2S RX driver so that it's ready to use * Look up the configuration in the config table, then initialize it. */ Config = XI2s_Rx_LookupConfig(DeviceId); if (Config == NULL) return XST_FAILURE; Status = XI2s_Rx_CfgInitialize(&I2s_rx, Config, Config->BaseAddress); if (Status != XST_SUCCESS) return XST_FAILURE; return Status; }
983
1,754
<reponame>leonard84/ratpack<gh_stars>1000+ /* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ratpack.core.file.internal; import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.Caffeine; import com.google.common.reflect.TypeToken; import io.netty.handler.codec.http.HttpHeaderNames; import ratpack.core.file.MimeTypes; import ratpack.core.handling.Context; import ratpack.core.http.Response; import ratpack.core.http.internal.HttpHeaderConstants; import ratpack.core.render.Renderer; import ratpack.core.render.RendererSupport; import ratpack.exec.Blocking; import ratpack.func.Action; import ratpack.func.Factory; import ratpack.func.Exceptions; import ratpack.func.Types; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.attribute.BasicFileAttributes; import java.util.Date; import java.util.Optional; import static io.netty.handler.codec.http.HttpResponseStatus.NOT_MODIFIED; public class FileRenderer extends RendererSupport<Path> { private final boolean cacheMetadata; public static final TypeToken<Renderer<Path>> TYPE = Types.intern(new TypeToken<Renderer<Path>>() {}); public static final Renderer<Path> CACHING = new FileRenderer(true); public static final Renderer<Path> NON_CACHING = new FileRenderer(false); private FileRenderer(boolean cacheMetadata) { this.cacheMetadata = cacheMetadata; } private static Cache<Path, Optional<BasicFileAttributes>> cache; @Override public void render(Context ctx, Path targetFile) throws Exception { readAttributes(targetFile, cacheMetadata, attributes -> { if (attributes == null || !attributes.isRegularFile()) { ctx.clientError(404); } else { sendFile(ctx, targetFile, attributes); } }); } public static void sendFile(Context context, Path file, BasicFileAttributes attributes) { Date date = new Date(attributes.lastModifiedTime().toMillis()); context.lastModified(date, () -> { final String ifNoneMatch = context.getRequest().getHeaders().get(HttpHeaderNames.IF_NONE_MATCH); Response response = context.getResponse(); if (ifNoneMatch != null && ifNoneMatch.trim().equals("*")) { response.status(NOT_MODIFIED.code()).send(); return; } response.contentTypeIfNotSet(() -> context.get(MimeTypes.class).getContentType(file.getFileName().toString())); response.getHeaders().set(HttpHeaderConstants.CONTENT_LENGTH, Long.toString(attributes.size())); try { response.sendFile(file); } catch (Exception e) { throw Exceptions.uncheck(e); } }); } private static Factory<BasicFileAttributes> getter(Path file) { return () -> { if (Files.exists(file)) { return Files.readAttributes(file, BasicFileAttributes.class); } else { return null; } }; } public static void readAttributes(Path file, boolean cacheMetadata, Action<? super BasicFileAttributes> then) throws Exception { if (cacheMetadata) { Optional<BasicFileAttributes> basicFileAttributes = getCache().getIfPresent(file); if (basicFileAttributes == null) { Blocking.get(getter(file)).then(a -> { getCache().put(file, Optional.ofNullable(a)); then.execute(a); }); } else { then.execute(basicFileAttributes.orElse(null)); } } else { Blocking.get(getter(file)).then(then); } } private static Cache<Path, Optional<BasicFileAttributes>> getCache() { if (cache == null) { cache = Caffeine.newBuilder().maximumSize(10000).build(); } return cache; } }
1,438
347
<gh_stars>100-1000 package org.ovirt.engine.core.vdsbroker.irsbroker; import java.io.Serializable; public class IRSUnicodeArgumentException extends IRSGenericException implements Serializable { // protected IRSUnicodeArgumentException(SerializationInfo info, // StreamingContext context) // { // super(info, context); // } public IRSUnicodeArgumentException(RuntimeException baseException) { super("IRSUnicodeArgumentException: ", baseException); } public IRSUnicodeArgumentException(String errorStr) { super("IRSUnicodeArgumentException: " + errorStr); } }
205
507
# terrascript/dns/d.py # Automatically generated by tools/makecode.py () import warnings warnings.warn( "using the 'legacy layout' is deprecated", DeprecationWarning, stacklevel=2 ) import terrascript class dns_a_record_set(terrascript.Data): pass class dns_aaaa_record_set(terrascript.Data): pass class dns_cname_record_set(terrascript.Data): pass class dns_mx_record_set(terrascript.Data): pass class dns_ns_record_set(terrascript.Data): pass class dns_ptr_record_set(terrascript.Data): pass class dns_srv_record_set(terrascript.Data): pass class dns_txt_record_set(terrascript.Data): pass
251
738
<reponame>dutrow/crits import uuid try: from django_mongoengine import Document except ImportError: from mongoengine import Document from mongoengine import EmbeddedDocument from mongoengine import StringField, ListField from mongoengine import UUIDField from mongoengine import IntField, BooleanField from django.conf import settings from crits.core.crits_mongoengine import CritsBaseAttributes, CritsDocumentFormatter from crits.core.crits_mongoengine import CritsSourceDocument, CritsDocument, CritsSchemaDocument from crits.core.crits_mongoengine import CommonAccess, CritsActionsDocument class SignatureDependency(CritsDocument, CritsSchemaDocument, Document): """ Signature dependency class. """ meta = { "collection": settings.COL_SIGNATURE_DEPENDENCY, "auto_create_index": False, "crits_type": 'SignatureDependency', "latest_schema_version": 1, "schema_doc": { 'name': 'The name of this data dependency', 'active': 'Enabled in the UI (on/off)' }, } name = StringField() active = StringField(default="on") class SignatureType(CritsDocument, CritsSchemaDocument, Document): """ Signature type class. """ meta = { "collection": settings.COL_SIGNATURE_TYPES, "auto_create_index": False, "crits_type": 'SignatureType', "latest_schema_version": 1, "schema_doc": { 'name': 'The name of this data type', 'active': 'Enabled in the UI (on/off)' }, } name = StringField() active = StringField(default="on") class Signature(CritsBaseAttributes, CritsSourceDocument, CritsActionsDocument, Document): """ Signature class. """ meta = { "collection": settings.COL_SIGNATURES, "auto_create_index": False, "crits_type": 'Signature', "latest_schema_version": 1, "schema_doc": { }, "jtable_opts": { 'details_url': 'crits-signatures-views-signature_detail', 'details_url_key': 'id', 'default_sort': "modified DESC", 'searchurl': 'crits-signatures-views-signatures_listing', 'fields': [ "title", "data_type", "data_type_min_version", "data_type_max_version", "data_type_dependency", "version", "modified", "source", "campaign", "id", "status"], 'jtopts_fields': [ "details", "title", "data_type", "data_type_min_version", "data_type_max_version", "data_type_dependency", "version", "modified", "source", "campaign", "status", "favorite", "id"], 'hidden_fields': [], 'linked_fields': ["source", "campaign"], 'details_link': 'details', 'no_sort': ['details'] } } data_type = StringField() data_type_min_version = StringField() data_type_max_version = StringField() data_type_dependency = ListField() data = StringField() link_id = UUIDField(binary=True, required=True, default=uuid.uuid4) md5 = StringField() title = StringField() version = IntField() class SignatureAccess(EmbeddedDocument, CritsDocumentFormatter, CommonAccess): """ ACL for Signatures. """ upload_new_version = BooleanField(default=False) data_type_read = BooleanField(default=False) data_type_edit = BooleanField(default=False) data_type_min_version_read = BooleanField(default=False) data_type_min_version_edit = BooleanField(default=False) data_type_max_version_read = BooleanField(default=False) data_type_max_version_edit = BooleanField(default=False) data_read = BooleanField(default=False) data_edit = BooleanField(default=False) dependencies_read = BooleanField(default=False) dependencies_edit = BooleanField(default=False)
2,305
576
package japicmp.test; import org.junit.Test; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; public class ITNoReport { @Test public void testThatNoReportWasGenerated() { Path path = Paths.get(System.getProperty("user.dir"), "target", "japicmp"); assertThat(Files.exists(path), is(false)); } }
160
515
package br.com.caelum.stella.feriado.util; import java.util.Comparator; import br.com.caelum.stella.feriado.entity.Feriado; public class Ordernador { public static final Comparator<Feriado> DATA_CRESCENTE = new Comparator<Feriado>() { @Override public int compare(Feriado f1, Feriado f2) { return f1.getData().compareTo(f2.getData()); } }; public static final Comparator<Feriado> DATA_DECRESCENTE = new Comparator<Feriado>() { @Override public int compare(Feriado f1, Feriado f2) { return f2.getData().compareTo(f1.getData()); } }; public static final Comparator<Feriado> LUGAR = new Comparator<Feriado>() { @Override public int compare(Feriado f1, Feriado f2) { return f1.getLocal().compareTo(f2.getLocal()); } }; public static final Comparator<Feriado> NOME_CRESCENTE = new Comparator<Feriado>() { @Override public int compare(Feriado f1, Feriado f2) { return f1.getNome().compareTo(f2.getNome()); } }; public static final Comparator<Feriado> NOME_DECRESCENTE = new Comparator<Feriado>() { @Override public int compare(Feriado f1, Feriado f2) { return f2.getNome().compareTo(f1.getNome()); } }; }
481
764
{"symbol": "ARDX","address": "0xB8E2e2101eD11e9138803cd3e06e16dd19910647","overview":{"en": ""},"email": "<EMAIL>","website": "http://www.ardcoin.com/","state": "NORMAL","links": {"blog": "https://medium.com/@ardcoin","twitter": "https://twitter.com/ard_coin","telegram": "https://t.me/ardcoin","github": ""}}
118
452
<filename>src/SegfaultHandler.h<gh_stars>100-1000 #ifndef FREEBAYES_SEGFAULTHANDLER_H #define FREEBAYES_SEGFAULTHANDLER_H #include <stdio.h> #ifndef __CYGWIN__ #include <execinfo.h> #endif #include <signal.h> #include <stdlib.h> void segfaultHandler(int sig); #endif
121
851
<filename>modules/schema/schema/session/state_update.schema.json { "id": "https://xviz.org/schema/session/state_update.json", "$schema": "http://json-schema.org/draft-04/schema#", "description": "XVIZ Stream Update", "type": "object", "properties": { "update_type": { "type": "string", "enum": [ "COMPLETE_STATE", "INCREMENTAL", "SNAPSHOT", "PERSISTENT" ] }, "updates": { "type": "array", "items": { "$ref": "https://xviz.org/schema/core/stream_set.json" }, "minItems": 1, "additionalItems": false } }, "required": [ "update_type", "updates" ], "additionalProperties": false }
327
2,666
#include <errno.h> #include <pthread.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <unistd.h> #include "cache.h" #define MAX_RANDOM_ENTRIES 32 struct key_record { char *key; char *value; }; int generate_random_entry(struct key_record **entry); int generate_random_string(char **dst, const size_t len); void free_random_entry(void *entry); void *producer(void *arg) { struct foo_cache *cache = arg; int i; for (i = 0; i < MAX_RANDOM_ENTRIES; i++) { struct key_record *entry = NULL; if (generate_random_entry(&entry)) { fprintf(stderr, "generate_random_entry() failed\n"); continue; } #if defined(DEBUG) printf("Random Entry:\n"); printf(" key: %s\n", entry->key); printf(" Key: %s\n", entry->value); #else printf("inserted %s (%d)\n", entry->key, (int)strlen(entry->key)); #endif if (foo_cache_insert(cache, entry->key, entry)) { fprintf(stderr, "foo_cache_insert() failed\n"); continue; } } pthread_exit(NULL); } void *consumer(void *arg) { struct foo_cache *cache = arg; struct key_record *result = NULL; char *buffer = malloc(64); char key[33]; int stop = 0; if (!buffer) goto out; /* give producer time to populate the cache */ sleep(2); printf("\n\n"); do { memset(key, 0, 64); result = NULL; printf("Enter key for lookup: "); fgets(buffer, sizeof(key), stdin); sscanf(buffer, "%s\n", key); /* read '\n' from stdin */ getchar(); if (strncmp(key, "exit", 4) == 0) { stop = 1; continue; } printf("Got key %s (%d)\n", key, (int)strlen(key)); if (foo_cache_lookup(cache, key, &result)) { fprintf(stderr, "Could not retrieve key %s\n", key); continue; } if (!result) { printf("MISS\n"); continue; } printf("HIT\n"); printf("key: %s\n", result->key); printf("key : %s\n", result->value); } while (!stop); out: if (buffer) free(buffer); pthread_exit(NULL); } int main() { int rv; struct foo_cache *cache = NULL; pthread_t workers[2]; rv = foo_cache_create(&cache, MAX_RANDOM_ENTRIES / 2, free_random_entry); if (rv) { fprintf(stderr, "Could not create cache\n"); exit(1); } (void)pthread_create(&workers[0], NULL, producer, (void *)cache); (void)pthread_create(&workers[1], NULL, consumer, (void *)cache); pthread_join(workers[0], NULL); pthread_join(workers[1], NULL); (void)foo_cache_delete(cache, 0); return 0; } int generate_random_entry(struct key_record **entry) { struct key_record *new = NULL; char *key = NULL; char *value = NULL; int rv; if (!entry) return EINVAL; rv = generate_random_string(&key, 33); if (rv) return rv; rv = generate_random_string(&value, 129); if (rv) return rv; if ((new = malloc(sizeof(*new))) == NULL) { free(key); free(value); return ENOMEM; } new->key = key; new->value = value; *entry = new; return 0; } int generate_random_string(char **dst, const size_t len) { static const char alphanum[] = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; size_t i; char *s; if (!dst || len == 0) return EINVAL; if ((s = malloc(len)) == NULL) return ENOMEM; for (i = 0; i < len - 1; i++) { s[i] = alphanum[rand() % (sizeof(alphanum) - 1)]; } s[len - 1] = '\0'; *dst = s; return 0; } void free_random_entry(void *entry) { #if defined(DEBUG) fprintf(stderr, "In %s: entry @ %p\n", __func__, entry); #endif struct key_record *record = entry; if (!record) return; if (record->key) free(record->key); if (record->value) free(record->value); free(record); record = NULL; }
1,546
348
{"nom":"Veyrines-de-Domme","circ":"4ème circonscription","dpt":"Dordogne","inscrits":181,"abs":98,"votants":83,"blancs":7,"nuls":7,"exp":69,"res":[{"nuance":"FI","nom":"<NAME>","voix":39},{"nuance":"REM","nom":"Mme <NAME>","voix":30}]}
97
301
/****************************************************************** * * Copyright 2015 Samsung Electronics All Rights Reserved. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************/ #include "simulator_logger.h" #include <sstream> #include <fstream> #include <time.h> class ConsoleLogger : public ILogger { public: void write(std::string time, ILogger::Level level, std::string message) { std::ostringstream out; out << time << " " << ILogger::getString(level) << " " << message; std::cout << out.str() << std::endl; } }; class FileLogger : public ILogger { public: FileLogger(std::string filePath) : m_filePath(filePath) {} bool open() { if (!m_out.is_open()) { m_out.open(m_filePath, std::ofstream::out); } return m_out.is_open(); } void close() { if (m_out.is_open()) { m_out.close(); } } void write(std::string time, ILogger::Level level, std::string message) { m_out << time << " " << ILogger::getString(level) << " " << message; } private: std::ofstream m_out; std::string m_filePath; }; bool Logger::setDefaultConsoleTarget() { if (nullptr != m_target) { return false; } m_target = std::make_shared<ConsoleLogger>(); return true; } bool Logger::setDefaultFileTarget(const std::string &path) { if (nullptr != m_target || path.empty()) { return false; } time_t timeInfo = time(NULL); struct tm *localTime = localtime(&timeInfo); if (nullptr == localTime) { return false; } std::ostringstream newFileName; newFileName << path << "/Simulator_"; newFileName << localTime->tm_year << localTime->tm_mon << localTime->tm_mday << localTime->tm_hour << localTime->tm_min << localTime->tm_sec; newFileName << ".log"; std::shared_ptr<FileLogger> fileLogger = std::make_shared<FileLogger>(newFileName.str()); if (fileLogger->open()) { m_target = fileLogger; return true; } return false; } void Logger::setCustomTarget(const std::shared_ptr<ILogger> &target) { m_target = target; } void Logger::write(ILogger::Level level, std::ostringstream &str) { if (nullptr != m_target) { time_t timeInfo = time(NULL); struct tm *localTime = localtime(&timeInfo); if (nullptr == localTime) { return; } std::ostringstream timeStr; timeStr << localTime->tm_hour << "." << localTime->tm_min << "." << localTime->tm_sec; m_target->write(timeStr.str(), level, str.str()); } } auto simLogger() -> Logger & { static Logger logger; return logger; }
1,433
634
<reponame>halotroop2288/consulo /* * Copyright 2013-2019 consulo.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package consulo.desktop.container.boot; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import consulo.annotation.DeprecationInfo; import consulo.application.ApplicationProperties; import consulo.container.boot.ContainerPathManager; import consulo.util.collection.ArrayUtil; import org.jetbrains.annotations.Contract; import javax.annotation.Nonnull; import java.io.*; import java.util.*; import static com.intellij.util.SystemProperties.getUserHome; /** * @author VISTALL * @since 2019-12-07 */ public class DesktopContainerPathManager extends ContainerPathManager { @Deprecated public static final String OLD_PROPERTIES_FILE = "idea.properties.file"; public static final String PROPERTIES_FILE = "consulo.properties.file"; public static final String PROPERTY_SYSTEM_PATH = "idea.system.path"; public static final String PROPERTY_SCRATCH_PATH = "idea.scratch.path"; public static final String PROPERTY_CONFIG_PATH = "idea.config.path"; @Deprecated @DeprecationInfo("See ApplicationProperties#CONSULO_PLUGINS_PATHS") public static final String PROPERTY_PLUGINS_PATH = ApplicationProperties.IDEA_PLUGINS_PATH; public static final String PROPERTY_HOME_PATH = "consulo.home.path"; @Deprecated public static final String OLD_PROPERTY_HOME_PATH = "idea.home.path"; public static final String PROPERTY_LOG_PATH = "idea.log.path"; private static final String PLATFORM_FOLDER = "platform"; private static final String LIB_FOLDER = "lib"; private static final String PLUGINS_FOLDER = "plugins"; private static final String BIN_FOLDER = "bin"; private static final String OPTIONS_FOLDER = "options"; private static String ourHomePath; private static String ourSystemPath; private static String ourConfigPath; private static String ourScratchPath; private static File ourLogPath; private static String ourInstallPluginsPath; private static String[] ourPluginsPaths; /** * @return home path of platform (in most cases path is $APP_HOME_PATH$/platform/$HOME_PATH$) */ @Override @Nonnull public String getHomePath() { if (ourHomePath != null) return ourHomePath; if (System.getProperty(PROPERTY_HOME_PATH) != null) { ourHomePath = getAbsolutePath(System.getProperty(PROPERTY_HOME_PATH)); if (!new File(ourHomePath).isDirectory()) { throw new RuntimeException("Invalid home path '" + ourHomePath + "'"); } } else if (System.getProperty(OLD_PROPERTY_HOME_PATH) != null) { ourHomePath = getAbsolutePath(System.getProperty(OLD_PROPERTY_HOME_PATH)); if (!new File(ourHomePath).isDirectory()) { throw new RuntimeException("Invalid home path '" + ourHomePath + "'"); } } else { if (ourHomePath == null) { throw new RuntimeException("Could not find installation home path."); } } if (SystemInfo.isWindows) { try { ourHomePath = new File(ourHomePath).getCanonicalPath(); } catch (IOException ignored) { } } return ourHomePath; } /** * @return external platform directory for mac, or platform directory inside application for other oses */ @Override @Nonnull public File getExternalPlatformDirectory() { File defaultPath = new File(getAppHomeDirectory(), PLATFORM_FOLDER); // force platform inside distribution directory if (Boolean.getBoolean(ApplicationProperties.CONSULO_NO_EXTERNAL_PLATFORM) || ApplicationProperties.isInSandbox()) { return defaultPath; } return DefaultPaths.getInstance().getExternalPlatformDirectory(defaultPath); } /** * @return app home, equal IDE installation path */ @Override @Nonnull public File getAppHomeDirectory() { String appHomePath = System.getProperty(ApplicationProperties.CONSULO_APP_HOME_PATH); if (appHomePath != null) { return new File(getAbsolutePath(trimPathQuotes(appHomePath))); } File homeDir = new File(getHomePath()); // 'platform' directory File parentFile = homeDir.getParentFile(); if (!parentFile.getName().equals(PLATFORM_FOLDER)) { throw new IllegalArgumentException("Parent dir is not platform: " + parentFile.getName()); } return parentFile.getParentFile(); } @Override @Nonnull public String getBinPath() { return getHomePath() + File.separator + BIN_FOLDER; } @Override @Nonnull public String getLibPath() { return getHomePath() + File.separator + LIB_FOLDER; } // config paths @Override @Nonnull public String getConfigPath() { if (ourConfigPath != null) return ourConfigPath; if (System.getProperty(PROPERTY_CONFIG_PATH) != null) { ourConfigPath = getAbsolutePath(trimPathQuotes(System.getProperty(PROPERTY_CONFIG_PATH))); } else { ourConfigPath = DefaultPaths.getInstance().getRoamingSettingsDir(); } if (ApplicationProperties.isInSandbox()) { //noinspection UseOfSystemOutOrSystemErr System.out.println("Config Path: " + ourConfigPath); } return ourConfigPath; } @Override @Nonnull public String getScratchPath() { if (ourScratchPath != null) return ourScratchPath; if (System.getProperty(PROPERTY_SCRATCH_PATH) != null) { ourScratchPath = getAbsolutePath(trimPathQuotes(System.getProperty(PROPERTY_SCRATCH_PATH))); } else { ourScratchPath = getConfigPath(); } return ourScratchPath; } public void ensureConfigFolderExists() { checkAndCreate(getConfigPath(), true); } @Override @Nonnull public String getOptionsPath() { return getConfigPath() + File.separator + OPTIONS_FOLDER; } @Override @Nonnull public File getOptionsFile(@Nonnull String fileName) { return new File(getOptionsPath(), fileName + ".xml"); } @Override @Nonnull public String getInstallPluginsPath() { if (ourInstallPluginsPath != null) { return ourInstallPluginsPath; } String property = System.getProperty(ApplicationProperties.CONSULO_INSTALL_PLUGINS_PATH); if (property != null) { ourInstallPluginsPath = getAbsolutePath(trimPathQuotes(property)); } else { String[] pluginsPaths = getPluginsPaths(); if (pluginsPaths.length != 1) { throw new IllegalArgumentException("Plugins paths size is not equal one. Paths: " + Arrays.asList(pluginsPaths)); } ourInstallPluginsPath = pluginsPaths[0]; } if (ApplicationProperties.isInSandbox()) { //noinspection UseOfSystemOutOrSystemErr System.out.println("Install Plugins Path: " + ourInstallPluginsPath); } return ourInstallPluginsPath; } @Override @Nonnull public String[] getPluginsPaths() { if (ourPluginsPaths != null) return ourPluginsPaths; String pathFromProperty = System.getProperty(ApplicationProperties.IDEA_PLUGINS_PATH); if (pathFromProperty != null) { pathFromProperty = getAbsolutePath(trimPathQuotes(pathFromProperty)); //noinspection UseOfSystemOutOrSystemErr System.out.println("Using obsolete property: " + ApplicationProperties.IDEA_PLUGINS_PATH); ourPluginsPaths = new String[]{getAbsolutePath(trimPathQuotes(pathFromProperty))}; } else if (System.getProperty(ApplicationProperties.CONSULO_PLUGINS_PATHS) != null) { pathFromProperty = System.getProperty(ApplicationProperties.CONSULO_PLUGINS_PATHS); String[] splittedPaths = pathFromProperty.split(File.pathSeparator); for (int i = 0; i < splittedPaths.length; i++) { String splitValue = splittedPaths[i]; splittedPaths[i] = getAbsolutePath(trimPathQuotes(splitValue)); } ourPluginsPaths = splittedPaths; } else if (System.getProperty(PROPERTY_CONFIG_PATH) != null) { // if config path overridden, use another logic for plugins ourPluginsPaths = new String[]{getConfigPath() + File.separatorChar + "plugins"}; } else { ourPluginsPaths = new String[]{DefaultPaths.getInstance().getRoamingPluginsDir()}; } if (ApplicationProperties.isInSandbox()) { //noinspection UseOfSystemOutOrSystemErr System.out.println("Plugins Paths: " + Arrays.asList(ourPluginsPaths)); } return ourPluginsPaths; } // runtime paths @Override @Nonnull public String getSystemPath() { if (ourSystemPath != null) return ourSystemPath; if (System.getProperty(PROPERTY_SYSTEM_PATH) != null) { ourSystemPath = getAbsolutePath(trimPathQuotes(System.getProperty(PROPERTY_SYSTEM_PATH))); } else { ourSystemPath = DefaultPaths.getInstance().getLocalSettingsDir(); } if (ApplicationProperties.isInSandbox()) { //noinspection UseOfSystemOutOrSystemErr System.out.println("System Path: " + ourSystemPath); } checkAndCreate(ourSystemPath, true); return ourSystemPath; } @Nonnull @Override public File getDocumentsDir() { return DefaultPaths.getInstance().getDocumentsDir(); } @Override @Nonnull public String getTempPath() { return getSystemPath() + File.separator + "tmp"; } @Override @Nonnull public File getIndexRoot() { String indexRoot = System.getProperty("index_root_path", getSystemPath() + "/index"); checkAndCreate(indexRoot, true); return new File(indexRoot); } @Override @Nonnull public File getLogPath() { if (ourLogPath != null) return ourLogPath; if (System.getProperty(PROPERTY_LOG_PATH) != null) { ourLogPath = getAbsoluteFile(trimPathQuotes(System.getProperty(PROPERTY_LOG_PATH))); } else if (System.getProperty(PROPERTY_SYSTEM_PATH) != null) { // if system path overridden, use another logic for logs ourLogPath = new File(getSystemPath(), "logs"); } else { ourLogPath = DefaultPaths.getInstance().getLocalLogsDir(); } return ourLogPath; } @Override @Nonnull public String getPluginTempPath() { return getSystemPath() + File.separator + PLUGINS_FOLDER; } public void loadProperties() { List<String> paths = new ArrayList<>(); paths.add(System.getProperty(PROPERTIES_FILE)); paths.add(System.getProperty(OLD_PROPERTIES_FILE)); paths.add(new File(getAppHomeDirectory(), "consulo.properties").getPath()); paths.add(getUserHome() + "/consulo.properties"); File propFile = FileUtil.findFirstThatExist(ArrayUtil.toStringArray(paths)); if (propFile == null) { return; } try (InputStream fis = new BufferedInputStream(new FileInputStream(propFile))) { final PropertyResourceBundle bundle = new PropertyResourceBundle(fis); final Enumeration keys = bundle.getKeys(); String home = (String)bundle.handleGetObject("idea.home"); if (home != null && ourHomePath == null) { ourHomePath = getAbsolutePath(substituteVars(home)); } final Properties sysProperties = System.getProperties(); while (keys.hasMoreElements()) { String key = (String)keys.nextElement(); if (sysProperties.getProperty(key, null) == null) { // load the property from the property file only if it is not defined yet final String value = substituteVars(bundle.getString(key)); sysProperties.setProperty(key, value); } } } catch (IOException e) { //noinspection HardCodedStringLiteral,UseOfSystemOutOrSystemErr System.err.println("Problem reading from property file: " + propFile.getPath()); } } @Contract("null -> null") public String substituteVars(String s) { final String ideaHomePath = getHomePath(); return substituteVars(s, ideaHomePath); } @Contract("null, _ -> null") public static String substituteVars(String s, String ideaHomePath) { if (s == null) return null; if (s.startsWith("..")) { s = ideaHomePath + File.separatorChar + BIN_FOLDER + File.separatorChar + s; } s = StringUtil.replace(s, "${idea.home}", ideaHomePath); final Properties props = System.getProperties(); final Set keys = props.keySet(); for (final Object key1 : keys) { String key = (String)key1; String value = props.getProperty(key); s = StringUtil.replace(s, "${" + key + "}", value); } return s; } // helpers private static String getAbsolutePath(String path) { if (path.startsWith("~/") || path.startsWith("~\\")) { path = getUserHome() + path.substring(1); } return new File(path).getAbsolutePath(); } @Nonnull private static File getAbsoluteFile(String path) { if (path.startsWith("~/") || path.startsWith("~\\")) { path = getUserHome() + path.substring(1); } return new File(path); } private static String trimPathQuotes(String path) { if (!(path != null && !(path.length() < 3))) { return path; } if (StringUtil.startsWithChar(path, '\"') && StringUtil.endsWithChar(path, '\"')) { return path.substring(1, path.length() - 1); } return path; } private static boolean checkAndCreate(String path, boolean createIfNotExists) { if (createIfNotExists) { File file = new File(path); if (!file.exists()) { return file.mkdirs(); } } return false; } }
4,898
302
# -------------------------------------------------------------------------- # Source file provided under Apache License, Version 2.0, January 2004, # http://www.apache.org/licenses/ # (c) Copyright IBM Corp. 2020 # -------------------------------------------------------------------------- """ The trim loss problems arises in the paper industry. The problem is to cut wide papers rolls into sub rolls (orders). The wide roll are cut into pieces with a cutting pattern. A cutting pattern defines the blades positions for cutting the roll. A maximum number of orders is allowed in a cutting pattern (here it is 6). When cutting a wide roll, we can have a loss of paper that is wasted. This loss is contrained to be not more than a given value (here it is 100) An order is characterised by a demand, a roll width, and a maximum number of time it can appear in a cutting pattern. The goal is to meet the demand while minimizing the roll used and the number of different cutting patterns used for production. In this example we also use: - extra constraints to avoid assigning orders to unused patterns, - lexicographic constraints to break symmetries between cutting patterns - strong constraints to have a better domain reduction by enumerating possible patterns configurations All this makes the proof of optimality rather fast. """ from docplex.cp.model import * from sys import stdout #----------------------------------------------------------------------------- # Initialize the problem data #----------------------------------------------------------------------------- # Data ROLL_WIDTH = 2200 # Width of roll to be cutted into pieces MAX_WASTE = 100 # Maximum waste per roll MAX_ORDER_PER_CUT = 5 # Maximum number of order per cutting pattern # Orders demand, width and max occurence in a cutting pattern ORDER_DEMAND = ( 8, 16, 12, 7, 14, 16) ORDER_WIDTH = (330, 360, 380, 430, 490, 530) ORDER_MAX_REPEAT = ( 2, 3, 3, 5, 3, 4) # Number of different order types NUM_ORDER_TYPE = len(ORDER_DEMAND) # Maximum number of cutting pattern NUM_PATTERN_TYPE = 6 # Maximum of time a cutting pattern is used MAX_PATTERN_USAGE = 16 # Cost of using a pattern PATTERN_COST = 0.1 # Cost of a roll ROLL_COST = 1 PATTERNS = range(NUM_PATTERN_TYPE) ORDERS = range(NUM_ORDER_TYPE) #----------------------------------------------------------------------------- # Build the model #----------------------------------------------------------------------------- model = CpoModel() # Decision variables : pattern usage patternUsage = [model.integer_var(0, MAX_PATTERN_USAGE, "PatternUsage_"+str(p)) for p in PATTERNS] # Decision variables : order quantity per pattern x = [[model.integer_var(0, max, "x["+str(o)+","+str(p)+"]") for (o, max) in enumerate(ORDER_MAX_REPEAT)] for p in PATTERNS] # Maximum number of orders per cutting pattern for p in PATTERNS : model.add(sum(x[p]) <= MAX_ORDER_PER_CUT) # Roll capacity usage = [0] + [v for v in range(ROLL_WIDTH - MAX_WASTE, ROLL_WIDTH+1)] # usage is [0, 2100..2200] rollUsage = [model.integer_var(domain = usage, name = "RollUsage_"+str(p)) for p in PATTERNS] for p in PATTERNS : model.add(sum(ORDER_WIDTH[o] * x[p][o] for o in ORDERS) == rollUsage[p]) # Production requirement for o in ORDERS : model.add(model.sum(x[p][o] * patternUsage[p] for p in PATTERNS) >= ORDER_DEMAND[o]) # Objective model.add(minimize(model.sum((patternUsage[p] > 0) * PATTERN_COST + patternUsage[p] * ROLL_COST for p in PATTERNS))) # Extra constraint to avoid assigning orders to an unused pattern for p in PATTERNS : model.add((patternUsage[p] == 0) == (rollUsage[p] == 0)) # Extra lexicographic constraint to break symmetries for p in range(NUM_PATTERN_TYPE - 1) : model.add(model.lexicographic([patternUsage[p]] + x[p], [patternUsage[p+1]] + x[p+1])) # Strong constraints to improve the time to prove optimality for p in PATTERNS : model.add(model.strong(x[p])) # KPIs : Number of rolls, of pattern used and total loss of paper model.add_kpi(model.sum([patternUsage[p] for p in PATTERNS]), "Rolls") model.add_kpi(model.sum([(patternUsage[p] > 0) for p in PATTERNS]), "Patterns") model.add_kpi(model.sum([patternUsage[p] * (ROLL_WIDTH - rollUsage[p]) for p in PATTERNS]), "Loss") #----------------------------------------------------------------------------- # Solve the model and display the result #----------------------------------------------------------------------------- print("Solve the model...") msol = model.solve(LogPeriod=1000000, TimeLimit=300) if msol: print("patternUsage = ") for p in PATTERNS: l = ROLL_WIDTH - msol[rollUsage[p]] stdout.write("Pattern {} , usage = {}, roll usage = {}, loss = {}, orders =".format(p, msol[patternUsage[p]], msol[rollUsage[p]], l)) for o in ORDERS: stdout.write(" {}".format(msol[x[p][o]])) stdout.write('\n') else: print("No solution found")
1,592
327
#ifdef NETWORK_C #pragma once #define SOCKET_C #define DISPATCH_C #define PORT_C #define WEBSERVER_C #define OSCSEND_C #define OSCRECV_C #define OSCPACK_C #define SENDBUF_C #endif #include "network_compat.c" #include "network_socket.c" #include "network_dispatch.c" #include "network_port.c" #include "network_webserver.c" #include "network_oscsend.c" #include "network_oscrecv.c" #include "network_oscpack.c" #include "network_sendbuf.c"
184
5,250
<gh_stars>1000+ /* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.cmmn.editor.json.converter; import java.util.Map; import org.apache.commons.lang3.StringUtils; import org.flowable.cmmn.editor.json.converter.CmmnJsonConverter.CmmnModelIdHelper; import org.flowable.cmmn.editor.json.converter.util.ListenerConverterUtil; import org.flowable.cmmn.model.BaseElement; import org.flowable.cmmn.model.CaseElement; import org.flowable.cmmn.model.CmmnModel; import org.flowable.cmmn.model.PlanItem; import org.flowable.cmmn.model.ScriptServiceTask; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; /** * @author <NAME> */ public class ScriptTaskJsonConverter extends BaseCmmnJsonConverter { public static void fillTypes(Map<String, Class<? extends BaseCmmnJsonConverter>> convertersToCmmnMap) { fillJsonTypes(convertersToCmmnMap); } public static void fillJsonTypes(Map<String, Class<? extends BaseCmmnJsonConverter>> convertersToBpmnMap) { convertersToBpmnMap.put(STENCIL_TASK_SCRIPT, ScriptTaskJsonConverter.class); } @Override protected String getStencilId(BaseElement baseElement) { return STENCIL_TASK_SCRIPT; } @Override protected CaseElement convertJsonToElement(JsonNode elementNode, JsonNode modelNode, ActivityProcessor processor, BaseElement parentElement, Map<String, JsonNode> shapeMap, CmmnModel cmmnModel, CmmnJsonConverterContext converterContext, CmmnModelIdHelper cmmnModelIdHelper) { ScriptServiceTask scriptServiceTask = new ScriptServiceTask(); scriptServiceTask.setImplementationType(CmmnJsonConverterUtil.getPropertyValueAsString(PROPERTY_SCRIPT_TASK_SCRIPT_FORMAT, elementNode)); addField("script", PROPERTY_SCRIPT_TASK_SCRIPT_TEXT, elementNode, scriptServiceTask); if (StringUtils.isNotEmpty(CmmnJsonConverterUtil.getPropertyValueAsString(PROPERTY_SERVICETASK_RESULT_VARIABLE, elementNode))) { scriptServiceTask.setResultVariableName(CmmnJsonConverterUtil.getPropertyValueAsString(PROPERTY_SERVICETASK_RESULT_VARIABLE, elementNode)); } ListenerConverterUtil.convertJsonToLifeCycleListeners(elementNode, scriptServiceTask); return scriptServiceTask; } @Override protected void convertElementToJson(ObjectNode elementNode, ObjectNode propertiesNode, ActivityProcessor processor, BaseElement baseElement, CmmnModel cmmnModel, CmmnJsonConverterContext converterContext) { // Done in service task converter ListenerConverterUtil.convertLifecycleListenersToJson(objectMapper, propertiesNode, ((PlanItem) baseElement).getPlanItemDefinition()); } }
1,163
1,248
<filename>intg/src/main/java/org/apache/atlas/model/impexp/AttributeTransform.java /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.atlas.model.impexp; import org.apache.commons.lang.StringUtils; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE; import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.PUBLIC_ONLY; @JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE) @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL) @JsonIgnoreProperties(ignoreUnknown=true) @XmlRootElement @XmlAccessorType(XmlAccessType.PROPERTY) public class AttributeTransform implements Serializable { private Map<String, String> conditions; private Map<String, String> action; public AttributeTransform() { } public AttributeTransform(Map<String, String> conditions, Map<String, String> action) { this.conditions = conditions; this.action = action; } public Map<String, String> getConditions() { return conditions; } public void setConditions(Map<String, String> conditions) { this.conditions = conditions; } public Map<String, String> getAction() { return action; } public void setAction(Map<String, String> action) { this.action = action; } public void addCondition(String attributeName, String conditionValue) { if (conditions == null) { conditions = new HashMap<>(); } if (StringUtils.isNotEmpty(attributeName) && StringUtils.isNotEmpty(conditionValue)) { conditions.put(attributeName, conditionValue); } } public void addAction(String attributeName, String actionValue) { if (action == null) { action = new HashMap<>(); } if (StringUtils.isNotEmpty(attributeName) && StringUtils.isNotEmpty(actionValue)) { action.put(attributeName, actionValue); } } }
1,071
523
<gh_stars>100-1000 // Copyright <NAME> 2019. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file ../LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #if !defined(CNL_IMPL_NUM_TRAITS_ROUNDING_H) #define CNL_IMPL_NUM_TRAITS_ROUNDING_H #include "../rounding/native_rounding_tag.h" #include "../rounding/nearest_rounding_tag.h" #include "../type_traits/is_integral.h" #include <type_traits> namespace cnl { /// \brief given a numeric type, defines member `type` as its rounding mode /// \headerfile cnl/num_traits.h /// \note User-specializations of this type are permitted. /// \sa set_rounding, rounding_t, native_rounding_tag, nearest_rounding_tag template<typename Number> struct rounding; template<typename Number> struct rounding<Number&&> : rounding<Number> { }; template<_impl::integral Number> struct rounding<Number> : std::type_identity<native_rounding_tag> { }; /// \brief helper alias of \ref rounding /// \headerfile cnl/num_traits.h /// \sa rounding, set_rounding_t, native_rounding_tag, nearest_rounding_tag template<typename Number> using rounding_t = typename rounding<Number>::type; } #endif // CNL_IMPL_NUM_TRAITS_ROUNDING_H
509
672
<gh_stars>100-1000 /* * Copyright (c) 2000 Apple Computer, Inc. All rights reserved. * * @APPLE_OSREFERENCE_LICENSE_HEADER_START@ * * This file contains Original Code and/or Modifications of Original Code * as defined in and that are subject to the Apple Public Source License * Version 2.0 (the 'License'). You may not use this file except in * compliance with the License. The rights granted to you under the License * may not be used to create, or enable the creation or redistribution of, * unlawful or unlicensed copies of an Apple operating system, or to * circumvent, violate, or enable the circumvention or violation of, any * terms of an Apple operating system software license agreement. * * Please obtain a copy of the License at * http://www.opensource.apple.com/apsl/ and read it before using this file. * * The Original Code and all software distributed under the License are * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. * Please see the License for the specific language governing rights and * limitations under the License. * * @APPLE_OSREFERENCE_LICENSE_HEADER_END@ */ /* * @OSF_COPYRIGHT@ * */ #ifndef _MACHINE_ARCH_TYPES_H_ #define _MACHINE_ARCH_TYPES_H_ typedef signed char bit8_t; /* signed 8-bit quantity */ typedef unsigned char u_bit8_t; /* unsigned 8-bit quantity */ typedef short bit16_t; /* signed 16-bit quantity */ typedef unsigned short u_bit16_t; /* unsigned 16-bit quantity */ typedef int bit32_t; /* signed 32-bit quantity */ typedef unsigned int u_bit32_t; /* unsigned 32-bit quantity */ #endif
607
2,392
// This file is part of Eigen, a lightweight C++ template library // for linear algebra. // // Copyright (C) 2008-2009 <NAME> <<EMAIL>> // // This Source Code Form is subject to the terms of the Mozilla // Public License v. 2.0. If a copy of the MPL was not distributed // with this file, You can obtain one at http://mozilla.org/MPL/2.0/. #include "main.h" template<typename MatrixType> void syrk(const MatrixType& m) { typedef typename MatrixType::Index Index; typedef typename MatrixType::Scalar Scalar; typedef Matrix<Scalar, MatrixType::RowsAtCompileTime, MatrixType::ColsAtCompileTime, RowMajor> RMatrixType; typedef Matrix<Scalar, MatrixType::ColsAtCompileTime, Dynamic> Rhs1; typedef Matrix<Scalar, Dynamic, MatrixType::RowsAtCompileTime> Rhs2; typedef Matrix<Scalar, MatrixType::ColsAtCompileTime, Dynamic,RowMajor> Rhs3; Index rows = m.rows(); Index cols = m.cols(); MatrixType m1 = MatrixType::Random(rows, cols), m2 = MatrixType::Random(rows, cols), m3 = MatrixType::Random(rows, cols); RMatrixType rm2 = MatrixType::Random(rows, cols); Rhs1 rhs1 = Rhs1::Random(internal::random<int>(1,320), cols); Rhs1 rhs11 = Rhs1::Random(rhs1.rows(), cols); Rhs2 rhs2 = Rhs2::Random(rows, internal::random<int>(1,320)); Rhs2 rhs22 = Rhs2::Random(rows, rhs2.cols()); Rhs3 rhs3 = Rhs3::Random(internal::random<int>(1,320), rows); Scalar s1 = internal::random<Scalar>(); Index c = internal::random<Index>(0,cols-1); m2.setZero(); VERIFY_IS_APPROX((m2.template selfadjointView<Lower>().rankUpdate(rhs2,s1)._expression()), ((s1 * rhs2 * rhs2.adjoint()).eval().template triangularView<Lower>().toDenseMatrix())); m2.setZero(); VERIFY_IS_APPROX(((m2.template triangularView<Lower>() += s1 * rhs2 * rhs22.adjoint()).nestedExpression()), ((s1 * rhs2 * rhs22.adjoint()).eval().template triangularView<Lower>().toDenseMatrix())); m2.setZero(); VERIFY_IS_APPROX(m2.template selfadjointView<Upper>().rankUpdate(rhs2,s1)._expression(), (s1 * rhs2 * rhs2.adjoint()).eval().template triangularView<Upper>().toDenseMatrix()); m2.setZero(); VERIFY_IS_APPROX((m2.template triangularView<Upper>() += s1 * rhs22 * rhs2.adjoint()).nestedExpression(), (s1 * rhs22 * rhs2.adjoint()).eval().template triangularView<Upper>().toDenseMatrix()); m2.setZero(); VERIFY_IS_APPROX(m2.template selfadjointView<Lower>().rankUpdate(rhs1.adjoint(),s1)._expression(), (s1 * rhs1.adjoint() * rhs1).eval().template triangularView<Lower>().toDenseMatrix()); m2.setZero(); VERIFY_IS_APPROX((m2.template triangularView<Lower>() += s1 * rhs11.adjoint() * rhs1).nestedExpression(), (s1 * rhs11.adjoint() * rhs1).eval().template triangularView<Lower>().toDenseMatrix()); m2.setZero(); VERIFY_IS_APPROX(m2.template selfadjointView<Upper>().rankUpdate(rhs1.adjoint(),s1)._expression(), (s1 * rhs1.adjoint() * rhs1).eval().template triangularView<Upper>().toDenseMatrix()); VERIFY_IS_APPROX((m2.template triangularView<Upper>() = s1 * rhs1.adjoint() * rhs11).nestedExpression(), (s1 * rhs1.adjoint() * rhs11).eval().template triangularView<Upper>().toDenseMatrix()); m2.setZero(); VERIFY_IS_APPROX(m2.template selfadjointView<Lower>().rankUpdate(rhs3.adjoint(),s1)._expression(), (s1 * rhs3.adjoint() * rhs3).eval().template triangularView<Lower>().toDenseMatrix()); m2.setZero(); VERIFY_IS_APPROX(m2.template selfadjointView<Upper>().rankUpdate(rhs3.adjoint(),s1)._expression(), (s1 * rhs3.adjoint() * rhs3).eval().template triangularView<Upper>().toDenseMatrix()); m2.setZero(); VERIFY_IS_APPROX((m2.template selfadjointView<Lower>().rankUpdate(m1.col(c),s1)._expression()), ((s1 * m1.col(c) * m1.col(c).adjoint()).eval().template triangularView<Lower>().toDenseMatrix())); m2.setZero(); VERIFY_IS_APPROX((m2.template selfadjointView<Upper>().rankUpdate(m1.col(c),s1)._expression()), ((s1 * m1.col(c) * m1.col(c).adjoint()).eval().template triangularView<Upper>().toDenseMatrix())); rm2.setZero(); VERIFY_IS_APPROX((rm2.template selfadjointView<Upper>().rankUpdate(m1.col(c),s1)._expression()), ((s1 * m1.col(c) * m1.col(c).adjoint()).eval().template triangularView<Upper>().toDenseMatrix())); m2.setZero(); VERIFY_IS_APPROX((m2.template triangularView<Upper>() += s1 * m3.col(c) * m1.col(c).adjoint()).nestedExpression(), ((s1 * m3.col(c) * m1.col(c).adjoint()).eval().template triangularView<Upper>().toDenseMatrix())); rm2.setZero(); VERIFY_IS_APPROX((rm2.template triangularView<Upper>() += s1 * m1.col(c) * m3.col(c).adjoint()).nestedExpression(), ((s1 * m1.col(c) * m3.col(c).adjoint()).eval().template triangularView<Upper>().toDenseMatrix())); m2.setZero(); VERIFY_IS_APPROX((m2.template selfadjointView<Lower>().rankUpdate(m1.col(c).conjugate(),s1)._expression()), ((s1 * m1.col(c).conjugate() * m1.col(c).conjugate().adjoint()).eval().template triangularView<Lower>().toDenseMatrix())); m2.setZero(); VERIFY_IS_APPROX((m2.template selfadjointView<Upper>().rankUpdate(m1.col(c).conjugate(),s1)._expression()), ((s1 * m1.col(c).conjugate() * m1.col(c).conjugate().adjoint()).eval().template triangularView<Upper>().toDenseMatrix())); m2.setZero(); VERIFY_IS_APPROX((m2.template selfadjointView<Lower>().rankUpdate(m1.row(c),s1)._expression()), ((s1 * m1.row(c).transpose() * m1.row(c).transpose().adjoint()).eval().template triangularView<Lower>().toDenseMatrix())); rm2.setZero(); VERIFY_IS_APPROX((rm2.template selfadjointView<Lower>().rankUpdate(m1.row(c),s1)._expression()), ((s1 * m1.row(c).transpose() * m1.row(c).transpose().adjoint()).eval().template triangularView<Lower>().toDenseMatrix())); m2.setZero(); VERIFY_IS_APPROX((m2.template triangularView<Lower>() += s1 * m3.row(c).transpose() * m1.row(c).transpose().adjoint()).nestedExpression(), ((s1 * m3.row(c).transpose() * m1.row(c).transpose().adjoint()).eval().template triangularView<Lower>().toDenseMatrix())); rm2.setZero(); VERIFY_IS_APPROX((rm2.template triangularView<Lower>() += s1 * m3.row(c).transpose() * m1.row(c).transpose().adjoint()).nestedExpression(), ((s1 * m3.row(c).transpose() * m1.row(c).transpose().adjoint()).eval().template triangularView<Lower>().toDenseMatrix())); m2.setZero(); VERIFY_IS_APPROX((m2.template selfadjointView<Upper>().rankUpdate(m1.row(c).adjoint(),s1)._expression()), ((s1 * m1.row(c).adjoint() * m1.row(c).adjoint().adjoint()).eval().template triangularView<Upper>().toDenseMatrix())); } void test_product_syrk() { for(int i = 0; i < g_repeat ; i++) { int s; s = internal::random<int>(1,EIGEN_TEST_MAX_SIZE); CALL_SUBTEST_1( syrk(MatrixXf(s, s)) ); s = internal::random<int>(1,EIGEN_TEST_MAX_SIZE); CALL_SUBTEST_2( syrk(MatrixXd(s, s)) ); s = internal::random<int>(1,EIGEN_TEST_MAX_SIZE/2); CALL_SUBTEST_3( syrk(MatrixXcf(s, s)) ); s = internal::random<int>(1,EIGEN_TEST_MAX_SIZE/2); CALL_SUBTEST_4( syrk(MatrixXcd(s, s)) ); } }
3,266
1,318
/* * Copyright 2016 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.linkedin.drelephant.tuning; import com.avaje.ebean.Expr; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import controllers.AutoTuningMetricsController; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import models.JobDefinition; import models.JobExecution; import models.JobSavedState; import models.JobSuggestedParamSet; import models.JobSuggestedParamValue; import models.TuningAlgorithm; import models.TuningJobDefinition; import models.TuningParameter; import org.apache.commons.io.FileUtils; import org.apache.log4j.Logger; import play.libs.Json; /** * This is an abstract class for generating parameter suggestions for jobs */ public abstract class ParamGenerator { private static final String JSON_CURRENT_POPULATION_KEY = "current_population"; private final Logger logger = Logger.getLogger(getClass()); /** * Generates the parameters using tuningJobInfo and returns it in updated JobTuningInfo * @param jobTuningInfo The tuning job information required to create new params * @return The updated job tuning information containing the new params */ public abstract JobTuningInfo generateParamSet(JobTuningInfo jobTuningInfo); /** * Converts a json to list of particles * @param jsonParticleList A list of configurations (particles) in json * @return Particle List */ private List<Particle> jsonToParticleList(JsonNode jsonParticleList) { List<Particle> particleList = new ArrayList<Particle>(); if (jsonParticleList == null) { logger.info("Null json, empty particle list returned"); } else { for (JsonNode jsonParticle : jsonParticleList) { Particle particle; particle = Json.fromJson(jsonParticle, Particle.class); if (particle != null) { particleList.add(particle); } } } return particleList; } /** * Fetches the list to job which need new parameter suggestion * @return Job list */ private List<TuningJobDefinition> getJobsForParamSuggestion() { // Todo: [Important] Change the logic. This is very rigid. Ideally you should look at the param set ids in the saved state, // todo: [continuation] if their fitness is computed, pso can generate new params for the job logger.info("Checking which jobs need new parameter suggestion"); List<TuningJobDefinition> jobsForParamSuggestion = new ArrayList<TuningJobDefinition>(); List<JobSuggestedParamSet> pendingParamSetList = JobSuggestedParamSet.find.select("*") .fetch(JobSuggestedParamSet.TABLE.jobDefinition, "*") .where() .or(Expr.or(Expr.eq(JobSuggestedParamSet.TABLE.paramSetState, JobSuggestedParamSet.ParamSetStatus.CREATED), Expr.eq(JobSuggestedParamSet.TABLE.paramSetState, JobSuggestedParamSet.ParamSetStatus.SENT)), Expr.eq(JobSuggestedParamSet.TABLE.paramSetState, JobSuggestedParamSet.ParamSetStatus.EXECUTED)) .eq(JobSuggestedParamSet.TABLE.isParamSetDefault, 0) .eq(JobSuggestedParamSet.TABLE.isParamSetBest, 0).findList(); List<JobDefinition> pendingParamJobList = new ArrayList<JobDefinition>(); for (JobSuggestedParamSet pendingParamSet : pendingParamSetList) { if (!pendingParamJobList.contains(pendingParamSet.jobDefinition)) { pendingParamJobList.add(pendingParamSet.jobDefinition); } } List<TuningJobDefinition> tuningJobDefinitionList = TuningJobDefinition.find.select("*") .fetch(TuningJobDefinition.TABLE.job, "*") .where() .eq(TuningJobDefinition.TABLE.tuningEnabled, 1) .findList(); if (tuningJobDefinitionList.size() == 0) { logger.error("No auto-tuning enabled jobs found"); } for (TuningJobDefinition tuningJobDefinition : tuningJobDefinitionList) { if (!pendingParamJobList.contains(tuningJobDefinition.job)) { logger.info("New parameter suggestion needed for job: " + tuningJobDefinition.job.jobName); jobsForParamSuggestion.add(tuningJobDefinition); } } logger.info("Number of job(s) which need new parameter suggestion: " + jobsForParamSuggestion.size()); return jobsForParamSuggestion; } /** * Converts a list of particles to json * @param particleList Particle List * @return JsonNode */ private JsonNode particleListToJson(List<Particle> particleList) { JsonNode jsonNode; if (particleList == null) { jsonNode = JsonNodeFactory.instance.objectNode(); logger.info("Null particleList, returning empty json"); } else { jsonNode = Json.toJson(particleList); } return jsonNode; } /** * Returns the tuning information for the jobs * @param tuningJobs Job List * @return Tuning information list */ private List<JobTuningInfo> getJobsTuningInfo(List<TuningJobDefinition> tuningJobs) { List<JobTuningInfo> jobTuningInfoList = new ArrayList<JobTuningInfo>(); for (TuningJobDefinition tuningJobDefinition : tuningJobs) { JobDefinition job = tuningJobDefinition.job; logger.info("Getting tuning information for job: " + job.jobDefId); List<TuningParameter> tuningParameterList = TuningParameter.find.where() .eq(TuningParameter.TABLE.tuningAlgorithm + "." + TuningAlgorithm.TABLE.id, tuningJobDefinition.tuningAlgorithm.id) .eq(TuningParameter.TABLE.isDerived, 0) .findList(); logger.info("Fetching default parameter values for job " + tuningJobDefinition.job.jobDefId); JobSuggestedParamSet defaultJobParamSet = JobSuggestedParamSet.find.where() .eq(JobSuggestedParamSet.TABLE.jobDefinition + "." + JobDefinition.TABLE.id, tuningJobDefinition.job.id) .eq(JobSuggestedParamSet.TABLE.isParamSetDefault, 1) .order() .desc(JobSuggestedParamSet.TABLE.id) .setMaxRows(1) .findUnique(); if (defaultJobParamSet != null) { List<JobSuggestedParamValue> jobSuggestedParamValueList = JobSuggestedParamValue.find.where() .eq(JobSuggestedParamValue.TABLE.jobSuggestedParamSet + "." + JobExecution.TABLE.id, defaultJobParamSet.id) .findList(); if (jobSuggestedParamValueList.size() > 0) { Map<Integer, Double> defaultExecutionParamMap = new HashMap<Integer, Double>(); for (JobSuggestedParamValue jobSuggestedParamValue : jobSuggestedParamValueList) { defaultExecutionParamMap.put(jobSuggestedParamValue.tuningParameter.id, jobSuggestedParamValue.paramValue); } for (TuningParameter tuningParameter : tuningParameterList) { Integer paramId = tuningParameter.id; if (defaultExecutionParamMap.containsKey(paramId)) { logger.info( "Updating value of param " + tuningParameter.paramName + " to " + defaultExecutionParamMap.get( paramId)); tuningParameter.defaultValue = defaultExecutionParamMap.get(paramId); } } } } JobTuningInfo jobTuningInfo = new JobTuningInfo(); jobTuningInfo.setTuningJob(job); jobTuningInfo.setJobType(tuningJobDefinition.tuningAlgorithm.jobType); jobTuningInfo.setParametersToTune(tuningParameterList); JobSavedState jobSavedState = JobSavedState.find.byId(job.id); boolean validSavedState = true; if (jobSavedState != null && jobSavedState.isValid()) { String savedState = new String(jobSavedState.savedState, Charset.forName("UTF-8")); ObjectNode jsonSavedState = (ObjectNode) Json.parse(savedState); JsonNode jsonCurrentPopulation = jsonSavedState.get(JSON_CURRENT_POPULATION_KEY); List<Particle> currentPopulation = jsonToParticleList(jsonCurrentPopulation); for (Particle particle : currentPopulation) { Long paramSetId = particle.getParamSetId(); logger.info("Param set id: " + paramSetId.toString()); JobSuggestedParamSet jobSuggestedParamSet = JobSuggestedParamSet.find.select("*").where().eq(JobSuggestedParamSet.TABLE.id, paramSetId).findUnique(); if (jobSuggestedParamSet.paramSetState.equals(JobSuggestedParamSet.ParamSetStatus.FITNESS_COMPUTED) && jobSuggestedParamSet.fitness != null) { particle.setFitness(jobSuggestedParamSet.fitness); } else { validSavedState = false; logger.error("Invalid saved state: Fitness of previous execution not computed."); break; } } if (validSavedState) { JsonNode updatedJsonCurrentPopulation = particleListToJson(currentPopulation); jsonSavedState.set(JSON_CURRENT_POPULATION_KEY, updatedJsonCurrentPopulation); savedState = Json.stringify(jsonSavedState); jobTuningInfo.setTunerState(savedState); } } else { logger.info("Saved state empty for job: " + job.jobDefId); validSavedState = false; } if (!validSavedState) { jobTuningInfo.setTunerState("{}"); } logger.info("Adding JobTuningInfo " + Json.toJson(jobTuningInfo)); jobTuningInfoList.add(jobTuningInfo); } return jobTuningInfoList; } /** * Returns list of suggested parameters * @param particle Particle (configuration) * @param paramList Parameter List * @return Suggested Param Value List */ private List<JobSuggestedParamValue> getParamValueList(Particle particle, List<TuningParameter> paramList) { logger.debug("Particle is: " + Json.toJson(particle)); List<JobSuggestedParamValue> jobSuggestedParamValueList = new ArrayList<JobSuggestedParamValue>(); if (particle != null) { List<Double> candidate = particle.getCandidate(); if (candidate != null) { logger.debug("Candidate is:" + Json.toJson(candidate)); for (int i = 0; i < candidate.size() && i < paramList.size(); i++) { logger.info("Candidate is " + candidate); JobSuggestedParamValue jobSuggestedParamValue = new JobSuggestedParamValue(); int paramId = paramList.get(i).id; jobSuggestedParamValue.tuningParameter = TuningParameter.find.byId(paramId); jobSuggestedParamValue.paramValue = candidate.get(i); jobSuggestedParamValueList.add(jobSuggestedParamValue); } } else { logger.info("Candidate is null"); } } else { logger.info("Particle null"); } return jobSuggestedParamValueList; } /** * For every tuning info: * For every new particle: * From the tuner set extract the list of suggested parameters * Check penalty * Save the param in the job execution table by creating execution instance (Create an entry into param_set table) * Update the execution instance in each of the suggested params (Update the param_set_id in each of the prams) * save th suggested parameters * update the paramsetid in the particle and add particle to a particlelist * Update the tunerstate from the updated particles * save the tuning info in db * * @param jobTuningInfoList JobTuningInfo List */ private void updateDatabase(List<JobTuningInfo> jobTuningInfoList) { logger.info("Updating new parameter suggestion in database"); if (jobTuningInfoList == null) { logger.info("No new parameter suggestion to update"); return; } int paramSetNotGeneratedJobs = jobTuningInfoList.size(); for (JobTuningInfo jobTuningInfo : jobTuningInfoList) { logger.info("Updating new parameter suggestion for job:" + jobTuningInfo.getTuningJob().jobDefId); JobDefinition job = jobTuningInfo.getTuningJob(); List<TuningParameter> paramList = jobTuningInfo.getParametersToTune(); String stringTunerState = jobTuningInfo.getTunerState(); if (stringTunerState == null) { logger.error("Suggested parameter suggestion is empty for job id: " + job.jobDefId); continue; } TuningJobDefinition tuningJobDefinition = TuningJobDefinition.find.select("*") .fetch(TuningJobDefinition.TABLE.job, "*") .where() .eq(TuningJobDefinition.TABLE.job + "." + JobDefinition.TABLE.id, job.id) .eq(TuningJobDefinition.TABLE.tuningEnabled, 1) .findUnique(); List<TuningParameter> derivedParameterList = new ArrayList<TuningParameter>(); derivedParameterList = TuningParameter.find.where() .eq(TuningParameter.TABLE.tuningAlgorithm + "." + TuningAlgorithm.TABLE.id, tuningJobDefinition.tuningAlgorithm.id) .eq(TuningParameter.TABLE.isDerived, 1) .findList(); logger.info("No. of derived tuning params for job " + tuningJobDefinition.job.jobName + ": " + derivedParameterList.size()); JsonNode jsonTunerState = Json.parse(stringTunerState); JsonNode jsonSuggestedPopulation = jsonTunerState.get(JSON_CURRENT_POPULATION_KEY); if (jsonSuggestedPopulation == null) { continue; } paramSetNotGeneratedJobs--; List<Particle> suggestedPopulation = jsonToParticleList(jsonSuggestedPopulation); for (Particle suggestedParticle : suggestedPopulation) { AutoTuningMetricsController.markParamSetGenerated(); List<JobSuggestedParamValue> jobSuggestedParamValueList = getParamValueList(suggestedParticle, paramList); Map<String, Double> jobSuggestedParamValueMap = new HashMap<String, Double>(); for (JobSuggestedParamValue jobSuggestedParamValue : jobSuggestedParamValueList) { jobSuggestedParamValueMap.put(jobSuggestedParamValue.tuningParameter.paramName, jobSuggestedParamValue.paramValue); } for (TuningParameter derivedParameter : derivedParameterList) { logger.info("Computing value of derived param: " + derivedParameter.paramName); Double paramValue = null; if (derivedParameter.paramName.equals("mapreduce.reduce.java.opts")) { String parentParamName = "mapreduce.reduce.memory.mb"; if (jobSuggestedParamValueMap.containsKey(parentParamName)) { paramValue = 0.75 * jobSuggestedParamValueMap.get(parentParamName); } } else if (derivedParameter.paramName.equals("mapreduce.map.java.opts")) { String parentParamName = "mapreduce.map.memory.mb"; if (jobSuggestedParamValueMap.containsKey(parentParamName)) { paramValue = 0.75 * jobSuggestedParamValueMap.get(parentParamName); } } else if (derivedParameter.paramName.equals("mapreduce.input.fileinputformat.split.maxsize")) { String parentParamName = "pig.maxCombinedSplitSize"; if (jobSuggestedParamValueMap.containsKey(parentParamName)) { paramValue = jobSuggestedParamValueMap.get(parentParamName); } } if (paramValue != null) { JobSuggestedParamValue jobSuggestedParamValue = new JobSuggestedParamValue(); jobSuggestedParamValue.paramValue = paramValue; jobSuggestedParamValue.tuningParameter = derivedParameter; jobSuggestedParamValueList.add(jobSuggestedParamValue); } } JobSuggestedParamSet jobSuggestedParamSet = new JobSuggestedParamSet(); jobSuggestedParamSet.jobDefinition = job; jobSuggestedParamSet.tuningAlgorithm = tuningJobDefinition.tuningAlgorithm; jobSuggestedParamSet.isParamSetDefault = false; jobSuggestedParamSet.isParamSetBest = false; if (isParamConstraintViolated(jobSuggestedParamValueList, jobSuggestedParamSet.tuningAlgorithm.jobType)) { logger.info("Parameter constraint violated. Applying penalty."); int penaltyConstant = 3; Double averageResourceUsagePerGBInput = tuningJobDefinition.averageResourceUsage * FileUtils.ONE_GB / tuningJobDefinition.averageInputSizeInBytes; Double maxDesiredResourceUsagePerGBInput = averageResourceUsagePerGBInput * tuningJobDefinition.allowedMaxResourceUsagePercent / 100.0; jobSuggestedParamSet.areConstraintsViolated = true; jobSuggestedParamSet.fitness = penaltyConstant * maxDesiredResourceUsagePerGBInput; jobSuggestedParamSet.paramSetState = JobSuggestedParamSet.ParamSetStatus.FITNESS_COMPUTED; } else { jobSuggestedParamSet.areConstraintsViolated = false; jobSuggestedParamSet.paramSetState = JobSuggestedParamSet.ParamSetStatus.CREATED; } Long paramSetId = saveSuggestedParamSet(jobSuggestedParamSet); for (JobSuggestedParamValue jobSuggestedParamValue : jobSuggestedParamValueList) { jobSuggestedParamValue.jobSuggestedParamSet = jobSuggestedParamSet; } suggestedParticle.setPramSetId(paramSetId); saveSuggestedParams(jobSuggestedParamValueList); } JsonNode updatedJsonSuggestedPopulation = particleListToJson(suggestedPopulation); ObjectNode updatedJsonTunerState = (ObjectNode) jsonTunerState; updatedJsonTunerState.put(JSON_CURRENT_POPULATION_KEY, updatedJsonSuggestedPopulation); String updatedStringTunerState = Json.stringify(updatedJsonTunerState); jobTuningInfo.setTunerState(updatedStringTunerState); } AutoTuningMetricsController.setParamSetGenerateWaitJobs(paramSetNotGeneratedJobs); saveTunerState(jobTuningInfoList); } /** * Check if the parameters violated constraints * Constraint 1: sort.mb > 60% of map.memory: To avoid heap memory failure * Constraint 2: map.memory - sort.mb < 768: To avoid heap memory failure * Constraint 3: pig.maxCombinedSplitSize > 1.8*mapreduce.map.memory.mb * @param jobSuggestedParamValueList List of suggested param values * @param jobType Job type * @return true if the constraint is violated, false otherwise */ private boolean isParamConstraintViolated(List<JobSuggestedParamValue> jobSuggestedParamValueList, TuningAlgorithm.JobType jobType) { logger.info("Checking whether parameter values are within constraints"); Integer violations = 0; if (jobType.equals(TuningAlgorithm.JobType.PIG)) { Double mrSortMemory = null; Double mrMapMemory = null; Double pigMaxCombinedSplitSize = null; for (JobSuggestedParamValue jobSuggestedParamValue : jobSuggestedParamValueList) { if (jobSuggestedParamValue.tuningParameter.paramName.equals("mapreduce.task.io.sort.mb")) { mrSortMemory = jobSuggestedParamValue.paramValue; } else if (jobSuggestedParamValue.tuningParameter.paramName.equals("mapreduce.map.memory.mb")) { mrMapMemory = jobSuggestedParamValue.paramValue; } else if (jobSuggestedParamValue.tuningParameter.paramName.equals("pig.maxCombinedSplitSize")) { pigMaxCombinedSplitSize = jobSuggestedParamValue.paramValue / FileUtils.ONE_MB; } } if (mrSortMemory != null && mrMapMemory != null) { if (mrSortMemory > 0.6 * mrMapMemory) { logger.info("Constraint violated: Sort memory > 60% of map memory"); violations++; } if (mrMapMemory - mrSortMemory < 768) { logger.info("Constraint violated: Map memory - sort memory < 768 mb"); violations++; } } if (pigMaxCombinedSplitSize != null && mrMapMemory != null && (pigMaxCombinedSplitSize > 1.8 * mrMapMemory)) { logger.info("Constraint violated: Pig max combined split size > 1.8 * map memory"); violations++; } } if (violations == 0) { return false; } else { logger.info("Number of constraint(s) violated: " + violations); return true; } } /** * Save the tuning info list to the database * @param jobTuningInfoList Tuning Info List */ private void saveTunerState(List<JobTuningInfo> jobTuningInfoList) { for (JobTuningInfo jobTuningInfo : jobTuningInfoList) { if (jobTuningInfo.getTunerState() == null) { continue; } JobSavedState jobSavedState = JobSavedState.find.byId(jobTuningInfo.getTuningJob().id); if (jobSavedState == null) { jobSavedState = new JobSavedState(); jobSavedState.jobDefinitionId = jobTuningInfo.getTuningJob().id; } jobSavedState.savedState = jobTuningInfo.getTunerState().getBytes(Charset.forName("UTF-8")); jobSavedState.save(); } } /** * Saves the list of suggested parameter values to database * @param jobSuggestedParamValueList Suggested Parameter Values List */ private void saveSuggestedParams(List<JobSuggestedParamValue> jobSuggestedParamValueList) { for (JobSuggestedParamValue jobSuggestedParamValue : jobSuggestedParamValueList) { jobSuggestedParamValue.save(); } } /** * Saves the suggested param set in the database and returns the param set id * @param jobSuggestedParamSet JobExecution * @return Param Set Id */ private Long saveSuggestedParamSet(JobSuggestedParamSet jobSuggestedParamSet) { jobSuggestedParamSet.save(); return jobSuggestedParamSet.id; } /** * Fetches job which need parameters, generates parameters and stores it in the database */ public void getParams() { List<TuningJobDefinition> jobsForSwarmSuggestion = getJobsForParamSuggestion(); List<JobTuningInfo> jobTuningInfoList = getJobsTuningInfo(jobsForSwarmSuggestion); List<JobTuningInfo> updatedJobTuningInfoList = new ArrayList<JobTuningInfo>(); for (JobTuningInfo jobTuningInfo : jobTuningInfoList) { JobTuningInfo newJobTuningInfo = generateParamSet(jobTuningInfo); updatedJobTuningInfoList.add(newJobTuningInfo); } updateDatabase(updatedJobTuningInfoList); } }
8,397
664
<gh_stars>100-1000 // Classic Shell (c) 2009-2016, <NAME> // Confidential information of <NAME>. Not for disclosure or distribution without prior written consent from the author #include <windows.h> #include <commctrl.h> #include <shlwapi.h> #include <Psapi.h> #include "StringUtils.h" // Find and activate the Settings window static BOOL CALLBACK FindSettingsEnum( HWND hwnd, LPARAM lParam ) { wchar_t className[256]; if (!GetClassName(hwnd,className,_countof(className)) || _wcsicmp(className,L"#32770")!=0) return TRUE; DWORD process=0; GetWindowThreadProcessId(hwnd,&process); HANDLE hProcess=OpenProcess(PROCESS_QUERY_INFORMATION|PROCESS_VM_READ,FALSE,process); bool bFound=false; if (hProcess!=INVALID_HANDLE_VALUE) { wchar_t path[_MAX_PATH]; if (GetModuleFileNameEx(hProcess,NULL,path,_countof(path))) { if (_wcsicmp(PathFindFileName(path),L"ClassicExplorerSettings.exe")==0) { SetForegroundWindow(hwnd); bFound=true; } } CloseHandle(hProcess); } return !bFound; } HMODULE LoadClassicExplorerDll( void ) { wchar_t path[_MAX_PATH]; GetModuleFileName(NULL,path,_countof(path)); *PathFindFileName(path)=0; PathAppend(path,L"ClassicExplorer32.dll"); return LoadLibrary(path); } // A simple program that loads ClassicExplorer32.dll and calls the ShowExplorerSettings function // Why not use rundll32 instead? Because it doesn't include the correct manifest for comctl32.dll int WINAPI wWinMain( HINSTANCE hInstance, HINSTANCE hPrevInstance, LPTSTR lpstrCmdLine, int nCmdShow ) { INITCOMMONCONTROLSEX init={sizeof(init),ICC_STANDARD_CLASSES}; InitCommonControlsEx(&init); { const wchar_t *pXml=wcsstr(lpstrCmdLine,L"-xml "); if (pXml) { wchar_t xml[_MAX_PATH]; GetToken(pXml+5,xml,_countof(xml),L" "); HMODULE dll=LoadClassicExplorerDll(); if (!dll) return 1; typedef bool (*tImportSettingsXml)( const wchar_t *fname ); tImportSettingsXml DllImportSettingsXml=(tImportSettingsXml)GetProcAddress(dll,"DllImportSettingsXml"); if (!DllImportSettingsXml) return 1; CoInitialize(NULL); bool res=DllImportSettingsXml(xml); CoUninitialize(); return res?0:1; } } { const wchar_t *pBackup=wcsstr(lpstrCmdLine,L"-backup "); if (pBackup) { wchar_t xml[_MAX_PATH]; GetToken(pBackup+8,xml,_countof(xml),L" "); HMODULE dll=LoadClassicExplorerDll(); if (!dll) return 1; typedef bool (*tExportSettingsXml)( const wchar_t *fname ); tExportSettingsXml DllExportSettingsXml=(tExportSettingsXml)GetProcAddress(dll,"DllExportSettingsXml"); if (!DllExportSettingsXml) return 1; CoInitialize(NULL); bool res=DllExportSettingsXml(xml); CoUninitialize(); return res?0:1; } } #ifndef _WIN64 const wchar_t *pSaveAdmx=wcsstr(lpstrCmdLine,L"-saveadmx "); if (pSaveAdmx) { wchar_t language[100]; GetToken(pSaveAdmx+10,language,_countof(language),L" "); HMODULE dll=LoadClassicExplorerDll(); if (!dll) return 1; typedef bool (*tSaveAdmx)( const char *admxFile, const char *admlFile, const char *docFile, const wchar_t *language ); tSaveAdmx SaveAdmx=(tSaveAdmx)GetProcAddress(dll,"DllSaveAdmx"); if (!SaveAdmx || !SaveAdmx("ClassicExplorer.admx","ClassicExplorer.adml","ClassicExplorerADMX.txt",language)) return 1; return 0; } #endif // prevent multiple instances from running on the same desktop // the assumption is that multiple desktops for the same user will have different name (but may repeat across users) wchar_t userName[256]; DWORD len=_countof(userName); GetUserName(userName,&len); len=0; HANDLE desktop=GetThreadDesktop(GetCurrentThreadId()); GetUserObjectInformation(desktop,UOI_NAME,NULL,0,&len); wchar_t *deskName=(wchar_t*)malloc(len); GetUserObjectInformation(desktop,UOI_NAME,deskName,len,&len); wchar_t mutexName[1024]; Sprintf(mutexName,_countof(mutexName),L"ClassicExplorerSettings.Mutex.%s.%s",userName,deskName); free(deskName); HANDLE hMutex=CreateMutex(NULL,TRUE,mutexName); if (GetLastError()==ERROR_ALREADY_EXISTS || GetLastError()==ERROR_ACCESS_DENIED) { EnumWindows(FindSettingsEnum,0); return 0; } HMODULE dll=LoadClassicExplorerDll(); if (!dll) return 1; FARPROC proc=GetProcAddress(dll,"ShowExplorerSettings"); if (!proc) return 2; proc(); return 0; }
1,693
435
<gh_stars>100-1000 { "copyright_text": null, "description": "Em 2019, depois de muito esfor\u00e7o de v\u00e1rias pessoas volunt\u00e1rias, publicamos no site oficial da documenta\u00e7\u00e3o do Python as tradu\u00e7\u00f5es em Portugu\u00eas. Por\u00e9m, isso n\u00e3o quer dizer que o trabalho com as tradu\u00e7\u00f5es diminuiu, muito pelo contr\u00e1rio. Nessa palestras vou mostrar como \u00e9 o processo de tradu\u00e7\u00f5es da documenta\u00e7\u00e3o, como voc\u00ea pode aprender Python traduzindo, e como contribuir para essa iniciativa mesmo sem saber ingl\u00eas.", "duration": 1736, "language": "por", "recorded": "2019-12-13", "related_urls": [ { "label": "Conference schedule", "url": "https://pyjamas.live/#grade" }, { "label": "python-doc in transifex", "url": "https://www.transifex.com/python-doc/public/" } ], "speakers": [ "<NAME>" ], "tags": [], "thumbnail_url": "https://i.ytimg.com/vi/ohpy_5ppoa0/hqdefault.jpg", "title": "Mantendo a documenta\u00e7\u00e3o do Python em Portugu\u00eas!", "videos": [ { "type": "youtube", "url": "https://www.youtube.com/watch?v=ohpy_5ppoa0" } ] }
550
61,676
import os import signal import subprocess import sys from pathlib import Path from unittest import mock, skipUnless from django.db import connection from django.db.backends.postgresql.client import DatabaseClient from django.test import SimpleTestCase class PostgreSqlDbshellCommandTestCase(SimpleTestCase): def settings_to_cmd_args_env(self, settings_dict, parameters=None): if parameters is None: parameters = [] return DatabaseClient.settings_to_cmd_args_env(settings_dict, parameters) def test_basic(self): self.assertEqual( self.settings_to_cmd_args_env({ 'NAME': 'dbname', 'USER': 'someuser', 'PASSWORD': '<PASSWORD>', 'HOST': 'somehost', 'PORT': '444', }), ( ['psql', '-U', 'someuser', '-h', 'somehost', '-p', '444', 'dbname'], {'PGPASSWORD': '<PASSWORD>'}, ) ) def test_nopass(self): self.assertEqual( self.settings_to_cmd_args_env({ 'NAME': 'dbname', 'USER': 'someuser', 'HOST': 'somehost', 'PORT': '444', }), ( ['psql', '-U', 'someuser', '-h', 'somehost', '-p', '444', 'dbname'], None, ) ) def test_ssl_certificate(self): self.assertEqual( self.settings_to_cmd_args_env({ 'NAME': 'dbname', 'USER': 'someuser', 'HOST': 'somehost', 'PORT': '444', 'OPTIONS': { 'sslmode': 'verify-ca', 'sslrootcert': 'root.crt', 'sslcert': 'client.crt', 'sslkey': 'client.key', }, }), ( ['psql', '-U', 'someuser', '-h', 'somehost', '-p', '444', 'dbname'], { 'PGSSLCERT': 'client.crt', 'PGSSLKEY': 'client.key', 'PGSSLMODE': 'verify-ca', 'PGSSLROOTCERT': 'root.crt', }, ) ) def test_service(self): self.assertEqual( self.settings_to_cmd_args_env({'OPTIONS': {'service': 'django_test'}}), (['psql'], {'PGSERVICE': 'django_test'}), ) def test_passfile(self): self.assertEqual( self.settings_to_cmd_args_env({ 'NAME': 'dbname', 'USER': 'someuser', 'HOST': 'somehost', 'PORT': '444', 'OPTIONS': { 'passfile': '~/.custompgpass', }, }), ( ['psql', '-U', 'someuser', '-h', 'somehost', '-p', '444', 'dbname'], {'PGPASSFILE': '~/.custompgpass'}, ), ) self.assertEqual( self.settings_to_cmd_args_env({ 'OPTIONS': { 'service': 'django_test', 'passfile': '~/.custompgpass', }, }), ( ['psql'], {'PGSERVICE': 'django_test', 'PGPASSFILE': '~/.custompgpass'}, ), ) def test_column(self): self.assertEqual( self.settings_to_cmd_args_env({ 'NAME': 'dbname', 'USER': 'some:user', 'PASSWORD': '<PASSWORD>', 'HOST': '::1', 'PORT': '444', }), ( ['psql', '-U', 'some:user', '-h', '::1', '-p', '444', 'dbname'], {'PGPASSWORD': '<PASSWORD>'}, ) ) def test_accent(self): username = 'rôle' password = '<PASSWORD>' self.assertEqual( self.settings_to_cmd_args_env({ 'NAME': 'dbname', 'USER': username, 'PASSWORD': password, 'HOST': 'somehost', 'PORT': '444', }), ( ['psql', '-U', username, '-h', 'somehost', '-p', '444', 'dbname'], {'PGPASSWORD': password}, ) ) def test_parameters(self): self.assertEqual( self.settings_to_cmd_args_env({'NAME': 'dbname'}, ['--help']), (['psql', 'dbname', '--help'], None), ) @skipUnless(connection.vendor == 'postgresql', 'Requires a PostgreSQL connection') def test_sigint_handler(self): """SIGINT is ignored in Python and passed to psql to abort queries.""" def _mock_subprocess_run(*args, **kwargs): handler = signal.getsignal(signal.SIGINT) self.assertEqual(handler, signal.SIG_IGN) sigint_handler = signal.getsignal(signal.SIGINT) # The default handler isn't SIG_IGN. self.assertNotEqual(sigint_handler, signal.SIG_IGN) with mock.patch('subprocess.run', new=_mock_subprocess_run): connection.client.runshell([]) # dbshell restores the original handler. self.assertEqual(sigint_handler, signal.getsignal(signal.SIGINT)) def test_crash_password_does_not_leak(self): # The password doesn't leak in an exception that results from a client # crash. args, env = self.settings_to_cmd_args_env({'PASSWORD': '<PASSWORD>'}, []) if env: env = {**os.environ, **env} fake_client = Path(__file__).with_name('fake_client.py') args[0:1] = [sys.executable, str(fake_client)] with self.assertRaises(subprocess.CalledProcessError) as ctx: subprocess.run(args, check=True, env=env) self.assertNotIn('somepassword', str(ctx.exception))
3,189
1,870
<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alipay.sofa.runtime.service.helper; import java.util.Collection; import com.alipay.sofa.runtime.SofaRuntimeProperties; import com.alipay.sofa.runtime.service.binding.JvmBinding; import com.alipay.sofa.runtime.service.component.Reference; import com.alipay.sofa.runtime.service.component.ReferenceComponent; import com.alipay.sofa.runtime.spi.binding.Binding; import com.alipay.sofa.runtime.spi.binding.BindingAdapterFactory; import com.alipay.sofa.runtime.spi.component.ComponentInfo; import com.alipay.sofa.runtime.spi.component.ComponentManager; import com.alipay.sofa.runtime.spi.component.DefaultImplementation; import com.alipay.sofa.runtime.spi.component.SofaRuntimeContext; /** * reference register helper * * @author xuanbei 18/3/1 */ public class ReferenceRegisterHelper { public static Object registerReference(Reference reference, BindingAdapterFactory bindingAdapterFactory, SofaRuntimeContext sofaRuntimeContext) { Binding binding = (Binding) reference.getBindings().toArray()[0]; if (!binding.getBindingType().equals(JvmBinding.JVM_BINDING_TYPE) && !SofaRuntimeProperties.isDisableJvmFirst(sofaRuntimeContext) && reference.isJvmFirst()) { // as rpc invocation would be serialized, so here would Not ignore serialized reference.addBinding(new JvmBinding()); } ComponentManager componentManager = sofaRuntimeContext.getComponentManager(); ReferenceComponent referenceComponent = new ReferenceComponent(reference, new DefaultImplementation(), bindingAdapterFactory, sofaRuntimeContext); if (componentManager.isRegistered(referenceComponent.getName())) { return componentManager.getComponentInfo(referenceComponent.getName()) .getImplementation().getTarget(); } ComponentInfo componentInfo = componentManager.registerAndGet(referenceComponent); return componentInfo.getImplementation().getTarget(); } public static int generateBindingHashCode(Reference reference) { Collection<Binding> bindings = reference.getBindings(); int result = 1; for (Binding binding : bindings) { result = result * 31 + binding.getBindingHashCode(); } ClassLoader cl = reference.getInterfaceType().getClassLoader(); if (cl != null) { result += reference.getInterfaceType().getClassLoader().hashCode(); } return result; } }
1,153