max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
3,136 | <filename>xhookwrapper/biz/src/main/java/com/qiyi/biz/Biz.java
package com.qiyi.biz;
/**
* Created by caikelun on 18/01/2018.
*/
public class Biz {
private static final Biz ourInstance = new Biz();
public static Biz getInstance() {
return ourInstance;
}
private Biz() {
}
public synchronized void init() {
System.loadLibrary("biz");
}
public synchronized void start() {
com.qiyi.biz.NativeHandler.getInstance().start();
}
}
| 195 |
3,269 | <filename>C++/string-matching-in-an-array.cpp<gh_stars>1000+
// Time: O(n + m + z) = O(n), n is the total size of patterns
// , m is the total size of query string
// , z is the number of all matched strings
// , O(n) = O(m) = O(z) in this problem
// Space: O(t), t is the total size of ac automata trie
struct AhoNode {
vector<AhoNode *> children;
vector<int> indices;
AhoNode *suffix;
AhoNode *output;
AhoNode() :
children(26, nullptr),
suffix(nullptr),
output(nullptr) {}
};
class AhoTrie {
public:
AhoTrie(const vector<string>& patterns) : root_(createACTrie(patterns)) {
node_ = createACSuffixAndOutputLinks(root_);
}
vector<int> step(char letter) {
while (node_ && !node_->children[letter - 'a']) {
node_ = node_->suffix;
}
node_ = node_ ? node_->children[letter - 'a'] : root_;
return getACNodeOutputs(node_);
}
void reset() {
node_ = root_;
}
private:
AhoNode *createACTrie(const vector<string>& patterns) { // Time: O(n), Space: O(t)
auto root = new AhoNode();
for (int i = 0; i < patterns.size(); ++i) {
auto node = root;
for (const auto& c : patterns[i]) {
if (!node->children[c - 'a']) {
node->children[c - 'a'] = new AhoNode();
}
node = node->children[c - 'a'];
}
node->indices.emplace_back(i);
}
return root;
}
AhoNode *createACSuffixAndOutputLinks(AhoNode *root) { // Time: O(n), Space: O(t)
queue<AhoNode *> q;
for (auto node : root->children) {
if (!node) {
continue;
}
q.emplace(node);
node->suffix = root;
}
while (!q.empty()) {
auto node = q.front(); q.pop();
for (int c = 0; c < node->children.size(); ++c) {
if (!node->children[c]) {
continue;
}
auto child = node->children[c];
q.emplace(child);
auto suffix = node->suffix;
while (suffix && !suffix->children[c]) {
suffix = suffix->suffix;
}
child->suffix = suffix ? suffix->children[c] : root;
child->output = !child->suffix->indices.empty() ?
child->suffix : child->suffix->output;
}
}
return root;
}
vector<int> getACNodeOutputs(AhoNode *node) { // Time: O(z)
vector<int> result;
for (const auto& i : node_->indices) {
result.emplace_back(i);
// return result;
}
auto output = node_->output;
while (output) {
for (const auto& i : output->indices) {
result.emplace_back(i);
// return result;
}
output = output->output;
}
return result;
}
AhoNode * const root_;
AhoNode *node_;
};
class Solution {
public:
vector<string> stringMatching(vector<string>& words) {
AhoTrie trie(words);
unordered_set<int> lookup;
for (int i = 0; i < words.size(); ++i) {
trie.reset();
for (const auto& c : words[i]) {
for (const auto& j : trie.step(c)) {
if (j != i) {
lookup.emplace(j);
}
}
}
}
vector<string> result;
for (const auto& i : lookup) {
result.emplace_back(words[i]);
}
return result;
}
};
// Time: O(n^2 * l), n is the number of strings
// Space: O(l) , l is the max length of strings
class Solution2 {
public:
vector<string> stringMatching(vector<string>& words) {
vector<string> result;
for (int i = 0; i < words.size(); ++i) {
const auto& prefix = getPrefix(words[i]);
for (int j = 0; j < words.size(); ++j) {
if (i != j && kmp(words[j], words[i], prefix) != -1) {
result.emplace_back(words[i]);
break;
}
}
}
return result;
}
private:
int kmp(const string& text, const string& pattern, const vector<int>& prefix) {
if (pattern.empty()) {
return 0;
}
if (text.length() < pattern.length()) {
return -1;
}
int j = -1;
for (int i = 0; i < text.length(); ++i) {
while (j != -1 && pattern[j + 1] != text[i]) {
j = prefix[j];
}
if (pattern[j + 1] == text[i]) {
++j;
}
if (j + 1 == pattern.length()) {
return i - j;
}
}
return -1;
}
vector<int> getPrefix(const string& pattern) {
vector<int> prefix(pattern.length(), -1);
int j = -1;
for (int i = 1; i < pattern.length(); ++i) {
while (j != -1 && pattern[j + 1] != pattern[i]) {
j = prefix[j];
}
if (pattern[j + 1] == pattern[i]) {
++j;
}
prefix[i] = j;
}
return prefix;
}
};
// Time: O(n^2 * l^2), n is the number of strings
// Space: O(1) , l is the max length of strings
class Solution3 {
public:
vector<string> stringMatching(vector<string>& words) {
vector<string> result;
for (int i = 0; i < words.size(); ++i) {
for (int j = 0; j < words.size(); ++j) {
if (i != j && words[j].find(words[i]) != string::npos) {
result.emplace_back(words[i]);
break;
}
}
}
return result;
}
};
| 3,303 |
344 | <filename>include/battle_ai_script_commands.h
#ifndef GUARD_BATTLE_AI_SCRIPT_COMMANDS_H
#define GUARD_BATTLE_AI_SCRIPT_COMMANDS_H
#include "global.h"
void __attribute__((long_call)) BattleAI_HandleItemUseBeforeAISetup(u8 defaultScoreMoves);
void __attribute__((long_call)) BattleAI_SetupAIData(u8 defaultScoreMoves);
//u8 __attribute__((long_call)) BattleAI_ChooseMoveOrAction(void);
void __attribute__((long_call)) ClearBankMoveHistory(u8 bank);
void __attribute__((long_call)) RecordAbilityBattle(u8 bank, u8 abilityId);
void __attribute__((long_call)) ClearBankAbilityHistory(u8 bank);
void __attribute__((long_call)) RecordItemEffectBattle(u8 bank, u8 itemEffect);
void __attribute__((long_call)) ClearBankItemEffectHistory(u8 bank);
#endif // GUARD_BATTLE_AI_SCRIPT_COMMANDS_H
| 268 |
1,338 | <reponame>Kirishikesan/haiku
/*
* Copyright (c) 1999-2000, <NAME>.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions, and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions, and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// DiagramItem.h (Cortex/DiagramView)
//
// * PURPOSE
// Provides a base class for all items that can be handled
// by the DiagramView implementation. A basic interface with
// some common implementation is defined, with methods related
// to drawing, mouse handling, selecting, dragging, comparison
// for sorting, and access to the drawing context which is the
// DiagramView instance.
//
// * HISTORY
// c.lenz 25sep99 Begun
//
#ifndef __DiagramItem_H__
#define __DiagramItem_H__
#include <OS.h>
#include <InterfaceDefs.h>
#include <Region.h>
class BMessage;
class BView;
#include "cortex_defs.h"
__BEGIN_CORTEX_NAMESPACE
class DiagramItemGroup;
class DiagramView;
class DiagramBox;
int compareSelectionTime(const void *lValue, const void *rValue);
class DiagramItem
{
friend class DiagramItemGroup;
friend class DiagramView;
friend class DiagramBox;
public: // *** types
enum diagram_item_t {
M_BOX = 0x1,
M_WIRE = 0x2,
M_ENDPOINT = 0x4,
M_ANY = 0x7
};
public: // *** ctor/dtor
DiagramItem(
uint32 itemType);
virtual ~DiagramItem();
public: // *** accessors
// returns the item type assigned in the ctor
uint32 type() const
{ return m_type; }
// returns pointer to the drawing context of the DiagramView
// object
DiagramView *view() const
{ return m_view; }
// returns pointer to the DiagramItemGroup the item belongs to
DiagramItemGroup *group() const
{ return m_group; }
// returns true if the item is currently selected
bool isSelected() const
{ return m_selected; }
public: // *** operations
// changes the selection state of the item, and updates the
// m_selectionTime member in the process to ensure proper
// sorting; calls the selected() hook if the state has
// actually changed
void select();
// sets the item to selected without changing m_selectionTime
// to the time of selection but prior to the last "replacing"
// selection (i.e. thru select()) use this method for additive
// selecting; still calls the selected() hook
void selectAdding();
// deselects the item; calls the deselected() hook if the
// state has actually changed
void deselect();
// moves the items frame to a given point by calling MoveBy with the
// absolute coords translated into relative shift amount
void moveTo(
BPoint point,
BRegion *updateRegion = 0)
{ MoveBy(point.x - Frame().left, point.y - Frame().top, updateRegion); }
// resizes the items frame to given dimensions; simply calls the ResizeBy
// implementation
void resizeTo(
float width,
float height)
{ ResizeBy(width - Frame().Width(), height - Frame().Height()); }
public: // *** hook functions
// is called when the item has been attached to the DiagramView
// and the view() pointer is valid
virtual void attachedToDiagram()
{ /* does nothing */ }
// is called just before the item is being detached from the
// the DiagramView
virtual void detachedFromDiagram()
{ /* does nothing */ }
// is called from the DiagramViews MouseDown() function after
// finding out the mouse buttons and clicks quantity.
virtual void MouseDown(
BPoint point,
uint32 buttons,
uint32 clicks)
{/* does nothing */}
// is called from the DiagramViews MouseMoved() when *no* message is being
// dragged, i.e. the mouse is simply floating above the item
virtual void MouseOver(
BPoint point,
uint32 transit)
{/* does nothing */}
// is called from the DiagramViews MouseMoved() when a message is being
// dragged; always call the base class version when overriding!
virtual void MessageDragged(
BPoint point,
uint32 transit,
const BMessage *message)
{/* does nothing */}
// is called from the DiagramViews MessageReceived() function when an
// message has been received through Drag&Drop; always call the base
// class version when overriding!
virtual void MessageDropped(
BPoint point,
BMessage *message)
{/* does nothing */}
// is called when the item has been selected or deselected in some way
virtual void selected()
{ /* does nothing */ }
virtual void deselected()
{ /* does nothing */ }
public: // *** interface definition
// this function must be implemented by derived classes to return the
// items frame rectangle in the DiagramViews coordinates
virtual BRect Frame() const = 0;
// this function should be implemented for non-rectangular subclasses
// (like wires) to estimate how close a given point is to the object;
// the default implementation returns 1.0 when the point lies within
// the Frame() rect and 0.0 if not
virtual float howCloseTo(
BPoint point) const;
// this is the hook function called by DiagramView when it's time to
// Draw the object
virtual void Draw(
BRect updateRect) = 0;
// should move the items frame by the specified amount and do the
// necessary drawing instructions to update the display; if the
// caller supplied a BRegion pointer in updateRegion, this method
// should add other areas affected by the move to it (e.g. wire
// frames)
virtual void MoveBy(
float x,
float y,
BRegion *updateRegion = 0)
{ /* does nothing */ }
// should resize the items frame by the specified amount
virtual void ResizeBy(
float horizontal,
float vertical)
{ /* does nothing */ }
protected: // *** selecting/dragging
// turn on/off the built-in selection handling
void makeSelectable(
bool selectable)
{ m_selectable = selectable; }
bool isSelectable() const
{ return m_selectable; }
// turn on/off the built-in drag & drop handling
void makeDraggable(
bool draggable)
{ m_draggable = draggable; }
bool isDraggable() const
{ return m_draggable; }
protected: // *** compare functions
// compares the time when each item was last selected and
// returns -1 for the most recent.
friend int compareSelectionTime(
const void *lValue,
const void *rValue);
protected: // *** internal methods
// called only by DiagramItemGroup objects in the method
// addItem()
virtual void _SetOwner(
DiagramView *owner)
{ m_view = owner; }
private: // *** data members
// the items type (M_BOX, M_WIRE or M_ENDPOINT)
uint32 m_type;
// a pointer to the drawing context (the DiagramView instance)
DiagramView *m_view;
// a pointer to the DiagramItemGroup the item belongs to
DiagramItemGroup *m_group;
// can the object be dragged
bool m_draggable;
// can the object be selected
bool m_selectable;
// is the object currently selected
bool m_selected;
// when was the object selected the last time or added (used
// for drawing order)
bigtime_t m_selectionTime;
// stores the most recent time a item was selected thru
// the select() method
static bigtime_t m_lastSelectionTime;
// counts the number of selections thru selectAdding()
// since the last call to select()
static int32 m_countSelected;
};
__END_CORTEX_NAMESPACE
#endif /* __DiagramItem_H__ */
| 2,936 |
390 | <filename>Misc/PythonSQL/DeserializeSavePlots.py
import pyodbc
import pickle
import os
cnxn = pyodbc.connect('DRIVER={SQL Server};SERVER={SERVER_NAME};DATABASE={DB_NAME};UID={USER_NAME};PWD={PASSWORD}')
cursor = cnxn.cursor()
cursor.execute("EXECUTE [dbo].[SerializePlots]")
tables = cursor.fetchall()
for i, table in enumerate(tables):
fig = pickle.loads(table[0])
fig.savefig(str(i)+'.png')
print("The plots are saved in directory:", os.getcwd())
| 182 |
310 | <filename>src/org/jitsi/impl/neomedia/jmfext/media/protocol/androidcamera/PreviewStream.java<gh_stars>100-1000
/*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.impl.neomedia.jmfext.media.protocol.androidcamera;
import android.hardware.*;
import android.view.*;
import net.java.sip.communicator.util.*;
import org.jitsi.impl.neomedia.codec.*;
import org.jitsi.impl.neomedia.device.util.*;
import javax.media.*;
import javax.media.control.*;
import java.io.*;
import java.util.*;
/**
* Video stream that captures frames using camera preview callbacks in YUV
* format. As an input Android YV12 format is used which is almost YUV420 planar
* except that for some dimensions padding is added to U,V strides.
* See {@link #YV12toYUV420Planar(byte[], byte[], int, int)}.
*
* @author <NAME>
*/
public class PreviewStream
extends CameraStreamBase
implements Camera.PreviewCallback
{
/**
* The logger.
*/
private final static Logger logger = Logger.getLogger(PreviewStream.class);
/**
* Buffers queue
*/
final LinkedList<byte[]> bufferQueue = new LinkedList<byte[]>();
/**
* Creates new instance of <tt>PreviewStream</tt>.
* @param dataSource parent <tt>DataSource</tt>.
* @param formatControl format control used by this instance.
*/
public PreviewStream(DataSource dataSource, FormatControl formatControl)
{
super(dataSource, formatControl);
}
/**
* {@inheritDoc}
*/
@Override
public void start()
throws IOException
{
super.start();
startImpl();
}
/**
* {@inheritDoc}
*/
@Override
protected void onInitPreview()
throws IOException
{
// Alloc two buffers
Camera.Parameters params = camera.getParameters();
Camera.Size prevSize = params.getPreviewSize();
int bufferSize = calcYV12Size(prevSize.width, prevSize.height);
logger.info(prevSize.width + "x" + prevSize.height
+ " using buffers of size: " + bufferSize
+ " for image format: 0x" +
Integer.toString(params.getPreviewFormat(), 16));
camera.addCallbackBuffer(new byte[bufferSize]);
camera.addCallbackBuffer(new byte[bufferSize]);
SurfaceHolder previewSurface = CameraUtils.obtainPreviewSurface();
camera.setPreviewDisplay(previewSurface);
camera.setPreviewCallbackWithBuffer(this);
}
/**
* {@inheritDoc}
*/
@Override
public void read(Buffer buffer)
throws IOException
{
byte[] data;
synchronized (bufferQueue)
{
data = bufferQueue.removeLast();
}
int w = format.getSize().width;
int h = format.getSize().height;
int outLen = (w*h*12)/8;
byte[] copy
= AbstractCodec2.validateByteArraySize(
buffer, outLen, false);
YV12toYUV420Planar(data, copy, w, h);
//System.arraycopy(data, 0, copy, 0, data.length);
buffer.setLength(outLen);
buffer.setFlags(Buffer.FLAG_LIVE_DATA | Buffer.FLAG_RELATIVE_TIME);
buffer.setTimeStamp(System.currentTimeMillis());
// Put the buffer for reuse
camera.addCallbackBuffer(data);
}
/**
* {@inheritDoc}
*/
@Override
public void onPreviewFrame(byte[] data, Camera camera)
{
if (data == null)
{
logger.error("Null data received on callback, " +
" invalid buffer size ?");
return;
}
// Calculate statistics
calcStats();
// Convert image format
synchronized (bufferQueue)
{
bufferQueue.addFirst(data);
}
transferHandler.transferData(this);
}
/**
* Converts Android YV12 format to YUV420 planar.
* @param input input YV12 image bytes.
* @param output output buffer.
* @param width image width.
* @param height image height.
*/
static void YV12toYUV420Planar(final byte[] input, final byte[] output,
final int width, final int height)
{
if(width % 16 != 0)
throw new IllegalArgumentException("Unsupported width: "+width);
int yStride = (int) Math.ceil( width / 16.0 ) * 16;
int uvStride = (int) Math.ceil( (yStride / 2) / 16.0) * 16;
int ySize = yStride * height;
int uvSize = uvStride * height / 2;
int I420uvStride = (int)(((yStride / 2) / 16.0) * 16);
int I420uvSize = width*height/4;
int uvStridePadding = uvStride - I420uvStride;
System.arraycopy(input, 0, output, 0, ySize); // Y
// If padding is 0 then just swap U and V planes
if(uvStridePadding == 0)
{
System.arraycopy(input, ySize,
output, ySize + uvSize, uvSize); // Cr (V)
System.arraycopy(input, ySize + uvSize,
output, ySize, uvSize); // Cb (U)
}
else
{
logger.warn("Not recommended resolution: " + width + "x" + height);
int src = ySize;
int dst = ySize;
//Copy without padding
for(int y=0; y < height/2; y++)
{
System.arraycopy(input, src, output,
I420uvSize + dst, I420uvStride); // Cr (V)
System.arraycopy(input, uvSize + src,
output, dst, I420uvStride); // Cb (U)
src += uvStride;
dst += I420uvStride;
}
}
}
/**
* Calculates YV12 image data size in bytes.
* @param width image width.
* @param height image height.
* @return YV12 image data size in bytes.
*/
public static int calcYV12Size(int width, int height)
{
float yStride = (int) Math.ceil( width / 16.0 ) * 16;
float uvStride = (int) Math.ceil( (yStride / 2) / 16.0) * 16;
float ySize = yStride * height;
float uvSize = uvStride * height / 2;
//float yRowIndex = yStride * y;
//float uRowIndex = ySize + uvSize + uvStride * c;
//float vRowIndex = ySize + uvStride * c;
return (int) (ySize + uvSize * 2);
}
}
| 3,106 |
1,179 | <gh_stars>1000+
//
// NSError+SQRLVerbosityExtensions.h
// Squirrel
//
// Created by <NAME> on 2013-08-05.
// Copyright (c) 2013 GitHub. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface NSError (SQRLVerbosityExtensions)
// Returns a description of the receiver, without holding back. Includes all
// `userInfo` keys, and verbosely describes underlying errors recursively.
- (NSString *)sqrl_verboseDescription;
@end
| 148 |
373 | <gh_stars>100-1000
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.media;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaCrypto;
import android.media.MediaFormat;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
import org.chromium.base.CalledByNative;
import org.chromium.base.JNINamespace;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
/**
* A wrapper of the MediaCodec class to facilitate exception capturing and
* audio rendering.
*/
@JNINamespace("media")
class MediaCodecBridge {
private static final String TAG = "MediaCodecBridge";
// Error code for MediaCodecBridge. Keep this value in sync with
// MediaCodecStatus in media_codec_bridge.h.
private static final int MEDIA_CODEC_OK = 0;
private static final int MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER = 1;
private static final int MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER = 2;
private static final int MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED = 3;
private static final int MEDIA_CODEC_OUTPUT_FORMAT_CHANGED = 4;
private static final int MEDIA_CODEC_INPUT_END_OF_STREAM = 5;
private static final int MEDIA_CODEC_OUTPUT_END_OF_STREAM = 6;
private static final int MEDIA_CODEC_NO_KEY = 7;
private static final int MEDIA_CODEC_STOPPED = 8;
private static final int MEDIA_CODEC_ERROR = 9;
// Codec direction. Keep this in sync with media_codec_bridge.h.
private static final int MEDIA_CODEC_DECODER = 0;
private static final int MEDIA_CODEC_ENCODER = 1;
// After a flush(), dequeueOutputBuffer() can often produce empty presentation timestamps
// for several frames. As a result, the player may find that the time does not increase
// after decoding a frame. To detect this, we check whether the presentation timestamp from
// dequeueOutputBuffer() is larger than input_timestamp - MAX_PRESENTATION_TIMESTAMP_SHIFT_US
// after a flush. And we set the presentation timestamp from dequeueOutputBuffer() to be
// non-decreasing for the remaining frames.
private static final long MAX_PRESENTATION_TIMESTAMP_SHIFT_US = 100000;
private ByteBuffer[] mInputBuffers;
private ByteBuffer[] mOutputBuffers;
private MediaCodec mMediaCodec;
private AudioTrack mAudioTrack;
private boolean mFlushed;
private long mLastPresentationTimeUs;
private static class DequeueInputResult {
private final int mStatus;
private final int mIndex;
private DequeueInputResult(int status, int index) {
mStatus = status;
mIndex = index;
}
@CalledByNative("DequeueInputResult")
private int status() { return mStatus; }
@CalledByNative("DequeueInputResult")
private int index() { return mIndex; }
}
/**
* This class represents supported android codec information.
*/
private static class CodecInfo {
private final String mCodecType; // e.g. "video/x-vnd.on2.vp8".
private final String mCodecName; // e.g. "OMX.google.vp8.decoder".
private final int mDirection;
private CodecInfo(String codecType, String codecName,
int direction) {
mCodecType = codecType;
mCodecName = codecName;
mDirection = direction;
}
@CalledByNative("CodecInfo")
private String codecType() { return mCodecType; }
@CalledByNative("CodecInfo")
private String codecName() { return mCodecName; }
@CalledByNative("CodecInfo")
private int direction() { return mDirection; }
}
private static class DequeueOutputResult {
private final int mStatus;
private final int mIndex;
private final int mFlags;
private final int mOffset;
private final long mPresentationTimeMicroseconds;
private final int mNumBytes;
private DequeueOutputResult(int status, int index, int flags, int offset,
long presentationTimeMicroseconds, int numBytes) {
mStatus = status;
mIndex = index;
mFlags = flags;
mOffset = offset;
mPresentationTimeMicroseconds = presentationTimeMicroseconds;
mNumBytes = numBytes;
}
@CalledByNative("DequeueOutputResult")
private int status() { return mStatus; }
@CalledByNative("DequeueOutputResult")
private int index() { return mIndex; }
@CalledByNative("DequeueOutputResult")
private int flags() { return mFlags; }
@CalledByNative("DequeueOutputResult")
private int offset() { return mOffset; }
@CalledByNative("DequeueOutputResult")
private long presentationTimeMicroseconds() { return mPresentationTimeMicroseconds; }
@CalledByNative("DequeueOutputResult")
private int numBytes() { return mNumBytes; }
}
/**
* Get a list of supported android codec mimes.
*/
@CalledByNative
private static CodecInfo[] getCodecsInfo() {
// Return the first (highest-priority) codec for each MIME type.
Map<String, CodecInfo> encoderInfoMap = new HashMap<String, CodecInfo>();
Map<String, CodecInfo> decoderInfoMap = new HashMap<String, CodecInfo>();
int count = MediaCodecList.getCodecCount();
for (int i = 0; i < count; ++i) {
MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
int direction =
info.isEncoder() ? MEDIA_CODEC_ENCODER : MEDIA_CODEC_DECODER;
String codecString = info.getName();
String[] supportedTypes = info.getSupportedTypes();
for (int j = 0; j < supportedTypes.length; ++j) {
Map<String, CodecInfo> map = info.isEncoder() ? encoderInfoMap : decoderInfoMap;
if (!map.containsKey(supportedTypes[j])) {
map.put(supportedTypes[j], new CodecInfo(
supportedTypes[j], codecString, direction));
}
}
}
ArrayList<CodecInfo> codecInfos = new ArrayList<CodecInfo>(
decoderInfoMap.size() + encoderInfoMap.size());
codecInfos.addAll(encoderInfoMap.values());
codecInfos.addAll(decoderInfoMap.values());
return codecInfos.toArray(new CodecInfo[codecInfos.size()]);
}
private static String getSecureDecoderNameForMime(String mime) {
int count = MediaCodecList.getCodecCount();
for (int i = 0; i < count; ++i) {
MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
if (info.isEncoder()) {
continue;
}
String[] supportedTypes = info.getSupportedTypes();
for (int j = 0; j < supportedTypes.length; ++j) {
if (supportedTypes[j].equalsIgnoreCase(mime)) {
return info.getName() + ".secure";
}
}
}
return null;
}
private MediaCodecBridge(MediaCodec mediaCodec) {
assert mediaCodec != null;
mMediaCodec = mediaCodec;
mLastPresentationTimeUs = 0;
mFlushed = true;
}
@CalledByNative
private static MediaCodecBridge create(String mime, boolean isSecure, int direction) {
// Creation of ".secure" codecs sometimes crash instead of throwing exceptions
// on pre-JBMR2 devices.
if (isSecure && Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) {
return null;
}
MediaCodec mediaCodec = null;
try {
// |isSecure| only applies to video decoders.
if (mime.startsWith("video") && isSecure && direction == MEDIA_CODEC_DECODER) {
mediaCodec = MediaCodec.createByCodecName(getSecureDecoderNameForMime(mime));
} else {
if (direction == MEDIA_CODEC_ENCODER) {
mediaCodec = MediaCodec.createEncoderByType(mime);
} else {
mediaCodec = MediaCodec.createDecoderByType(mime);
}
}
} catch (Exception e) {
Log.e(TAG, "Failed to create MediaCodec: " + mime + ", isSecure: "
+ isSecure + ", direction: " + direction, e);
}
if (mediaCodec == null) {
return null;
}
return new MediaCodecBridge(mediaCodec);
}
@CalledByNative
private void release() {
mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;
if (mAudioTrack != null) {
mAudioTrack.release();
}
}
@CalledByNative
private boolean start() {
try {
mMediaCodec.start();
mInputBuffers = mMediaCodec.getInputBuffers();
} catch (IllegalStateException e) {
Log.e(TAG, "Cannot start the media codec", e);
return false;
}
return true;
}
@CalledByNative
private DequeueInputResult dequeueInputBuffer(long timeoutUs) {
int status = MEDIA_CODEC_ERROR;
int index = -1;
try {
int indexOrStatus = mMediaCodec.dequeueInputBuffer(timeoutUs);
if (indexOrStatus >= 0) { // index!
status = MEDIA_CODEC_OK;
index = indexOrStatus;
} else if (indexOrStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.e(TAG, "dequeueInputBuffer: MediaCodec.INFO_TRY_AGAIN_LATER");
status = MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER;
} else {
Log.e(TAG, "Unexpected index_or_status: " + indexOrStatus);
assert false;
}
} catch (Exception e) {
Log.e(TAG, "Failed to dequeue input buffer", e);
}
return new DequeueInputResult(status, index);
}
@CalledByNative
private int flush() {
try {
mFlushed = true;
if (mAudioTrack != null) {
mAudioTrack.flush();
}
mMediaCodec.flush();
} catch (IllegalStateException e) {
Log.e(TAG, "Failed to flush MediaCodec", e);
return MEDIA_CODEC_ERROR;
}
return MEDIA_CODEC_OK;
}
@CalledByNative
private void stop() {
mMediaCodec.stop();
if (mAudioTrack != null) {
mAudioTrack.pause();
}
}
@CalledByNative
private int getOutputHeight() {
return mMediaCodec.getOutputFormat().getInteger(MediaFormat.KEY_HEIGHT);
}
@CalledByNative
private int getOutputWidth() {
return mMediaCodec.getOutputFormat().getInteger(MediaFormat.KEY_WIDTH);
}
@CalledByNative
private ByteBuffer getInputBuffer(int index) {
return mInputBuffers[index];
}
@CalledByNative
private ByteBuffer getOutputBuffer(int index) {
return mOutputBuffers[index];
}
@CalledByNative
private int getInputBuffersCount() {
return mInputBuffers.length;
}
@CalledByNative
private int getOutputBuffersCount() {
return mOutputBuffers != null ? mOutputBuffers.length : -1;
}
@CalledByNative
private int getOutputBuffersCapacity() {
return mOutputBuffers != null ? mOutputBuffers[0].capacity() : -1;
}
@CalledByNative
private boolean getOutputBuffers() {
try {
mOutputBuffers = mMediaCodec.getOutputBuffers();
} catch (IllegalStateException e) {
Log.e(TAG, "Cannot get output buffers", e);
return false;
}
return true;
}
@CalledByNative
private int queueInputBuffer(
int index, int offset, int size, long presentationTimeUs, int flags) {
resetLastPresentationTimeIfNeeded(presentationTimeUs);
try {
mMediaCodec.queueInputBuffer(index, offset, size, presentationTimeUs, flags);
} catch (Exception e) {
Log.e(TAG, "Failed to queue input buffer", e);
return MEDIA_CODEC_ERROR;
}
return MEDIA_CODEC_OK;
}
@CalledByNative
private void setVideoBitrate(int bps) {
Bundle b = new Bundle();
b.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, bps);
mMediaCodec.setParameters(b);
}
@CalledByNative
private void requestKeyFrameSoon() {
Bundle b = new Bundle();
b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
mMediaCodec.setParameters(b);
}
@CalledByNative
private int queueSecureInputBuffer(
int index, int offset, byte[] iv, byte[] keyId, int[] numBytesOfClearData,
int[] numBytesOfEncryptedData, int numSubSamples, long presentationTimeUs) {
resetLastPresentationTimeIfNeeded(presentationTimeUs);
try {
MediaCodec.CryptoInfo cryptoInfo = new MediaCodec.CryptoInfo();
cryptoInfo.set(numSubSamples, numBytesOfClearData, numBytesOfEncryptedData,
keyId, iv, MediaCodec.CRYPTO_MODE_AES_CTR);
mMediaCodec.queueSecureInputBuffer(index, offset, cryptoInfo, presentationTimeUs, 0);
} catch (MediaCodec.CryptoException e) {
Log.e(TAG, "Failed to queue secure input buffer", e);
if (e.getErrorCode() == MediaCodec.CryptoException.ERROR_NO_KEY) {
Log.e(TAG, "MediaCodec.CryptoException.ERROR_NO_KEY");
return MEDIA_CODEC_NO_KEY;
}
Log.e(TAG, "MediaCodec.CryptoException with error code " + e.getErrorCode());
return MEDIA_CODEC_ERROR;
} catch (IllegalStateException e) {
Log.e(TAG, "Failed to queue secure input buffer", e);
return MEDIA_CODEC_ERROR;
}
return MEDIA_CODEC_OK;
}
@CalledByNative
private void releaseOutputBuffer(int index, boolean render) {
mMediaCodec.releaseOutputBuffer(index, render);
}
@CalledByNative
private DequeueOutputResult dequeueOutputBuffer(long timeoutUs) {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int status = MEDIA_CODEC_ERROR;
int index = -1;
try {
int indexOrStatus = mMediaCodec.dequeueOutputBuffer(info, timeoutUs);
if (info.presentationTimeUs < mLastPresentationTimeUs) {
// TODO(qinmin): return a special code through DequeueOutputResult
// to notify the native code the the frame has a wrong presentation
// timestamp and should be skipped.
info.presentationTimeUs = mLastPresentationTimeUs;
}
mLastPresentationTimeUs = info.presentationTimeUs;
if (indexOrStatus >= 0) { // index!
status = MEDIA_CODEC_OK;
index = indexOrStatus;
} else if (indexOrStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
status = MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED;
} else if (indexOrStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
status = MEDIA_CODEC_OUTPUT_FORMAT_CHANGED;
} else if (indexOrStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
status = MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER;
} else {
Log.e(TAG, "Unexpected index_or_status: " + indexOrStatus);
assert false;
}
} catch (IllegalStateException e) {
Log.e(TAG, "Failed to dequeue output buffer", e);
}
return new DequeueOutputResult(
status, index, info.flags, info.offset, info.presentationTimeUs, info.size);
}
@CalledByNative
private boolean configureVideo(MediaFormat format, Surface surface, MediaCrypto crypto,
int flags) {
try {
mMediaCodec.configure(format, surface, crypto, flags);
return true;
} catch (IllegalStateException e) {
Log.e(TAG, "Cannot configure the video codec", e);
}
return false;
}
@CalledByNative
private static MediaFormat createAudioFormat(String mime, int sampleRate, int channelCount) {
return MediaFormat.createAudioFormat(mime, sampleRate, channelCount);
}
@CalledByNative
private static MediaFormat createVideoDecoderFormat(String mime, int width, int height) {
return MediaFormat.createVideoFormat(mime, width, height);
}
@CalledByNative
private static MediaFormat createVideoEncoderFormat(String mime, int width, int height,
int bitRate, int frameRate, int iFrameInterval, int colorFormat) {
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
return format;
}
@CalledByNative
private static void setCodecSpecificData(MediaFormat format, int index, byte[] bytes) {
String name = null;
if (index == 0) {
name = "csd-0";
} else if (index == 1) {
name = "csd-1";
}
if (name != null) {
format.setByteBuffer(name, ByteBuffer.wrap(bytes));
}
}
@CalledByNative
private static void setFrameHasADTSHeader(MediaFormat format) {
format.setInteger(MediaFormat.KEY_IS_ADTS, 1);
}
@CalledByNative
private boolean configureAudio(MediaFormat format, MediaCrypto crypto, int flags,
boolean playAudio) {
try {
mMediaCodec.configure(format, null, crypto, flags);
if (playAudio) {
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int channelConfig = getAudioFormat(channelCount);
// Using 16bit PCM for output. Keep this value in sync with
// kBytesPerAudioOutputSample in media_codec_bridge.cc.
int minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig,
AudioFormat.ENCODING_PCM_16BIT);
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig,
AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_STREAM);
if (mAudioTrack.getState() == AudioTrack.STATE_UNINITIALIZED) {
mAudioTrack = null;
return false;
}
}
return true;
} catch (IllegalStateException e) {
Log.e(TAG, "Cannot configure the audio codec", e);
}
return false;
}
@CalledByNative
private void playOutputBuffer(byte[] buf) {
if (mAudioTrack != null) {
if (AudioTrack.PLAYSTATE_PLAYING != mAudioTrack.getPlayState()) {
mAudioTrack.play();
}
int size = mAudioTrack.write(buf, 0, buf.length);
if (buf.length != size) {
Log.i(TAG, "Failed to send all data to audio output, expected size: " +
buf.length + ", actual size: " + size);
}
}
}
@CalledByNative
private void setVolume(double volume) {
if (mAudioTrack != null) {
mAudioTrack.setStereoVolume((float) volume, (float) volume);
}
}
private void resetLastPresentationTimeIfNeeded(long presentationTimeUs) {
if (mFlushed) {
mLastPresentationTimeUs =
Math.max(presentationTimeUs - MAX_PRESENTATION_TIMESTAMP_SHIFT_US, 0);
mFlushed = false;
}
}
private int getAudioFormat(int channelCount) {
switch (channelCount) {
case 1:
return AudioFormat.CHANNEL_OUT_MONO;
case 2:
return AudioFormat.CHANNEL_OUT_STEREO;
case 4:
return AudioFormat.CHANNEL_OUT_QUAD;
case 6:
return AudioFormat.CHANNEL_OUT_5POINT1;
case 8:
return AudioFormat.CHANNEL_OUT_7POINT1;
default:
return AudioFormat.CHANNEL_OUT_DEFAULT;
}
}
}
| 9,149 |
776 | package act.db;
/*-
* #%L
* ACT Framework
* %%
* Copyright (C) 2014 - 2017 ActFramework
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import act.data.Timestamped;
import act.inject.param.NoBind;
import org.osgl.$;
@NoBind
public abstract class TimeTrackingModelBase<
ID_TYPE, MODEL_TYPE extends ModelBase,
TIMESTAMP_TYPE, TIMESTAMP_TYPE_RESOLVER extends $.Function<TIMESTAMP_TYPE, Long>
> extends ModelBase<ID_TYPE, MODEL_TYPE>
implements TimeTrackingModel<TIMESTAMP_TYPE, TIMESTAMP_TYPE_RESOLVER>, Timestamped {
@Override
public boolean _isNew() {
return null == _created();
}
@Override
public long _timestamp() {
return _timestampTypeResolver().apply(_lastModified());
}
}
| 429 |
2,293 | <filename>CMSIS/DoxyGen/Driver/src/Driver_USART.c
/**
\defgroup usart_interface_gr USART Interface
\brief Driver API for Universal Synchronous Asynchronous Receiver/Transmitter (%Driver_USART.h)
\details
The <b>Universal Synchronous Asynchronous Receiver/Transmitter</b> (USART) implements a synchronous and asynchronous serial bus for exchanging data.
When only asynchronous mode is supported it is called Universal Asynchronous Receiver/Transmitter (UART).
Almost all microcontrollers have a serial interface (UART/USART peripheral). A UART is a simple device to send data to a PC
via a terminal emulation program (Hyperterm, TeraTerm) or to another microcontroller.
A UART takes bytes of data and transmits the individual bits in a sequential mode. At the destination,
a second UART reassembles the bits into complete bytes. Each UART contains a shift register for converting between serial and parallel transmission forms.
Wikipedia offers more information about
the <a href="http://en.wikipedia.org/wiki/Universal_asynchronous_receiver/transmitter" target="_blank"><b>Universal asynchronous receiver/transmitter</b></a>.
<b>USART API</b>
The following header files define the Application Programming Interface (API) for the USART interface:
- \b %Driver_USART.h : Driver API for Universal Synchronous Asynchronous Receiver/Transmitter
The driver implementation is a typical part of the Device Family Pack (DFP) that supports the
peripherals of the microcontroller family.
<b>Driver Functions</b>
The driver functions are published in the access struct as explained in \ref DriverFunctions
- \ref ARM_DRIVER_USART : access struct for USART driver functions
<b>Example Code</b>
The following example code shows the usage of the USART interface for asynchronous communication.
\include USART_Demo.c
@{
*/
/**
\struct ARM_DRIVER_USART
\details
The functions of the USART driver are accessed by function pointers exposed by this structure.
Refer to \ref DriverFunctions for overview information.
Each instance of an USART interface provides such an access structure.
The instance is identified by a postfix number in the symbol name of the access structure, for example:
- \b Driver_USART0 is the name of the access struct of the first instance (no. 0).
- \b Driver_USART1 is the name of the access struct of the second instance (no. 1).
A middleware configuration setting allows connecting the middleware to a specific driver instance \b %Driver_USART<i>n</i>.
The default is \token{0}, which connects a middleware to the first instance of a driver.
*****************************************************************************************************************/
/**
\struct ARM_USART_CAPABILITIES
\details
An USART driver can be implemented with different capabilities.
The data fields of this structure encode the capabilities implemented by this driver.
<b>Returned by:</b>
- \ref ARM_USART_GetCapabilities
*****************************************************************************************************************/
/**
\struct ARM_USART_STATUS
\details
Structure with information about the status of the USART. The data fields encode busy flags and error flags.
<b>Returned by:</b>
- \ref ARM_USART_GetStatus
*****************************************************************************************************************/
/**
\enum ARM_USART_MODEM_CONTROL
\details
Specifies values for controlling the modem control lines.
<b>Parameter for:</b>
- \ref ARM_USART_SetModemControl
*****************************************************************************************************************/
/**
\struct ARM_USART_MODEM_STATUS
\details
Structure with information about the status of modem lines. The data fields encode states of modem status lines.
<b>Returned by:</b>
- \ref ARM_USART_GetModemStatus
*****************************************************************************************************************/
/**
\typedef ARM_USART_SignalEvent_t
\details
Provides the typedef for the callback function \ref ARM_USART_SignalEvent.
<b>Parameter for:</b>
- \ref ARM_USART_Initialize
*******************************************************************************************************************/
/**
\defgroup usart_execution_status Status Error Codes
\ingroup common_drv_gr
\brief Negative values indicate errors (USART has specific codes in addition to common \ref execution_status).
\details
The USART driver has additional status error codes that are listed below.
Note that the USART driver also returns the common \ref execution_status.
@{
\def ARM_USART_ERROR_MODE
The \b mode requested with the function \ref ARM_USART_Control is not supported by this driver.
\def ARM_USART_ERROR_BAUDRATE
The <b>baud rate</b> requested with the function \ref ARM_USART_Control is not supported by this driver.
\def ARM_USART_ERROR_DATA_BITS
The number of <b>data bits</b> requested with the function \ref ARM_USART_Control is not supported by this driver.
\def ARM_USART_ERROR_PARITY
The <b>parity bit</b> requested with the function \ref ARM_USART_Control is not supported by this driver.
\def ARM_USART_ERROR_STOP_BITS
The <b>stop bit</b> requested with the function \ref ARM_USART_Control is not supported by this driver.
\def ARM_USART_ERROR_FLOW_CONTROL
The <b>flow control</b> requested with the function \ref ARM_USART_Control is not supported by this driver.
\def ARM_USART_ERROR_CPOL
The <b>clock polarity</b> requested with the function \ref ARM_USART_Control is not supported by this driver.
\def ARM_USART_ERROR_CPHA
The <b>clock phase</b> requested with the function \ref ARM_USART_Control is not supported by this driver.
@}
*/
/**
\defgroup USART_events USART Events
\ingroup usart_interface_gr
\brief The USART driver generates call back events that are notified via the function \ref ARM_USART_SignalEvent.
\details
This section provides the event values for the \ref ARM_USART_SignalEvent callback function.
The following call back notification events are generated:
@{
\def ARM_USART_EVENT_SEND_COMPLETE
\def ARM_USART_EVENT_RECEIVE_COMPLETE
\def ARM_USART_EVENT_TRANSFER_COMPLETE
\def ARM_USART_EVENT_TX_COMPLETE
\def ARM_USART_EVENT_TX_UNDERFLOW
\def ARM_USART_EVENT_RX_OVERFLOW
\def ARM_USART_EVENT_RX_TIMEOUT
\def ARM_USART_EVENT_RX_BREAK
\def ARM_USART_EVENT_RX_FRAMING_ERROR
\def ARM_USART_EVENT_RX_PARITY_ERROR
\def ARM_USART_EVENT_CTS
\def ARM_USART_EVENT_DSR
\def ARM_USART_EVENT_DCD
\def ARM_USART_EVENT_RI
@}
*/
/**
\defgroup USART_control USART Control Codes
\ingroup usart_interface_gr
\brief Many parameters of the USART driver are configured using the \ref ARM_USART_Control function.
\details
@{
The various USART control codes define:
- \ref usart_mode_control specifies USART mode
- \ref usart_data_bits defines the number of data bits
- \ref usart_parity_bit defines the parity bit
- \ref usart_stop_bits defines the number of stop bits
- \ref usart_flow_control specifies RTS/CTS flow control
- \ref usart_clock_polarity defines the clock polarity for the synchronous mode
- \ref usart_clock_phase defines the clock phase for the synchronous mode
- \ref usart_misc_control specifies additional miscellaneous controls
Refer to the \ref ARM_USART_Control function for further details.
*/
/**
\defgroup usart_mode_control USART Mode Control
\ingroup USART_control
\brief Specify USART mode.
\details
@{
\def ARM_USART_MODE_ASYNCHRONOUS
\sa ARM_USART_Control
\def ARM_USART_MODE_SYNCHRONOUS_MASTER
\sa ARM_USART_Control
\def ARM_USART_MODE_SYNCHRONOUS_SLAVE
\sa ARM_USART_Control
\def ARM_USART_MODE_SINGLE_WIRE
\sa ARM_USART_Control
\def ARM_USART_MODE_IRDA
\sa ARM_USART_Control
\def ARM_USART_MODE_SMART_CARD
\sa ARM_USART_Control
@}
*/
/**
\defgroup usart_misc_control USART Miscellaneous Control
\ingroup USART_control
\brief Specifies additional miscellaneous controls.
\details
@{
\def ARM_USART_SET_DEFAULT_TX_VALUE
\sa ARM_USART_Control; ARM_USART_Receive;
\def ARM_USART_SET_IRDA_PULSE
\sa ARM_USART_Control
\def ARM_USART_SET_SMART_CARD_GUARD_TIME
\sa ARM_USART_Control
\def ARM_USART_SET_SMART_CARD_CLOCK
\sa ARM_USART_Control
\def ARM_USART_CONTROL_SMART_CARD_NACK
\sa ARM_USART_Control
\def ARM_USART_CONTROL_TX
\sa ARM_USART_Control; ARM_USART_Send; ARM_USART_Transfer
\def ARM_USART_CONTROL_RX
\sa ARM_USART_Control; ARM_USART_Receive; ARM_USART_Transfer;
\def ARM_USART_CONTROL_BREAK
\sa ARM_USART_Control
\def ARM_USART_ABORT_SEND
\sa ARM_USART_Control;
\def ARM_USART_ABORT_RECEIVE
\sa ARM_USART_Control;
\def ARM_USART_ABORT_TRANSFER
\sa ARM_USART_Control;
@}
*/
/**
\defgroup usart_data_bits USART Data Bits
\ingroup USART_control
\brief Defines the number of data bits.
\details
@{
\def ARM_USART_DATA_BITS_5
\sa ARM_USART_Control
\def ARM_USART_DATA_BITS_6
\sa ARM_USART_Control
\def ARM_USART_DATA_BITS_7
\sa ARM_USART_Control
\def ARM_USART_DATA_BITS_8
\sa ARM_USART_Control
\def ARM_USART_DATA_BITS_9
\sa ARM_USART_Control
@}
*/
/**
\defgroup usart_parity_bit USART Parity Bit
\ingroup USART_control
\brief Defines the parity bit.
\details
@{
\def ARM_USART_PARITY_NONE
\sa ARM_USART_Control
\def ARM_USART_PARITY_EVEN
\sa ARM_USART_Control
\def ARM_USART_PARITY_ODD
\sa ARM_USART_Control
@}
*/
/**
\defgroup usart_stop_bits USART Stop Bits
\ingroup USART_control
\brief Defines the number of stop bits.
\details
@{
\sa ARM_USART_Control
\def ARM_USART_STOP_BITS_1
\sa ARM_USART_Control
\def ARM_USART_STOP_BITS_2
\sa ARM_USART_Control
\def ARM_USART_STOP_BITS_1_5
\sa ARM_USART_Control
\def ARM_USART_STOP_BITS_0_5
\sa ARM_USART_Control
@}
*/
/**
\defgroup usart_flow_control USART Flow Control
\ingroup USART_control
\brief Specifies RTS/CTS flow control.
\details
@{
\def ARM_USART_FLOW_CONTROL_NONE
\sa ARM_USART_Control
\def ARM_USART_FLOW_CONTROL_RTS
\sa ARM_USART_Control
\def ARM_USART_FLOW_CONTROL_CTS
\sa ARM_USART_Control
\def ARM_USART_FLOW_CONTROL_RTS_CTS
\sa ARM_USART_Control
@}
*/
/**
\defgroup usart_clock_polarity USART Clock Polarity
\ingroup USART_control
\brief Defines the clock polarity for the synchronous mode.
\details
@{
\def ARM_USART_CPOL0
\sa ARM_USART_Control; ARM_USART_Receive; ARM_USART_Send; ARM_USART_Transfer
\def ARM_USART_CPOL1
\sa ARM_USART_Control; ARM_USART_Receive; ARM_USART_Send; ARM_USART_Transfer
@}
*/
/**
\defgroup usart_clock_phase USART Clock Phase
\ingroup USART_control
\brief Defines the clock phase for the synchronous mode.
\details
@{
\def ARM_USART_CPHA0
\sa ARM_USART_Control; ARM_USART_Receive; ARM_USART_Send; ARM_USART_Transfer
\def ARM_USART_CPHA1
\sa ARM_USART_Control; ARM_USART_Receive; ARM_USART_Send; ARM_USART_Transfer
@}
*/
/**
@}
*/
// end group USART_control
//
// Functions
//
ARM_DRIVER_VERSION ARM_USART_GetVersion (void) {
return { 0, 0 };
}
/**
\fn ARM_DRIVER_VERSION ARM_USART_GetVersion (void)
\details
The function \b ARM_USART_GetVersion returns version information of the driver implementation in \ref ARM_DRIVER_VERSION
- API version is the version of the CMSIS-Driver specification used to implement this driver.
- Driver version is source code version of the actual driver implementation.
Example:
\code
extern ARM_DRIVER_USART Driver_USART0;
ARM_DRIVER_USART *drv_info;
void setup_usart (void) {
ARM_DRIVER_VERSION version;
drv_info = &Driver_USART0;
version = drv_info->GetVersion ();
if (version.api < 0x10A) { // requires at minimum API version 1.10 or higher
// error handling
return;
}
}
\endcode
*****************************************************************************************************************/
ARM_USART_CAPABILITIES ARM_USART_GetCapabilities (void) {
return { 0 } ;
}
/**
\fn ARM_USART_CAPABILITIES ARM_USART_GetCapabilities (void)
\details
The function \b ARM_USART_GetCapabilities returns information about capabilities in this driver implementation.
The data fields of the structure \ref ARM_USART_CAPABILITIES encode various capabilities, for example:
supported modes, if hardware and driver are capable of signaling events using the \ref ARM_USART_SignalEvent
callback function ...
Example:
\code
extern ARM_DRIVER_USART Driver_USART0;
ARM_DRIVER_USART *drv_info;
void read_capabilities (void) {
ARM_USART_CAPABILITIES drv_capabilities;
drv_info = &Driver_USART0;
drv_capabilities = drv_info->GetCapabilities ();
// interrogate capabilities
}
\endcode
*****************************************************************************************************************/
int32_t ARM_USART_Initialize (ARM_USART_SignalEvent_t cb_event) {
return ARM_DRIVER_OK;
}
/**
\fn int32_t ARM_USART_Initialize (ARM_USART_SignalEvent_t cb_event)
\details
The function \b ARM_USART_Initialize initializes the USART interface.
It is called when the middleware component starts operation.
The function performs the following operations:
- Initializes the resources needed for the USART interface.
- Registers the \ref ARM_USART_SignalEvent callback function.
The parameter \em cb_event is a pointer to the \ref ARM_USART_SignalEvent callback function; use a NULL pointer
when no callback signals are required.
\b Example:
- see \ref usart_interface_gr - Driver Functions
*****************************************************************************************************************/
int32_t ARM_USART_Uninitialize (void) {
return ARM_DRIVER_OK;
}
/**
\fn int32_t ARM_USART_Uninitialize (void)
\details
The function \b ARM_USART_Uninitialize de-initializes the resources of USART interface.
It is called when the middleware component stops operation and releases the software resources used by the interface.
*****************************************************************************************************************/
int32_t ARM_USART_PowerControl (ARM_POWER_STATE state) {
return ARM_DRIVER_OK;
}
/**
\fn int32_t ARM_USART_PowerControl (ARM_POWER_STATE state)
\details
The function \b ARM_USART_PowerControl operates the power modes of the USART interface.
The parameter \em state sets the operation and can have the following values:
- \ref ARM_POWER_FULL : set-up peripheral for data transfers, enable interrupts (NVIC) and optionally DMA.
Can be called multiple times. If the peripheral is already in this mode the function performs
no operation and returns with \ref ARM_DRIVER_OK.
- \ref ARM_POWER_LOW : may use power saving. Returns \ref ARM_DRIVER_ERROR_UNSUPPORTED when not implemented.
- \ref ARM_POWER_OFF : terminates any pending data transfers, disables peripheral, disables related interrupts and DMA.
Refer to \ref CallSequence for more information.
*****************************************************************************************************************/
int32_t ARM_USART_Send (const void *data, uint32_t num) {
return ARM_DRIVER_OK;
}
/**
\fn int32_t ARM_USART_Send (const void *data, uint32_t num)
\details
This functions \b ARM_USART_Send is used in asynchronous mode to send data to the USART transmitter.
It can also be used in synchronous mode when sending data only (received data is ignored).
Transmitter needs to be enabled by calling \ref ARM_USART_Control with \ref ARM_USART_CONTROL_TX as the control parameter and \token{1} as argument.
The function parameters specify the buffer with data and the number of items to send.
The item size is defined by the data type which depends on the configured number of data bits.
Data type is:
- \em uint8_t when configured for 5..8 data bits
- \em uint16_t when configured for 9 data bits
Calling the function <b>ARM_USART_Send</b> only starts the send operation.
The function is non-blocking and returns as soon as the driver has started the operation (driver typically configures DMA or the interrupt system for continuous transfer).
When in synchronous slave mode the operation is only registered and started when the master starts the transfer.
During the operation it is not allowed to call this function again or any other data transfer function when in synchronous mode. Also the data buffer must stay allocated and the contents of unsent data must not be modified.
When send operation is completed (requested number of items sent) the \ref ARM_USART_EVENT_SEND_COMPLETE event is generated.
Progress of send operation can also be monitored by reading the number of items already sent by calling \ref ARM_USART_GetTxCount.
After send operation has completed there might still be some data left in the driver's hardware buffer which is still being transmitted.
When all data has been physically transmitted the \ref ARM_USART_EVENT_TX_COMPLETE event is generated (if supported and reported by \em event_tx_complete in \ref ARM_USART_CAPABILITIES).
At that point also the \em tx_busy data field in \ref ARM_USART_STATUS is cleared.
Status of the transmitter can be monitored by calling the \ref ARM_USART_GetStatus and checking the \em tx_busy flag
which indicates if transmission is still in progress.
When in synchronous slave mode and transmitter is enabled but send/receive/transfer operation is not started and data is requested by the master then the \ref ARM_USART_EVENT_TX_UNDERFLOW event is generated.
Send operation can be aborted by calling \ref ARM_USART_Control with \ref ARM_USART_ABORT_SEND as the control parameter.
*****************************************************************************************************************/
int32_t ARM_USART_Receive (void *data, uint32_t num) {
return ARM_DRIVER_OK;
}
/**
\fn int32_t ARM_USART_Receive (void *data, uint32_t num)
\details
This functions \b ARM_USART_Receive is used in asynchronous mode to receive data from the USART receiver.
It can also be used in synchronous mode when receiving data only (transmits the default value as specified by \ref ARM_USART_Control with \ref ARM_USART_SET_DEFAULT_TX_VALUE as control parameter).
Receiver needs to be enabled by calling \ref ARM_USART_Control with \ref ARM_USART_CONTROL_RX as the control parameter and \token{1} as argument.
The function parameters specify the buffer for data and the number of items to receive.
The item size is defined by the data type which depends on the configured number of data bits.
Data type is:
- \em uint8_t when configured for 5..8 data bits
- \em uint16_t when configured for 9 data bits
Calling the function <b>ARM_USART_Receive</b> only starts the receive operation.
The function is non-blocking and returns as soon as the driver has started the operation (driver typically configures DMA or the interrupt system for continuous transfer).
When in synchronous slave mode the operation is only registered and started when the master starts the transfer.
During the operation it is not allowed to call this function again or any other data transfer function when in synchronous mode. Also the data buffer must stay allocated.
When receive operation is completed (requested number of items received) the \ref ARM_USART_EVENT_RECEIVE_COMPLETE event is generated.
Progress of receive operation can also be monitored by reading the number of items already received by calling \ref ARM_USART_GetRxCount.
Status of the receiver can be monitored by calling the \ref ARM_USART_GetStatus and checking the \em rx_busy flag
which indicates if reception is still in progress.
During reception the following events can be generated (in asynchronous mode):
- \ref ARM_USART_EVENT_RX_TIMEOUT : Receive timeout between consecutive characters detected (optional)
- \ref ARM_USART_EVENT_RX_BREAK : Break detected (Framing error is not generated for Break condition)
- \ref ARM_USART_EVENT_RX_FRAMING_ERROR : Framing error detected
- \ref ARM_USART_EVENT_RX_PARITY_ERROR : Parity error detected
- \ref ARM_USART_EVENT_RX_OVERFLOW : Data overflow detected (also in synchronous slave mode)
\ref ARM_USART_EVENT_RX_OVERFLOW event is also generated when receiver is enabled but data is lost because
receive operation in asynchronous mode or receive/send/transfer operation in synchronous slave mode has not been started.
Receive operation can be aborted by calling \ref ARM_USART_Control with \ref ARM_USART_ABORT_RECEIVE as the control parameter.
*****************************************************************************************************************/
int32_t ARM_USART_Transfer (const void *data_out, void *data_in, uint32_t num) {
return ARM_DRIVER_OK;
}
/**
\fn int32_t ARM_USART_Transfer (const void *data_out, void *data_in, uint32_t num)
\details
This functions \b ARM_USART_Transfer is used in synchronous mode to transfer data via USART. It synchronously sends data to the USART transmitter and receives data from the USART receiver.
Transmitter needs to be enabled by calling \ref ARM_USART_Control with \ref ARM_USART_CONTROL_TX as the control parameter and \token{1} as argument.
Receiver needs to be enabled by calling \ref ARM_USART_Control with \ref ARM_USART_CONTROL_RX as the control parameter and \token{1} as argument.
The function parameters specify the buffer with data to send, the buffer for data to receive and the number of items to transfer.
The item size is defined by the data type which depends on the configured number of data bits.
Data type is:
- \em uint8_t when configured for 5..8 data bits
- \em uint16_t when configured for 9 data bits
Calling the function <b>ARM_USART_Transfer</b> only starts the transfer operation.
The function is non-blocking and returns as soon as the driver has started the operation (driver typically configures DMA or the interrupt system for continuous transfer).
When in synchronous slave mode the operation is only registered and started when the master starts the transfer.
During the operation it is not allowed to call this function or any other data transfer function again. Also the data buffers must stay allocated and the contents of unsent data must not be modified.
When transfer operation is completed (requested number of items transferred) the \ref ARM_USART_EVENT_TRANSFER_COMPLETE event is generated.
Progress of transfer operation can also be monitored by reading the number of items already transferred by calling \ref ARM_USART_GetTxCount or \ref ARM_USART_GetRxCount.
Status of the transmitter or receiver can be monitored by calling the \ref ARM_USART_GetStatus and checking the \em tx_busy or \em rx_busy flag.
When in synchronous slave mode also the following events can be generated:
- \ref ARM_USART_EVENT_TX_UNDERFLOW : transmitter is enabled but transfer operation is not started and data is requested by the master
- \ref ARM_USART_EVENT_RX_OVERFLOW : data lost during transfer or because receiver is enabled but transfer operation has not been started
Transfer operation can also be aborted by calling \ref ARM_USART_Control with \ref ARM_USART_ABORT_TRANSFER as the control parameter.
*****************************************************************************************************************/
uint32_t ARM_USART_GetTxCount (void) {
return 0;
}
/**
\fn uint32_t ARM_USART_GetTxCount (void)
\details
The function \b ARM_USART_GetTxCount returns the number of the currently transmitted data items during \ref ARM_USART_Send and \ref ARM_USART_Transfer operation.
*****************************************************************************************************************/
uint32_t ARM_USART_GetRxCount (void) {
return 0;
}
/**
\fn uint32_t ARM_USART_GetRxCount (void)
\details
The function \b ARM_USART_GetRxCount returns the number of the currently received data items during \ref ARM_USART_Receive and \ref ARM_USART_Transfer operation.
*****************************************************************************************************************/
int32_t ARM_USART_Control (uint32_t control, uint32_t arg) {
return ARM_DRIVER_OK;
}
/**
\fn int32_t ARM_USART_Control (uint32_t control, uint32_t arg)
\details
The function \b ARM_USART_Control control the USART interface settings and execute various operations.
The parameter \em control sets the operation and is explained in the table below.
Values from different categories can be ORed with the exception of \ref usart_misc_tab "Miscellaneous Operations".
The parameter \em arg provides, depending on the operation, additional information, for example the baudrate.
The table lists the available \em control operations.
<TABLE class="cmtable" summary="">
<TR><TH>Parameter \em control </TH> <TH style="text-align:right"> Bit </TH> <TH> Category </TH> <TH>Description </TH></TR>
<TR><TD>\ref ARM_USART_MODE_ASYNCHRONOUS</TD> <td rowspan="6" style="text-align:right"> 0..7 </td> <td rowspan="6"> Operation Mode </td><TD>Set to asynchronous UART mode. \em arg specifies baudrate.</TD></TR>
<TR><TD>\ref ARM_USART_MODE_SYNCHRONOUS_MASTER</TD> <TD>Set to synchronous master mode with clock signal generation. \em arg specifies baudrate.</TD></TR>
<TR><TD>\ref ARM_USART_MODE_SYNCHRONOUS_SLAVE</TD> <TD>Set to synchronous slave mode with external clock signal.</TD></TR>
<TR><TD>\ref ARM_USART_MODE_SINGLE_WIRE</TD> <TD>Set to single-wire (half-duplex) mode. \em arg specifies baudrate.</TD></TR>
<TR><TD>\ref ARM_USART_MODE_IRDA</TD> <TD>Set to Infra-red data mode. \em arg specifies baudrate.</TD></TR>
<TR><TD>\ref ARM_USART_MODE_SMART_CARD</TD> <TD>Set to Smart Card mode. \em arg specifies baudrate.</TD></TR>
<TR><TD>\ref ARM_USART_DATA_BITS_5</TD> <td rowspan="5" style="text-align:right"> 8..11 </td> <td rowspan="5"> Data Bits </td><TD>Set to \token{5} data bits</TD></TR>
<TR><TD>\ref ARM_USART_DATA_BITS_6</TD> <TD>Set to \token{6} data bits</TD></TR>
<TR><TD>\ref ARM_USART_DATA_BITS_7</TD> <TD>Set to \token{7} data bits</TD></TR>
<TR><TD>\ref ARM_USART_DATA_BITS_8</TD> <TD>Set to \token{8} data bits (default)</TD></TR>
<TR><TD>\ref ARM_USART_DATA_BITS_9</TD> <TD>Set to \token{9} data bits</TD></TR>
<TR><TD>\ref ARM_USART_PARITY_EVEN</TD> <td rowspan="3" style="text-align:right"> 12..13 </td> <td rowspan="3"> Parity Bit </td><TD>Set to Even Parity</TD></TR>
<TR><TD>\ref ARM_USART_PARITY_NONE</TD> <TD>Set to No Parity (default)</TD></TR>
<TR><TD>\ref ARM_USART_PARITY_ODD</TD> <TD>Set to Odd Parity</TD></TR>
<TR><TD>\ref ARM_USART_STOP_BITS_1</TD> <td rowspan="4" style="text-align:right"> 14..15 </td> <td rowspan="4"> Stop Bit </td><TD>Set to \token{1} Stop bit (default)</TD></TR>
<TR><TD>\ref ARM_USART_STOP_BITS_2</TD> <TD>Set to \token{2} Stop bits</TD></TR>
<TR><TD>\ref ARM_USART_STOP_BITS_1_5</TD> <TD>Set to \token{1.5} Stop bits</TD></TR>
<TR><TD>\ref ARM_USART_STOP_BITS_0_5</TD> <TD>Set to \token{0.5} Stop bits</TD></TR>
<TR><TD>\ref ARM_USART_FLOW_CONTROL_NONE</TD> <td rowspan="4" style="text-align:right"> 16..17 </td> <td rowspan="4"> Flow Control </td><TD>No flow control signal (default)</TD></TR>
<TR><TD>\ref ARM_USART_FLOW_CONTROL_CTS</TD> <TD>Set to use the CTS flow control signal</TD></TR>
<TR><TD>\ref ARM_USART_FLOW_CONTROL_RTS</TD> <TD>Set to use the RTS flow control signal</TD></TR>
<TR><TD>\ref ARM_USART_FLOW_CONTROL_RTS_CTS</TD> <TD>Set to use the RTS and CTS flow control signal</TD></TR>
<TR><TD>\ref ARM_USART_CPOL0</TD> <td rowspan="2" style="text-align:right"> 18 </td> <td rowspan="2"> Clock Polarity </td><TD>CPOL=\token{0} (default) : data are captured on rising edge (low->high transition)</TD></TR>
<TR><TD>\ref ARM_USART_CPOL1</TD> <TD>CPOL=\token{1} : data are captured on falling edge (high->low transition)</TD></TR>
<TR><TD>\ref ARM_USART_CPHA0</TD> <td rowspan="2" style="text-align:right"> 19 </td> <td rowspan="2"> Clock Phase </td><TD>CPHA=\token{0} (default) : sample on first (leading) edge</TD></TR>
<TR><TD>\ref ARM_USART_CPHA1</TD> <TD>CPHA=\token{1} : sample on second (trailing) edge</TD></TR>
<TR><TD>\ref ARM_USART_ABORT_RECEIVE</TD> <td rowspan="11" style="text-align:right"> 0..19 </td> <td rowspan="11"> \anchor usart_misc_tab Miscellaneous Operations <br>(cannot be ORed) </td><TD>Abort receive operation (see also: \ref ARM_USART_Receive)</TD></TR>
<TR> <TD>\ref ARM_USART_ABORT_SEND</TD> <TD>Abort send operation (see also: \ref ARM_USART_Send)</TD></TR>
<TR> <TD>\ref ARM_USART_ABORT_TRANSFER</TD> <TD>Abort transfer operation (see also: \ref ARM_USART_Transfer)</TD></TR>
<TR> <TD>\ref ARM_USART_CONTROL_BREAK</TD> <TD>Enable or disable continuous Break transmission; \em arg : \token{0=disabled; 1=enabled}</TD></TR>
<TR> <TD>\ref ARM_USART_CONTROL_RX</TD> <TD>Enable or disable receiver; \em arg : \token{0=disabled; 1=enabled} (see also: \ref ARM_USART_Receive; \ref ARM_USART_Transfer)</TD></TR>
<TR> <TD>\ref ARM_USART_CONTROL_SMART_CARD_NACK</TD> <TD>Enable or disable Smart Card NACK generation; \em arg : \token{0=disabled; 1=enabled}</TD></TR>
<TR> <TD>\ref ARM_USART_CONTROL_TX</TD> <TD>Enable or disable transmitter; \em arg : \token{0=disabled; 1=enabled} (see also: \ref ARM_USART_Send; \ref ARM_USART_Transfer)</TD></TR>
<TR> <TD>\ref ARM_USART_SET_DEFAULT_TX_VALUE</TD> <TD>Set the default transmit value (synchronous receive only); \em arg specifies the value. (see also: \ref ARM_USART_Receive)</TD></TR>
<TR> <TD>\ref ARM_USART_SET_IRDA_PULSE</TD> <TD>Set the IrDA pulse value in \token{ns}; \em arg : \token{0=3/16 of bit period}</TD></TR>
<TR> <TD>\ref ARM_USART_SET_SMART_CARD_CLOCK</TD> <TD>Set the Smart Card Clock in \token{Hz}; \em arg : \token{0=Clock not set}</TD></TR>
<TR> <TD>\ref ARM_USART_SET_SMART_CARD_GUARD_TIME</TD> <TD>Set the Smart Card guard time; \em arg = number of bit periods</TD></TR>
</TABLE>
\b Example
\code
extern ARM_DRIVER_USART Driver_USART0;
// configure to UART mode: 8 bits, no parity, 1 stop bit, no flow control, 9600 bps
status = Driver_USART0.Control(ARM_USART_MODE_ASYNCHRONOUS |
ARM_USART_DATA_BITS_8 |
ARM_USART_PARITY_NONE |
ARM_USART_STOP_BITS_1 |
ARM_USART_FLOW_CONTROL_NONE, 9600);
// identical with above settings (default settings removed)
// configure to UART mode: 8 bits, no parity, 1 stop bit, flow control, 9600 bps
status = Driver_USART0.Control(ARM_USART_MODE_ASYNCHRONOUS, 9600);
// enable TX output
status = Driver_USART0.Control(ARM_USART_CONTROL_TX, 1);
// disable RX output
status = Driver_USART0.Control(ARM_USART_CONTROL_RX, 0);
\endcode
*****************************************************************************************************************/
ARM_USART_STATUS ARM_USART_GetStatus (void) {
return { 0 };
}
/**
\fn ARM_USART_STATUS ARM_USART_GetStatus (void)
\details
The function \b ARM_USART_GetStatus retrieves the current USART interface status.
*****************************************************************************************************************/
int32_t ARM_USART_SetModemControl (ARM_USART_MODEM_CONTROL control) {
return ARM_DRIVER_OK;
}
/**
\fn int32_t ARM_USART_SetModemControl (ARM_USART_MODEM_CONTROL control)
\details
The function \b ARM_USART_SetModemControl activates or deactivates the selected USART modem control line.
The function \ref ARM_USART_GetModemStatus returns information about status of the modem lines.
*****************************************************************************************************************/
ARM_USART_MODEM_STATUS ARM_USART_GetModemStatus (void) {
return { 0 };
}
/**
\fn ARM_USART_MODEM_STATUS ARM_USART_GetModemStatus (void)
\details
The function \b ARM_USART_GetModemStatus returns the current USART Modem Status lines state.
The function \ref ARM_USART_SetModemControl sets the modem control lines of the USART.
*****************************************************************************************************************/
void ARM_USART_SignalEvent (uint32_t event) {
// function body
}
/**
\fn void ARM_USART_SignalEvent (uint32_t event)
\details
The function \b ARM_USART_SignalEvent is a callback function registered by the function \ref ARM_USART_Initialize.
The parameter \em event indicates one or more events that occurred during driver operation.
Each event is encoded in a separate bit and therefore it is possible to signal multiple events within the same call.
Not every event is necessarily generated by the driver. This depends on the implemented capabilities stored in the
data fields of the structure \ref ARM_USART_CAPABILITIES, which can be retrieved with the function \ref ARM_USART_GetCapabilities.
The following events can be generated:
<table class="cmtable" summary="">
<tr>
<th> Parameter \em event </th><th> Bit </th><th> Description </th>
<th> supported when ARM_USART_CAPABILITIES </th>
</tr>
<tr>
<td> \ref ARM_USART_EVENT_SEND_COMPLETE </td><td> 0 </td><td> Occurs after call to \ref ARM_USART_Send to indicate that all the data to be sent
was processed by the driver. All the data might have been already transmitted
or parts of it are still queued in transmit buffers. The driver is ready for the next
call to \ref ARM_USART_Send; however USART may still transmit data. </td>
<td> <i>always supported</i> </td>
</tr>
<tr>
<td> \ref ARM_USART_EVENT_RECEIVE_COMPLETE </td><td> 1 </td><td> Occurs after call to \ref ARM_USART_Receive to indicate that all the data has been
received. The driver is ready for the next call to \ref ARM_USART_Receive. </td>
<td> <i>always supported</i> </td>
</tr>
<tr>
<td> \ref ARM_USART_EVENT_TRANSFER_COMPLETE </td><td> 2 </td><td> Occurs after call to \ref ARM_USART_Transfer to indicate that all the data has been
transferred. The driver is ready for the next call to \ref ARM_USART_Transfer. </td>
<td> <i>always supported</i> </td>
</tr>
<tr>
<td> \ref ARM_USART_EVENT_TX_COMPLETE </td><td> 3 </td><td> Occurs after call to \ref ARM_USART_Send to indicate that all the data has been
physically transmitted on the wires. </td>
<td> data field \em event_tx_complete = \token{1} </td>
</tr>
<tr>
<td> \ref ARM_USART_EVENT_TX_UNDERFLOW </td><td> 4 </td><td> Occurs in synchronous slave mode when data is requested by the master but
send/receive/transfer operation has not been started.
Data field \em rx_underflow = \token{1} of \ref ARM_USART_STATUS. </td>
<td> <i>always supported</i> </td>
</tr>
<tr>
<td> \ref ARM_USART_EVENT_RX_OVERFLOW </td><td> 5 </td><td> Occurs when data is lost during receive/transfer operation or when data is lost
because receive operation in asynchronous mode or receive/send/transfer operation in
synchronous slave mode has not been started.
Data field \em rx_overflow = \token{1} of \ref ARM_USART_STATUS. </td>
<td> <i>always supported</i> </td>
</tr>
<tr>
<td> ARM_USART_EVENT_RX_TIMEOUT </td><td> 6 </td><td> Occurs during receive when idle time is detected between consecutive characters
(idle time is hardware dependent).</td>
<td> data field \em event_rx_timeout = \token{1} </td>
</tr>
<tr>
<td> \ref ARM_USART_EVENT_RX_BREAK </td><td> 7 </td><td> Occurs when break is detected during receive.
Data field \em rx_break = \token{1} of \ref ARM_USART_STATUS. </td>
<td> <i>always supported</i> </td>
</tr>
<tr>
<td> \ref ARM_USART_EVENT_RX_FRAMING_ERROR </td><td> 8 </td><td> Occurs when framing error is detected during receive.
Data field \em rx_framing_error = \token{1} of \ref ARM_USART_STATUS. </td>
<td> <i>always supported</i> </td>
</tr>
<tr>
<td> \ref ARM_USART_EVENT_RX_PARITY_ERROR </td><td> 9 </td><td> Occurs when parity error is detected during receive.
Data field \em rx_parity_error = \token{1} of \ref ARM_USART_STATUS. </td>
<td> <i>always supported</i> </td>
</tr>
<tr>
<td> ARM_USART_EVENT_CTS </td><td> 10 </td><td> Indicates that CTS modem line state has changed.
Data field \em cts of \ref ARM_USART_MODEM_STATUS has changed. </td>
<td> data field \em event_cts = \token{1} and <br>
data field \em cts = \token{1} </td>
</tr>
<tr>
<td> ARM_USART_EVENT_DSR </td><td> 11 </td><td> Indicates that DSR modem line state has changed.
Data field \em dsr of \ref ARM_USART_MODEM_STATUS has changed. </td>
<td> data field \em event_dsr = \token{1} and <br>
data field \em dsr = \token{1} </td>
</tr>
<tr>
<td> ARM_USART_EVENT_DCD </td><td> 12 </td><td> Indicates that DCD modem line state has changed.
Data field \em dcd of \ref ARM_USART_MODEM_STATUS has changed. </td>
<td> data field \em event_dcd = \token{1} and <br>
data field \em dcd = \token{1} </td>
</tr>
<tr>
<td> ARM_USART_EVENT_RI </td><td> 13 </td><td> Indicates that RI modem line state has changed from active to inactive
(trailing edge on RI).
Data field \em ri of \ref ARM_USART_MODEM_STATUS has changed from \token{1} to \token{0}. </td>
<td> data field \em event_ri = \token{1} and <br>
data field \em ri = \token{1} </td>
</tr>
</table>
*****************************************************************************************************************/
/**
@}
*/
// End USART Interface
| 13,799 |
1,444 | <reponame>GabrielSturtevant/mage
package mage.cards.s;
import java.util.UUID;
import mage.abilities.Ability;
import mage.abilities.effects.OneShotEffect;
import mage.abilities.effects.common.ExileTargetEffect;
import mage.cards.CardImpl;
import mage.cards.CardSetInfo;
import mage.constants.CardType;
import mage.constants.Outcome;
import mage.counters.CounterType;
import mage.filter.common.FilterControlledPermanent;
import mage.game.Game;
import mage.game.permanent.Permanent;
import mage.players.Player;
import mage.target.TargetPermanent;
import mage.target.common.TargetCreaturePermanent;
/**
*
* @author TheElk801
*/
public final class SettleTheScore extends CardImpl {
public SettleTheScore(UUID ownerId, CardSetInfo setInfo) {
super(ownerId, setInfo, new CardType[]{CardType.SORCERY}, "{2}{B}{B}");
// Exile target creature. Put two loyalty counters on a planeswalker you control.
this.getSpellAbility().addEffect(new ExileTargetEffect());
this.getSpellAbility().addTarget(new TargetCreaturePermanent());
this.getSpellAbility().addEffect(new SettleTheScoreEffect());
}
private SettleTheScore(final SettleTheScore card) {
super(card);
}
@Override
public SettleTheScore copy() {
return new SettleTheScore(this);
}
}
class SettleTheScoreEffect extends OneShotEffect {
private static final FilterControlledPermanent filter = new FilterControlledPermanent("planeswalker you control");
static {
filter.add(CardType.PLANESWALKER.getPredicate());
}
public SettleTheScoreEffect() {
super(Outcome.Benefit);
this.staticText = "Put two loyalty counters on a planeswalker you control";
}
public SettleTheScoreEffect(final SettleTheScoreEffect effect) {
super(effect);
}
@Override
public SettleTheScoreEffect copy() {
return new SettleTheScoreEffect(this);
}
@Override
public boolean apply(Game game, Ability source) {
Player player = game.getPlayer(source.getControllerId());
if (player == null) {
return false;
}
TargetPermanent target = new TargetPermanent(filter);
if (target.choose(Outcome.Benefit, player.getId(), source.getSourceId(), game)) {
Permanent permanent = game.getPermanent(target.getFirstTarget());
if (permanent != null) {
permanent.addCounters(CounterType.LOYALTY.createInstance(2), source.getControllerId(), source, game);
}
}
return true;
}
}
| 927 |
2,072 | <reponame>defendercrypt/amundsen
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
"""
This is a example script for extracting Feast feature tables
Usage:
python3 sample_feast_loader.py [feast_core] [neo4j_endpoint] [es_url]
For example:
python sample_feast_loader.py feast-feast-core:6565 bolt://neo4j http://elasticsearch:9200
"""
import sys
import uuid
from elasticsearch.client import Elasticsearch
from pyhocon import ConfigFactory
from databuilder.extractor.feast_extractor import FeastExtractor
from databuilder.extractor.neo4j_extractor import Neo4jExtractor
from databuilder.extractor.neo4j_search_data_extractor import Neo4jSearchDataExtractor
from databuilder.job.job import DefaultJob
from databuilder.loader.file_system_elasticsearch_json_loader import FSElasticsearchJSONLoader
from databuilder.loader.file_system_neo4j_csv_loader import FsNeo4jCSVLoader
from databuilder.publisher import neo4j_csv_publisher
from databuilder.publisher.elasticsearch_publisher import ElasticsearchPublisher
from databuilder.task.task import DefaultTask
feast_endpoint = sys.argv[1]
feast_serving_endpoint = sys.argv[2]
neo4j_endpoint = sys.argv[3]
es_url = sys.argv[4]
es = Elasticsearch([es_url])
neo4j_user = "neo4j"
neo4j_password = "<PASSWORD>"
def create_feast_job_config():
tmp_folder = "/var/tmp/amundsen/table_metadata"
node_files_folder = f"{tmp_folder}/nodes/"
relationship_files_folder = f"{tmp_folder}/relationships/"
job_config = ConfigFactory.from_dict(
{
f"extractor.feast.{FeastExtractor.FEAST_ENDPOINT_CONFIG_KEY}": feast_endpoint,
f'extractor.feast.{FeastExtractor.FEAST_SERVING_ENDPOINT_CONFIG_KEY}': feast_serving_endpoint,
f"loader.filesystem_csv_neo4j.{FsNeo4jCSVLoader.NODE_DIR_PATH}": node_files_folder,
f"loader.filesystem_csv_neo4j.{FsNeo4jCSVLoader.RELATION_DIR_PATH}": relationship_files_folder,
f"publisher.neo4j.{neo4j_csv_publisher.NODE_FILES_DIR}": node_files_folder,
f"publisher.neo4j.{neo4j_csv_publisher.RELATION_FILES_DIR}": relationship_files_folder,
f"publisher.neo4j.{neo4j_csv_publisher.NEO4J_END_POINT_KEY}": neo4j_endpoint,
f"publisher.neo4j.{neo4j_csv_publisher.NEO4J_USER}": neo4j_user,
f"publisher.neo4j.{neo4j_csv_publisher.NEO4J_PASSWORD}": neo4j_password,
"publisher.neo4j.job_publish_tag": "some_unique_tag", # TO-DO unique tag must be added
}
)
return job_config
def create_es_publish_job_config(
elasticsearch_index_alias="table_search_index",
elasticsearch_doc_type_key="table",
model_name="databuilder.models.table_elasticsearch_document.TableESDocument",
cypher_query=None,
elasticsearch_mapping=None,
):
"""
:param elasticsearch_index_alias: alias for Elasticsearch used in
amundsensearchlibrary/search_service/config.py as an index
:param elasticsearch_doc_type_key: name the ElasticSearch index is prepended with. Defaults to `table` resulting in
`table_search_index`
:param model_name: the Databuilder model class used in transporting between Extractor and Loader
:param cypher_query: Query handed to the `Neo4jSearchDataExtractor` class, if None is given (default)
it uses the `Table` query baked into the Extractor
:param elasticsearch_mapping: Elasticsearch field mapping "DDL" handed to the `ElasticsearchPublisher` class,
if None is given (default) it uses the `Table` query baked into the Publisher
"""
# loader saves data to this location and publisher reads it from here
extracted_search_data_path = "/var/tmp/amundsen/search_data.json"
# elastic search client instance
elasticsearch_client = es
# unique name of new index in Elasticsearch
elasticsearch_new_index_key = "tables" + str(uuid.uuid4())
job_config = ConfigFactory.from_dict(
{
f"extractor.search_data.extractor.neo4j.{Neo4jExtractor.GRAPH_URL_CONFIG_KEY}": neo4j_endpoint,
f"extractor.search_data.extractor.neo4j.{Neo4jExtractor.MODEL_CLASS_CONFIG_KEY}": model_name,
f"extractor.search_data.extractor.neo4j.{Neo4jExtractor.NEO4J_AUTH_USER}": neo4j_user,
f"extractor.search_data.extractor.neo4j.{Neo4jExtractor.NEO4J_AUTH_PW}": neo4j_password,
f"loader.filesystem.elasticsearch.{FSElasticsearchJSONLoader.FILE_PATH_CONFIG_KEY}":
extracted_search_data_path,
f"loader.filesystem.elasticsearch.{FSElasticsearchJSONLoader.FILE_MODE_CONFIG_KEY}": "w",
f"publisher.elasticsearch.{ElasticsearchPublisher.FILE_PATH_CONFIG_KEY}": extracted_search_data_path,
f"publisher.elasticsearch.{ElasticsearchPublisher.FILE_MODE_CONFIG_KEY}": "r",
f"publisher.elasticsearch.{ElasticsearchPublisher.ELASTICSEARCH_CLIENT_CONFIG_KEY}":
elasticsearch_client,
f"publisher.elasticsearch.{ElasticsearchPublisher.ELASTICSEARCH_NEW_INDEX_CONFIG_KEY}":
elasticsearch_new_index_key,
f"publisher.elasticsearch.{ElasticsearchPublisher.ELASTICSEARCH_DOC_TYPE_CONFIG_KEY}":
elasticsearch_doc_type_key,
f"publisher.elasticsearch.{ElasticsearchPublisher.ELASTICSEARCH_ALIAS_CONFIG_KEY}":
elasticsearch_index_alias,
}
)
# only optionally add these keys, so need to dynamically `put` them
if cypher_query:
job_config.put(
f"extractor.search_data.{Neo4jSearchDataExtractor.CYPHER_QUERY_CONFIG_KEY}",
cypher_query,
)
if elasticsearch_mapping:
job_config.put(
f"publisher.elasticsearch.{ElasticsearchPublisher.ELASTICSEARCH_MAPPING_CONFIG_KEY}",
elasticsearch_mapping,
)
return job_config
if __name__ == "__main__":
feast_job = DefaultJob(
conf=create_feast_job_config(),
task=DefaultTask(extractor=FeastExtractor(), loader=FsNeo4jCSVLoader()),
publisher=neo4j_csv_publisher.Neo4jCsvPublisher(),
)
feast_job.launch()
es_publish_job = DefaultJob(
conf=create_es_publish_job_config(),
task=DefaultTask(
loader=FSElasticsearchJSONLoader(), extractor=Neo4jSearchDataExtractor()
),
publisher=ElasticsearchPublisher(),
)
es_publish_job.launch()
| 2,853 |
2,360 | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RRngtools(RPackage):
"""Utility Functions for Working with Random Number Generators
Provides a set of functions for working with Random Number Generators
(RNGs). In particular, a generic S4 framework is defined for
getting/setting the current RNG, or RNG data that are embedded into objects
for reproducibility. Notably, convenient default methods greatly facilitate
the way current RNG settings can be changed."""
homepage = "https://renozao.github.io/rngtools"
url = "https://cloud.r-project.org/src/contrib/rngtools_1.4.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/rngtools"
version('1.5', sha256='8274873b73f7acbe0ce007e62893bf4d369d2aab8768754a60da46b3f078f575')
version('1.4', sha256='3aa92366e5d0500537964302f5754a750aff6b169a27611725e7d84552913bce')
version('1.3.1.1', sha256='99e1a8fde6b81128d0946746c1ef84ec5b6c2973ad843a080098baf73aa3364c')
version('1.3.1', sha256='763fc493cb821a4d3e514c0dc876d602a692c528e1d67f295dde70c77009e224')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', when='@1.4:', type=('build', 'run'))
depends_on('r-digest', type=('build', 'run'))
depends_on('[email protected]:', when='@:1.4', type=('build', 'run'))
depends_on('r-stringr', when='@:1.4', type=('build', 'run'))
| 632 |
481 | <filename>libweston/linux-explicit-synchronization.c<gh_stars>100-1000
/*
* Copyright © 2018 Collabora, Ltd.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#include "config.h"
#include <assert.h>
#include <inttypes.h>
#include <libweston/libweston.h>
#include "linux-explicit-synchronization.h"
#include "linux-explicit-synchronization-unstable-v1-server-protocol.h"
#include "linux-sync-file.h"
#include "shared/fd-util.h"
#include "libweston-internal.h"
static void
destroy_linux_buffer_release(struct wl_resource *resource)
{
struct weston_buffer_release *buffer_release =
wl_resource_get_user_data(resource);
fd_clear(&buffer_release->fence_fd);
free(buffer_release);
}
static void
destroy_linux_surface_synchronization(struct wl_resource *resource)
{
struct weston_surface *surface =
wl_resource_get_user_data(resource);
if (surface) {
fd_clear(&surface->pending.acquire_fence_fd);
surface->synchronization_resource = NULL;
}
}
static void
linux_surface_synchronization_destroy(struct wl_client *client,
struct wl_resource *resource)
{
wl_resource_destroy(resource);
}
static void
linux_surface_synchronization_set_acquire_fence(struct wl_client *client,
struct wl_resource *resource,
int32_t fd)
{
struct weston_surface *surface = wl_resource_get_user_data(resource);
if (!surface) {
wl_resource_post_error(
resource,
ZWP_LINUX_SURFACE_SYNCHRONIZATION_V1_ERROR_NO_SURFACE,
"surface no longer exists");
goto err;
}
if (!linux_sync_file_is_valid(fd)) {
wl_resource_post_error(
resource,
ZWP_LINUX_SURFACE_SYNCHRONIZATION_V1_ERROR_INVALID_FENCE,
"invalid fence fd");
goto err;
}
if (surface->pending.acquire_fence_fd != -1) {
wl_resource_post_error(
resource,
ZWP_LINUX_SURFACE_SYNCHRONIZATION_V1_ERROR_DUPLICATE_FENCE,
"already have a fence fd");
goto err;
}
fd_update(&surface->pending.acquire_fence_fd, fd);
return;
err:
close(fd);
}
static void
linux_surface_synchronization_get_release(struct wl_client *client,
struct wl_resource *resource,
uint32_t id)
{
struct weston_surface *surface =
wl_resource_get_user_data(resource);
struct weston_buffer_release *buffer_release;
if (!surface) {
wl_resource_post_error(
resource,
ZWP_LINUX_SURFACE_SYNCHRONIZATION_V1_ERROR_NO_SURFACE,
"surface no longer exists");
return;
}
if (surface->pending.buffer_release_ref.buffer_release) {
wl_resource_post_error(
resource,
ZWP_LINUX_SURFACE_SYNCHRONIZATION_V1_ERROR_DUPLICATE_RELEASE,
"already has a buffer release");
return;
}
buffer_release = zalloc(sizeof *buffer_release);
if (buffer_release == NULL)
goto err_alloc;
buffer_release->fence_fd = -1;
buffer_release->resource =
wl_resource_create(client,
&zwp_linux_buffer_release_v1_interface,
wl_resource_get_version(resource), id);
if (!buffer_release->resource)
goto err_create;
wl_resource_set_implementation(buffer_release->resource, NULL,
buffer_release,
destroy_linux_buffer_release);
weston_buffer_release_reference(&surface->pending.buffer_release_ref,
buffer_release);
return;
err_create:
free(buffer_release);
err_alloc:
wl_client_post_no_memory(client);
}
const struct zwp_linux_surface_synchronization_v1_interface
linux_surface_synchronization_implementation = {
linux_surface_synchronization_destroy,
linux_surface_synchronization_set_acquire_fence,
linux_surface_synchronization_get_release,
};
static void
linux_explicit_synchronization_destroy(struct wl_client *client,
struct wl_resource *resource)
{
wl_resource_destroy(resource);
}
static void
linux_explicit_synchronization_get_synchronization(struct wl_client *client,
struct wl_resource *resource,
uint32_t id,
struct wl_resource *surface_resource)
{
struct weston_surface *surface =
wl_resource_get_user_data(surface_resource);
if (surface->synchronization_resource) {
wl_resource_post_error(
resource,
ZWP_LINUX_EXPLICIT_SYNCHRONIZATION_V1_ERROR_SYNCHRONIZATION_EXISTS,
"wl_surface@%"PRIu32" already has a synchronization object",
wl_resource_get_id(surface_resource));
return;
}
surface->synchronization_resource =
wl_resource_create(client,
&zwp_linux_surface_synchronization_v1_interface,
wl_resource_get_version(resource), id);
if (!surface->synchronization_resource) {
wl_client_post_no_memory(client);
return;
}
wl_resource_set_implementation(surface->synchronization_resource,
&linux_surface_synchronization_implementation,
surface,
destroy_linux_surface_synchronization);
}
static const struct zwp_linux_explicit_synchronization_v1_interface
linux_explicit_synchronization_implementation = {
linux_explicit_synchronization_destroy,
linux_explicit_synchronization_get_synchronization
};
static void
bind_linux_explicit_synchronization(struct wl_client *client,
void *data, uint32_t version,
uint32_t id)
{
struct weston_compositor *compositor = data;
struct wl_resource *resource;
resource = wl_resource_create(client,
&zwp_linux_explicit_synchronization_v1_interface,
version, id);
if (resource == NULL) {
wl_client_post_no_memory(client);
return;
}
wl_resource_set_implementation(resource,
&linux_explicit_synchronization_implementation,
compositor, NULL);
}
/** Advertise linux_explicit_synchronization support
*
* Calling this initializes the zwp_linux_explicit_synchronization_v1
* protocol support, so that the interface will be advertised to clients.
* Essentially it creates a global. Do not call this function multiple times
* in the compositor's lifetime. There is no way to deinit explicitly, globals
* will be reaped when the wl_display gets destroyed.
*
* \param compositor The compositor to init for.
* \return Zero on success, -1 on failure.
*/
WL_EXPORT int
linux_explicit_synchronization_setup(struct weston_compositor *compositor)
{
if (!wl_global_create(compositor->wl_display,
&zwp_linux_explicit_synchronization_v1_interface,
2, compositor,
bind_linux_explicit_synchronization))
return -1;
return 0;
}
/** Resolve an internal compositor error by disconnecting the client.
*
* This function is used in cases when explicit synchronization
* turns out to be unusable and there is no fallback path.
*
* It is possible the fault is caused by a compositor bug, the underlying
* graphics stack bug or normal behaviour, or perhaps a client mistake.
* In any case, the options are to either composite garbage or nothing,
* or disconnect the client. This is a helper function for the latter.
*
* The error is sent as an INVALID_OBJECT error on the client's wl_display.
*
* \param resource The explicit synchronization related resource that is unusable.
* \param msg A custom error message attached to the protocol error.
*/
WL_EXPORT void
linux_explicit_synchronization_send_server_error(struct wl_resource *resource,
const char *msg)
{
uint32_t id = wl_resource_get_id(resource);
const char *class = wl_resource_get_class(resource);
struct wl_client *client = wl_resource_get_client(resource);
struct wl_resource *display_resource = wl_client_get_object(client, 1);
assert(display_resource);
wl_resource_post_error(display_resource,
WL_DISPLAY_ERROR_INVALID_OBJECT,
"linux_explicit_synchronization server error "
"with %s@%"PRIu32": %s",
class, id, msg);
}
| 3,042 |
323 | /*
* Copyright (c) 2014 <NAME> on behalf of AirPair.com
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.ultimate.camera.fragments;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AbsListView;
import android.widget.AdapterView;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.ListAdapter;
import android.widget.RadioGroup;
import android.widget.TextView;
import android.widget.Toast;
import com.ultimate.camera.R;
import com.ultimate.camera.activities.CameraActivity;
import com.ultimate.camera.utilities.ImageUtil;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* This is an example of a fragment which can use the external Android camera to take
* a picture. It is important to remember to save the file URI where we want to save
* our picture into the bundle because this data will be cleared when the camera is loaded.
* The appropriate place to do this is in the Fragment's parent activity because there isn't a
* good "entry" point when the fragment returns to the foreground to retrieve the bundle info.
*
* Reference: http://developer.android.com/training/camera/photobasics.html
*
* Created by <NAME> (on behalf of AirPair.com) on 3/4/14.
*/
public class SimpleCameraIntentFragment extends BaseFragment implements Button.OnClickListener {
// Activity result key for camera
static final int REQUEST_TAKE_PHOTO = 11111;
// Image view for showing our image.
private ImageView mImageView;
private ImageView mThumbnailImageView;
/**
* Default empty constructor.
*/
public SimpleCameraIntentFragment(){
super();
}
/**
* Static factory method
* @param sectionNumber
* @return
*/
public static SimpleCameraIntentFragment newInstance(int sectionNumber) {
SimpleCameraIntentFragment fragment = new SimpleCameraIntentFragment();
Bundle args = new Bundle();
args.putInt(ARG_SECTION_NUMBER, sectionNumber);
fragment.setArguments(args);
return fragment;
}
/**
* OnCreateView fragment override
* @param inflater
* @param container
* @param savedInstanceState
* @return
*/
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = null;
view = inflater.inflate(R.layout.fragment_simple_camera_intent, container, false);
// Set the image view
mImageView = (ImageView)view.findViewById(R.id.imageViewFullSized);
mThumbnailImageView = (ImageView)view.findViewById(R.id.imageViewThumbnail);
Button takePictureButton = (Button)view.findViewById(R.id.button);
// Set OnItemClickListener so we can be notified on button clicks
takePictureButton.setOnClickListener(this);
return view;
}
/**
* Start the camera by dispatching a camera intent.
*/
protected void dispatchTakePictureIntent() {
// Check if there is a camera.
Context context = getActivity();
PackageManager packageManager = context.getPackageManager();
if(packageManager.hasSystemFeature(PackageManager.FEATURE_CAMERA) == false){
Toast.makeText(getActivity(), "This device does not have a camera.", Toast.LENGTH_SHORT)
.show();
return;
}
// Camera exists? Then proceed...
Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
// Ensure that there's a camera activity to handle the intent
CameraActivity activity = (CameraActivity)getActivity();
if (takePictureIntent.resolveActivity(activity.getPackageManager()) != null) {
// Create the File where the photo should go.
// If you don't do this, you may get a crash in some devices.
File photoFile = null;
try {
photoFile = createImageFile();
} catch (IOException ex) {
// Error occurred while creating the File
Toast toast = Toast.makeText(activity, "There was a problem saving the photo...", Toast.LENGTH_SHORT);
toast.show();
}
// Continue only if the File was successfully created
if (photoFile != null) {
Uri fileUri = Uri.fromFile(photoFile);
activity.setCapturedImageURI(fileUri);
activity.setCurrentPhotoPath(fileUri.getPath());
takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT,
activity.getCapturedImageURI());
startActivityForResult(takePictureIntent, REQUEST_TAKE_PHOTO);
}
}
}
/**
* The activity returns with the photo.
* @param requestCode
* @param resultCode
* @param data
*/
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == REQUEST_TAKE_PHOTO && resultCode == Activity.RESULT_OK) {
addPhotoToGallery();
CameraActivity activity = (CameraActivity)getActivity();
// Show the full sized image.
setFullImageFromFilePath(activity.getCurrentPhotoPath(), mImageView);
setFullImageFromFilePath(activity.getCurrentPhotoPath(), mThumbnailImageView);
} else {
Toast.makeText(getActivity(), "Image Capture Failed", Toast.LENGTH_SHORT)
.show();
}
}
/**
* Creates the image file to which the image must be saved.
* @return
* @throws IOException
*/
protected File createImageFile() throws IOException {
// Create an image file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = "JPEG_" + timeStamp + "_";
File storageDir = Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES);
File image = File.createTempFile(
imageFileName, /* prefix */
".jpg", /* suffix */
storageDir /* directory */
);
// Save a file: path for use with ACTION_VIEW intents
CameraActivity activity = (CameraActivity)getActivity();
activity.setCurrentPhotoPath("file:" + image.getAbsolutePath());
return image;
}
/**
* Add the picture to the photo gallery.
* Must be called on all camera images or they will
* disappear once taken.
*/
protected void addPhotoToGallery() {
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
CameraActivity activity = (CameraActivity)getActivity();
File f = new File(activity.getCurrentPhotoPath());
Uri contentUri = Uri.fromFile(f);
mediaScanIntent.setData(contentUri);
this.getActivity().sendBroadcast(mediaScanIntent);
}
/**
* Deal with button clicks.
* @param v
*/
@Override
public void onClick(View v) {
dispatchTakePictureIntent();
}
/**
* Scale the photo down and fit it to our image views.
*
* "Drastically increases performance" to set images using this technique.
* Read more:http://developer.android.com/training/camera/photobasics.html
*/
private void setFullImageFromFilePath(String imagePath, ImageView imageView) {
// Get the dimensions of the View
int targetW = imageView.getWidth();
int targetH = imageView.getHeight();
// Get the dimensions of the bitmap
BitmapFactory.Options bmOptions = new BitmapFactory.Options();
bmOptions.inJustDecodeBounds = true;
BitmapFactory.decodeFile(imagePath, bmOptions);
int photoW = bmOptions.outWidth;
int photoH = bmOptions.outHeight;
// Determine how much to scale down the image
int scaleFactor = Math.min(photoW/targetW, photoH/targetH);
// Decode the image file into a Bitmap sized to fill the View
bmOptions.inJustDecodeBounds = false;
bmOptions.inSampleSize = scaleFactor;
bmOptions.inPurgeable = true;
Bitmap bitmap = BitmapFactory.decodeFile(imagePath, bmOptions);
imageView.setImageBitmap(bitmap);
}
}
| 3,558 |
342 | <filename>kite-data/kite-data-hbase/src/main/java/org/kitesdk/data/hbase/avro/io/ColumnEncoder.java
/**
* Copyright 2013 Cloudera Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kitesdk.data.hbase.avro.io;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.util.Utf8;
import org.apache.hadoop.hbase.util.Bytes;
/* An Avro Encoder implementation used for encoding Avro
* instances to HBase columns. This is basically an
* Avro BinaryEncoder with custom encoding of int,
* long, and String types.
*
* int and long are serialized in standard 4 and 8 byte
* format (instead of Avro's ZigZag encoding) so that
* we can use HBase's atomic increment functionality on
* columns.
*
* Strings are encoded as UTF-8 bytes. This is consistent
* with HBase, and will allow appends in the future.
*/
public class ColumnEncoder extends Encoder {
private final BinaryEncoder wrappedEncoder;
private final OutputStream out;
public ColumnEncoder(OutputStream out) {
this.out = out;
wrappedEncoder = new EncoderFactory().binaryEncoder(out, null);
}
public ColumnEncoder(OutputStream out, ColumnEncoder reuse) {
this.out = out;
wrappedEncoder = new EncoderFactory().binaryEncoder(out, reuse.wrappedEncoder);
}
@Override
public void flush() throws IOException {
wrappedEncoder.flush();
}
@Override
public void writeNull() throws IOException {
wrappedEncoder.writeNull();
}
@Override
public void writeBoolean(boolean b) throws IOException {
wrappedEncoder.writeBoolean(b);
}
@Override
public void writeInt(int n) throws IOException {
out.write(Bytes.toBytes(n));
}
@Override
public void writeLong(long n) throws IOException {
out.write(Bytes.toBytes(n));
}
@Override
public void writeFloat(float f) throws IOException {
wrappedEncoder.writeFloat(f);
}
@Override
public void writeDouble(double d) throws IOException {
wrappedEncoder.writeDouble(d);
}
@Override
public void writeString(Utf8 utf8) throws IOException {
out.write(utf8.getBytes(), 0, utf8.getByteLength());
}
@Override
public void writeBytes(ByteBuffer bytes) throws IOException {
wrappedEncoder.writeBytes(bytes);
}
@Override
public void writeBytes(byte[] bytes, int start, int len) throws IOException {
wrappedEncoder.writeBytes(bytes, start, len);
}
@Override
public void writeFixed(byte[] bytes, int start, int len) throws IOException {
wrappedEncoder.writeFixed(bytes, start, len);
}
@Override
public void writeEnum(int e) throws IOException {
wrappedEncoder.writeEnum(e);
}
@Override
public void writeArrayStart() throws IOException {
wrappedEncoder.writeArrayStart();
}
@Override
public void setItemCount(long itemCount) throws IOException {
wrappedEncoder.setItemCount(itemCount);
}
@Override
public void startItem() throws IOException {
wrappedEncoder.startItem();
}
@Override
public void writeArrayEnd() throws IOException {
wrappedEncoder.writeArrayEnd();
}
@Override
public void writeMapStart() throws IOException {
wrappedEncoder.writeMapStart();
}
@Override
public void writeMapEnd() throws IOException {
wrappedEncoder.writeMapEnd();
}
@Override
public void writeIndex(int unionIndex) throws IOException {
wrappedEncoder.writeIndex(unionIndex);
}
}
| 1,259 |
1,093 | <reponame>akmeraki/deep-learning-
"""Scripts to run models implemented in yadlt from the command line.""" | 31 |
14,668 | <reponame>chromium/chromium
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_SHARED_HIGHLIGHTING_CORE_COMMON_SHARED_HIGHLIGHTING_METRICS_H_
#define COMPONENTS_SHARED_HIGHLIGHTING_CORE_COMMON_SHARED_HIGHLIGHTING_METRICS_H_
#include "base/time/time.h"
#include "services/metrics/public/cpp/ukm_recorder.h"
#include "services/metrics/public/cpp/ukm_source_id.h"
#include "url/gurl.h"
namespace shared_highlighting {
// Used to indicate whether link generation complited successfully.
// Java counterpart will be auto-generated for this enum.
// GENERATED_JAVA_ENUM_PACKAGE: org.chromium.chrome.browser.share.link_to_text
enum class LinkGenerationStatus { kSuccess = 0, kFailure = 1 };
// Used to indicate whether generated link was ready at the time of the request.
// Java counterpart will be auto-generated for this enum.
// GENERATED_JAVA_ENUM_PACKAGE: org.chromium.chrome.browser.share.link_to_text
enum class LinkGenerationReadyStatus {
kRequestedBeforeReady = 0,
kRequestedAfterReady = 1
};
// These values are persisted to logs. Entries should not be renumbered and
// numeric values should never be reused.
// The type of copied Shared Highlighting Link on Desktop.
// Update corresponding |LinkGenerationCopiedLinkType| in enums.xml.
enum class LinkGenerationCopiedLinkType {
kCopiedFromNewGeneration = 0,
kCopiedFromExistingHighlight = 1,
kMaxValue = kCopiedFromExistingHighlight
};
// These values are persisted to logs. Entries should not be renumbered and
// numeric values should never be reused.
// The type of errors that can happen during link generation.
// Update corresponding |LinkGenerationError| in enums.xml.
// Java counterpart will be auto-generated for this enum.
// GENERATED_JAVA_ENUM_PACKAGE: org.chromium.chrome.browser.share.link_to_text
enum class LinkGenerationError {
kNone = -1,
kIncorrectSelector = 0,
kNoRange = 1,
kNoContext = 2,
kContextExhausted = 3,
kContextLimitReached = 4,
kEmptySelection = 5,
// Android specific.
kTabHidden = 6,
kOmniboxNavigation = 7,
kTabCrash = 8,
// Catch-all bucket.
kUnknown = 9,
// Selection happened on iframe.
kIFrame = 10,
// Timed-out waiting for a link to be generated.
kTimeout = 11,
// Link generation is not triggered because current page is not supported.
// Recorded on Android/Desktop.
kBlockList = 12,
// Link to text cannot be requested because connection with the renderer side
// cannot be established. Android only.
kNoRemoteConnection = 13,
kMaxValue = kNoRemoteConnection
};
// These values are persisted to logs. Entries should not be renumbered and
// numeric values should never be reused.
// The different sources from which a text fragment URL can come from.
enum class TextFragmentLinkOpenSource {
kUnknown = 0,
kSearchEngine = 1,
kMaxValue = kSearchEngine,
};
// Records the type of link generation that was copied on desktop.
void LogDesktopLinkGenerationCopiedLinkType(LinkGenerationCopiedLinkType type);
// Records the reason why the link generation failed.
void LogLinkGenerationErrorReason(LinkGenerationError reason);
// Records the reason why the link to text was not available for the user.
void LogLinkRequestedErrorReason(LinkGenerationError reason);
// Records whether the link generation attempt was successful or not.
void LogLinkGenerationStatus(LinkGenerationStatus status);
// Records whether the generated link to text was available for the user.
void LogLinkRequestedStatus(LinkGenerationStatus status);
// Records metrics when successfully generated link to text was available for
// the user.
void LogRequestedSuccessMetrics();
// Records metrics when link to text was not available for the user.
void LogRequestedFailureMetrics(LinkGenerationError error);
// Records whether an individual text fragment could not be scrolled to because
// there was an |ambiguous_match| (generally because more than one matching
// passage was found).
void LogTextFragmentAmbiguousMatch(bool ambiguous_match);
// Records the source of the text fragment based on its |referrer|. E.g. a
// search engine.
void LogTextFragmentLinkOpenSource(const GURL& referrer);
// Records the success rate, which is the number of |matches| over number of
// |text_fragments| in the url param.
void LogTextFragmentMatchRate(int matches, int text_fragments);
// Records the total |count| of text fragment selectors in the URL param.
void LogTextFragmentSelectorCount(int count);
// Records the latency for successfully generating a link.
void LogGenerateSuccessLatency(base::TimeDelta latency);
// Records the latency for failing to generate a link.
void LogGenerateErrorLatency(base::TimeDelta latency);
// Records a UKM event for opening a link with text fragments. |source_id|
// refers to the navigation action's ID, |referrer| will be used to record the
// source and |success| should be true only if fragments highlighting was a
// complete success. This event can only be recorded once per navigation, and
// this function will record using the static Recorder instance. This API can
// only be used when calling from the browser process, otherwise no event will
// be recorded.
void LogLinkOpenedUkmEvent(ukm::SourceId source_id,
const GURL& referrer,
bool success);
// Records a UKM event for opening a link with text fragments. |source_id|
// refers to the navigation action's ID, |referrer| will be used to record the
// source and |success| should be true only if fragments highlighting was a
// complete success. This event can only be recorded once per navigation, and
// will record using the given custom |recorder|. Prefer this API when calling
// from a process other than the browser process.
void LogLinkOpenedUkmEvent(ukm::UkmRecorder* recorder,
ukm::SourceId source_id,
const GURL& referrer,
bool success);
// Records a UKM event for successfully generating a link with text fragments.
// |source_id| refers to the current frame, and this function will record using
// the static Recorder. This API can only be used when calling from the browser
// process, otherwise no event will be recorded.
void LogLinkGeneratedSuccessUkmEvent(ukm::SourceId source_id);
// Records a UKM event for successfully generating a link with text fragments.
// |source_id| refers to the current frame. This function will record using the
// given custom |recorder|. Prefer this API when calling from a process other
// than the browser process.
void LogLinkGeneratedSuccessUkmEvent(ukm::UkmRecorder* recorder,
ukm::SourceId source_id);
// Records a UKM event for failing to generate a link with text fragments.
// |source_id| refers to the current frame and |reason| highlights the cause of
// the failure. This function will record using the static Recorder. This API
// can only be used when calling from the browser process, otherwise no event
// will be recorded.
void LogLinkGeneratedErrorUkmEvent(ukm::SourceId source_id,
LinkGenerationError reason);
// Records a UKM event for failing to generate a link with text fragments.
// |source_id| refers to the current frame and |reason| highlights the cause of
// the failure. This function will record using the given custom |recorder|.
// Prefer this API when calling from a process other than the browser process.
void LogLinkGeneratedErrorUkmEvent(ukm::UkmRecorder* recorder,
ukm::SourceId source_id,
LinkGenerationError reason);
// Records whether link to text was requested before or after link generation
// was complete with corresponding success status.
void LogLinkRequestedBeforeStatus(LinkGenerationStatus status,
LinkGenerationReadyStatus ready_status);
} // namespace shared_highlighting
#endif // COMPONENTS_SHARED_HIGHLIGHTING_CORE_COMMON_SHARED_HIGHLIGHTING_METRICS_H_
| 2,448 |
357 | /*
* Copyright (c) 2012-2015 VMware, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, without
* warranties or conditions of any kind, EITHER EXPRESS OR IMPLIED. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.vmware.identity.interop.ldap;
import java.util.Arrays;
import java.util.List;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
/**
* Do not use this class externally.
* It is intended for internal use by Platform package,
* but must stay public for interop ...
*/
public final class BerValNative extends Structure
{
public int length;
public Pointer value;
public BerValNative()
{
length = 0;
value = null;
write();
}
public BerValNative(Pointer p)
{
useMemory(p);
read();
}
public BerValNative(int length, Pointer value)
{
this.length = length;
this.value = value;
write();
}
public static BerValNative[] fromPointerArray(Pointer p, int size)
{
BerValNative[] result = new BerValNative[size];
int i = 0;
for (Pointer pValue : p.getPointerArray(0, size))
{
result[i++] = new BerValNative(pValue);
}
return result;
}
@Override
protected List<String> getFieldOrder()
{
return Arrays.asList(new String[] {
"length", "value"
});
}
}
| 676 |
521 | /*
* Copyright (C) 2007 <NAME> <<EMAIL>>.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; either version 2 of the
* License, or any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
FILE_LICENCE ( GPL2_OR_LATER );
#include <stdint.h>
#include <string.h>
#include <errno.h>
#include <realmode.h>
#include <pnpbios.h>
/** @file
*
* PnP BIOS
*
*/
/** PnP BIOS structure */
struct pnp_bios {
/** Signature
*
* Must be equal to @c PNP_BIOS_SIGNATURE
*/
uint32_t signature;
/** Version as BCD (e.g. 1.0 is 0x10) */
uint8_t version;
/** Length of this structure */
uint8_t length;
/** System capabilities */
uint16_t control;
/** Checksum */
uint8_t checksum;
} __attribute__ (( packed ));
/** Signature for a PnP BIOS structure */
#define PNP_BIOS_SIGNATURE \
( ( '$' << 0 ) + ( 'P' << 8 ) + ( 'n' << 16 ) + ( 'P' << 24 ) )
/**
* Test address for PnP BIOS structure
*
* @v offset Offset within BIOS segment to test
* @ret rc Return status code
*/
static int is_pnp_bios ( unsigned int offset ) {
union {
struct pnp_bios pnp_bios;
uint8_t bytes[256]; /* 256 is maximum length possible */
} u;
size_t len;
unsigned int i;
uint8_t sum = 0;
/* Read start of header and verify signature */
copy_from_real ( &u.pnp_bios, BIOS_SEG, offset, sizeof ( u.pnp_bios ));
if ( u.pnp_bios.signature != PNP_BIOS_SIGNATURE )
return -EINVAL;
/* Read whole header and verify checksum */
len = u.pnp_bios.length;
copy_from_real ( &u.bytes, BIOS_SEG, offset, len );
for ( i = 0 ; i < len ; i++ ) {
sum += u.bytes[i];
}
if ( sum != 0 )
return -EINVAL;
DBG ( "Found PnP BIOS at %04x:%04x\n", BIOS_SEG, offset );
return 0;
}
/**
* Locate Plug-and-Play BIOS
*
* @ret pnp_offset Offset of PnP BIOS structure within BIOS segment
*
* The PnP BIOS structure will be at BIOS_SEG:pnp_offset. If no PnP
* BIOS is found, -1 is returned.
*/
int find_pnp_bios ( void ) {
static int pnp_offset = 0;
if ( pnp_offset )
return pnp_offset;
for ( pnp_offset = 0 ; pnp_offset < 0x10000 ; pnp_offset += 0x10 ) {
if ( is_pnp_bios ( pnp_offset ) == 0 )
return pnp_offset;
}
pnp_offset = -1;
return pnp_offset;
}
| 989 |
643 | <filename>ReactSkia/views/common/RSkImageUtils.cpp
/*
* Copyright (C) 1994-2021 OpenTV, Inc. and Nagravision S.A.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
#include <math.h>
#include "include/core/SkRect.h"
#include "ReactSkia/views/common/RSkImageUtils.h"
namespace facebook {
namespace react {
namespace RSkImageUtils {
SkRect computeTargetRect (Size srcSize,SkRect targetRect,ImageResizeMode resizeMode) {
float startX{0},startY{0},width{0},height{0};
float srcAR = srcSize.width / srcSize.height; /* source Aspect Ratio */
float targetAR = targetRect.width() / targetRect.height(); /* Target Aspect ratio */
switch(resizeMode) {
case ImageResizeMode::Cover:
if(targetAR <= srcAR) { /*target is taller than the source image */
height = targetRect.height();
width = height * srcAR;
startX= targetRect.x() + ((targetRect.width() - width )/2);
startY= targetRect.y();
} else { /* target is wider than the source image */
width = targetRect.width();
height = width /srcAR;
startX= targetRect.x();
startY= targetRect.y() + ((targetRect.height() - height )/2);
}
return SkRect:: MakeXYWH(startX, startY, width, height);
case ImageResizeMode::Contain:
if(targetAR <= srcAR) { /*target is taller than the source image*/
width = targetRect.width();
height = width / srcAR;
} else { /* target is wider than the source image */
height = targetRect.height();
width = height * srcAR;
}
startX= targetRect.x() + ((targetRect.width() - width )/2);
startY= targetRect.y() + ((targetRect.height() - height )/2);
return SkRect:: MakeXYWH(startX, startY, width, height);
case ImageResizeMode::Center:
width = srcSize.width;
height = srcSize.height;
if(srcSize.height > targetRect.height()) {
width = targetRect.width();
height = width / srcAR;
}
if(srcSize.width > targetRect.width()) {
height = targetRect.height();
width = height * srcAR;
}
startX= targetRect.x() + ((targetRect.width() - width )/2);
startY= targetRect.y()+ ((targetRect.height() - height )/2);
return SkRect:: MakeXYWH(startX, startY, width, height);
case ImageResizeMode::Repeat:
if((srcSize.width <= targetRect.width()) && (srcSize.height <= targetRect.height())) {
return SkRect:: MakeXYWH(targetRect.x(), targetRect.y(), srcSize.width, srcSize.height);
}
/* Apply resizemode:contain logic mode to resize in case target is greater */
if(targetAR <= srcAR) { /*target taller than the source image */
width = targetRect.width();
height = width / srcAR;
} else { /* target wider than the source image */
height = targetRect.height();
width = height * srcAR;
}
return SkRect:: MakeXYWH(targetRect.x(), targetRect.y(), width, height);
case ImageResizeMode::Stretch:
return targetRect;
}
}
} //RSkImageUtils
} // namespace react
} // namespace facebook
| 1,336 |
1,131 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.backup.dao;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import org.apache.cloudstack.api.response.BackupOfferingResponse;
import org.apache.cloudstack.backup.BackupOffering;
import org.apache.cloudstack.backup.BackupOfferingVO;
import com.cloud.dc.DataCenterVO;
import com.cloud.dc.dao.DataCenterDao;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
public class BackupOfferingDaoImpl extends GenericDaoBase<BackupOfferingVO, Long> implements BackupOfferingDao {
@Inject
DataCenterDao dataCenterDao;
private SearchBuilder<BackupOfferingVO> backupPoliciesSearch;
public BackupOfferingDaoImpl() {
}
@PostConstruct
protected void init() {
backupPoliciesSearch = createSearchBuilder();
backupPoliciesSearch.and("name", backupPoliciesSearch.entity().getName(), SearchCriteria.Op.EQ);
backupPoliciesSearch.and("zone_id", backupPoliciesSearch.entity().getZoneId(), SearchCriteria.Op.EQ);
backupPoliciesSearch.and("external_id", backupPoliciesSearch.entity().getExternalId(), SearchCriteria.Op.EQ);
backupPoliciesSearch.done();
}
@Override
public BackupOfferingResponse newBackupOfferingResponse(BackupOffering offering) {
DataCenterVO zone = dataCenterDao.findById(offering.getZoneId());
BackupOfferingResponse response = new BackupOfferingResponse();
response.setId(offering.getUuid());
response.setName(offering.getName());
response.setDescription(offering.getDescription());
response.setExternalId(offering.getExternalId());
response.setUserDrivenBackups(offering.isUserDrivenBackupAllowed());
if (zone != null) {
response.setZoneId(zone.getUuid());
response.setZoneName(zone.getName());
}
response.setCreated(offering.getCreated());
response.setObjectName("backupoffering");
return response;
}
@Override
public BackupOffering findByExternalId(String externalId, Long zoneId) {
SearchCriteria<BackupOfferingVO> sc = backupPoliciesSearch.create();
sc.setParameters("external_id", externalId);
if (zoneId != null) {
sc.setParameters("zone_id", zoneId);
}
return findOneBy(sc);
}
@Override
public BackupOffering findByName(String name, Long zoneId) {
SearchCriteria<BackupOfferingVO> sc = backupPoliciesSearch.create();
sc.setParameters("name", name);
sc.setParameters("zone_id", zoneId);
return findOneBy(sc);
}
}
| 1,191 |
2,663 | <reponame>lobap/Superalgos<gh_stars>1000+
{
"topic": "Data Mining",
"pageNumber": 1,
"type": "Data Mining - Introduction",
"definition": {
"text": "Data Mining in Superalgos means extracting data from external sources and processing that data in order to have it ready for consumption by trading strategies.",
"updated": 1622200589433
},
"paragraphs": [
{
"style": "Text",
"text": "Superalgos offers an extensive set of Data Mining tools. These tools allow users to fetch data from external sources, process that data into more elaborate datasets, plot the datasets within the Charting Space, feed ML Bot's models, and ultimately consume the data within their trading strategies.",
"updated": 1622200769538
},
{
"style": "Title",
"text": "The Big Picture",
"updated": 1622188702833
},
{
"style": "Text",
"text": "All of the data inside the Superalgos system has an origin somewhere outside the system, usually somewhere on the Internet. There are currently 3 ways to bring data into the system:",
"updated": 1622200785987
},
{
"style": "List",
"text": "By coding an Algorithm: with this method, users who have developer skills can extract data from any source imaginable. The way to do it is by implementing a new process within the already existing Sensor Bot. In other words, the sensor bot allows users to write code that extracts data from different places outside of the system. This method is ideal for when it is not possible to use the second method listed below. An example of such algorithms is the Historic-OHLCVs which uses the CCXT open source library to extract one minute candles from any exchange supported by that library.",
"updated": 1622201355726
},
{
"style": "List",
"text": "Without coding an Algorithm: (available since Beta 10) via the API Data Fetcher Bot. This method involves designing an API Map which is later feed to an API Data Fetcher Bot at runtime. From there it can fetch data from APIs without needing the user to write a single line of code.",
"updated": 1622201515378
},
{
"style": "List",
"text": "External processing: it is also possible to gather data and process it outside the system. Once data has been processed in this manner, it can be put in the right folder, with the right format, and the system will pick it up from there. This use case is not the most popular, but it is a valid possibility.",
"updated": 1622201656015
},
{
"style": "Text",
"text": "Once data is within the system, it is then processed for different reasons by Indicator Bots. Some of the processed data might be plotted, some might feed machine learning bots, and some might be consumed by a Trading Strategy.",
"updated": 1622201764822
}
]
} | 1,033 |
3,402 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.util.Shell;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.engine.spark.LocalWithSparkSessionTest;
import org.apache.kylin.job.exception.SchedulerException;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SparderAppNameTest extends LocalWithSparkSessionTest {
private static final Logger logger = LoggerFactory.getLogger(SparderAppNameTest.class);
@BeforeClass
public static void beforeClass() {
}
@Override
@Before
public void setup() throws SchedulerException {
super.setup();
if (Shell.MAC)
System.setProperty("org.xerial.snappy.lib.name", "libsnappyjava.jnilib");//for snappy
KylinConfig conf = KylinConfig.getInstanceFromEnv();
conf.setProperty("kylin.query.spark-conf.spark.master", "local");
SparderContext.getOriginalSparkSession();
}
@After
public void after() {
SparderContext.stopSpark();
super.after();
}
@Test
public void testThreadSparkSession() {
Assert.assertTrue(StringUtils.isNotBlank(
SparderContext.getOriginalSparkSession().sparkContext().getConf()
.get("spark.app.name")));
Assert.assertTrue(SparderContext.getOriginalSparkSession().sparkContext().getConf()
.get("spark.app.name").equals("sparder_on_localhost-7070"));
KylinConfig config = KylinConfig.getInstanceFromEnv();
config.setProperty("kylin.query.sparder-context.app-name", "test-sparder-app-name");
SparderContext.restartSpark();
Assert.assertTrue(StringUtils.isNotBlank(
SparderContext.getOriginalSparkSession().sparkContext().getConf()
.get("spark.app.name")));
Assert.assertTrue(SparderContext.getOriginalSparkSession().sparkContext().getConf()
.get("spark.app.name").equals("test-sparder-app-name"));
config.setProperty("kylin.query.sparder-context.app-name", "");
}
}
| 1,121 |
2,692 | <gh_stars>1000+
#!/usr/bin/env python
#
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
from typing import Set, Tuple
import pytest
from onefuzztypes.primitives import Region
from __app__.onefuzzlib.azure.nsg import ok_to_delete
# Active regions, NSG region, NSG name, expected result
NsgOkToDeleteTestCase = Tuple[Set[Region], str, str, bool]
NSG_OK_TO_DELETE_TEST_CASES = [
# OK to delete
(set([Region("def"), Region("ghk")]), "abc", "abc", True),
# Not OK to delete
# region set has same region as NSG
(set([Region("abc"), Region("def"), Region("ghk")]), "abc", "abc", False),
# NSG region does not match it's name
(set([Region("abc"), Region("def"), Region("ghk")]), "abc", "cba", False),
(set([Region("def"), Region("ghk")]), "abc", "cba", False),
]
@pytest.mark.parametrize("nsg_ok_to_delete_test_case", NSG_OK_TO_DELETE_TEST_CASES)
def test_is_ok_to_delete_nsg(nsg_ok_to_delete_test_case: NsgOkToDeleteTestCase) -> None:
regions, nsg_location, nsg_name, expected = nsg_ok_to_delete_test_case
result = ok_to_delete(regions, nsg_location, nsg_name)
assert result == expected
| 445 |
872 | package cn.zealon.readingcloud.book.controller;
import cn.zealon.readingcloud.book.service.BookService;
import cn.zealon.readingcloud.book.vo.BookVO;
import cn.zealon.readingcloud.common.pojo.book.Book;
import cn.zealon.readingcloud.common.result.Result;
import io.swagger.annotations.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* 图书接口
* @author: zealon
* @since: 2019/4/3
*/
@Api(description = "图书查询接口")
@RestController
@RequestMapping("book")
public class BookController {
@Autowired
private BookService bookService;
@ApiOperation(value = "查询图书基本信息" , httpMethod = "GET")
@ApiImplicitParams({
@ApiImplicitParam(paramType = "query", name = "bookId", value = "图书ID", dataType = "String")
})
@ApiResponses({@ApiResponse(code = 200, message = "", response = Book.class)})
@GetMapping("/getBookById")
public Result<Book> getBookById(String bookId){
return bookService.getBookById(bookId);
}
@ApiOperation(value = "获取图书详情" , httpMethod = "GET")
@ApiImplicitParams({
@ApiImplicitParam(paramType = "query", name = "bookId", value = "图书ID", dataType = "String")
})
@ApiResponses({@ApiResponse(code = 200, message = "", response = Book.class)})
@GetMapping("/details")
public Result<BookVO> getBookDetails(String bookId){
return bookService.getBookDetails(bookId);
}
} | 636 |
854 | __________________________________________________________________________________________________
sample 52 ms submission
class Solution
{
public:
long long maxSum(vector<int>& arr)
{
long long ans=INT_MIN, curr=0LL;
for(int i=0;i<arr.size();i++)
{
curr+=arr[i];
ans=max(ans,curr);
if(curr<0)
curr=0;
}
if(ans<0)
return 0;
return ans;
}
int MOD=1e9+7;
int kConcatenationMaxSum(vector<int>& arr, int k)
{
long long ans=maxSum(arr);
long long pp=0,ss=0,pSum=0,sSum=0;
for(int i=0;i<arr.size();i++)
{
sSum+=arr[arr.size()-1-i];
pSum+=arr[i];
ss=max(ss,sSum);
pp=max(pp,pSum);
}
long long arrSum=0LL;
for(int i=0;i<arr.size();i++)
arrSum+=arr[i];
long long sum=max(0LL,arrSum);
ans=max(ans,(pp+ss+(k-2)*sum));
ans%=MOD;
return ans;
}
};
__________________________________________________________________________________________________
sample 56 ms submission
class Solution {
public:
int kConcatenationMaxSum(vector<int>& arr, int k) {
int len = arr.size();
vector<long long> pre_sum(len + 1, 0);
long long pre_max = 0;
long long pre_min = 0;
long long local_max = 0;
long long local_min = 0;
for(int i = 0; i < len; ++i){
pre_sum[i + 1] = pre_sum[i] + arr[i];
pre_max = max(pre_max, pre_sum[i + 1]);
pre_min = min(pre_min, pre_sum[i + 1]);
local_min = min(local_min, pre_min);
local_max = max(local_max, pre_sum[i+1] - local_min);
}
local_max = local_max % 1000000007;
if(k == 1){
return local_max;
}
long long ans = pre_max + (pre_sum[len] - pre_min);
if(pre_sum[len] > 0){
ans += pre_sum[len] * (k - 2);
ans = ans % 1000000007;
}
return max(ans, local_max);
}
};
__________________________________________________________________________________________________
| 1,133 |
398 | <reponame>bwcxyk/openjdk-docker
# ------------------------------------------------------------------------------
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------------
import requests
import json
import copy
import argparse
import logging
from logging import config
from datetime import datetime, timedelta
from pathlib import Path
LOGGER = logging.getLogger(__name__)
def load_logging_config(debug, file_path):
"""
Loads and configures a logging config
:param debug: True or False if debugging for console should be turned on
:param file_path: File path to storage the log file
:return: None
"""
logging_config = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"debugFormater": {
"format": "%(asctime)s.%(msecs)03d %(levelname)s:%(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S"
},
"simpleFormater": {
"format": "%(message)s"
}
},
"handlers": {
"file": {
"class": "logging.FileHandler",
"formatter": "debugFormater",
"level": "DEBUG",
"filename": "adoptopenjdk_scanner.log"
},
"console": {
"class": "logging.StreamHandler",
"formatter": "simpleFormater",
"level": "INFO",
"stream": "ext://sys.stdout"
}
},
"loggers": {
"": {
"level": "DEBUG",
"handlers": ["file"]
},
"__main__": {
"level": "DEBUG",
"handlers": ["console"],
"propagate": True
}
}
}
# If debugging, then switch the console format to be verbose
if debug:
logging_config["handlers"]["console"]["formatter"] = "debugFormater"
# If a file path is passed in then hadnle the prefix and append the file name
if file_path:
log_path = Path(file_path)
log_path = log_path.joinpath(logging_config["handlers"]["file"]["filename"])
logging_config["handlers"]["file"]["filename"] = str(log_path)
# Apply logging config
logging.config.dictConfig(logging_config)
LOGGER.debug("Logging Config: " + str(logging_config))
LOGGER.debug("Logging is configured")
def sanitize_build(build):
"""
Takes a build name and processes it for tagging
:param build: String - Name of build - (full/slim)
:return: String - Name of processed build - (""/-slim)
"""
if build == "full":
return ""
elif build == "slim":
return "-" + build
def sanitize_jvm(jvm):
"""
Takes a JVM name and processes it for tagging
:param jvm: String - Name of JVM - (hotspot/openj9)
:return: String - Name of processed JVM - (""/-openj9)
"""
if jvm == "hotspot":
return ""
elif jvm == "openj9":
return "-" + jvm
def docker_arch_names(arch):
"""
Convert architecture names to a friendly name
:param arch: String of the arch
:return: String of the "friendly" arch name
"""
if arch == "armv7l":
return "arm"
elif arch == "aarch64":
return "arm64"
elif arch == "x86_64":
return "amd64"
elif arch == "ppc64le":
return "ppc64le"
elif arch == "s390x":
return "s390x"
else:
LOGGER.error("{arch} is an unsupport architecture!".format(arch=arch))
raise ValueError("{arch} is an unsupport architecture!".format(arch=arch))
def convert_timedelta(duration):
"""
Takes in Timedelta and converts it to days, hours, minutes and seconds
:param duration: Timedelta Timestamp
:return: Days, Hours, Minutes and Seconds
"""
days, seconds = duration.days, duration.seconds
hours = seconds // 3600
minutes = (seconds % 3600) // 60
seconds = (seconds % 60)
# Make sure if negative numbers are rounded up to 0
days = max(0, days)
hours = max(0, hours)
minutes = max(0, minutes)
seconds = max(0, seconds)
return days, hours, minutes, seconds
def enrich_list_with_image_json(image_list, docker_org="adoptopenjdk"):
"""
Enriches an image list with the image json data from docker api
:param image_list: List of images
:param docker_org: Name of the docker organization
:return: Enriched image list
"""
# Get a list that has only one copy each possible image to save on image checks
manifest_list = get_manifest_list(image_list=image_list)
# Enrich the manifest list with image json
for image in manifest_list:
image_json = get_image_information(docker_org=docker_org, docker_repo="openjdk{version}{jvm}".format(version=image["version"], jvm=sanitize_jvm(image["jvm"])), tag_name=image["tag"])
image["image_json"] = image_json
# Enrich the full image list with image json to avoid calling the same manifest 4 or 5 times(for each arch)
for image in image_list:
for enrich_image in manifest_list:
if image["tag"] == enrich_image["tag"] and image["jvm"] == enrich_image["jvm"]:
image["image_json"] = enrich_image["image_json"]
return image_list
def deenrich_list_with_image_json(enriched_image_list):
"""
De-enrich the image list
:param enriched_image_list: List of enriched images
:return: De-enriched image list
"""
# For each image delete image json
for image in enriched_image_list:
if "image_json" in image:
del image["image_json"]
return enriched_image_list
def get_image_information(docker_org, docker_repo, tag_name):
"""
Fetch image json from DockerHub for an image
:param docker_org: Name of docker organization
:param docker_repo: Name of docker repo
:param tag_name: Name of tag
:return: JSON of the image
"""
LOGGER.debug("Getting image information for: {org}/{repo}:{tag}".format(org=docker_org, repo=docker_repo, tag=tag_name))
response = requests.get("https://hub.docker.com/v2/repositories/{org}/{repo}/tags/{tag}".format(org=docker_org, repo=docker_repo, tag=tag_name))
# Checks if the response is not a 5XX or 4XX status code
if response.ok:
return response.json()
else:
# If "bad" status code print error
LOGGER.error("ERROR: Something went wrong grabbing image, {org}/{repo}:{tag}. HTTP Status Code: {code}".format(org=docker_org, repo=docker_repo, tag=tag_name, code=response.status_code))
return None
def get_last_updated_for_image(image_json):
"""
Grab "last_updated" timestamp from docker image json
:param image_json: Image JSON
:return: Datetime Object
"""
# Grab the timestamp for last time updated
last_updated = image_json.get("last_updated")
# If last_update is not empty
if last_updated is not None:
# Parse timestamp string to datetime object
timestamp = datetime.strptime(last_updated, "%Y-%m-%dT%H:%M:%S.%fZ")
return timestamp
else:
# This should not happen unless Docker API changes the format/response
LOGGER.error("last_updated value in the image json is not there. Has the DockerHub API changed?")
raise ValueError("last_updated value in the image json is not there. Has the DockerHub API changed?")
def get_manifest_list(image_list):
"""
Get a list with only the "manifest" images
:param image_list: Full image list
:return: "Unique" image list
"""
# Create a copy of the image list
manifest_list = copy.deepcopy(image_list)
# Delete arch from image dicts, thus produce a list with duplicate values
for image in manifest_list:
del image["arch"]
# Get unique list for manifest checking. Normal images list contain an entry for each arch thus "duplicates"
# Converts dict to tuple to be able to generate hash for comparision
manifest_list = [dict(t) for t in {tuple(d.items()) for d in manifest_list}]
return manifest_list
def get_unique_image_name_and_last_updated(enriched_image_list):
"""
Generate a list with "manifest" images only and last_update timestamp
:param enriched_image_list: Image list with image JSON
:return: List of tuples(image name and timestamp)
"""
# Use a set to avoid adding the same image twice
unique_list = set()
for image in enriched_image_list:
image_name = "adoptopenjdk/openjdk{version}{jvm}:{tag}".format(version=image["version"],
jvm=sanitize_jvm(image["jvm"]), tag=image["tag"])
last_updated = get_last_updated_for_image(image_json=image["image_json"])
# Need to use tuples not dicts to take advantage of a set
unique_list.add((image_name, last_updated))
return list(unique_list)
def generate_all_image(supported_versions, supported_jvms, supported_os, supported_packages, supported_builds, supported_archs, dict_image_template):
"""
Generates all possible combinations of images. Should take in any parameters that make up your image/tag
:param supported_versions: String - List of Versions
:param supported_jvms: String - List of JVMs
:param supported_os: String - List of OSs
:param supported_packages: String - List of Packages
:param supported_builds: String - List of Builds
:param supported_archs: String - List of Architectures
:param dict_template: Dict - Template Dict to store needed information about said image/tag
:return: List - All generate image/tag possibilities
"""
# A list to hold all the possible images
master_list = []
# Loop over every possible image and check if it needs to be tested
for version in supported_versions:
for jvm in supported_jvms:
for os in supported_os:
for package in supported_packages:
for build in supported_builds:
for arch in supported_archs:
# Using Deep copy to make a new dict not just over writing the same one
template = copy.deepcopy(dict_image_template)
template["version"] = version
template["jvm"] = jvm
template["arch"] = arch
template["os"] = os
template["package"] = package
template["build"] = build
template["tag"] = "{package}{version}u-{os}-nightly{build}".format(package=package,
version=version, os=os,
build=sanitize_build(
build))
master_list.append(template)
return master_list
def is_valid_package_and_build(package, build):
"""
Returns true or false depending on the package and build are jre and slim
:param package: String - Package name - (jdk/jre)
:param build: String - Build name - (slim/full)
:return: Boolean - (True/False)
"""
# Currently we do not produce JRE SLIM builds
if package == "jre" and build == "slim":
LOGGER.debug("Package & Build Check Failed with {package} and {build}".format(package=package, build=build))
return False
return True
def filter_valid_package_and_build(image_list):
"""
Filter out any non-valid image by package and build
:param image_list: List - Collection of possible images/tags
:return: Tuple - First list is valid images and second list is non-valid images
"""
filtered_list = []
removed_list = []
# Loop over all the images
for image in image_list:
# If valid added it to filter list
if is_valid_package_and_build(package=image["package"], build=image["build"]):
filtered_list.append(image)
# If non-valid add it to the "removed" list
else:
removed_list.append(image)
return (filtered_list, removed_list)
def is_valid_os_and_arch(os, arch):
"""
Returns true or false depending on arch and os combination
:param os: String - Name of OS - See supported OSs
:param arch: String - Name of Arch - See supported Archs
:return: Boolean - (True/False)
"""
# ClefOS only runs on s390x
if os == "clefos" and arch != "s390x":
LOGGER.debug("OS Check Failed with {os} and {arch}".format(os=os, arch=arch))
return False
# CentOS does not support s390x
elif os == "centos" and arch == "s390x":
LOGGER.debug("OS Check Failed with {os} and {arch}".format(os=os, arch=arch))
return False
# Ubi based images do not support armv7l
elif os == "ubi" and arch == "armv7l":
LOGGER.debug("OS Check Failed with {os} and {arch}".format(os=os, arch=arch))
return False
# Ubi-minimal based images do not support armv7l
elif os == "ubi-minimal" and arch == "armv7l":
LOGGER.debug("OS Check Failed with {os} and {arch}".format(os=os, arch=arch))
return False
return True
def filter_valid_os_and_arch(image_list):
"""
Filter out any non-valid image by package and build
:param image_list: List - Collection of possible images/tags
:return: Tuple - First list is valid images and second list is non-valid images
"""
filtered_list = []
removed_list = []
# Loop over all the images
for image in image_list:
# If valid added it to filter list
if is_valid_os_and_arch(os=image["os"], arch=image["arch"]):
filtered_list.append(image)
# If non-valid add it to the "removed" list
else:
removed_list.append(image)
return (filtered_list, removed_list)
def is_valid_jvm_and_arch(jvm, arch):
"""
Check if the jvm and arch are a valid combination
:param jvm: Name of JVM
:param arch: Name of arch
:return: Boolean
"""
# Currently OpenJ9 does not support armv7l or aarch64
# But Hotspot supports all supported_archs
if jvm == "openj9" and (arch == "armv7l" or arch == "aarch64"):
LOGGER.debug("JVM Check Failed with {jvm} and {arch}".format(jvm=jvm, arch=arch))
return False
return True
def filter_valid_jvm_and_arch(image_list):
"""
Filter list based on jvm and arch
:param image_list: List of images
:return: Dict of filtered and removed images
"""
filtered_list = []
removed_list = []
# Loop over all the images
for image in image_list:
# If valid added it to filter list
if is_valid_jvm_and_arch(jvm=image["jvm"], arch=image["arch"]):
filtered_list.append(image)
# If non-valid add it to the "removed" list
else:
removed_list.append(image)
return (filtered_list, removed_list)
def is_image_exist(docker_org, docker_repo, tag_name):
"""
Checks if image exists on DockerHub
:param docker_org: Name of docker organization
:param docker_repo: Name of docker repo
:param tag_name: Name of tag
:return: Boolean
"""
# Issue GET request to get a HTTP Status code to check if it is a valid image
# Using GET instead of HEAD because HEAD is not being treated right, thus enable stream to just get headers
response = requests.get("https://hub.docker.com/v2/repositories/{org}/{repo}/tags/{tag}".format(org=docker_org, repo=docker_repo, tag=tag_name), stream=True)
LOGGER.debug("HTTP Status Code: {code}".format(code=response.status_code))
# Checks if the response is not a 5XX or 4XX status code
if response.ok:
return True
elif response.status_code == 404:
LOGGER.debug("ERROR: Image, {org}/{repo}:{tag}, does not exist!".format(org=docker_org, repo=docker_repo, tag=tag_name))
return False
else:
# Should never get another type of status code from Dockerhub then 200 or 404
LOGGER.error("ERROR: When requesting the image, {org}/{repo}:{tag}, we got the HTTP status code, {code}. Network issues?".format(org=docker_org, repo=docker_repo, tag=tag_name, code=response.status_code))
raise ValueError("ERROR: When requesting the image, {org}/{repo}:{tag}, we got the HTTP status code, {code}. Network issues?".format(org=docker_org, repo=docker_repo, tag=tag_name, code=response.status_code))
def filter_image_exist(docker_org, image_list):
"""
Filter images based on if they exist or not
:param docker_org: Name of docker organization
:param image_list: List of images
:return: Dict of filtered and removed images
"""
filtered_list = []
removed_list = []
# Get a list that has only one copy each possible image to save on image checks
manifest_list = get_manifest_list(image_list=image_list)
removed_manifest_list = []
# Loop over all possible images
for image in manifest_list:
if is_image_exist(docker_org=docker_org, docker_repo="openjdk{version}{jvm}".format(version=image["version"], jvm=sanitize_jvm(image["jvm"])), tag_name=image["tag"]) is not True:
removed_manifest_list.append(image["tag"])
# Filter the image list based on if the image did not exist
for image in image_list:
# Add image to the removed list if in removed_manifest_list
if image["tag"] in removed_manifest_list:
removed_list.append(image)
else:
filtered_list.append(image)
return (filtered_list, removed_list)
def is_arch_in_manifest(arch, image_json):
"""
Check if the architecture is in the manifest/image json
:param arch: Name of architecture
:param image_json: JSON of image
:return: Boolean
"""
# Grab the images value the image json. Should be a list if its a manifest
manifest_images = image_json.get("images")
if manifest_images is not None:
for image in manifest_images:
if docker_arch_names(arch=arch) == image.get("architecture"):
return True
return False
else:
LOGGER.error("images value in the image json is not there. Has the DockerHub API changed?")
raise ValueError("images value in the image json is not there. Has the DockerHub API changed?")
def filter_arch_in_manifest(enriched_image_list, filter_images=True):
"""
Filter image list based on if the architecture is in the manifest/image json
:param enriched_image_list: List of images with image JSON
:param filter_images: If set to False, images that are not in the manifest will remain in the list
:return: Dict of filtered and removed images
"""
filtered_list = []
removed_list = []
for image in enriched_image_list:
if is_arch_in_manifest(arch=image["arch"], image_json=image["image_json"]):
filtered_list.append(image)
else:
removed_list.append(image)
# If filter_images is false, we want to keep the "bad" images in the list
if filter_images is False:
filtered_list = copy.deepcopy(enriched_image_list)
return (filtered_list, removed_list)
def is_timedelta(timestamp, current_time=datetime.utcnow(), delta_hours=2):
"""
Check if the given timestamp is within a time delta
:param timestamp: Timestamp of image(UTC)
:param current_time: Current UTC timestamp
:param delta_hours: An integer of hours
:return: Boolean
"""
# Check the timestamps are within a given delta
# Checking if the given timestamp + delta hours is great then the current time aka making it a "new" image
# If less then current time the image is deemed "old"
if (timestamp + timedelta(hours=delta_hours)) > current_time:
return True
else:
return False
def filter_timedelta(enriched_image_list, delta_hours=2):
"""
Filter images based on time delta
:param enriched_image_list: List of images with image JSON
:param delta_hours: An integer of hours
:return: Dict of filtered and removed images
"""
filtered_list = []
removed_list = []
for image in enriched_image_list:
if is_timedelta(timestamp=get_last_updated_for_image(image_json=image["image_json"]), current_time=datetime.utcnow(), delta_hours=delta_hours):
filtered_list.append(image)
else:
removed_list.append(image)
return (filtered_list, removed_list)
def general_filters(image_list, dict_images_template):
"""
Filters out images that should not be valid exist
:param image_list: List of images
:param dict_images_template: Images template
:return: Dict of images
"""
# Filter by package and build
image_list, dict_images_template["package_and_build"] = filter_valid_package_and_build(image_list)
# Filter by os and arch
image_list, dict_images_template["os_and_arch"] = filter_valid_os_and_arch(image_list)
# Filter by jvm and arch
image_list, dict_images_template["jvm_and_arch"] = filter_valid_jvm_and_arch(image_list)
# Remaining images must be valid after going through the above filters
dict_images_template["filtered_images"] = image_list
return dict_images_template
def verify_images(image_list, dict_images_template, docker_org="adoptopenjdk"):
"""
Verify a list of images exists
:param image_list: List of images
:param dict_images_template: Images template
:param docker_org: Name of docker organization
:return: Dict of images
"""
# Apply general filter for the image list. This makes sure all images are valid
dict_images_template = general_filters(image_list, dict_images_template)
# Check if the images exist by using the filter
dict_images_template["filtered_images"], dict_images_template["bad_requests"] = filter_image_exist(docker_org=docker_org, image_list=dict_images_template["filtered_images"])
return dict_images_template
def verify_manifests(image_list, dict_images_template, docker_org="adoptopenjdk", filter_bad_manifests=True):
"""
Verify a list of images have valid manifests
:param image_list: List of images
:param dict_images_template: Images template
:param docker_org: Name of docker organization
:param filter_bad_manifests: Filter out bad manifests from list if set to true
:return: Dict of images
"""
# Call verify images to make sure they all exist before further processing
dict_images_template = verify_images(image_list=image_list, dict_images_template=dict_images_template, docker_org=docker_org)
# Enrich the images with image JSON
enriched_image_list = enrich_list_with_image_json(image_list=dict_images_template["filtered_images"], docker_org=docker_org)
# Check if the manifests are "bad" by using the filter
dict_images_template["filtered_images"], dict_images_template["bad_manifests"] = filter_arch_in_manifest(enriched_image_list=enriched_image_list, filter_images=filter_bad_manifests)
# De-enrich the images before storing them into the dict
dict_images_template["bad_manifests"] = deenrich_list_with_image_json(enriched_image_list=dict_images_template["bad_manifests"])
return dict_images_template
def verify_timedelta(image_list, dict_images_template, docker_org="adoptopenjdk", filter_bad_manifests=True, delta_hours=2, force_old_images=False):
"""
Verify a list of images meet a given time delta
:param image_list: List of images
:param dict_images_template: Images template
:param docker_org: Name of docker organization
:param filter_bad_manifests: Filter out bad manifests from list if set to true
:param delta_hours: An integer of hours to deem an image "old"
:param force_old_images: Forces old images to not be filtered out
:return: Dict of images
"""
# Call verify manifests to make use all manifests are okay. Calling manifest also verifies if the images exist too
dict_images_template = verify_manifests(image_list=image_list, dict_images_template=dict_images_template, docker_org=docker_org, filter_bad_manifests=filter_bad_manifests)
# Force Old Images set to true will skip the delta time check
if force_old_images is not True:
dict_images_template["filtered_images"], dict_images_template["old_images"] = filter_timedelta(enriched_image_list=dict_images_template["filtered_images"], delta_hours=delta_hours)
return dict_images_template
def verify(image_list, dict_images_template, docker_org="adoptopenjdk", filter_bad_manifests=False, delta_hours=2, force_old_images=False):
"""
Verify a list of images meet all filters. Used to generate a list of images that need to be tested
:param image_list: List of images
:param dict_images_template: Images template
:param docker_org: Name of docker organization
:param filter_bad_manifests: Filter out bad manifests from list if set to true
:param delta_hours: An integer of hours to deem an image "old"
:param force_old_images: Forces old images to not be filtered out
:return: Dict of images
"""
# Call verify time delta to make sure all images are not "old". This calls verifies manifests and if the images exist
dict_images_template = verify_timedelta(image_list=image_list, dict_images_template=dict_images_template, docker_org=docker_org, filter_bad_manifests=filter_bad_manifests, delta_hours=delta_hours, force_old_images=force_old_images)
# De-enrich images before storing them in image dict
dict_images_template["filtered_images"] = deenrich_list_with_image_json(enriched_image_list=dict_images_template["filtered_images"])
return dict_images_template
def output_package_and_build(image_dict, json_output):
"""
Outputs a list of images that failed the package and build filter
:param image_dict: Dictionary of images
:param json_output: Boolean for Json output verse printed
:return: None
"""
LOGGER.info("\nPackage and Build Image Issues({number}):".format(number=str(len(image_dict["package_and_build"]))))
for image in image_dict["package_and_build"]:
if json_output is False:
image_name = "adoptopenjdk/openjdk{version}{jvm}:{tag}".format(version=image["version"], jvm=sanitize_jvm(image["jvm"]), tag=image["tag"])
LOGGER.info("Package & Build Check Failed with {package} and {build} for image: {image_name}".format(package=image["package"], build=image["build"], image_name=image_name))
else:
LOGGER.info(json.dumps(image))
def output_os_and_arch(image_dict, json_output):
"""
Outputs a list of images that failed the os and arch filter
:param image_dict: Dictionary of images
:param json_output: Boolean for Json output verse printed
:return: None
"""
LOGGER.info("\nOS and Image Image Issues({number}):".format(number=str(len(image_dict["os_and_arch"]))))
for image in image_dict["os_and_arch"]:
if json_output is False:
image_name = "adoptopenjdk/openjdk{version}{jvm}:{tag}".format(version=image["version"], jvm=sanitize_jvm(image["jvm"]), tag=image["tag"])
LOGGER.info("OS Check Failed with {os} and {arch} for image: {image_name}".format(os=image["os"], arch=image["arch"], image_name=image_name))
else:
LOGGER.info(json.dumps(image))
def output_jvm_and_arch(image_dict, json_output):
"""
Outputs a list of images that failed the jvm and arch filter
:param image_dict: Dictionary of images
:param json_output: Boolean for Json output verse printed
:return: None
"""
LOGGER.info("\nJVM and Architecture Image Issues({number}):".format(number=str(len(image_dict["jvm_and_arch"]))))
for image in image_dict["jvm_and_arch"]:
if json_output is False:
image_name = "adoptopenjdk/openjdk{version}{jvm}:{tag}".format(version=image["version"], jvm=sanitize_jvm(image["jvm"]), tag=image["tag"])
LOGGER.info("JVM Check Failed with {jvm} and {arch} for image: {image_name}".format(jvm=image["jvm"], arch=image["arch"], image_name=image_name))
else:
LOGGER.info(json.dumps(image))
def output_bad_requests(image_dict, json_output, valid_images):
"""
Outputs a list of images that do not exist in DockerHub/generated a bad request. Also can out print "valid" images
:param image_dict: Dictionary of images
:param json_output: Boolean for Json output verse printed
:param valid_images: Boolean for if Valid images should be shown
:return: None
"""
manifest_list = get_manifest_list(image_list=image_dict["bad_requests"])
LOGGER.info("\nNonexistent(Bad Requests) Image Issues({number}):".format(number=str(len(manifest_list))))
for image in manifest_list:
if json_output is False:
image_name = "adoptopenjdk/openjdk{version}{jvm}:{tag}".format(version=image["version"], jvm=sanitize_jvm(image["jvm"]), tag=image["tag"])
LOGGER.info("Got a bad request for image: {image_name}".format(image_name=image_name))
else:
LOGGER.info(json.dumps(image))
if valid_images is True:
valid_manifest_list = get_manifest_list(image_list=image_dict["filtered_images"])
LOGGER.info("\nExistent(Good Requests) Images({number}):".format(number=str(len(valid_manifest_list))))
for image in valid_manifest_list:
if json_output is False:
image_name = "adoptopenjdk/openjdk{version}{jvm}:{tag}".format(version=image["version"], jvm=sanitize_jvm(image["jvm"]), tag=image["tag"])
LOGGER.info("Got a good request for image: {image_name}".format(image_name=image_name))
else:
LOGGER.info(json.dumps(image))
def output_old_images(image_dict, json_output, valid_images, delta_hours):
"""
Outputs a list of images that are deemed "old" by a given time delta. Also can out print "valid" images
:param image_dict: Dictionary of images
:param json_output: Boolean for Json output verse printed
:param valid_images: Boolean for if Valid images should be shown
:param delta_hours: An integer of hours to deem an image "old"
:return: None
"""
if json_output is False:
image_name_and_last_updated = get_unique_image_name_and_last_updated(enriched_image_list=image_dict["old_images"])
LOGGER.info("\nDelta Time(Old) Image Issues({number}):".format(number=str(len(image_name_and_last_updated))))
for image_name, timestamp in image_name_and_last_updated:
age_of_image = datetime.utcnow() - timestamp
days, hours, minutes, seconds = convert_timedelta(age_of_image)
LOGGER.info("Failed delta time check of {delta_hours} hours with the age of {days} days, {hours:02d}:{minutes:02d}.{seconds:02d} for image: {image_name}".format(delta_hours=delta_hours, days=days, hours=hours, minutes=minutes, seconds=seconds, image_name=image_name))
if valid_images is True:
image_name_and_last_updated = get_unique_image_name_and_last_updated(enriched_image_list=image_dict["filtered_images"])
LOGGER.info("\nDelta Time(NEW) Images({number}):".format(number=str(len(image_name_and_last_updated))))
for image_name, timestamp in image_name_and_last_updated:
age_of_image = timestamp - datetime.utcnow()
days, hours, minutes, seconds = convert_timedelta(age_of_image)
LOGGER.info("Passed delta time check of {delta_hours} hours with the age of {days} days, {hours:02d}:{minutes:02d}.{seconds:02d} for image: {image_name}".format(delta_hours=delta_hours, days=days, hours=hours, minutes=minutes, seconds=seconds, image_name=image_name))
else:
LOGGER.info("\nDelta Time(Old) RAW Image Issues({number}):".format(number=str(len(image_dict["old_images"]))))
for image in image_dict["old_images"]:
LOGGER.info(json.dumps(image))
if valid_images is True:
LOGGER.info("\nDelta Time(NEW) RAW Images({number}):".format(number=str(len(image_dict["filtered_images"]))))
for image in image_dict["filtered_images"]:
LOGGER.info(json.dumps(image))
def output_bad_manifests(image_dict, json_output):
"""
Outputs of a list of images that have manifest issues
:param image_dict: Dictionary of images
:param json_output: Boolean for Json output verse printed
:return: None
"""
if json_output is False:
manifest_dict = {}
for image in image_dict["bad_manifests"]:
image_name = "adoptopenjdk/openjdk{version}{jvm}:{tag}".format(version=image["version"], jvm=sanitize_jvm(image["jvm"]), tag=image["tag"])
if image_name in manifest_dict:
manifest_dict[image_name] = manifest_dict[image_name] + ", " + image["arch"]
else:
manifest_dict[image_name] = image["arch"]
LOGGER.info("\nManifest Image Issues({number}):".format(number=str(len(image_dict["bad_manifests"]))))
for key, value in manifest_dict.items():
LOGGER.info(key + " : " + value)
else:
LOGGER.info("\nManifest RAW Image Issues({number}):".format(number=str(len(image_dict["bad_manifests"]))))
for image in image_dict["bad_manifests"]:
LOGGER.info(json.dumps(image))
def output_filtered_images(image_dict, json_output):
"""
Outputs a list of images that need to be tested
:param image_dict: Dictionary of images
:param json_output: Boolean for Json output verse printed
:return: None
"""
if json_output is False:
manifest_list = get_manifest_list(image_list=image_dict["filtered_images"])
LOGGER.info("Valid(Filtered) Images({number}):".format(number=str(len(manifest_list))))
for image in manifest_list:
image_name = "adoptopenjdk/openjdk{version}{jvm}:{tag}".format(version=image["version"], jvm=sanitize_jvm(image["jvm"]), tag=image["tag"])
LOGGER.info("All attributes have been verified for image: {image_name}".format(image_name=image_name))
else:
LOGGER.info("Valid(Filtered) RAW Images({number}):".format(number=str(len(image_dict["filtered_images"]))))
for image in image_dict["filtered_images"]:
LOGGER.info(json.dumps(image))
def get_args():
"""
Processes and handles command line arguments
:return: Dict of command line arguments
"""
parser = argparse.ArgumentParser(description="AdoptOpenJDK Scanner allows a user to verify attributes about images")
parser.add_argument("--verify",
help="Name of the attribute you want to verify",
type=str,
choices=["all", "timedelta", "manifests", "images"],
default=None,
required=True)
parser.add_argument("--versions",
help="Java Versions",
nargs='+',
type=str,
choices=["8", "11", "14"],
default=["8", "11", "14"])
parser.add_argument("--jvms",
help="Name of the JVMs",
nargs='+',
type=str,
choices=["hotspot", "openj9"],
default=["hotspot", "openj9"])
parser.add_argument("--oss",
help="Names of the OSs",
nargs='+',
type=str,
choices=["alpine", "debian", "debianslim", "ubi", "ubi-minimal", "centos", "clefos", "ubuntu"],
default=["alpine", "debian", "debianslim", "ubi", "ubi-minimal", "centos", "clefos", "ubuntu"])
parser.add_argument("--packages",
help="Names of the Packages",
nargs='+',
type=str,
choices=["jdk", "jre"],
default=["jdk", "jre"])
parser.add_argument("--archs",
help="Architectures",
nargs='+',
type=str,
choices=["armv7l", "aarch64", "ppc64le", "s390x", "x86_64"],
default=["armv7l", "aarch64", "ppc64le", "s390x", "x86_64"])
parser.add_argument("--builds",
help="Name of the Builds",
nargs='+',
type=str,
choices=["slim", "full"],
default=["slim", "full"])
parser.add_argument("--filter-bad-manifests",
help="Filter out bad manifest images",
action="store_true",
default=False)
parser.add_argument("--delta-hours",
help="Number of hours to deem an image 'old'",
type=int,
default=2)
parser.add_argument("--force-old-images",
help="Force old images not to be filtered out",
action="store_true",
default=False)
parser.add_argument("--debug",
help="Enable Debug output",
action="store_true",
default=False)
parser.add_argument("--log-path",
help="Path to where the log file will be generated",
type=str,
default=None)
parser.add_argument("--json",
help="Prints JSON output for results instead of formatted strings",
action="store_true",
default=False)
parser.add_argument("--show-valid",
help="Prints valid objects in addition to the problematic objects. Only works for certain verify values",
action="store_true",
default=False)
return vars(parser.parse_args())
def run(parsed_args):
"""
Main function that takes in arguments and processes them
:param parsed_args: Dict of command line arguments
:return: None
"""
docker_organization = "adoptopenjdk"
image_template = {
"version": "",
"jvm": "",
"arch": "",
"os": "",
"package": "",
"build": "",
"tag": ""
}
images_template = {
"filtered_images": [],
"package_and_build": [],
"os_and_arch": [],
"jvm_and_arch": [],
"bad_requests": [],
"bad_manifests": [],
"old_images": []
}
LOGGER.info("Generating All Possible Images.......")
all_images = generate_all_image(supported_versions=parsed_args["versions"], supported_jvms=parsed_args["jvms"], supported_os=parsed_args["oss"], supported_packages=parsed_args["packages"], supported_builds=parsed_args["builds"], supported_archs=parsed_args["archs"], dict_image_template=image_template)
LOGGER.info("Processing images.......")
if parsed_args["verify"] == "all":
processed_dict = verify(image_list=all_images, dict_images_template=images_template, docker_org=docker_organization, filter_bad_manifests=parsed_args["filter_bad_manifests"], delta_hours=parsed_args["delta_hours"], force_old_images=parsed_args["force_old_images"])
if parsed_args["debug"]:
output_package_and_build(image_dict=processed_dict, json_output=parsed_args["json"])
output_os_and_arch(image_dict=processed_dict, json_output=parsed_args["json"])
output_jvm_and_arch(image_dict=processed_dict, json_output=parsed_args["json"])
output_bad_requests(image_dict=processed_dict, json_output=parsed_args["json"], valid_images=parsed_args["show_valid"])
output_bad_manifests(image_dict=processed_dict, json_output=parsed_args["json"])
output_old_images(image_dict=processed_dict, json_output=parsed_args["json"], valid_images=parsed_args["show_valid"], delta_hours=parsed_args["delta_hours"])
output_filtered_images(image_dict=processed_dict, json_output=parsed_args["json"])
elif parsed_args["verify"] == "timedelta":
processed_dict = verify_timedelta(image_list=all_images, dict_images_template=images_template, docker_org=docker_organization, filter_bad_manifests=parsed_args["filter_bad_manifests"], delta_hours=parsed_args["delta_hours"], force_old_images=parsed_args["force_old_images"])
output_old_images(image_dict=processed_dict, json_output=parsed_args["json"], valid_images=parsed_args["show_valid"], delta_hours=parsed_args["delta_hours"])
elif parsed_args["verify"] == "manifests":
processed_dict = verify_manifests(image_list=all_images, dict_images_template=images_template, docker_org=docker_organization, filter_bad_manifests=parsed_args["filter_bad_manifests"])
output_bad_manifests(image_dict=processed_dict, json_output=parsed_args["json"])
elif parsed_args["verify"] == "images":
processed_dict = verify_images(image_list=all_images, dict_images_template=images_template, docker_org=docker_organization)
output_bad_requests(image_dict=processed_dict, json_output=parsed_args["json"], valid_images=parsed_args["show_valid"])
if __name__ == "__main__":
# Parse the arguments passed in
args = get_args()
# Configure logging
load_logging_config(args["debug"], args["log_path"])
LOGGER.debug("Parsed arguments: " + str(args))
run(parsed_args=args)
| 17,017 |
412 | package indirect;
public abstract class ThirdInstance implements ThirdInterface {
public ThirdInstance() {
val = TestEnum.Four;
}
private TestEnum val;
public TestEnum getState() {
return val;
}
}
| 85 |
14,668 | <gh_stars>1000+
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <memory>
#include "base/callback_helpers.h"
#include "base/test/task_environment.h"
#include "chrome/browser/nearby_sharing/scheduling/nearby_share_on_demand_scheduler.h"
#include "components/prefs/pref_registry_simple.h"
#include "components/prefs/testing_pref_service.h"
#include "services/network/test/test_network_connection_tracker.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace {
const char kTestPrefName[] = "test_pref_name";
} // namespace
class NearbyShareOnDemandSchedulerTest : public ::testing::Test {
protected:
NearbyShareOnDemandSchedulerTest() = default;
~NearbyShareOnDemandSchedulerTest() override = default;
void SetUp() override {
pref_service_.registry()->RegisterDictionaryPref(kTestPrefName);
network::TestNetworkConnectionTracker::GetInstance()->SetConnectionType(
network::mojom::ConnectionType::CONNECTION_WIFI);
scheduler_ = std::make_unique<NearbyShareOnDemandScheduler>(
/*retry_failures=*/true, /*require_connectivity=*/true, kTestPrefName,
&pref_service_, base::DoNothing(), task_environment_.GetMockClock());
}
NearbyShareScheduler* scheduler() { return scheduler_.get(); }
private:
base::test::SingleThreadTaskEnvironment task_environment_{
base::test::TaskEnvironment::TimeSource::MOCK_TIME};
TestingPrefServiceSimple pref_service_;
std::unique_ptr<NearbyShareScheduler> scheduler_;
};
TEST_F(NearbyShareOnDemandSchedulerTest, NoRecurringRequest) {
scheduler()->Start();
EXPECT_FALSE(scheduler()->GetTimeUntilNextRequest());
}
| 581 |
1,582 | <reponame>carlin-q-scott/AspNet.Security.OAuth.Providers
{
"$schema": "https://raw.githubusercontent.com/justeat/httpclient-interception/master/src/HttpClientInterception/Bundles/http-request-bundle-schema.json",
"items": [
{
"comment": "https://trakt.docs.apiary.io/#reference/authentication-oauth/get-token/retrieve-settings?console=1",
"uri": "https://api.trakt.tv/oauth/token",
"method": "POST",
"contentFormat": "json",
"contentJson": {
"access_token": "<PASSWORD>",
"token_type": "bearer",
"expires_in": 7200,
"refresh_token": "<PASSWORD>",
"scope": "public",
"created_at": 1487889741
}
},
{
"comment": "See https://trakt.docs.apiary.io/#reference/users/profile/get-user-profile?console=1",
"uri": "https://api.trakt.tv/users/me?extended=full",
"contentFormat": "json",
"contentJson": {
"username": "sean",
"private": false,
"name": "<NAME>",
"vip": true,
"vip_ep": true,
"ids": {
"slug": "sean"
},
"joined_at": "2011-11-01T00:31:51.000Z",
"location": "",
"about": "",
"gender": "",
"age": null,
"images": {
"avatar": {
"full": "https://secure.gravatar.com/avatar/5asdasdasdasdf5e6asdasd1?d=https%3A%2F%2Fwalter.trakt.tv%2Fhotlink-ok%2Fplaceholders%2Fmedium%2Fzoidberg.png&r=pg&s=256"
}
}
}
}
]
}
| 762 |
604 | <gh_stars>100-1000
#include "pch.h"
#include "config.hpp"
#include <fstream>
#include <Shlwapi.h>
#include <yaml-cpp/yaml.h>
#pragma comment(lib, "Shlwapi.lib")
Config config{};
bool config_init() {
using namespace std::literals;
config.enable = false;
wchar_t path[MAX_PATH];
GetModuleFileNameW(nullptr, path, std::size(path));
PathRemoveFileSpecW(path);
PathAppendW(path, L"IbEverythingExt.yaml");
std::ifstream in(path);
if (!in) {
MessageBoxW(nullptr, L"配置文件 IbEverythingExt.yaml 不存在!", L"IbEverythingExt", MB_ICONERROR);
return false;
}
try {
YAML::Node root = YAML::Load(in);
{
YAML::Node node = root["pinyin_search"];
config.pinyin_search = {
.enable = node["enable"].as<bool>(),
.mode = [&node] {
auto mode = node["mode"].as<std::string>();
if (mode == "Auto")
return PinyinSearchMode::Auto;
else if (mode == "Pcre")
return PinyinSearchMode::Pcre;
else if (mode == "Edit")
return PinyinSearchMode::Edit;
throw YAML::Exception(YAML::Mark::null_mark(), "Invalid pinyin_search.mode");
}(),
.flags = [&node] {
std::vector<pinyin::PinyinFlagValue> flags;
// mind the order
if (node["pinyin_ascii_digit"].as<bool>())
flags.push_back(pinyin::PinyinFlag::PinyinAsciiDigit);
if (node["pinyin_ascii"].as<bool>())
flags.push_back(pinyin::PinyinFlag::PinyinAscii);
if (node["double_pinyin_abc"].as<bool>())
flags.push_back(pinyin::PinyinFlag::DoublePinyinAbc);
if (node["double_pinyin_jiajia"].as<bool>())
flags.push_back(pinyin::PinyinFlag::DoublePinyinJiajia);
if (node["double_pinyin_microsoft"].as<bool>())
flags.push_back(pinyin::PinyinFlag::DoublePinyinMicrosoft);
if (node["double_pinyin_thunisoft"].as<bool>())
flags.push_back(pinyin::PinyinFlag::DoublePinyinThunisoft);
if (node["double_pinyin_xiaohe"].as<bool>())
flags.push_back(pinyin::PinyinFlag::DoublePinyinXiaohe);
if (node["double_pinyin_zrm"].as<bool>())
flags.push_back(pinyin::PinyinFlag::DoublePinyinZrm);
if (node["initial_letter"].as<bool>())
flags.push_back(pinyin::PinyinFlag::InitialLetter);
return flags;
}()
};
}
{
YAML::Node node = root["quick_select"];
config.quick_select = {
.enable = node["enable"].as<bool>(),
.hotkey_mode = node["hotkey_mode"].as<int>(),
.input_mode = [&node] {
auto mode = node["input_mode"].as<std::string>();
if (mode == "Auto")
return quick::InputMode::Auto;
else if (mode == "WmKey")
return quick::InputMode::WmKey;
else if (mode == "SendInput")
return quick::InputMode::SendInput;
throw YAML::Exception(YAML::Mark::null_mark(), "Invalid quick_select.input_mode");
}(),
.close_everything = node["close_everything"].as<bool>()
};
}
}
catch (YAML::Exception& e) {
MessageBoxA(nullptr, ("配置文件读取错误:\n"s + e.what()).c_str(), "IbEverythingExt", MB_ICONERROR);
return false;
}
pinyin::PinyinFlagValue flags{};
for (pinyin::PinyinFlagValue flag : config.pinyin_search.flags)
flags |= flag;
pinyin::init(flags);
config.enable = true;
return true;
}
void config_destroy() {
pinyin::destroy();
} | 2,299 |
414 | <gh_stars>100-1000
//
// SEInputAccessoryView.h
// RichTextEditor
//
// Created by <NAME> on 13/09/26.
// Copyright (c) 2013 <NAME>. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface SEInputAccessoryView : UIToolbar
@property (nonatomic, weak) IBOutlet UIBarButtonItem *keyboardButton;
@property (nonatomic, weak) IBOutlet UIBarButtonItem *stampButton;
@property (nonatomic, weak) IBOutlet UIBarButtonItem *photoButton;
@property (nonatomic, weak) IBOutlet UIBarButtonItem *smallerButton;
@property (nonatomic, weak) IBOutlet UIBarButtonItem *largerButton;
@property (nonatomic, weak) IBOutlet UIBarButtonItem *colorButton;
@property (nonatomic, weak) IBOutlet UIBarButtonItem *nomalButton;
@property (nonatomic, weak) IBOutlet UIBarButtonItem *boldButton;
@property (nonatomic, weak) IBOutlet UIBarButtonItem *italicButton;
@end
| 286 |
2,151 | // Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef UI_WM_PUBLIC_ANIMATION_HOST_H_
#define UI_WM_PUBLIC_ANIMATION_HOST_H_
#include "base/compiler_specific.h"
#include "ui/wm/public/wm_public_export.h"
namespace aura {
class Window;
}
namespace gfx {
class Vector2d;
}
namespace wm {
// Interface for top level window host of animation. Communicates additional
// bounds required for animation as well as animation completion for deferring
// window closes on hide.
class WM_PUBLIC_EXPORT AnimationHost {
public:
// Ensure the host window is at least this large so that transitions have
// sufficient space.
// The |top_left_delta| parameter contains the offset to be subtracted from
// the window bounds for the top left corner.
// The |bottom_right_delta| parameter contains the offset to be added to the
// window bounds for the bottom right.
virtual void SetHostTransitionOffsets(
const gfx::Vector2d& top_left_delta,
const gfx::Vector2d& bottom_right_delta) = 0;
// Called after the window has faded out on a hide.
virtual void OnWindowHidingAnimationCompleted() = 0;
protected:
virtual ~AnimationHost() {}
};
WM_PUBLIC_EXPORT void SetAnimationHost(aura::Window* window,
AnimationHost* animation_host);
WM_PUBLIC_EXPORT AnimationHost* GetAnimationHost(aura::Window* window);
} // namespace wm
#endif // UI_WM_PUBLIC_ANIMATION_HOST_H_
| 499 |
492 | """
Generic SNMPv2c TRAP
++++++++++++++++++++
Send SNMPv1 TRAP using the following options:
* SNMPv2c
* with community name 'public'
* over IPv4/UDP
* send TRAP notification
* with Uptime 12345
* with Generic Trap #1 (warmStart) and Specific Trap 0
* include managed object information '1.3.6.1.2.1.1.1.0' = 'my system'
Functionally similar to:
| $ snmptrap -v2c -c public demo.snmplabs.com 12345 1.3.6.1.6.3.1.1.5.2 1.3.6.1.2.1.1.1.0 s "my system"
"""#
from pysnmp.hlapi.v1arch.asyncore import *
def cbFun(errorIndication, errorStatus, errorIndex, varBinds, **context):
if errorIndication:
print(errorIndication)
snmpDispatcher = SnmpDispatcher()
sendNotification(
snmpDispatcher,
CommunityData('public'),
UdpTransportTarget(('demo.snmplabs.com', 162)),
'trap',
# SNMPv2-MIB::sysUpTime.0 = 12345
('1.3.6.1.2.1.1.3.0', TimeTicks(12345)),
# SNMPv2-SMI::snmpTrapOID.0 = SNMPv2-MIB::warmStart
('1.3.6.1.6.3.1.1.4.1.0', ObjectIdentifier('1.3.6.1.6.3.1.1.5.2')),
# SNMPv2-MIB::sysName.0
('1.3.6.1.2.1.1.1.0', OctetString('my system')),
cbFun=cbFun
)
snmpDispatcher.transportDispatcher.runDispatcher()
| 537 |
11,010 | <reponame>da196/System-BQ
package com.google.inject.internal;
/**
* Class used for restricting APIs in other packages to only be used by this package.
*
* <p>Other packages can reference this class but only this package can reference an instance of it,
* so adding this class as a method param ensures that only this package can call it (provided null
* is disallowed).
*/
public final class GuiceInternal {
static final GuiceInternal GUICE_INTERNAL = new GuiceInternal();
private GuiceInternal() {}
}
| 135 |
2,504 | <gh_stars>1000+
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "LoggingScenarioEventArgs.g.h"
namespace winrt::SDKTemplate::implementation
{
struct LoggingScenarioEventArgs : LoggingScenarioEventArgsT<LoggingScenarioEventArgs>
{
// e.g. for BusyStatusChanged
LoggingScenarioEventArgs(LoggingScenarioEventType type)
: _type(type) {}
// e.g. for LogFileGenerated
LoggingScenarioEventArgs(LoggingScenarioEventType type, hstring const& logFileFullPath)
: _type(type), _logFileFullPath(logFileFullPath) {}
// e.g. for LoggingEnabledDisabled
LoggingScenarioEventArgs(LoggingScenarioEventType type, bool enabled)
: _type(type), _enabled(enabled) {}
LoggingScenarioEventType Type() const noexcept { return _type; }
bool Enabled() const noexcept { return _enabled; }
hstring LogFileFullPath() const noexcept { return _logFileFullPath; }
private:
LoggingScenarioEventType _type;
bool _enabled = false;
hstring _logFileFullPath;
};
}
| 570 |
14,668 | <filename>chrome/test/data/webui/certificate_viewer_ui_test-inl.h
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_TEST_DATA_WEBUI_CERTIFICATE_VIEWER_UI_TEST_INL_H_
#define CHROME_TEST_DATA_WEBUI_CERTIFICATE_VIEWER_UI_TEST_INL_H_
#include "chrome/browser/certificate_viewer.h"
#include "chrome/browser/ui/browser.h"
#include "chrome/browser/ui/browser_window.h"
#include "chrome/browser/ui/tabs/tab_strip_model.h"
#include "chrome/browser/ui/webui/certificate_viewer_webui.h"
#include "chrome/common/url_constants.h"
#include "chrome/test/base/ui_test_utils.h"
#include "chrome/test/base/web_ui_browser_test.h"
#include "content/public/browser/render_view_host.h"
#include "content/public/browser/web_contents.h"
#include "content/public/browser/web_ui.h"
#include "content/public/test/browser_test.h"
#include "content/public/test/browser_test_utils.h"
#include "net/cert/x509_util_nss.h"
#include "net/test/test_certificate_data.h"
// Test framework for
// chrome/test/data/webui/certificate_viewer_dialog_browsertest.js.
class CertificateViewerUITest : public WebUIBrowserTest {
public:
CertificateViewerUITest();
~CertificateViewerUITest() override;
protected:
void ShowCertificateViewer();
};
void CertificateViewerUITest::ShowCertificateViewer() {
net::ScopedCERTCertificate google_cert(
net::x509_util::CreateCERTCertificateFromBytes(google_der,
sizeof(google_der)));
ASSERT_TRUE(google_cert);
net::ScopedCERTCertificateList certs;
certs.push_back(net::x509_util::DupCERTCertificate(google_cert.get()));
ASSERT_TRUE(browser());
ASSERT_TRUE(browser()->window());
CertificateViewerDialog* dialog = CertificateViewerDialog::ShowConstrained(
std::move(certs), browser()->tab_strip_model()->GetActiveWebContents(),
browser()->window()->GetNativeWindow());
content::WebContents* webui_webcontents = dialog->webui_->GetWebContents();
EXPECT_TRUE(content::WaitForLoadStop(webui_webcontents));
content::WebUI* webui = webui_webcontents->GetWebUI();
webui_webcontents->GetMainFrame()->SetWebUIProperty(
"expectedUrl", chrome::kChromeUICertificateViewerURL);
SetWebUIInstance(webui);
}
#endif // CHROME_TEST_DATA_WEBUI_CERTIFICATE_VIEWER_UI_TEST_INL_H_
| 911 |
388 | <filename>AccordionPanel/src/java/example/MainPanel.java
// -*- mode:java; encoding:utf-8 -*-
// vim:set fileencoding=utf-8:
// @homepage@
package example;
import java.awt.*;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Stream;
import javax.swing.*;
import javax.swing.border.Border;
public final class MainPanel extends JPanel {
// private final Box box = Box.createVerticalBox();
// private final Component glue = Box.createVerticalGlue();
private MainPanel() {
super(new BorderLayout());
Box accordion = Box.createVerticalBox();
accordion.setOpaque(true);
accordion.setBackground(new Color(0xB4_B4_FF));
accordion.setBorder(BorderFactory.createEmptyBorder(10, 5, 5, 5));
makeExpansionPanelList().forEach(p -> {
accordion.add(p);
accordion.add(Box.createVerticalStrut(5));
});
accordion.add(Box.createVerticalGlue());
JScrollPane scroll = new JScrollPane(accordion);
scroll.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
scroll.getVerticalScrollBar().setUnitIncrement(25);
JSplitPane split = new JSplitPane();
split.setResizeWeight(.5);
split.setDividerSize(2);
split.setLeftComponent(scroll);
split.setRightComponent(new JLabel("Dummy"));
add(split);
setPreferredSize(new Dimension(320, 240));
}
// public void addComp(Component comp) {
// box.remove(glue);
// box.add(Box.createVerticalStrut(5));
// box.add(comp);
// box.add(glue);
// box.revalidate();
// }
private List<AbstractExpansionPanel> makeExpansionPanelList() {
return Arrays.asList(
new AbstractExpansionPanel("System Tasks") {
@Override public JPanel makePanel() {
JPanel p = new JPanel(new GridLayout(0, 1));
Stream.of("1111", "222222")
.map(JCheckBox::new)
.forEach(b -> {
b.setOpaque(false);
p.add(b);
});
return p;
}
},
new AbstractExpansionPanel("Other Places") {
@Override public JPanel makePanel() {
JPanel p = new JPanel(new GridLayout(0, 1));
Stream.of("Desktop", "My Network Places", "My Documents", "Shared Documents")
.map(JLabel::new)
.forEach(p::add);
return p;
}
},
new AbstractExpansionPanel("Details") {
@Override public JPanel makePanel() {
JPanel p = new JPanel(new GridLayout(0, 1));
ButtonGroup bg = new ButtonGroup();
Stream.of("aaa", "bbb", "ccc", "ddd")
.map(JRadioButton::new)
.forEach(b -> {
b.setSelected(p.getComponentCount() == 0);
b.setOpaque(false);
p.add(b);
bg.add(b);
});
return p;
}
}
);
}
public static void main(String[] args) {
EventQueue.invokeLater(MainPanel::createAndShowGui);
}
private static void createAndShowGui() {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException ex) {
ex.printStackTrace();
Toolkit.getDefaultToolkit().beep();
}
JFrame frame = new JFrame("@title@");
frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
frame.getContentPane().add(new MainPanel());
frame.pack();
frame.setLocationRelativeTo(null);
frame.setVisible(true);
}
}
abstract class AbstractExpansionPanel extends JPanel {
private final String title;
private final JLabel label;
private final JPanel panel;
public abstract JPanel makePanel();
protected AbstractExpansionPanel(String title) {
super(new BorderLayout());
this.title = title;
label = new JLabel("▼ " + title) {
private final Color bgc = new Color(0xC8_C8_FF);
@Override protected void paintComponent(Graphics g) {
Graphics2D g2 = (Graphics2D) g.create();
// Insets ins = getInsets();
g2.setPaint(new GradientPaint(50f, 0f, Color.WHITE, getWidth(), getHeight(), bgc));
g2.fillRect(0, 0, getWidth(), getHeight());
g2.dispose();
super.paintComponent(g);
}
};
label.addMouseListener(new MouseAdapter() {
@Override public void mousePressed(MouseEvent e) {
initPanel();
}
});
label.setForeground(Color.BLUE);
label.setBorder(BorderFactory.createEmptyBorder(2, 5, 2, 2));
add(label, BorderLayout.NORTH);
panel = makePanel();
panel.setVisible(false);
panel.setOpaque(true);
panel.setBackground(new Color(0xF0_F0_FF));
Border outBorder = BorderFactory.createMatteBorder(0, 2, 2, 2, Color.WHITE);
Border inBorder = BorderFactory.createEmptyBorder(10, 10, 10, 10);
Border border = BorderFactory.createCompoundBorder(outBorder, inBorder);
panel.setBorder(border);
add(panel);
}
@Override public Dimension getPreferredSize() {
Dimension d = label.getPreferredSize();
if (panel.isVisible()) {
d.height += panel.getPreferredSize().height;
}
return d;
}
@Override public Dimension getMaximumSize() {
Dimension d = getPreferredSize();
d.width = Short.MAX_VALUE;
return d;
}
protected void initPanel() {
panel.setVisible(!panel.isVisible());
label.setText(String.format("%s %s", panel.isVisible() ? "△" : "▼", title));
revalidate();
// fireExpansionEvent();
EventQueue.invokeLater(() -> panel.scrollRectToVisible(panel.getBounds()));
}
// protected Vector<ExpansionListener> expansionListenerList = new Vector<>();
// public void addExpansionListener(ExpansionListener listener) {
// if (!expansionListenerList.contains(listener)) {
// expansionListenerList.add(listener);
// }
// }
// public void removeExpansionListener(ExpansionListener listener) {
// expansionListenerList.remove(listener);
// }
// public void fireExpansionEvent() {
// Vector list = (Vector) expansionListenerList.clone();
// Enumeration enm = list.elements();
// ExpansionEvent e = new ExpansionEvent(this);
// while (enm.hasMoreElements()) {
// ExpansionListener listener = (ExpansionListener) enm.nextElement();
// listener.expansionStateChanged(e);
// }
// }
}
// class ExpansionEvent extends EventObject {
// protected ExpansionEvent(Object source) {
// super(source);
// }
// }
//
// interface ExpansionListener {
// void expansionStateChanged(ExpansionEvent e);
// }
| 2,627 |
3,285 | """
Copyright 2013 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import math
import pylab
from cvxpy import Minimize, Problem, Variable, geo_mean, vstack
# Based on http://cvxopt.org/examples/book/floorplan.html
class Box:
""" A box in a floor packing problem. """
ASPECT_RATIO = 5.0
def __init__(self, min_area) -> None:
self.min_area = min_area
self.height = Variable()
self.width = Variable()
self.x = Variable()
self.y = Variable()
@property
def position(self):
return (round(self.x.value,2), round(self.y.value,2))
@property
def size(self):
return (round(self.width.value,2), round(self.height.value,2))
@property
def left(self):
return self.x
@property
def right(self):
return self.x + self.width
@property
def bottom(self):
return self.y
@property
def top(self):
return self.y + self.height
class FloorPlan:
""" A minimum perimeter floor plan. """
MARGIN = 1.0
ASPECT_RATIO = 5.0
def __init__(self, boxes) -> None:
self.boxes = boxes
self.height = Variable()
self.width = Variable()
self.horizontal_orderings = []
self.vertical_orderings = []
@property
def size(self):
return (round(self.width.value,2), round(self.height.value,2))
# Return constraints for the ordering.
@staticmethod
def _order(boxes, horizontal):
if len(boxes) == 0: return
constraints = []
curr = boxes[0]
for box in boxes[1:]:
if horizontal:
constraints.append(curr.right + FloorPlan.MARGIN <= box.left)
else:
constraints.append(curr.top + FloorPlan.MARGIN <= box.bottom)
curr = box
return constraints
# Compute minimum perimeter layout.
def layout(self):
constraints = []
for box in self.boxes:
# Enforce that boxes lie in bounding box.
constraints += [box.bottom >= FloorPlan.MARGIN,
box.top + FloorPlan.MARGIN <= self.height]
constraints += [box.left >= FloorPlan.MARGIN,
box.right + FloorPlan.MARGIN <= self.width]
# Enforce aspect ratios.
constraints += [(1/box.ASPECT_RATIO)*box.height <= box.width,
box.width <= box.ASPECT_RATIO*box.height]
# Enforce minimum area
constraints += [
geo_mean(vstack([box.width, box.height])) >= math.sqrt(box.min_area)
]
# Enforce the relative ordering of the boxes.
for ordering in self.horizontal_orderings:
constraints += self._order(ordering, True)
for ordering in self.vertical_orderings:
constraints += self._order(ordering, False)
p = Problem(Minimize(2*(self.height + self.width)), constraints)
return p.solve()
# Show the layout with matplotlib
def show(self):
pylab.figure(facecolor='w')
for k in range(len(self.boxes)):
box = self.boxes[k]
x,y = box.position
w,h = box.size
pylab.fill([x, x, x + w, x + w],
[y, y+h, y+h, y],
facecolor = '#D0D0D0')
pylab.text(x+.5*w, y+.5*h, "%d" %(k+1))
x,y = self.size
pylab.axis([0, x, 0, y])
pylab.xticks([])
pylab.yticks([])
pylab.show()
boxes = [Box(180), Box(80), Box(80), Box(80), Box(80)]
fp = FloorPlan(boxes)
fp.horizontal_orderings.append( [boxes[0], boxes[2], boxes[4]] )
fp.horizontal_orderings.append( [boxes[1], boxes[2]] )
fp.horizontal_orderings.append( [boxes[3], boxes[4]] )
fp.vertical_orderings.append( [boxes[1], boxes[0], boxes[3]] )
fp.vertical_orderings.append( [boxes[2], boxes[3]] )
fp.layout()
fp.show()
| 1,949 |
325 | package com.box.l10n.mojito.service.repository.statistics;
import com.box.l10n.mojito.entity.Repository;
import com.box.l10n.mojito.quartz.QuartzJobInfo;
import com.box.l10n.mojito.quartz.QuartzPollableTaskScheduler;
import com.box.l10n.mojito.service.repository.statistics.RepositoryStatisticsJob;
import com.box.l10n.mojito.service.repository.statistics.RepositoryStatisticsJobInput;
import com.google.common.base.Preconditions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class RepositoryStatisticsJobScheduler {
@Autowired
QuartzPollableTaskScheduler quartzPollableTaskScheduler;
public void schedule(Long repositoryId) {
Preconditions.checkNotNull(repositoryId);
RepositoryStatisticsJobInput repositoryStatisticsJobInput = new RepositoryStatisticsJobInput();
repositoryStatisticsJobInput.setRepositoryId(repositoryId);
QuartzJobInfo.Builder quartzInfo = QuartzJobInfo.newBuilder(RepositoryStatisticsJob.class)
.withUniqueId(String.valueOf(repositoryId))
.withInput(repositoryStatisticsJobInput);
quartzPollableTaskScheduler.scheduleJob(quartzInfo.build());
}
}
| 439 |
666 | <gh_stars>100-1000
/**
*
* Copyright 2018 iQIYI.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
//
// QIYINativeViewCombine.h
// QIYIMiniProgram
//
// Created by <NAME> on 08/03/2018.
// Copyright © 2018 www.iqiyi.com. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "QIYINativeViewProtocal.h"
@interface QIYINativeViewCombine : NSObject
@property(nonatomic, strong, readwrite) UIView <QIYINativeViewProtocal> * view;
@property(nonatomic, assign, readwrite) BOOL isHover;
@property(nonatomic, assign, readwrite) CGRect rect;
@property (nonatomic, copy) bNativeCallkback nativeViewCallback;
-(instancetype) init:(NSString *)type;
@end
| 368 |
1,020 | <filename>javadoc-taglet/src/main/java/org/robobinding/doctaglet/PropTaglet.java
package org.robobinding.doctaglet;
import java.text.MessageFormat;
import java.util.Map;
import com.sun.javadoc.Doc;
import com.sun.javadoc.Tag;
import com.sun.tools.doclets.formats.html.TagletOutputImpl;
import com.sun.tools.doclets.formats.html.TagletWriterImpl;
import com.sun.tools.doclets.internal.toolkit.taglets.Taglet;
import com.sun.tools.doclets.internal.toolkit.taglets.TagletOutput;
import com.sun.tools.doclets.internal.toolkit.taglets.TagletWriter;
/**
*
* @since 1.0
* @version $Revision: 1.0 $
* @author <NAME>
*/
public class PropTaglet implements Taglet {
private static final String NAME = "prop";
private PropParser parser;
private final InlineLinkTagTranslator inlineLinkTagTranslator;
public PropTaglet(PropParser parser, InlineLinkTagTranslator inlineLinkTagTranslator) {
this.parser = parser;
this.inlineLinkTagTranslator = inlineLinkTagTranslator;
}
public String getName() {
return NAME;
}
public TagletOutput getTagletOutput(Doc doc, TagletWriter tagletWriter) throws IllegalArgumentException {
Tag[] tags = doc.tags(NAME);
if (tags.length == 0) {
return null;
}
TagletWriterImpl tagletWriterImpl = (TagletWriterImpl)tagletWriter;
StringBuilder sb = new StringBuilder();
sb.append("<table style=\"border: 2px solid gray\"><caption style=\"text-align:left\"><strong>Supported Properties</strong></caption><tr style=\"background-color:#555555; color: #ffffff\"><th>Name</th><th>Supported types</th><th>Two-ways?</th></tr>");
for (int i=0; i<tags.length; i++) {
Tag tag = tags[i];
Prop prop = parser.parse(tag);
sb.append(MessageFormat.format("<tr style=\"background-color:{0}\"><td><strong>{1}</strong></td><td>{2}</td><td>{3}</td></tr>",
(i % 2 == 0)?"#eeeeef":"#ffffff",
prop.getName(),
inlineLinkTagTranslator.translate(prop.getSupportedTypes(), tag, doc, tagletWriterImpl),
prop.getSupportedBindingTypes()));
}
sb.append("</table></br>");
return new TagletOutputImpl(sb.toString());
}
public static void register(Map<String, Taglet> tagletMap) {
Taglet existing = (Taglet) tagletMap.get(NAME);
if (existing != null) {
tagletMap.remove(NAME);
}
PropTaglet target = new PropTaglet(new PropParser(), new InlineLinkTagTranslator());
tagletMap.put(target.getName(), target);
}
public boolean inPackage() {
return true;
}
public boolean inConstructor() {
return false;
}
public boolean inField() {
return false;
}
public boolean inMethod() {
return false;
}
public boolean inOverview() {
return false;
}
public boolean inType() {
return false;
}
public boolean isInlineTag() {
return false;
}
public TagletOutput getTagletOutput(Tag arg0, TagletWriter arg1) throws IllegalArgumentException {
throw new UnsupportedOperationException();
}
}
| 1,204 |
458 | /*
* #%L
* GwtMaterial
* %%
* Copyright (C) 2015 - 2017 GwtMaterialDesign
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package gwt.material.design.client.ui;
import com.google.gwt.event.shared.HasHandlers;
import com.google.gwt.user.client.ui.IsWidget;
import com.google.gwt.user.client.ui.ValueBoxBase;
import gwt.material.design.client.constants.Autocomplete;
import gwt.material.design.client.constants.StatusDisplayType;
import gwt.material.design.client.ui.base.AbstractValueWidgetTest;
/**
* Test case for ValueBox.
*
* @author kevzlou7979
*/
public abstract class MaterialValueBoxTest<T extends MaterialValueBox> extends AbstractValueWidgetTest<T> {
public abstract void testValue();
public void testFieldErrorSuccess() {
// given
MaterialValueBox widget = getWidget();
// when / then
checkFieldErrorSuccess(widget, widget.getErrorLabel(), widget.getValueBoxBase(), widget.getLabelWidget());
}
public void testPlaceholder() {
// given
T widget = getWidget();
checkPlaceholder(widget);
}
public void testIcon() {
// given
T widget = getWidget();
checkFieldIcon(widget);
}
public void testReadOnly() {
// given
T widget = getWidget();
checkReadOnly(widget);
}
public void testValueReturnAsNull() {
T widget = getWidget(false);
if (widget.getValue() instanceof String) {
checkValueReturnAsNull(widget);
attachWidget();
checkValueReturnAsNull(widget);
}
}
public void testAutocomplete() {
T widget = getWidget();
checkAutocomplete(widget);
}
public void testMandatoryField() {
T widget = getWidget(false);
checkMandatoryField(widget);
attachWidget();
checkMandatoryField(widget);
}
public void testStatusDisplayType() {
T widget = getWidget(false);
assertEquals(null, widget.getStatusDisplayType());
widget.setStatusDisplayType(StatusDisplayType.DEFAULT);
assertEquals(StatusDisplayType.DEFAULT, widget.getStatusDisplayType());
assertTrue(widget.getElement().getClassName().isEmpty());
widget.setStatusDisplayType(StatusDisplayType.HOVERABLE);
assertEquals(StatusDisplayType.HOVERABLE, widget.getStatusDisplayType());
assertTrue(widget.getElement().hasClassName(StatusDisplayType.HOVERABLE.getCssName()));
}
protected <W extends MaterialValueBox> void checkMandatoryField(W widget) {
String REQUIRED = "required";
assertFalse(widget.isRequired());
widget.setRequired(true);
assertTrue(widget.isRequired());
assertTrue(widget.getStatusTextMixin().getPlaceholder().getElement().hasClassName(REQUIRED));
assertTrue(widget.isValidateOnBlur());
assertFalse(widget.isAllowBlank());
widget.setRequired(false);
assertFalse(widget.isRequired());
assertFalse(widget.getStatusTextMixin().getPlaceholder().getElement().hasClassName(REQUIRED));
assertFalse(widget.isValidateOnBlur());
assertTrue(widget.isAllowBlank());
}
protected <W extends MaterialValueBox> void checkAutocomplete(W widget) {
ValueBoxBase valueBoxBase = widget.getValueBoxBase();
widget.setAutocomplete(Autocomplete.ON);
assertEquals(Autocomplete.ON, widget.getAutocomplete());
assertEquals(valueBoxBase.getElement().getAttribute("autocomplete"), "on");
widget.setAutocomplete(Autocomplete.OFF);
assertEquals(Autocomplete.OFF, widget.getAutocomplete());
assertEquals(valueBoxBase.getElement().getAttribute("autocomplete"), "off");
widget.setAutocomplete(Autocomplete.NEW_PASSWORD);
assertEquals(Autocomplete.NEW_PASSWORD, widget.getAutocomplete());
assertEquals(valueBoxBase.getElement().getAttribute("autocomplete"), "new-password");
valueBoxBase.getElement().removeAttribute("autocomplete");
}
protected void checkValueReturnAsNull(T widget) {
assertTrue(widget.isBlank());
assertFalse(widget.isReturnBlankAsNull());
widget.setReturnBlankAsNull(true);
assertTrue(widget.isReturnBlankAsNull());
widget.setValue("");
assertEquals(null, widget.getValue());
widget.setReturnBlankAsNull(false);
widget.setValue("");
assertEquals("", widget.getValue());
assertNotNull(widget.getValue());
}
@Override
public void testTabIndex() {
ValueBoxBase widget = getWidget().getValueBoxBase();
final int INITIAL_TAB_INDEX = 0;
final int FINAL_TAB_INDEX = 1;
// when / then
widget.setTabIndex(INITIAL_TAB_INDEX);
assertEquals(INITIAL_TAB_INDEX, widget.getTabIndex());
assertEquals(String.valueOf(INITIAL_TAB_INDEX), widget.getElement().getPropertyString("tabIndex"));
// when / then
widget.setTabIndex(FINAL_TAB_INDEX);
assertEquals(FINAL_TAB_INDEX, widget.getTabIndex());
assertEquals(String.valueOf(FINAL_TAB_INDEX), widget.getElement().getPropertyString("tabIndex"));
}
@Override
public void fireBlurEvent(HasHandlers widget) {
super.fireBlurEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public void fireFocusEvent(HasHandlers widget) {
super.fireFocusEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public void fireClickEvent(HasHandlers widget) {
super.fireClickEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public void fireDoubleClickEvent(HasHandlers widget) {
super.fireDoubleClickEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public void fireKeyDownEvent(HasHandlers widget) {
super.fireKeyDownEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public void fireKeyUpEvent(HasHandlers widget) {
super.fireKeyUpEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public void fireKeyPressEvent(HasHandlers widget) {
super.fireKeyPressEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public void fireGestureStartEvent(HasHandlers widget) {
super.fireGestureStartEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public void fireGestureChangeEvent(HasHandlers widget) {
super.fireGestureChangeEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public void fireGestureEndEvent(HasHandlers widget) {
super.fireGestureEndEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public void fireTouchStartEvent(HasHandlers widget) {
super.fireTouchStartEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public void fireTouchMoveEvent(HasHandlers widget) {
super.fireTouchMoveEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public void fireTouchEndEvent(HasHandlers widget) {
super.fireTouchEndEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public void fireTouchCancelEvent(HasHandlers widget) {
super.fireTouchCancelEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public void fireMouseUpEvent(HasHandlers widget) {
super.fireMouseUpEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public void fireMouseDownEvent(HasHandlers widget) {
super.fireMouseDownEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public void fireMouseMoveEvent(HasHandlers widget) {
super.fireMouseMoveEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public void fireMouseWheelEvent(HasHandlers widget) {
super.fireMouseWheelEvent(((MaterialValueBox) widget).getValueBoxBase());
}
@Override
public <H extends HasHandlers & IsWidget> void fireMouseOverEvent(H widget) {
super.fireMouseOverEvent(((MaterialValueBox) widget).getValueBoxBase());
}
}
| 3,185 |
1,570 | <reponame>pavanv/django-tastypie
from django.conf.urls import include, url
from core.tests.api import Api, NoteResource, UserResource
api = Api()
api.register(NoteResource())
api.register(UserResource())
urlpatterns = [
url(r'^api/', include(api.urls)),
]
| 98 |
823 | //
// BaseViewController.h
// MierMilitaryNews
//
// Created by 李响 on 15/9/10.
// Copyright (c) 2015年 miercn. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "MierNavigationBar.h"
#import "AppDelegate.h"
#import "UINavigationController+FDFullscreenPopGesture.h"
@interface BaseViewController : UIViewController
@property (nonatomic , strong ) AppDelegate *appdelegate;
@property (nonatomic , strong ) UIWindow *mainWindow;
/**
* @brief 每个页面调用配置NavigationBar
*/
- (void)configNavigationBar;
/**
* @brief 设置主题
*/
- (void)configTheme;
@end
| 243 |
3,102 | /* For use with the objc_property.m PCH test */
@interface TestProperties
{
int value;
float percentage;
}
+ alloc;
@property int value;
@property float percentage;
@end
| 57 |
845 | <reponame>bitbrain-za/Open-Home-Automation
///////////////////////////////////////////////////////////////////////////
// CONFIGURATION - HARDWARE
///////////////////////////////////////////////////////////////////////////
// Door sensor
#define DOOR_SENSOR D3
#if defined(DOOR_SENSOR)
#define DOOR_SENSOR_NAME "door" // used for the MQTT topic
#endif
// Motion sensor
// - AM312
// - RCWL 0516
#define MOTION_SENSOR
#if defined(MOTION_SENSOR)
#define MOTION_SENSOR_NAME "motion"
#define MOTION_SENSOR_PIN D7
#endif
// Ambient light sensor
// - Photoresistor
// - TEMT6000 sensor
// Do not add a 10 kOhms resistor if you're using the TEMT6000
// sensor (already present on the PCB)
//#define LDR_SENSOR
#define LDR_SENSOR
#if defined(LDR_SENSOR)
#define LDR_SENSOR_NAME "lux"
#define LDR_OFFSET_VALUE 25
#define LDR_MEASURE_INTERVAL 15000 // [ms]
#define REFERENCE_VOLTAGE 3.3 // [v]
#define ADC_PRECISION 1024.0 // 10 bits
#define LDR_RESISTOR_VALUE 10000.0 // [Ohms]
#define LDR_PIN A0
#endif
// Temperature and humidity sensor (DHT22)
//#define DHT_SENSOR
#if defined(DHT_SENSOR)
#define DHT_TEMPERATURE_SENSOR_NAME "temperature"
#define DHT_HUMIDITY_SENSOR_NAME "humidity"
#define DHT_TEMPERATURE_OFFSET_VALUE 0.2 // [°C]
#define DHT_HUMIDITY_OFFSET_VALUE 0.5 // [%]
#define DHT_MEASURE_INTERVAL 30000 // [ms]
#define DHT_PIN D2
#endif
// Temperature and humidity sensor (Sensirion SHT3X)
#define SHT_SENSOR
#if defined(SHT_SENSOR)
#define SHT_TEMPERATURE_SENSOR_NAME "temperature"
#define SHT_HUMIDITY_SENSOR_NAME "humidity"
#define SHT_TEMPERATURE_OFFSET_VALUE 0.2 // [°C]
#define SHT_HUMIDITY_OFFSET_VALUE 0.5 // [%]
#define SHT_MEASURE_INTERVAL 30000 // [ms]
#define SHT_SDA_PIN D3
#define SHT_SCL_PIN D2
#endif
// Button
#define BUTTON_SENSOR D1
#if defined(BUTTON_SENSOR)
#define BUTTON_SENSOR_NAME "button"
#endif
///////////////////////////////////////////////////////////////////////////
// CONFIGURATION - SOFTWARE
///////////////////////////////////////////////////////////////////////////
// Debug output
#define DEBUG_SERIAL
// Wi-Fi credentials
#define WIFI_SSID ""
#define WIFI_PASSWORD ""
// Over-the-Air update
#define OTA
#define OTA_HOSTNAME "MultiSensor" // hostname esp8266-[ChipID] by default
//#define OTA_PASSWORD "password" // no password by default
//#define OTA_PORT 8266 // port 8266 by default
// MQTT
#define MQTT_USERNAME ""
#define MQTT_PASSWORD ""
#define MQTT_SERVER ""
#define MQTT_SERVER_PORT 1883
#define MQTT_CONNECTION_TIMEOUT 5000 // [ms]
#define MQTT_AVAILABILITY_TOPIC_TEMPLATE "%s/status" // MQTT availability: online/offline
#define MQTT_SENSOR_TOPIC_TEMPLATE "%s/sensor/%s"
#define MQTT_PAYLOAD_ON "ON"
#define MQTT_PAYLOAD_OFF "OFF"
| 1,394 |
5,836 | //
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#import <UIKit/UIKit.h>
#if !defined(__IPHONE_12_0) || __IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_12_0
#import <EarlGrey/GREYIdlingResource.h>
NS_ASSUME_NONNULL_BEGIN
/**
* An idling resource used for detecting when a UIWebView has loaded all the required resources,
* rendered, and generated its accessibility element tree.
*/
@interface GREYUIWebViewIdlingResource : NSObject<GREYIdlingResource>
/**
* Creates and registers an idling resource before returning the created instance.
*
* @param webView The UIWebView being tracked.
* @param name The name used to identify this idling resource.
* @return instance of GREYUIWebViewIdlingResource for the provided @c webView.
*/
+ (instancetype)idlingResourceForWebView:(UIWebView *)webView name:(NSString *)name;
/**
* @remark init is not an available initializer. Use the other initializers.
*/
- (instancetype)init NS_UNAVAILABLE;
@end
NS_ASSUME_NONNULL_END
#endif // !defined(__IPHONE_12_0) || __IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_12_0
| 502 |
371 | import os
import sys
import traceback
from configparser import ConfigParser
from contextlib import contextmanager
from pathlib import Path
from typing import (
Any,
Dict,
Iterator,
List,
Mapping,
NamedTuple,
Optional,
Set,
Tuple,
Union,
)
import tomli
from packaging.specifiers import SpecifierSet
from .architecture import Architecture
from .environment import EnvironmentParseError, ParsedEnvironment, parse_environment
from .projectfiles import get_requires_python_str
from .typing import PLATFORMS, Literal, PlatformName, TypedDict
from .util import (
MANYLINUX_ARCHS,
MUSLLINUX_ARCHS,
BuildFrontend,
BuildSelector,
DependencyConstraints,
TestSelector,
resources_dir,
selector_matches,
strtobool,
unwrap,
)
class CommandLineArguments:
platform: Literal["auto", "linux", "macos", "windows"]
archs: Optional[str]
output_dir: Optional[str]
config_file: str
package_dir: str
print_build_identifiers: bool
allow_empty: bool
prerelease_pythons: bool
class GlobalOptions(NamedTuple):
package_dir: Path
output_dir: Path
build_selector: BuildSelector
test_selector: TestSelector
architectures: Set[Architecture]
class BuildOptions(NamedTuple):
globals: GlobalOptions
environment: ParsedEnvironment
before_all: str
before_build: Optional[str]
repair_command: str
manylinux_images: Optional[Dict[str, str]]
musllinux_images: Optional[Dict[str, str]]
dependency_constraints: Optional[DependencyConstraints]
test_command: Optional[str]
before_test: Optional[str]
test_requires: List[str]
test_extras: str
build_verbosity: int
build_frontend: BuildFrontend
@property
def package_dir(self) -> Path:
return self.globals.package_dir
@property
def output_dir(self) -> Path:
return self.globals.output_dir
@property
def build_selector(self) -> BuildSelector:
return self.globals.build_selector
@property
def test_selector(self) -> TestSelector:
return self.globals.test_selector
@property
def architectures(self) -> Set[Architecture]:
return self.globals.architectures
Setting = Union[Dict[str, str], List[str], str]
class Override(NamedTuple):
select_pattern: str
options: Dict[str, Setting]
MANYLINUX_OPTIONS = {f"manylinux-{build_platform}-image" for build_platform in MANYLINUX_ARCHS}
MUSLLINUX_OPTIONS = {f"musllinux-{build_platform}-image" for build_platform in MUSLLINUX_ARCHS}
DISALLOWED_OPTIONS = {
"linux": {"dependency-versions"},
"macos": MANYLINUX_OPTIONS | MUSLLINUX_OPTIONS,
"windows": MANYLINUX_OPTIONS | MUSLLINUX_OPTIONS,
}
class TableFmt(TypedDict):
item: str
sep: str
class ConfigOptionError(KeyError):
pass
def _dig_first(*pairs: Tuple[Mapping[str, Setting], str], ignore_empty: bool = False) -> Setting:
"""
Return the first dict item that matches from pairs of dicts and keys.
Will throw a KeyError if missing.
_dig_first((dict1, "key1"), (dict2, "key2"), ...)
"""
if not pairs:
raise ValueError("pairs cannot be empty")
for dict_like, key in pairs:
if key in dict_like:
value = dict_like[key]
if ignore_empty and value == "":
continue
return value
raise KeyError(key)
class OptionsReader:
"""
Gets options from the environment, config or defaults, optionally scoped
by the platform.
Example:
>>> options_reader = OptionsReader(config_file, platform='macos')
>>> options_reader.get('cool-color')
This will return the value of CIBW_COOL_COLOR_MACOS if it exists,
otherwise the value of CIBW_COOL_COLOR, otherwise
'tool.cibuildwheel.macos.cool-color' or 'tool.cibuildwheel.cool-color'
from `config_file`, or from cibuildwheel/resources/defaults.toml. An
error is thrown if there are any unexpected keys or sections in
tool.cibuildwheel.
"""
def __init__(
self,
config_file_path: Optional[Path] = None,
*,
platform: PlatformName,
disallow: Optional[Dict[str, Set[str]]] = None,
) -> None:
self.platform = platform
self.disallow = disallow or {}
# Open defaults.toml, loading both global and platform sections
defaults_path = resources_dir / "defaults.toml"
self.default_options, self.default_platform_options = self._load_file(defaults_path)
# Load the project config file
config_options: Dict[str, Any] = {}
config_platform_options: Dict[str, Any] = {}
if config_file_path is not None:
config_options, config_platform_options = self._load_file(config_file_path)
# Validate project config
for option_name in config_options:
if not self._is_valid_global_option(option_name):
raise ConfigOptionError(f'Option "{option_name}" not supported in a config file')
for option_name in config_platform_options:
if not self._is_valid_platform_option(option_name):
raise ConfigOptionError(
f'Option "{option_name}" not supported in the "{self.platform}" section'
)
self.config_options = config_options
self.config_platform_options = config_platform_options
self.overrides: List[Override] = []
self.current_identifier: Optional[str] = None
config_overrides = self.config_options.get("overrides")
if config_overrides is not None:
if not isinstance(config_overrides, list):
raise ConfigOptionError('"tool.cibuildwheel.overrides" must be a list')
for config_override in config_overrides:
select = config_override.pop("select", None)
if not select:
raise ConfigOptionError('"select" must be set in an override')
if isinstance(select, list):
select = " ".join(select)
self.overrides.append(Override(select, config_override))
def _is_valid_global_option(self, name: str) -> bool:
"""
Returns True if an option with this name is allowed in the
[tool.cibuildwheel] section of a config file.
"""
allowed_option_names = self.default_options.keys() | PLATFORMS | {"overrides"}
return name in allowed_option_names
def _is_valid_platform_option(self, name: str) -> bool:
"""
Returns True if an option with this name is allowed in the
[tool.cibuildwheel.<current-platform>] section of a config file.
"""
disallowed_platform_options = self.disallow.get(self.platform, set())
if name in disallowed_platform_options:
return False
allowed_option_names = self.default_options.keys() | self.default_platform_options.keys()
return name in allowed_option_names
def _load_file(self, filename: Path) -> Tuple[Dict[str, Any], Dict[str, Any]]:
"""
Load a toml file, returns global and platform as separate dicts.
"""
with filename.open("rb") as f:
config = tomli.load(f)
global_options = config.get("tool", {}).get("cibuildwheel", {})
platform_options = global_options.get(self.platform, {})
return global_options, platform_options
@property
def active_config_overrides(self) -> List[Override]:
if self.current_identifier is None:
return []
return [
o for o in self.overrides if selector_matches(o.select_pattern, self.current_identifier)
]
@contextmanager
def identifier(self, identifier: Optional[str]) -> Iterator[None]:
self.current_identifier = identifier
try:
yield
finally:
self.current_identifier = None
def get(
self,
name: str,
*,
env_plat: bool = True,
sep: Optional[str] = None,
table: Optional[TableFmt] = None,
ignore_empty: bool = False,
) -> str:
"""
Get and return the value for the named option from environment,
configuration file, or the default. If env_plat is False, then don't
accept platform versions of the environment variable. If this is an
array it will be merged with "sep" before returning. If it is a table,
it will be formatted with "table['item']" using {k} and {v} and merged
with "table['sep']". Empty variables will not override if ignore_empty
is True.
"""
if name not in self.default_options and name not in self.default_platform_options:
raise ConfigOptionError(f"{name} must be in cibuildwheel/resources/defaults.toml file")
# Environment variable form
envvar = f"CIBW_{name.upper().replace('-', '_')}"
plat_envvar = f"{envvar}_{self.platform.upper()}"
# later overrides take precedence over earlier ones, so reverse the list
active_config_overrides = reversed(self.active_config_overrides)
# get the option from the environment, then the config file, then finally the default.
# platform-specific options are preferred, if they're allowed.
result = _dig_first(
(os.environ if env_plat else {}, plat_envvar), # type: ignore[arg-type]
(os.environ, envvar),
*[(o.options, name) for o in active_config_overrides],
(self.config_platform_options, name),
(self.config_options, name),
(self.default_platform_options, name),
(self.default_options, name),
ignore_empty=ignore_empty,
)
if isinstance(result, dict):
if table is None:
raise ConfigOptionError(f"{name} does not accept a table")
return table["sep"].join(table["item"].format(k=k, v=v) for k, v in result.items())
elif isinstance(result, list):
if sep is None:
raise ConfigOptionError(f"{name} does not accept a list")
return sep.join(result)
elif isinstance(result, int):
return str(result)
else:
return result
class Options:
def __init__(self, platform: PlatformName, command_line_arguments: CommandLineArguments):
self.platform = platform
self.command_line_arguments = command_line_arguments
self.reader = OptionsReader(
self.config_file_path,
platform=platform,
disallow=DISALLOWED_OPTIONS,
)
@property
def config_file_path(self) -> Optional[Path]:
args = self.command_line_arguments
if args.config_file:
return Path(args.config_file.format(package=args.package_dir))
# return pyproject.toml, if it's available
pyproject_toml_path = Path(args.package_dir) / "pyproject.toml"
if pyproject_toml_path.exists():
return pyproject_toml_path
return None
@property
def package_requires_python_str(self) -> Optional[str]:
if not hasattr(self, "_package_requires_python_str"):
args = self.command_line_arguments
self._package_requires_python_str = get_requires_python_str(Path(args.package_dir))
return self._package_requires_python_str
@property
def globals(self) -> GlobalOptions:
args = self.command_line_arguments
package_dir = Path(args.package_dir)
output_dir = Path(
args.output_dir
if args.output_dir is not None
else os.environ.get("CIBW_OUTPUT_DIR", "wheelhouse")
)
build_config = self.reader.get("build", env_plat=False, sep=" ") or "*"
skip_config = self.reader.get("skip", env_plat=False, sep=" ")
test_skip = self.reader.get("test-skip", env_plat=False, sep=" ")
prerelease_pythons = args.prerelease_pythons or strtobool(
os.environ.get("CIBW_PRERELEASE_PYTHONS", "0")
)
# This is not supported in tool.cibuildwheel, as it comes from a standard location.
# Passing this in as an environment variable will override pyproject.toml, setup.cfg, or setup.py
requires_python_str: Optional[str] = (
os.environ.get("CIBW_PROJECT_REQUIRES_PYTHON") or self.package_requires_python_str
)
requires_python = None if requires_python_str is None else SpecifierSet(requires_python_str)
build_selector = BuildSelector(
build_config=build_config,
skip_config=skip_config,
requires_python=requires_python,
prerelease_pythons=prerelease_pythons,
)
test_selector = TestSelector(skip_config=test_skip)
archs_config_str = args.archs or self.reader.get("archs", sep=" ")
architectures = Architecture.parse_config(archs_config_str, platform=self.platform)
return GlobalOptions(
package_dir=package_dir,
output_dir=output_dir,
build_selector=build_selector,
test_selector=test_selector,
architectures=architectures,
)
def build_options(self, identifier: Optional[str]) -> BuildOptions:
"""
Compute BuildOptions for a single run configuration.
"""
with self.reader.identifier(identifier):
before_all = self.reader.get("before-all", sep=" && ")
build_frontend_str = self.reader.get("build-frontend", env_plat=False)
environment_config = self.reader.get(
"environment", table={"item": '{k}="{v}"', "sep": " "}
)
environment_pass = self.reader.get("environment-pass", sep=" ").split()
before_build = self.reader.get("before-build", sep=" && ")
repair_command = self.reader.get("repair-wheel-command", sep=" && ")
dependency_versions = self.reader.get("dependency-versions")
test_command = self.reader.get("test-command", sep=" && ")
before_test = self.reader.get("before-test", sep=" && ")
test_requires = self.reader.get("test-requires", sep=" ").split()
test_extras = self.reader.get("test-extras", sep=",")
build_verbosity_str = self.reader.get("build-verbosity")
build_frontend: BuildFrontend
if build_frontend_str == "build":
build_frontend = "build"
elif build_frontend_str == "pip":
build_frontend = "pip"
else:
msg = f"cibuildwheel: Unrecognised build frontend '{build_frontend_str}', only 'pip' and 'build' are supported"
print(msg, file=sys.stderr)
sys.exit(2)
try:
environment = parse_environment(environment_config)
except (EnvironmentParseError, ValueError):
print(
f'cibuildwheel: Malformed environment option "{environment_config}"',
file=sys.stderr,
)
traceback.print_exc(None, sys.stderr)
sys.exit(2)
# Pass through environment variables
if self.platform == "linux":
for env_var_name in environment_pass:
try:
environment.add(env_var_name, os.environ[env_var_name])
except KeyError:
pass
if dependency_versions == "pinned":
dependency_constraints: Optional[
DependencyConstraints
] = DependencyConstraints.with_defaults()
elif dependency_versions == "latest":
dependency_constraints = None
else:
dependency_versions_path = Path(dependency_versions)
dependency_constraints = DependencyConstraints(dependency_versions_path)
if test_extras:
test_extras = f"[{test_extras}]"
try:
build_verbosity = min(3, max(-3, int(build_verbosity_str)))
except ValueError:
build_verbosity = 0
manylinux_images: Dict[str, str] = {}
musllinux_images: Dict[str, str] = {}
if self.platform == "linux":
all_pinned_docker_images = _get_pinned_docker_images()
for build_platform in MANYLINUX_ARCHS:
pinned_images = all_pinned_docker_images[build_platform]
config_value = self.reader.get(
f"manylinux-{build_platform}-image", ignore_empty=True
)
if not config_value:
# default to manylinux2014
image = pinned_images.get("manylinux2014")
elif config_value in pinned_images:
image = pinned_images[config_value]
else:
image = config_value
assert image is not None
manylinux_images[build_platform] = image
for build_platform in MUSLLINUX_ARCHS:
pinned_images = all_pinned_docker_images[build_platform]
config_value = self.reader.get(f"musllinux-{build_platform}-image")
if config_value is None:
image = pinned_images["musllinux_1_1"]
elif config_value in pinned_images:
image = pinned_images[config_value]
else:
image = config_value
musllinux_images[build_platform] = image
return BuildOptions(
globals=self.globals,
test_command=test_command,
test_requires=test_requires,
test_extras=test_extras,
before_test=before_test,
before_build=before_build,
before_all=before_all,
build_verbosity=build_verbosity,
repair_command=repair_command,
environment=environment,
dependency_constraints=dependency_constraints,
manylinux_images=manylinux_images or None,
musllinux_images=musllinux_images or None,
build_frontend=build_frontend,
)
def check_for_invalid_configuration(self, identifiers: List[str]) -> None:
if self.platform in ["macos", "windows"]:
before_all_values = {self.build_options(i).before_all for i in identifiers}
if len(before_all_values) > 1:
raise ValueError(
unwrap(
f"""
before_all cannot be set to multiple values. On macOS and Windows,
before_all is only run once, at the start of the build. before_all values
are: {before_all_values!r}
"""
)
)
def check_for_deprecated_options(self) -> None:
build_selector = self.globals.build_selector
test_selector = self.globals.test_selector
deprecated_selectors("CIBW_BUILD", build_selector.build_config, error=True)
deprecated_selectors("CIBW_SKIP", build_selector.skip_config)
deprecated_selectors("CIBW_TEST_SKIP", test_selector.skip_config)
def summary(self, identifiers: List[str]) -> str:
lines = [
f"{option_name}: {option_value!r}"
for option_name, option_value in sorted(self.globals._asdict().items())
]
build_option_defaults = self.build_options(identifier=None)
for option_name, default_value in sorted(build_option_defaults._asdict().items()):
if option_name == "globals":
continue
lines.append(f"{option_name}: {default_value!r}")
# if any identifiers have an overridden value, print that too
for identifier in identifiers:
option_value = self.build_options(identifier=identifier)._asdict()[option_name]
if option_value != default_value:
lines.append(f" {identifier}: {option_value!r}")
return "\n".join(lines)
def compute_options(
platform: PlatformName,
command_line_arguments: CommandLineArguments,
) -> Options:
options = Options(platform=platform, command_line_arguments=command_line_arguments)
options.check_for_deprecated_options()
return options
_all_pinned_docker_images: Optional[ConfigParser] = None
def _get_pinned_docker_images() -> Mapping[str, Mapping[str, str]]:
"""
This looks like a dict of dicts, e.g.
{ 'x86_64': {'manylinux1': '...', 'manylinux2010': '...', 'manylinux2014': '...'},
'i686': {'manylinux1': '...', 'manylinux2010': '...', 'manylinux2014': '...'},
'pypy_x86_64': {'manylinux2010': '...' }
... }
"""
global _all_pinned_docker_images
if _all_pinned_docker_images is None:
pinned_docker_images_file = resources_dir / "pinned_docker_images.cfg"
_all_pinned_docker_images = ConfigParser()
_all_pinned_docker_images.read(pinned_docker_images_file)
return _all_pinned_docker_images
def deprecated_selectors(name: str, selector: str, *, error: bool = False) -> None:
if "p2" in selector or "p35" in selector:
msg = f"cibuildwheel 2.x no longer supports Python < 3.6. Please use the 1.x series or update {name}"
print(msg, file=sys.stderr)
if error:
sys.exit(4)
| 9,643 |
2,268 | <filename>sp/src/game/client/fx_interpvalue.h
//========= Copyright Valve Corporation, All rights reserved. ============//
//
// Purpose:
//
//=============================================================================
#ifndef FX_INTERPVALUE_H
#define FX_INTERPVALUE_H
#ifdef _WIN32
#pragma once
#endif
// Types of supported interpolation
enum InterpType_t
{
INTERP_LINEAR = 0,
INTERP_SPLINE,
};
class CInterpolatedValue
{
public:
CInterpolatedValue( void );
CInterpolatedValue( float startTime, float endTime, float startValue, float endValue, InterpType_t type );
void SetTime( float start, float end );
void SetRange( float start, float end );
void SetType( InterpType_t type );
// Set the value with no range
void SetAbsolute( float value );
// Set the value with range and time supplied
void Init( float startValue, float endValue, float dt, InterpType_t type = INTERP_LINEAR );
// Start from the current value and move towards the end value
void InitFromCurrent( float endValue, float dt, InterpType_t type = INTERP_LINEAR );
// Find our interpolated value at the given point in time
float Interp( float curTime );
private:
float m_flStartTime;
float m_flEndTime;
float m_flStartValue;
float m_flEndValue;
int m_nInterpType;
};
#endif // FX_INTERPVALUE_H | 421 |
725 | <reponame>vexofp/hyperion<filename>libsrc/utils/jsonschema/JsonSchemaChecker.cpp
// stdlib includes
#include <cassert>
#include <iterator>
#include <sstream>
#include <algorithm>
// Utils-Jsonschema includes
#include <utils/jsonschema/JsonSchemaChecker.h>
JsonSchemaChecker::JsonSchemaChecker()
{
// empty
}
JsonSchemaChecker::~JsonSchemaChecker()
{
// empty
}
bool JsonSchemaChecker::setSchema(const Json::Value & schema)
{
_schema = schema;
// TODO: check the schema
return true;
}
bool JsonSchemaChecker::validate(const Json::Value & value)
{
// initialize state
_error = false;
_messages.clear();
_currentPath.clear();
_currentPath.push_back("[root]");
_references.clear();
// collect dependencies
collectDependencies(value, _schema);
// validate
validate(value, _schema);
return !_error;
}
void JsonSchemaChecker::collectDependencies(const Json::Value & value, const Json::Value &schema)
{
assert (schema.isObject());
// check if id is present
if (schema.isMember("id"))
{
// strore reference
assert (schema["id"].isString());
std::ostringstream ref;
ref << "$(" << schema["id"].asString() << ")";
_references[ref.str()] = &value;
}
// check the current json value
if (schema.isMember("properties"))
{
const Json::Value & properties = schema["properties"];
assert(properties.isObject());
for (Json::Value::const_iterator j = properties.begin(); j != properties.end(); ++j)
{
std::string property = j.memberName();
if (value.isMember(property))
{
collectDependencies(value[property], properties[property]);
}
}
}
}
void JsonSchemaChecker::validate(const Json::Value & value, const Json::Value &schema)
{
assert (schema.isObject());
// check the current json value
for (Json::Value::const_iterator i = schema.begin(); i != schema.end(); ++i)
{
std::string attribute = i.memberName();
const Json::Value & attributeValue = *i;
if (attribute == "type")
checkType(value, attributeValue);
else if (attribute == "properties")
checkProperties(value, attributeValue);
else if (attribute == "additionalProperties")
{
// ignore the properties which are handled by the properties attribute (if present)
Json::Value::Members ignoredProperties;
if (schema.isMember("properties")) {
const Json::Value & props = schema["properties"];
ignoredProperties = props.getMemberNames();
}
checkAdditionalProperties(value, attributeValue, ignoredProperties);
}
else if (attribute == "dependencies")
checkDependencies(value, attributeValue);
else if (attribute == "minimum")
checkMinimum(value, attributeValue);
else if (attribute == "maximum")
checkMaximum(value, attributeValue);
else if (attribute == "items")
checkItems(value, attributeValue);
else if (attribute == "minItems")
checkMinItems(value, attributeValue);
else if (attribute == "maxItems")
checkMaxItems(value, attributeValue);
else if (attribute == "uniqueItems")
checkUniqueItems(value, attributeValue);
else if (attribute == "enum")
checkEnum(value, attributeValue);
else if (attribute == "required")
; // nothing to do. value is present so always oke
else if (attribute == "id")
; // references have already been collected
else
{
// no check function defined for this attribute
setMessage(std::string("No check function defined for attribute ") + attribute);
continue;
}
}
}
void JsonSchemaChecker::setMessage(const std::string & message)
{
std::ostringstream oss;
std::copy(_currentPath.begin(), _currentPath.end(), std::ostream_iterator<std::string>(oss, ""));
oss << ": " << message;
_messages.push_back(oss.str());
}
const std::list<std::string> & JsonSchemaChecker::getMessages() const
{
return _messages;
}
void JsonSchemaChecker::checkType(const Json::Value & value, const Json::Value & schema)
{
assert(schema.isString());
std::string type = schema.asString();
bool wrongType = false;
if (type == "string")
wrongType = !value.isString();
else if (type == "number")
wrongType = !value.isNumeric();
else if (type == "integer")
wrongType = !value.isIntegral();
else if (type == "double")
wrongType = !value.isDouble();
else if (type == "boolean")
wrongType = !value.isBool();
else if (type == "object")
wrongType = !value.isObject();
else if (type == "array")
wrongType = !value.isArray();
else if (type == "null")
wrongType = !value.isNull();
else if (type == "enum")
wrongType = !value.isString();
else if (type == "any")
wrongType = false;
// else
// assert(false);
if (wrongType)
{
_error = true;
setMessage(type + " expected");
}
}
void JsonSchemaChecker::checkProperties(const Json::Value & value, const Json::Value & schema)
{
assert(schema.isObject());
if (!value.isObject())
{
_error = true;
setMessage("properies attribute is only valid for objects");
return;
}
for (Json::Value::const_iterator i = schema.begin(); i != schema.end(); ++i)
{
std::string property = i.memberName();
const Json::Value & propertyValue = *i;
assert(propertyValue.isObject());
_currentPath.push_back(std::string(".") + property);
if (value.isMember(property))
{
validate(value[property], propertyValue);
}
else if (propertyValue.get("required", false).asBool())
{
_error = true;
setMessage("missing member");
}
_currentPath.pop_back();
}
}
void JsonSchemaChecker::checkAdditionalProperties(const Json::Value & value, const Json::Value & schema, const Json::Value::Members & ignoredProperties)
{
if (!value.isObject())
{
_error = true;
setMessage("additional properies attribute is only valid for objects");
return;
}
for (Json::Value::const_iterator i = value.begin(); i != value.end(); ++i)
{
std::string property = i.memberName();
if (std::find(ignoredProperties.begin(), ignoredProperties.end(), property) == ignoredProperties.end())
{
// property has no property definition. check against the definition for additional properties
_currentPath.push_back(std::string(".") + property);
if (schema.isBool())
{
if (schema.asBool() == false)
{
_error = true;
setMessage("no schema definition");
}
}
else
{
validate(value[property], schema);
}
_currentPath.pop_back();
}
}
}
void JsonSchemaChecker::checkDependencies(const Json::Value & value, const Json::Value & schemaLink)
{
if (!value.isObject())
{
_error = true;
setMessage("dependencies attribute is only valid for objects");
return;
}
assert(schemaLink.isString());
std::map<std::string, const Json::Value *>::iterator iter = _references.find(schemaLink.asString());
if (iter == _references.end())
{
_error = true;
std::ostringstream oss;
oss << "reference " << schemaLink.asString() << " could not be resolved";
setMessage(oss.str());
return;
}
const Json::Value & schema = *(iter->second);
std::list<std::string> requiredProperties;
if (schema.isString())
{
requiredProperties.push_back(schema.asString());
}
else if (schema.isArray())
{
for (Json::UInt i = 0; i < schema.size(); ++i)
{
assert(schema[i].isString());
requiredProperties.push_back(schema[i].asString());
}
}
else
{
_error = true;
std::ostringstream oss;
oss << "Exepected reference " << schemaLink.asString() << " to resolve to a string or array";
setMessage(oss.str());
return;
}
for (std::list<std::string>::const_iterator i = requiredProperties.begin(); i != requiredProperties.end(); ++i)
{
if (!value.isMember(*i))
{
_error = true;
std::ostringstream oss;
oss << "missing member " << *i;
setMessage(oss.str());
}
}
}
void JsonSchemaChecker::checkMinimum(const Json::Value & value, const Json::Value & schema)
{
assert(schema.isNumeric());
if (!value.isNumeric())
{
// only for numeric
_error = true;
setMessage("minimum check only for numeric fields");
return;
}
if (value.asDouble() < schema.asDouble())
{
_error = true;
std::ostringstream oss;
oss << "value is too small (minimum=" << schema.asDouble() << ")";
setMessage(oss.str());
}
}
void JsonSchemaChecker::checkMaximum(const Json::Value & value, const Json::Value & schema)
{
assert(schema.isNumeric());
if (!value.isNumeric())
{
// only for numeric
_error = true;
setMessage("maximum check only for numeric fields");
return;
}
if (value.asDouble() > schema.asDouble())
{
_error = true;
std::ostringstream oss;
oss << "value is too large (maximum=" << schema.asDouble() << ")";
setMessage(oss.str());
}
}
void JsonSchemaChecker::checkItems(const Json::Value & value, const Json::Value & schema)
{
assert(schema.isObject());
if (!value.isArray())
{
// only for arrays
_error = true;
setMessage("items only valid for arrays");
return;
}
for(Json::ArrayIndex i = 0; i < value.size(); ++i)
{
// validate each item
std::ostringstream oss;
oss << "[" << i << "]";
_currentPath.push_back(oss.str());
validate(value[i], schema);
_currentPath.pop_back();
}
}
void JsonSchemaChecker::checkMinItems(const Json::Value & value, const Json::Value & schema)
{
assert(schema.isIntegral());
if (!value.isArray())
{
// only for arrays
_error = true;
setMessage("minItems only valid for arrays");
return;
}
int minimum = schema.asInt();
if (static_cast<int>(value.size()) < minimum)
{
_error = true;
std::ostringstream oss;
oss << "array is too small (minimum=" << minimum << ")";
setMessage(oss.str());
}
}
void JsonSchemaChecker::checkMaxItems(const Json::Value & value, const Json::Value & schema)
{
assert(schema.isIntegral());
if (!value.isArray())
{
// only for arrays
_error = true;
setMessage("maxItems only valid for arrays");
return;
}
int maximum = schema.asInt();
if (static_cast<int>(value.size()) > maximum)
{
_error = true;
std::ostringstream oss;
oss << "array is too large (maximum=" << maximum << ")";
setMessage(oss.str());
}
}
void JsonSchemaChecker::checkUniqueItems(const Json::Value & value, const Json::Value & schema)
{
assert(schema.isBool());
if (!value.isArray())
{
// only for arrays
_error = true;
setMessage("uniqueItems only valid for arrays");
return;
}
if (schema.asBool() == true)
{
// make sure no two items are identical
for(Json::UInt i = 0; i < value.size(); ++i)
{
for (Json::UInt j = i+1; j < value.size(); ++j)
{
if (value[i] == value[j])
{
// found a value twice
_error = true;
setMessage("array must have unique values");
}
}
}
}
}
void JsonSchemaChecker::checkEnum(const Json::Value & value, const Json::Value & schema)
{
assert(schema.isArray());
for(Json::ArrayIndex i = 0; i < schema.size(); ++i)
{
if (schema[i] == value)
{
// found enum value. done.
return;
}
}
// nothing found
_error = true;
std::ostringstream oss;
oss << "Unknown enum value (allowed values are: ";
std::string values = Json::FastWriter().write(schema);
oss << values.substr(0, values.size()-1); // The writer append a new line which we don't want
oss << ")";
setMessage(oss.str());
}
| 4,082 |
335 | {
"word": "Discrimination",
"definitions": [
"The unjust or prejudicial treatment of different categories of people, especially on the grounds of race, age, or sex.",
"Recognition and understanding of the difference between one thing and another.",
"The ability to judge what is of high quality; good judgement or taste.",
"The ability to distinguish between different stimuli.",
"The selection of a signal having a required characteristic, such as frequency or amplitude, by means of a discriminator."
],
"parts-of-speech": "Noun"
} | 172 |
310 | {
"name": "Sonos (Mac)",
"description": "A Mac client for the music streaming devices.",
"url": "http://www.sonos.com/support?r=1"
} | 50 |
1,259 | from django.apps import AppConfig
class InvitesConfig(AppConfig):
name = "organisations.invites"
| 32 |
4,493 | <filename>audio_logger.cpp
/*! \file audio_logger.cpp
* \brief Enter description here.
* \author <NAME>
*/
#include "audio_logger.h"
#include <SDL.h>
#include <SDL_audio.h>
#include <mutex>
#include <atomic>
#include <algorithm>
#ifndef pi
#define pi 3.1415926535897932384626433832795
#endif
#ifndef sqrt2
#define sqrt2 (2.0 * 0.707106781186547524401)
#endif
#ifndef sqrt2over2
#define sqrt2over2 0.707106781186547524401
#endif
namespace {
// DSP filter
// ref : https://github.com/dimtass/DSP-Cpp-filters
struct TFilterCoefficients {
float a0;
float a1;
float a2;
float b1;
float b2;
float c0;
float d0;
float xnz1;
float xnz2;
float ynz1;
float ynz2;
};
TFilterCoefficients calculateCoefficientsFirstOrderHighPass(int fc, int fs) {
TFilterCoefficients res;
float th = 2.0 * pi * fc / fs;
float g = cos(th) / (1.0 + sin(th));
res.a0 = (1.0 + g) / 2.0;
res.a1 = -((1.0 + g) / 2.0);
res.a2 = 0.0;
res.b1 = -g;
res.b2 = 0.0;
return res;
}
TFilterCoefficients calculateCoefficientsSecondOrderButterworthHighPass(int fc, int fs) {
TFilterCoefficients res;
float c = tan(pi*fc / fs);
res.a0 = 1.0 / (1.0 + sqrt2*c + pow(c, 2.0));
res.a1 = -2.0 * res.a0;
res.a2 = res.a0;
res.b1 = 2.0 * res.a0*(pow(c, 2.0) - 1.0);
res.b2 = res.a0 * (1.0 - sqrt2*c + pow(c, 2.0));
return res;
}
AudioLogger::Sample filterFirstOrderHighPass(TFilterCoefficients & coefficients, AudioLogger::Sample sample) {
AudioLogger::Sample xn = sample;
AudioLogger::Sample yn =
coefficients.a0*xn + coefficients.a1*coefficients.xnz1 + coefficients.a2*coefficients.xnz2 -
coefficients.b1*coefficients.ynz1 - coefficients.b2*coefficients.xnz2;
coefficients.xnz2 = coefficients.xnz1;
coefficients.xnz1 = xn;
coefficients.xnz2 = coefficients.ynz1;
coefficients.ynz1 = yn;
return yn;
}
AudioLogger::Sample filterSecondOrderButterworthHighPass(TFilterCoefficients & coefficients, AudioLogger::Sample sample) {
AudioLogger::Sample xn = sample;
AudioLogger::Sample yn =
coefficients.a0*xn + coefficients.a1*coefficients.xnz1 + coefficients.a2*coefficients.xnz2 -
coefficients.b1*coefficients.ynz1 - coefficients.b2*coefficients.xnz2;
coefficients.xnz2 = coefficients.xnz1;
coefficients.xnz1 = xn;
coefficients.xnz2 = coefficients.ynz1;
coefficients.ynz1 = yn;
return yn;
}
void cbAudioReady(void * userData, uint8_t * stream, int32_t /*nbytes*/) {
AudioLogger * logger = (AudioLogger *)(userData);
logger->addFrame((AudioLogger::Sample *)(stream));
}
}
struct AudioLogger::Data {
Data() : isReady(false) {
for (auto & frame : buffer) {
frame.fill(0);
}
for (auto & record : records) {
record.clear();
}
nFramesToRecord.fill(0);
}
SDL_AudioDeviceID deviceIdIn = 0;
//SDL_AudioDeviceID deviceIdOut = 0;
int32_t sampleSize_bytes = -1;
int32_t bufferId = 0;
std::array<Frame, getBufferSize_frames(kMaxSampleRate, kMaxBufferSize_s)> buffer;
int32_t nRecords = 0;
std::array<int32_t, kMaxRecords> nFramesToRecord;
std::array<Record, kMaxRecords> records;
Parameters parameters;
TFilterCoefficients filterCoefficients;
std::mutex mutex;
std::atomic_bool isReady;
};
AudioLogger::AudioLogger() : data_(new AudioLogger::Data()) {}
AudioLogger::~AudioLogger() {}
bool AudioLogger::install(Parameters && parameters) {
auto & data = getData();
if (parameters.captureId < 0) {
fprintf(stderr, "error : invalid captureId = %d\n", parameters.captureId);
return false;
}
if (parameters.nChannels < 0) {
fprintf(stderr, "error : invalid nChannels = %d\n", parameters.nChannels);
return false;
}
if (parameters.sampleRate <= 0) {
fprintf(stderr, "error : invalid sampleRate = %d\n", (int) parameters.sampleRate);
return false;
}
if (parameters.freqCutoff_Hz <= 0) {
fprintf(stderr, "error : invalid freqCutoff_Hz = %g\n", parameters.freqCutoff_Hz);
return false;
}
if (SDL_Init(SDL_INIT_AUDIO) < 0) {
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't initialize SDL: %s\n", SDL_GetError());
return false;
}
int nDevices = SDL_GetNumAudioDevices(SDL_TRUE);
printf("Found %d capture devices:\n", nDevices);
for (int i = 0; i < nDevices; i++) {
printf(" - Capture device #%d: '%s'\n", i, SDL_GetAudioDeviceName(i, SDL_TRUE));
}
if (parameters.captureId < 0 || parameters.captureId >= nDevices) {
fprintf(stderr, "error : invalid capture device id selected - %d\n", parameters.captureId);
return false;
}
SDL_AudioSpec captureSpec;
SDL_zero(captureSpec);
captureSpec.freq = parameters.sampleRate;
captureSpec.format = AUDIO_F32SYS;
captureSpec.channels = parameters.nChannels;
captureSpec.samples = kSamplesPerFrame;
captureSpec.callback = ::cbAudioReady;
captureSpec.userdata = this;
SDL_AudioSpec obtainedSpec;
SDL_zero(obtainedSpec);
printf("Attempt to open capture device %d : '%s' ...\n", parameters.captureId, SDL_GetAudioDeviceName(parameters.captureId, SDL_TRUE));
data.deviceIdIn = SDL_OpenAudioDevice(SDL_GetAudioDeviceName(parameters.captureId, SDL_TRUE), SDL_TRUE, &captureSpec, &obtainedSpec, SDL_AUDIO_ALLOW_CHANNELS_CHANGE);
if (!data.deviceIdIn) {
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't open an audio device for capture: %s!\n", SDL_GetError());
SDL_Quit();
return false;
}
switch (obtainedSpec.format) {
case AUDIO_U8:
case AUDIO_S8:
case AUDIO_U16SYS:
case AUDIO_S16SYS:
case AUDIO_S32SYS:
{
fprintf(stderr, "error : unsupported sample format %d\n", obtainedSpec.format);
return false;
}
break;
case AUDIO_F32SYS:
{
data.sampleSize_bytes = 4;
}
break;
}
printf("Opened capture device succesfully!\n");
printf(" Frequency: %d\n", obtainedSpec.freq);
printf(" Format: %d (%d bytes)\n", obtainedSpec.format, data.sampleSize_bytes);
printf(" Channels: %d\n", obtainedSpec.channels);
printf(" Samples: %d\n", obtainedSpec.samples);
if (obtainedSpec.channels != parameters.nChannels && parameters.nChannels != 0) {
printf("Warning: obtained number of channels (%d) does not match requested (%d)\n",
obtainedSpec.channels, parameters.nChannels);
}
SDL_PauseAudioDevice(data.deviceIdIn, 0);
parameters.nChannels = obtainedSpec.channels;
switch (parameters.filter) {
case AudioLogger::Parameters::EFilter::None:
{
}
break;
case AudioLogger::Parameters::EFilter::FirstOrderHighPass:
{
data.filterCoefficients = ::calculateCoefficientsFirstOrderHighPass(parameters.freqCutoff_Hz, parameters.sampleRate);
}
break;
case AudioLogger::Parameters::EFilter::SecondOrderButterworthHighPass:
{
data.filterCoefficients = ::calculateCoefficientsSecondOrderButterworthHighPass(parameters.freqCutoff_Hz, parameters.sampleRate);
}
break;
};
data.parameters = parameters;
data.isReady = true;
return true;
}
bool AudioLogger::terminate() {
auto & data = getData();
SDL_PauseAudioDevice(data.deviceIdIn, 1);
SDL_CloseAudioDevice(data.deviceIdIn);
return true;
}
bool AudioLogger::addFrame(const Sample * stream) {
auto & data = getData();
if (data.isReady == false) return false;
if (SDL_GetQueuedAudioSize(data.deviceIdIn) > 32*sizeof(float)*kSamplesPerFrame) {
printf("Queue size: %d\n", SDL_GetQueuedAudioSize(data.deviceIdIn));
SDL_ClearQueuedAudio(data.deviceIdIn);
}
const float norm = 1.0/data.parameters.nChannels;
auto & curFrame = data.buffer[data.bufferId];
for (int i = 0; i < kSamplesPerFrame; ++i) {
Sample x = 0;
for (int j = 0; j < data.parameters.nChannels; ++j) {
x += stream[i*data.parameters.nChannels + j];
}
curFrame[i] = x*norm;
}
switch (data.parameters.filter) {
case AudioLogger::Parameters::EFilter::None:
{
}
break;
case AudioLogger::Parameters::EFilter::FirstOrderHighPass:
{
for (auto & s : curFrame) {
s = ::filterFirstOrderHighPass(data.filterCoefficients, s);
}
}
break;
case AudioLogger::Parameters::EFilter::SecondOrderButterworthHighPass:
{
for (auto & s : curFrame) {
s = ::filterSecondOrderButterworthHighPass(data.filterCoefficients, s);
}
}
break;
}
std::lock_guard<std::mutex> lock(data.mutex);
for (int r = 0; r < data.nRecords; ++r) {
auto & record = data.records[r];
auto & nFramesToRecord = data.nFramesToRecord[r];
if (nFramesToRecord > 0) {
record.push_back(curFrame);
if (--nFramesToRecord == 0) {
if (data.parameters.callback) data.parameters.callback(record);
record.clear();
for (int k = r + 1; k < data.nRecords; ++k) {
data.records[k - 1] = std::move(data.records[k]);
data.nFramesToRecord[k - 1] = data.nFramesToRecord[k];
}
--data.nRecords;
--r;
}
}
}
if (++data.bufferId >= (int) data.buffer.size()) {
data.bufferId = 0;
}
return true;
}
bool AudioLogger::record(float bufferSize_s, int32_t nPrevFrames) {
auto & data = getData();
if (isValidBufferSize(bufferSize_s) == false) {
return false;
}
auto bufferSize_frames = getBufferSize_frames(data.parameters.sampleRate, bufferSize_s);
if (nPrevFrames >= bufferSize_frames) {
fprintf(stderr, "warning : invalid previous frames in record requested - %d. max allowed is %d s\n", nPrevFrames, bufferSize_frames - 1);
return false;
}
std::lock_guard<std::mutex> lock(data.mutex);
if (data.nRecords == kMaxRecords) {
fprintf(stderr, "warning : max number of simultaneous records %d reached\n", kMaxRecords);
return false;
}
auto & record = data.records[data.nRecords];
if (record.size() == 0) {
int fStart = data.bufferId - nPrevFrames;
if (fStart < 0) fStart += data.buffer.size();
for (int i = 0; i < nPrevFrames; ++i) {
record.push_back(data.buffer[(fStart + i)%data.buffer.size()]);
}
} else {
fprintf(stderr, "warning : new record requested before last has been processed. should never happen\n");
}
data.nFramesToRecord[data.nRecords] = bufferSize_frames - nPrevFrames;
++data.nRecords;
return true;
}
bool AudioLogger::pause() {
auto & data = getData();
SDL_PauseAudioDevice(data.deviceIdIn, 1);
data.nFramesToRecord.fill(0);
return true;
}
bool AudioLogger::resume() {
auto & data = getData();
SDL_PauseAudioDevice(data.deviceIdIn, 0);
return true;
}
bool AudioLogger::isValidBufferSize(float bufferSize_s) const {
if (bufferSize_s <= 0) {
fprintf(stderr, "error : invalid bufferSize_s = %g\n", bufferSize_s);
return false;
}
if (bufferSize_s > kMaxBufferSize_s) {
fprintf(stderr, "error : invalid record size requested - %g s. max allowed is %g s\n", bufferSize_s, kMaxBufferSize_s);
return false;
}
return true;
}
| 5,509 |
1,133 | //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// United States Government Sponsorship acknowledged. This software is subject to
// U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
// (No [Export] License Required except when exporting to an embargoed country,
// end user, or in support of a prohibited end use). By downloading this software,
// the user agrees to comply with all applicable U.S. export laws and regulations.
// The user has the responsibility to obtain export licenses, or other export
// authority as may be required before exporting this software to any 'EAR99'
// embargoed foreign country or citizen of those countries.
//
// Author: <NAME>
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#ifndef denseoffsetsmodule_h
#define denseoffsetsmodule_h
#include <Python.h>
#include <stdint.h>
#include "denseoffsetsmoduleFortTrans.h"
extern "C"
{
void denseoffsets_f(uint64_t *, uint64_t *, uint64_t *, uint64_t *);
PyObject * denseoffsets_C(PyObject *, PyObject *);
void setLineLength1_f(int *);
PyObject * setLineLength1_C(PyObject *, PyObject *);
void setLineLength2_f(int *);
PyObject * setLineLength2_C(PyObject *, PyObject *);
void setFileLength1_f(int *);
PyObject * setFileLength1_C(PyObject *, PyObject *);
void setFileLength2_f(int *);
PyObject * setFileLength2_C(PyObject *, PyObject *);
void setFirstSampleAcross_f(int *);
PyObject * setFirstSampleAcross_C(PyObject *, PyObject *);
void setLastSampleAcross_f(int *);
PyObject * setLastSampleAcross_C(PyObject *, PyObject *);
void setSkipSampleAcross_f(int *);
PyObject * setSkipSampleAcross_C(PyObject *, PyObject *);
void setFirstSampleDown_f(int *);
PyObject * setFirstSampleDown_C(PyObject *, PyObject *);
void setLastSampleDown_f(int *);
PyObject * setLastSampleDown_C(PyObject *, PyObject *);
void setSkipSampleDown_f(int *);
PyObject * setSkipSampleDown_C(PyObject *, PyObject *);
void setAcrossGrossOffset_f(int *);
PyObject * setAcrossGrossOffset_C(PyObject *, PyObject *);
void setDownGrossOffset_f(int *);
PyObject * setDownGrossOffset_C(PyObject *, PyObject *);
void setScaleFactorX_f(float *);
PyObject * setScaleFactorX_C(PyObject *, PyObject *);
void setScaleFactorY_f(float *);
PyObject * setScaleFactorY_C(PyObject *, PyObject *);
void setDebugFlag_f(char *, int *);
PyObject * setDebugFlag_C(PyObject *, PyObject *);
void setWindowSizeWidth_f(int *);
PyObject * setWindowSizeWidth_C(PyObject *, PyObject *);
void setWindowSizeHeight_f(int *);
PyObject * setWindowSizeHeight_C(PyObject *, PyObject *);
void setSearchWindowSizeWidth_f(int *);
PyObject * setSearchWindowSizeWidth_C(PyObject *, PyObject *);
void setSearchWindowSizeHeight_f(int *);
PyObject *setSearchWindowSizeHeight_C(PyObject *, PyObject *);
void setZoomWindowSize_f(int *);
PyObject * setZoomWindowSize_C(PyObject *, PyObject *);
void setOversamplingFactor_f(int *);
PyObject * setOversamplingFactor_C(PyObject *, PyObject *);
void setIsComplex1_f(int *);
PyObject * setIsComplex1_C(PyObject *, PyObject *);
void setIsComplex2_f(int *);
PyObject * setIsComplex2_C(PyObject *, PyObject *);
void setBand1_f(int *);
PyObject * setBand1_C(PyObject *, PyObject *);
void setBand2_f(int *);
PyObject * setBand2_C(PyObject *, PyObject *);
void setNormalizeFlag_f(int *);
PyObject *setNormalizeFlag_C(PyObject*, PyObject*);
}
static PyMethodDef denseoffsets_methods[] =
{
{"denseoffsets_Py", denseoffsets_C, METH_VARARGS, " "},
{"setLineLength1_Py", setLineLength1_C, METH_VARARGS, " "},
{"setLineLength2_Py", setLineLength2_C, METH_VARARGS, " "},
{"setFileLength1_Py", setFileLength1_C, METH_VARARGS, " "},
{"setFileLength2_Py", setFileLength2_C, METH_VARARGS, " "},
{"setFirstSampleAcross_Py", setFirstSampleAcross_C, METH_VARARGS, " "},
{"setLastSampleAcross_Py", setLastSampleAcross_C, METH_VARARGS, " "},
{"setSkipSampleAcross_Py", setSkipSampleAcross_C, METH_VARARGS, " "},
{"setFirstSampleDown_Py", setFirstSampleDown_C, METH_VARARGS, " "},
{"setLastSampleDown_Py", setLastSampleDown_C, METH_VARARGS, " "},
{"setSkipSampleDown_Py", setSkipSampleDown_C, METH_VARARGS, " "},
{"setAcrossGrossOffset_Py", setAcrossGrossOffset_C, METH_VARARGS, " "},
{"setDownGrossOffset_Py", setDownGrossOffset_C, METH_VARARGS, " "},
{"setScaleFactorX_Py", setScaleFactorX_C, METH_VARARGS, " "},
{"setScaleFactorY_Py", setScaleFactorY_C, METH_VARARGS, " "},
{"setDebugFlag_Py", setDebugFlag_C, METH_VARARGS, " "},
{"setWindowSizeWidth_Py", setWindowSizeWidth_C, METH_VARARGS, " "},
{"setWindowSizeHeight_Py", setWindowSizeHeight_C, METH_VARARGS, " "},
{"setSearchWindowSizeWidth_Py", setSearchWindowSizeWidth_C, METH_VARARGS, " "},
{"setSearchWindowSizeHeight_Py", setSearchWindowSizeHeight_C, METH_VARARGS, " "},
{"setZoomWindowSize_Py", setZoomWindowSize_C, METH_VARARGS, " "},
{"setOversamplingFactor_Py", setOversamplingFactor_C, METH_VARARGS, " "},
{"setIsComplex1_Py", setIsComplex1_C, METH_VARARGS, " "},
{"setIsComplex2_Py", setIsComplex2_C, METH_VARARGS, " "},
{"setBand1_Py", setBand1_C, METH_VARARGS, " "},
{"setBand2_Py", setBand2_C, METH_VARARGS, " "},
{"setNormalizeFlag_Py", setNormalizeFlag_C, METH_VARARGS, " "},
{NULL, NULL, 0, NULL}
};
#endif //denseoffsetsmodule_h
| 2,615 |
1,396 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jdbi.v3.sqlobject.customizer;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.time.OffsetDateTime;
import org.jdbi.v3.sqlobject.customizer.internal.TimestampedFactory;
/**
* Binds the named parameter <code>:now</code> or a custom named parameter with
* the current DateTime as an {@link OffsetDateTime}.
* Common use cases:
* <pre>
* <code>
* public interface PersonDAO {
* @SqlUpdate("INSERT INTO people(id, firstName, lastName, email, created, modified) VALUES (:p.id, :p.firstName, :p.lastName, :p.email, :now, :now)")
* @Timestamped
* @GetGeneratedKeys
* int insert(@BindBean("p") Person person);
*
* @SqlUpdate("UPDATE people SET modified = :now, firstName = :p.firstName, lastName = :p.lastName, email = :p.email WHERE id = :p.id")
* @Timestamped
* void update(@BindBean("p") Person person);
* }
* </code>
* </pre>
*
* @see TimestampedConfig
*/
@Target({ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@SqlStatementCustomizingAnnotation(TimestampedFactory.class)
@Documented
public @interface Timestamped {
/**
* The parameter to bind in the SQL query. If omitted, defaults to <code>now</code>
* and can be changed to customize the parameter bound to the current DateTime.
*
* @return the parameter name
*/
String value() default "now";
}
| 688 |
435 | <gh_stars>100-1000
{
"copyright_text": null,
"description": "",
"duration": 2251,
"language": "kor",
"recorded": "2018-08-18",
"related_urls": [
{
"label": "Conference schedule",
"url": "https://www.pycon.kr/2018/en/program/schedule/"
}
],
"speakers": [],
"tags": [],
"thumbnail_url": "https://i.ytimg.com/vi/cNWKQcekSy4/maxresdefault.jpg",
"title": "\ub3d9\ubb3c \ud64d\ucc44\uc778\uc2dd\ubd80\ud130 \uc11c\ubc84\uae4c\uc9c0 python\uc73c\ub85c \ub9cc\ub4e4\uae30 - \uc815\uc9c4\uc131",
"videos": [
{
"type": "youtube",
"url": "https://www.youtube.com/watch?v=cNWKQcekSy4"
}
]
}
| 323 |
1,006 | /****************************************************************************
* sched/paging/pg_miss.c
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership. The
* ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
****************************************************************************/
/****************************************************************************
* Included Files
****************************************************************************/
#include <nuttx/config.h>
#include <assert.h>
#include <errno.h>
#include <debug.h>
#include <nuttx/arch.h>
#include <nuttx/sched.h>
#include <nuttx/page.h>
#include <nuttx/signal.h>
#ifdef CONFIG_PAGING
#include "sched/sched.h"
#include "paging/paging.h"
/****************************************************************************
* Public Functions
****************************************************************************/
/****************************************************************************
* Name: pg_miss
*
* Description:
* This function is called from architecture-specific memory segmentation
* fault handling logic. This function will perform the following
* operations:
*
* 1) Sanity checking.
* - ASSERT if the currently executing task is the page fill worker
* thread. The page fill worker thread is how the page fault
* is resolved and all logic associated with the page fill worker
* must be "locked" and always present in memory.
* - ASSERT if an interrupt was executing at the time of the exception.
* 2) Block the currently executing task.
* - Call up_block_task() to block the task at the head of the ready-
* to-run list. This should cause an interrupt level context switch
* to the next highest priority task.
* - The blocked task will be marked with state TSTATE_WAIT_PAGEFILL
* and will be retained in the g_waitingforfill prioritized task
* list.
* 3) Boost the page fill worker thread priority.
* - Check the priority of the task at the head of the g_waitingforfill
* list. If the priority of that task is higher than the current
* priority of the page fill worker thread, then boost the priority
* of the page fill worker thread to that priority.
* 4) Signal the page fill worker thread.
* - Is there a page fill pending? If not then signal the worker
* thread to start working on the queued page fill requests.
*
* Input Parameters:
* None - The head of the ready-to-run list is assumed to be task that
* caused the exception.
*
* Returned Value:
* None - Either this function function succeeds or an assertion occurs.
*
* Assumptions:
* - It is assumed that this function is called from the level of an
* exception handler and that all interrupts are disabled.
* - It is assumed that currently executing task (the one at the head of
* the ready-to-run list) is the one that cause the fault. This will
* always be true unless the page fault occurred in an interrupt handler.
* Interrupt handling logic must always be present and "locked" into
* memory.
* - As mentioned above, the task causing the page fault must not be the
* page fill worker thread because that is the only way to complete the
* page fill.
*
* NOTES:
* 1. One way to accomplish this would be a two pass link phase:
* - In the first phase, create a partially linked objected containing
* all interrupt/exception handling logic, the page fill worker thread
* plus all parts of the IDLE thread (which must always be available
* for execution).
* - All of the .text and .rodata sections of this partial link should
* be collected into a single section.
* - The second link would link the partially linked object along with
* the remaining object to produce the final binary. The linker
* script should position the "special" section so that it lies
* in a reserved, "non-swappable" region.
*
****************************************************************************/
void pg_miss(void)
{
FAR struct tcb_s *ftcb = this_task();
FAR struct tcb_s *wtcb;
/* Sanity checking
*
* ASSERT if the currently executing task is the page fill worker thread.
* The page fill worker thread is how the page fault is resolved and
* all logic associated with the page fill worker must be "locked" and
* always present in memory.
*/
pginfo("Blocking TCB: %p PID: %d\n", ftcb, ftcb->pid);
DEBUGASSERT(g_pgworker != ftcb->pid);
/* Block the currently executing task
* - Call up_block_task() to block the task at the head of the ready-
* to-run list. This should cause an interrupt level context switch
* to the next highest priority task.
* - The blocked task will be marked with state TSTATE_WAIT_PAGEFILL
* and will be retained in the g_waitingforfill prioritized task list.
*
* Need to firstly check that this is not the idle task,descheduling
* that isn't going to end well.
*/
DEBUGASSERT(NULL != ftcb->flink);
up_block_task(ftcb, TSTATE_WAIT_PAGEFILL);
/* Boost the page fill worker thread priority.
* - Check the priority of the task at the head of the g_waitingforfill
* list. If the priority of that task is higher than the current
* priority of the page fill worker thread, then boost the priority
* of the page fill worker thread to that priority.
*/
wtcb = nxsched_get_tcb(g_pgworker);
DEBUGASSERT(wtcb != NULL);
if (wtcb->sched_priority < ftcb->sched_priority)
{
/* Reprioritize the page fill worker thread */
pginfo("New worker priority. %d->%d\n",
wtcb->sched_priority, ftcb->sched_priority);
nxsched_set_priority(wtcb, ftcb->sched_priority);
}
/* Signal the page fill worker thread.
* - Is there a page fill pending? If not then signal the worker
* thread to start working on the queued page fill requests.
*/
if (!g_pftcb)
{
pginfo("Signaling worker. PID: %d\n", g_pgworker);
nxsig_kill(g_pgworker, SIGWORK);
}
}
#endif /* CONFIG_PAGING */
| 2,025 |
608 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package com.facebook.battery.serializer.healthstats;
import android.os.Build;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.collection.ArrayMap;
import com.facebook.battery.metrics.healthstats.HealthStatsMetrics;
import com.facebook.battery.metrics.healthstats.HealthStatsMetrics.TimerMetrics;
import com.facebook.battery.serializer.core.SystemMetricsSerializer;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
@RequiresApi(api = Build.VERSION_CODES.N)
public class HealthStatsMetricsSerializer extends SystemMetricsSerializer<HealthStatsMetrics> {
private static final long serialVersionUID = -874523681867511420L;
@Override
public long getTag() {
return serialVersionUID;
}
@Override
public void serializeContents(HealthStatsMetrics metrics, DataOutput output) throws IOException {
writeString(metrics.dataType, output);
int measurementLength = metrics.measurement.size();
output.writeInt(measurementLength);
for (int i = 0; i < measurementLength; i++) {
output.writeInt(metrics.measurement.keyAt(i));
output.writeLong(metrics.measurement.valueAt(i));
}
int timerLength = metrics.timer.size();
output.writeInt(timerLength);
for (int i = 0; i < timerLength; i++) {
output.writeInt(metrics.timer.keyAt(i));
writeTimer(metrics.timer.valueAt(i), output);
}
int measurementsLength = metrics.measurements.size();
output.writeInt(measurementsLength);
for (int i = 0; i < measurementsLength; i++) {
output.writeInt(metrics.measurements.keyAt(i));
ArrayMap<String, Long> currentMeasurement = metrics.measurements.valueAt(i);
int currentMeasurementLength = currentMeasurement.size();
output.writeInt(currentMeasurementLength);
for (int j = 0; j < currentMeasurementLength; j++) {
writeString(currentMeasurement.keyAt(j), output);
output.writeLong(currentMeasurement.valueAt(j));
}
}
int timersLength = metrics.timers.size();
output.writeInt(timersLength);
for (int i = 0; i < timersLength; i++) {
output.writeInt(metrics.timers.keyAt(i));
ArrayMap<String, TimerMetrics> currentTimer = metrics.timers.valueAt(i);
int currentTimerLength = currentTimer.size();
output.writeInt(currentTimerLength);
for (int j = 0; j < currentTimerLength; j++) {
writeString(currentTimer.keyAt(j), output);
writeTimer(currentTimer.valueAt(j), output);
}
}
int statsLength = metrics.stats.size();
output.writeInt(statsLength);
for (int i = 0; i < statsLength; i++) {
output.writeInt(metrics.stats.keyAt(i));
ArrayMap<String, HealthStatsMetrics> currentStats = metrics.stats.valueAt(i);
int currentStatsLength = currentStats.size();
output.writeInt(currentStatsLength);
for (int j = 0; j < currentStatsLength; j++) {
writeString(currentStats.keyAt(j), output);
serializeContents(currentStats.valueAt(j), output);
}
}
}
private static void writeTimer(TimerMetrics timer, DataOutput output) throws IOException {
output.writeInt(timer.count);
output.writeLong(timer.timeMs);
}
private static void writeString(@Nullable String str, DataOutput output) throws IOException {
if (str == null) {
output.writeInt(0);
} else {
output.writeInt(str.length());
output.writeBytes(str);
}
}
@Override
public boolean deserializeContents(HealthStatsMetrics metrics, DataInput input)
throws IOException {
metrics.dataType = readString(input);
int measurementLength = input.readInt();
for (int i = 0; i < measurementLength; i++) {
metrics.measurement.put(input.readInt(), input.readLong());
}
int timerLength = input.readInt();
for (int i = 0; i < timerLength; i++) {
metrics.timer.put(input.readInt(), readTimer(input));
}
int measurementsLength = input.readInt();
for (int i = 0; i < measurementsLength; i++) {
int currentMeasurementKey = input.readInt();
int currentMeasurementLength = input.readInt();
ArrayMap<String, Long> currentMeasurement = new ArrayMap<>(currentMeasurementLength);
for (int j = 0; j < currentMeasurementLength; j++) {
currentMeasurement.put(readString(input), input.readLong());
}
metrics.measurements.put(currentMeasurementKey, currentMeasurement);
}
int timersLength = input.readInt();
for (int i = 0; i < timersLength; i++) {
int currentTimerKey = input.readInt();
int currentTimerLength = input.readInt();
ArrayMap<String, TimerMetrics> currentTimer = new ArrayMap<>(currentTimerLength);
for (int j = 0; j < currentTimerLength; j++) {
int length = input.readInt();
byte[] bytes = new byte[length];
input.readFully(bytes, 0, length);
String key = new String(bytes);
currentTimer.put(key, readTimer(input));
}
metrics.timers.put(currentTimerKey, currentTimer);
}
int statsLength = input.readInt();
for (int i = 0; i < statsLength; i++) {
int currentStatsKey = input.readInt();
int currentStatsLength = input.readInt();
ArrayMap<String, HealthStatsMetrics> currentStats = new ArrayMap<>(currentStatsLength);
for (int j = 0; j < currentStatsLength; j++) {
String key = readString(input);
HealthStatsMetrics healthStatsMetrics = new HealthStatsMetrics();
deserializeContents(healthStatsMetrics, input);
currentStats.put(key, healthStatsMetrics);
}
metrics.stats.put(currentStatsKey, currentStats);
}
return true;
}
private static TimerMetrics readTimer(DataInput input) throws IOException {
return new TimerMetrics(input.readInt(), input.readLong());
}
private static @Nullable String readString(DataInput input) throws IOException {
int length = input.readInt();
if (length == 0) {
return null;
} else {
byte[] bytes = new byte[length];
input.readFully(bytes, 0, length);
return new String(bytes);
}
}
}
| 2,246 |
506 | /*
* Copyright 2020. Huawei Technologies Co., Ltd. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.huawei.push.webpush;
import com.alibaba.fastjson.annotation.JSONField;
public class WebActions {
@JSONField(name = "action")
private String action;
@JSONField(name = "icon")
private String icon;
@JSONField(name = "title")
private String title;
public String getAction() {
return action;
}
public String getIcon() {
return icon;
}
public String getTitle() {
return title;
}
public void check(){
}
public WebActions(Builder builder) {
this.action = builder.action;
this.icon = builder.icon;
this.title = builder.title;
}
/**
* builder
*/
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String action;
private String icon;
private String title;
public Builder setAction(String action) {
this.action = action;
return this;
}
public Builder setIcon(String icon) {
this.icon = icon;
return this;
}
public Builder setTitle(String title) {
this.title = title;
return this;
}
public WebActions build() {
return new WebActions(this);
}
}
}
| 738 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-f933-7c3x-46v4",
"modified": "2022-04-14T00:00:42Z",
"published": "2022-04-05T00:00:22Z",
"aliases": [
"CVE-2021-32994"
],
"details": "Softing OPC UA C++ SDK (Software Development Kit) versions from 5.59 to 5.64 exported library functions don't properly validate received extension objects, which may allow an attacker to crash the software by sending a variety of specially crafted packets to access several unexpected memory locations.",
"severity": [
{
"type": "CVSS_V3",
"score": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H"
}
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2021-32994"
},
{
"type": "WEB",
"url": "https://www.cisa.gov/uscert/ics/advisories/icsa-21-168-02"
}
],
"database_specific": {
"cwe_ids": [
"CWE-119"
],
"severity": "HIGH",
"github_reviewed": false
}
} | 449 |
1,209 | <filename>u8g2/csrc/u8x8_d_ssd1306_128x64_noname.c
#include "u8x8.h"
static const uint8_t u8x8_d_ssd1306_128x64_noname_init_seq[] = {
U8X8_START_TRANSFER(), /* enable chip, delay is part of the transfer start */
U8X8_C(0x0ae), /* display off */
U8X8_CA(0x0d5, 0x080), /* clock divide ratio (0x00=1) and oscillator frequency (0x8) */
U8X8_CA(0x0a8, 0x03f), /* multiplex ratio */
U8X8_CA(0x0d3, 0x000), /* display offset */
U8X8_C(0x040), /* set display start line to 0 */
U8X8_CA(0x08d, 0x014), /* [2] charge pump setting (p62): 0x014 enable, 0x010 disable */
U8X8_CA(0x020, 0x000), /* page addressing mode */
#if U8X8_DEFAULT_FLIP_MODE == 0
U8X8_C(0x0a1), /* segment remap a0/a1*/
U8X8_C(0x0c8), /* c0: scan dir normal, c8: reverse */
#else
U8X8_C(0x0a0), /* segment remap a0/a1*/
U8X8_C(0x0c0), /* c0: scan dir normal, c8: reverse */
#endif
U8X8_CA(0x0da, 0x012), /* com pin HW config, sequential com pin config (bit 4), disable left/right remap (bit 5) */
U8X8_CA(0x081, 0x0cf), /* [2] set contrast control */
U8X8_CA(0x0d9, 0x0f1), /* [2] pre-charge period 0x022/f1*/
U8X8_CA(0x0db, 0x040), /* vcomh deselect level */
U8X8_C(0x02e), /* Deactivate scroll */
U8X8_C(0x0a4), /* output ram to display */
U8X8_C(0x0a6), /* none inverted normal display mode */
U8X8_END_TRANSFER(), /* disable chip */
U8X8_END() /* end of sequence */
};
static const uint8_t u8x8_d_ssd1306_128x64_noname_powersave0_seq[] = {
U8X8_START_TRANSFER(), /* enable chip, delay is part of the transfer start */
U8X8_C(0x0af), /* display on */
U8X8_END_TRANSFER(), /* disable chip */
U8X8_END() /* end of sequence */
};
static const uint8_t u8x8_d_ssd1306_128x64_noname_powersave1_seq[] = {
U8X8_START_TRANSFER(), /* enable chip, delay is part of the transfer start */
U8X8_C(0x0ae), /* display off */
U8X8_END_TRANSFER(), /* disable chip */
U8X8_END() /* end of sequence */
};
#ifdef U8X8_WITH_SET_FLIP_MODE
static const uint8_t u8x8_d_ssd1306_128x64_noname_flip0_seq[] = {
U8X8_START_TRANSFER(), /* enable chip, delay is part of the transfer start */
U8X8_C(0x0a1), /* segment remap a0/a1*/
U8X8_C(0x0c8), /* c0: scan dir normal, c8: reverse */
U8X8_END_TRANSFER(), /* disable chip */
U8X8_END() /* end of sequence */
};
static const uint8_t u8x8_d_ssd1306_128x64_noname_flip1_seq[] = {
U8X8_START_TRANSFER(), /* enable chip, delay is part of the transfer start */
U8X8_C(0x0a0), /* segment remap a0/a1*/
U8X8_C(0x0c0), /* c0: scan dir normal, c8: reverse */
U8X8_END_TRANSFER(), /* disable chip */
U8X8_END() /* end of sequence */
};
#endif
static const u8x8_display_info_t u8x8_ssd1306_128x64_noname_display_info =
{
/* chip_enable_level = */ 0,
/* chip_disable_level = */ 1,
/* post_chip_enable_wait_ns = */ 20,
/* pre_chip_disable_wait_ns = */ 10,
/* reset_pulse_width_ms = */ 100, /* SSD1306: 3 us */
/* post_reset_wait_ms = */ 100, /* far east OLEDs need much longer setup time */
/* sda_setup_time_ns = */ 50, /* SSD1306: 15ns, but cycle time is 100ns, so use 100/2 */
/* sck_pulse_width_ns = */ 50, /* SSD1306: 20ns, but cycle time is 100ns, so use 100/2 */
/* sck_takeover_edge = */ 1, /* rising edge */
/* i2c_bus_clock_100kHz = */ 4,
/* data_setup_time_ns = */ 40,
/* write_pulse_width_ns = */ 150, /* SSD1306: cycle time is 300ns, so use 300/2 = 150 */
/* tile_width = */ 16,
/* tile_hight = */ 8,
/* default_x_offset = */ 0
};
uint8_t u8x8_d_ssd1306_128x64_noname(u8x8_t *u8x8, uint8_t msg, uint8_t arg_int, void *arg_ptr)
{
uint8_t x, c;
uint8_t *ptr;
switch(msg)
{
case U8X8_MSG_DISPLAY_SETUP:
u8x8_d_helper_display_setup(u8x8, &u8x8_ssd1306_128x64_noname_display_info);
break;
case U8X8_MSG_DISPLAY_INIT:
u8x8_d_helper_display_init(u8x8);
u8x8_cad_SendSequence(u8x8, u8x8_d_ssd1306_128x64_noname_init_seq);
break;
case U8X8_MSG_DISPLAY_SET_POWER_SAVE:
if ( arg_int == 0 )
u8x8_cad_SendSequence(u8x8, u8x8_d_ssd1306_128x64_noname_powersave0_seq);
else
u8x8_cad_SendSequence(u8x8, u8x8_d_ssd1306_128x64_noname_powersave1_seq);
break;
#ifdef U8X8_WITH_SET_FLIP_MODE
case U8X8_MSG_DISPLAY_SET_FLIP_MODE:
if ( arg_int == 0 )
u8x8_cad_SendSequence(u8x8, u8x8_d_ssd1306_128x64_noname_flip0_seq);
else
u8x8_cad_SendSequence(u8x8, u8x8_d_ssd1306_128x64_noname_flip1_seq);
break;
#endif
#ifdef U8X8_WITH_SET_CONTRAST
case U8X8_MSG_DISPLAY_SET_CONTRAST:
u8x8_cad_StartTransfer(u8x8);
u8x8_cad_SendCmd(u8x8, 0x081 );
u8x8_cad_SendArg(u8x8, arg_int ); /* ssd1306 has range from 0 to 255 */
u8x8_cad_EndTransfer(u8x8);
break;
#endif
case U8X8_MSG_DISPLAY_DRAW_TILE:
u8x8_cad_StartTransfer(u8x8);
x = ((u8x8_tile_t *)arg_ptr)->x_pos;
x *= 8;
x += u8x8->x_offset;
u8x8_cad_SendCmd(u8x8, 0x010 | (x>>4) );
u8x8_cad_SendCmd(u8x8, 0x000 | ((x&15)));
u8x8_cad_SendCmd(u8x8, 0x0b0 | (((u8x8_tile_t *)arg_ptr)->y_pos));
do
{
c = ((u8x8_tile_t *)arg_ptr)->cnt;
ptr = ((u8x8_tile_t *)arg_ptr)->tile_ptr;
u8x8_cad_SendData(u8x8, c*8, ptr); /* note: SendData can not handle more than 255 bytes */
/*
do
{
u8x8_cad_SendData(u8x8, 8, ptr);
ptr += 8;
c--;
} while( c > 0 );
*/
arg_int--;
} while( arg_int > 0 );
u8x8_cad_EndTransfer(u8x8);
break;
default:
return 0;
}
return 1;
}
| 3,093 |
794 | <filename>coap/src/main/java/org/apache/mina/coap/CoapCode.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.mina.coap;
/**
* Extract of the CoAP RFC :
*
* <pre>
* | 1 | GET | [RFCXXXX] |
* | 2 | POST | [RFCXXXX] |
* | 3 | PUT | [RFCXXXX] |
* | 4 | DELETE | [RFCXXXX] |
*
* | 65 | 2.01 Created | [RFCXXXX] |
* | 66 | 2.02 Deleted | [RFCXXXX] |
* | 67 | 2.03 Valid | [RFCXXXX] |
* | 68 | 2.04 Changed | [RFCXXXX] |
* | 69 | 2.05 Content | [RFCXXXX] |
* | 128 | 4.00 Bad Request | [RFCXXXX] |
* | 129 | 4.01 Unauthorized | [RFCXXXX] |
* | 130 | 4.02 Bad Option | [RFCXXXX] |
* | 131 | 4.03 Forbidden | [RFCXXXX] |
* | 132 | 4.04 Not Found | [RFCXXXX] |
* | 133 | 4.05 Method Not Allowed | [RFCXXXX] |
* | 134 | 4.06 Not Acceptable | [RFCXXXX] |
* | 140 | 4.12 Precondition Failed | [RFCXXXX] |
* | 141 | 4.13 Request Entity Too Large | [RFCXXXX] |
* | 143 | 4.15 Unsupported Content-Format | [RFCXXXX] |
* | 160 | 5.00 Internal Server Error | [RFCXXXX] |
* | 161 | 5.01 Not Implemented | [RFCXXXX] |
* | 162 | 5.02 Bad Gateway | [RFCXXXX] |
* | 163 | 5.03 Service Unavailable | [RFCXXXX] |
* | 164 | 5.04 Gateway Timeout | [RFCXXXX] |
* | 165 | 5.05 Proxying Not Supported | [RFCXXXX] |
* </pre>
*
* @author <a href="http://mina.apache.org">Apache MINA Project</a>
*/
public enum CoapCode {
GET("GET", 1), POST("POST", 2), PUT("PUT", 3), DELETE("DELETE", 4), CREATED("2.01", 65), DELETED("2.02", 66),
VALID("2.03", 67), CHANGED("2.04", 68), CONTENT("2.05", 69), BAD_REQUEST("4.00", 128), UNAUTHORIZED("4.01", 129),
BAD_OPTION("4.02", 130), FORBIDDEN("4.03", 131), NOT_FOUND("4.04", 132), METHOD_NOT_ALLOWED("4.05", 133),
NOT_ACCEPTABLE("4.06", 134), PRECONDITION_FAILED("4.12", 140), REQUEST_ENTITY_TOO_LARGE("4.13", 141),
UNSUPPORTED_CONTENT_FORMAT("4.15", 143), INTERNAL_SERVER_ERROR("5.00", 160), NOT_IMPLEMENTED("5.01", 161),
BAD_GATEWAY("5.02", 162), SERVICE_UNAVAILABLE("5.03", 163), GATEWAY_TIMEOUT("5.04", 164), PROXYING_NOT_SUPPORTED(
"5.05", 165)
;
private final String text;
private final int code;
private CoapCode(String text, int code) {
this.text = text;
this.code = code;
}
public String getText() {
return text;
}
public int getCode() {
return code;
}
/**
* Find the {@link CoapCode} for the given value code (<code>null</code> if not found)
*/
public static CoapCode fromCode(int code) {
for (CoapCode t : CoapCode.values()) {
if (t.getCode() == code) {
return t;
}
}
return null;
}
}
| 1,738 |
10,876 | {
"name": "xtensor-io",
"version": "0.13.0",
"port-version": 1,
"description": "xtensor plugin to read and write images, audio files, numpy (compressed) npz and HDF5",
"homepage": "https://github.com/xtensor-stack/xtensor-io",
"dependencies": [
{
"name": "vcpkg-cmake",
"host": true
},
{
"name": "vcpkg-cmake-config",
"host": true
},
"xtensor",
"xtl"
]
}
| 192 |
2,059 | #include <stdio.h>
#include <stdlib.h>
#include <string.h>
#define _GNU_SOURCE /* See feature_test_macros(7) */
#include <dlfcn.h>
#include "wrappedlibs.h"
#include "debug.h"
#include "wrapper.h"
#include "bridge.h"
#include "librarian/library_private.h"
#include "x86emu.h"
#include "emu/x86emu_private.h"
#include "box86context.h"
#include "librarian.h"
#include "callback.h"
extern char* libGL;
const char* libglName = "libGL.so.1";
#define LIBNAME libgl
void fillGLProcWrapper(box86context_t*);
void freeProcWrapper(kh_symbolmap_t** symbolmap);
EXPORT void* my_glXGetProcAddress(x86emu_t* emu, void* name)
{
khint_t k;
const char* rname = (const char*)name;
if(dlsym_error && box86_log<LOG_DEBUG) printf_log(LOG_NONE, "Calling glXGetProcAddress(\"%s\") => ", rname);
if(!emu->context->glwrappers)
fillGLProcWrapper(emu->context);
// check if glxprocaddress is filled, and search for lib and fill it if needed
// get proc adress using actual glXGetProcAddress
k = kh_get(symbolmap, emu->context->glmymap, rname);
int is_my = (k==kh_end(emu->context->glmymap))?0:1;
void* symbol;
if(is_my) {
// try again, by using custom "my_" now...
char tmp[200];
strcpy(tmp, "my_");
strcat(tmp, rname);
symbol = dlsym(emu->context->box86lib, tmp);
} else
symbol = emu->context->glxprocaddress(rname);
if(!symbol) {
if(dlsym_error && box86_log<LOG_DEBUG) printf_log(LOG_NONE, "%p\n", NULL);
return NULL; // easy
}
// check if alread bridged
uintptr_t ret = CheckBridged(emu->context->system, symbol);
if(ret) {
if(dlsym_error && box86_log<LOG_DEBUG) printf_log(LOG_NONE, "%p\n", (void*)ret);
return (void*)ret; // already bridged
}
// get wrapper
k = kh_get(symbolmap, emu->context->glwrappers, rname);
if(k==kh_end(emu->context->glwrappers) && strstr(rname, "ARB")==NULL) {
// try again, adding ARB at the end if not present
char tmp[200];
strcpy(tmp, rname);
strcat(tmp, "ARB");
k = kh_get(symbolmap, emu->context->glwrappers, tmp);
}
if(k==kh_end(emu->context->glwrappers) && strstr(rname, "EXT")==NULL) {
// try again, adding EXT at the end if not present
char tmp[200];
strcpy(tmp, rname);
strcat(tmp, "EXT");
k = kh_get(symbolmap, emu->context->glwrappers, tmp);
}
if(k==kh_end(emu->context->glwrappers)) {
if(dlsym_error && box86_log<LOG_DEBUG) printf_log(LOG_NONE, "%p\n", NULL);
if(dlsym_error && box86_log<LOG_INFO) printf_log(LOG_NONE, "Warning, no wrapper for %s\n", rname);
return NULL;
}
AddOffsetSymbol(emu->context->maplib, symbol, rname);
ret = AddBridge(emu->context->system, kh_value(emu->context->glwrappers, k), symbol, 0);
if(dlsym_error && box86_log<LOG_DEBUG) printf_log(LOG_NONE, "%p\n", (void*)ret);
return (void*)ret;
}
EXPORT void* my_glXGetProcAddressARB(x86emu_t* emu, void* name) __attribute__((alias("my_glXGetProcAddress")));
typedef int (*iFi_t)(int);
typedef void (*vFpp_t)(void*, void*);
typedef void (*debugProc_t)(int32_t, int32_t, uint32_t, int32_t, int32_t, void*, void*);
#define SUPER() \
GO(0) \
GO(1) \
GO(2) \
GO(3) \
GO(4)
// debug_callback ...
#define GO(A) \
static uintptr_t my_debug_callback_fct_##A = 0; \
static void my_debug_callback_##A(int32_t a, int32_t b, uint32_t c, int32_t d, int32_t e, const char* f, const void* g) \
{ \
RunFunction(my_context, my_debug_callback_fct_##A, 7, a, b, c, d, e, f, g); \
}
SUPER()
#undef GO
static void* find_debug_callback_Fct(void* fct)
{
if(!fct) return fct;
if(GetNativeFnc((uintptr_t)fct)) return GetNativeFnc((uintptr_t)fct);
#define GO(A) if(my_debug_callback_fct_##A == (uintptr_t)fct) return my_debug_callback_##A;
SUPER()
#undef GO
#define GO(A) if(my_debug_callback_fct_##A == 0) {my_debug_callback_fct_##A = (uintptr_t)fct; return my_debug_callback_##A; }
SUPER()
#undef GO
printf_log(LOG_NONE, "Warning, no more slot for libGL debug_callback callback\n");
return NULL;
}
// program_callback ...
#define GO(A) \
static uintptr_t my_program_callback_fct_##A = 0; \
static void my_program_callback_##A(int32_t a, void* b) \
{ \
RunFunction(my_context, my_program_callback_fct_##A, 2, a, b); \
}
SUPER()
#undef GO
static void* find_program_callback_Fct(void* fct)
{
if(!fct) return fct;
if(GetNativeFnc((uintptr_t)fct)) return GetNativeFnc((uintptr_t)fct);
#define GO(A) if(my_program_callback_fct_##A == (uintptr_t)fct) return my_program_callback_##A;
SUPER()
#undef GO
#define GO(A) if(my_program_callback_fct_##A == 0) {my_program_callback_fct_##A = (uintptr_t)fct; return my_program_callback_##A; }
SUPER()
#undef GO
printf_log(LOG_NONE, "Warning, no more slot for libGL program_callback callback\n");
return NULL;
}
#undef SUPER
EXPORT void my_glDebugMessageCallback(x86emu_t* emu, void* prod, void* param)
{
static vFpp_t DebugMessageCallback = NULL;
static int init = 1;
if(init) {
DebugMessageCallback = my_context->glxprocaddress("glDebugMessageCallback");
init = 0;
}
if(!DebugMessageCallback)
return;
void* cb = find_debug_callback_Fct(prod);
DebugMessageCallback(cb, param);
}
EXPORT void my_glDebugMessageCallbackARB(x86emu_t* emu, void* prod, void* param) __attribute__((alias("my_glDebugMessageCallback")));
EXPORT void my_glDebugMessageCallbackAMD(x86emu_t* emu, void* prod, void* param) __attribute__((alias("my_glDebugMessageCallback")));
EXPORT int my_glXSwapIntervalMESA(int interval)
{
static iFi_t SwapIntervalMESA = NULL;
static int init = 1;
if(init) {
SwapIntervalMESA = my_context->glxprocaddress("glXSwapIntervalMESA");
init = 0;
}
if(!SwapIntervalMESA)
return 0;
return SwapIntervalMESA(interval);
}
EXPORT void my_glProgramCallbackMESA(x86emu_t* emu, void* f, void* data)
{
static vFpp_t ProgramCallbackMESA = NULL;
static int init = 1;
if(init) {
ProgramCallbackMESA = my_context->glxprocaddress("glProgramCallbackMESA");
init = 0;
}
if(!ProgramCallbackMESA)
return;
ProgramCallbackMESA(find_program_callback_Fct(f), data);
}
#define PRE_INIT if(libGL) {lib->priv.w.lib = dlopen(libGL, RTLD_LAZY | RTLD_GLOBAL); lib->path = strdup(libGL);} else
#define CUSTOM_INIT \
lib->priv.w.priv = dlsym(lib->priv.w.lib, "glXGetProcAddress"); \
box86->glxprocaddress = lib->priv.w.priv; \
lib->priv.w.needed = 1; \
lib->priv.w.neededlibs = (char**)calloc(lib->priv.w.needed, sizeof(char*)); \
lib->priv.w.neededlibs[0] = strdup("libdl.so.2");
#include "wrappedlib_init.h"
void fillGLProcWrapper(box86context_t* context)
{
int cnt, ret;
khint_t k;
kh_symbolmap_t * symbolmap = kh_init(symbolmap);
// populates maps...
cnt = sizeof(libglsymbolmap)/sizeof(map_onesymbol_t);
for (int i=0; i<cnt; ++i) {
k = kh_put(symbolmap, symbolmap, libglsymbolmap[i].name, &ret);
kh_value(symbolmap, k) = libglsymbolmap[i].w;
}
// and the my_ symbols map
cnt = sizeof(MAPNAME(mysymbolmap))/sizeof(map_onesymbol_t);
for (int i=0; i<cnt; ++i) {
k = kh_put(symbolmap, symbolmap, libglmysymbolmap[i].name, &ret);
kh_value(symbolmap, k) = libglmysymbolmap[i].w;
}
context->glwrappers = symbolmap;
// my_* map
symbolmap = kh_init(symbolmap);
cnt = sizeof(MAPNAME(mysymbolmap))/sizeof(map_onesymbol_t);
for (int i=0; i<cnt; ++i) {
k = kh_put(symbolmap, symbolmap, libglmysymbolmap[i].name, &ret);
kh_value(symbolmap, k) = libglmysymbolmap[i].w;
}
context->glmymap = symbolmap;
}
void freeGLProcWrapper(box86context_t* context)
{
if(!context)
return;
if(context->glwrappers)
kh_destroy(symbolmap, context->glwrappers);
if(context->glmymap)
kh_destroy(symbolmap, context->glmymap);
context->glwrappers = NULL;
context->glmymap = NULL;
}
| 4,098 |
302 | #include <strings.h>
int ffs(int i)
{
static const unsigned char table[] =
{
0, 1, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8};
unsigned int a;
unsigned int x = i & -i;
a = x <= 0xffff ? (x <= 0xff ? 0 : 8) : (x <= 0xffffff ? 16 : 24);
return table[x >> a] + a;
}
| 640 |
4,874 | <reponame>RapotOR/gocv
#include "../core.h"
#include "cuda.h"
#include "objdetect.h"
// CascadeClassifier_GPU
CascadeClassifier_GPU CascadeClassifier_GPU_Create(const char* cascade_name) {
return new cv::Ptr<cv::cuda::CascadeClassifier>(cv::cuda::CascadeClassifier::create(cascade_name));
}
struct Rects CascadeClassifier_GPU_DetectMultiScale(CascadeClassifier_GPU cs, GpuMat img) {
std::vector<cv::Rect> detected;
cv::cuda::GpuMat objbuf;
(*cs)->detectMultiScale(*img, objbuf); // uses all default parameters
(*cs)->convert(objbuf, detected);
Rect* rects = new Rect[detected.size()];
for (size_t i = 0; i < detected.size(); ++i) {
Rect r = {detected[i].x, detected[i].y, detected[i].width, detected[i].height};
rects[i] = r;
}
Rects ret = {rects, (int)detected.size()};
return ret;
}
// HOG
HOG HOG_Create() {
return new cv::Ptr<cv::cuda::HOG>(cv::cuda::HOG::create());
}
HOG HOG_CreateWithParams(Size winSize, Size blockSize, Size blockStride, Size cellSize, int nbins) {
cv::Size winSz(winSize.width, winSize.height);
cv::Size blockSz(blockSize.width, blockSize.height);
cv::Size blockSt(blockStride.width, blockStride.height);
cv::Size cellSz(cellSize.width, cellSize.height);
return new cv::Ptr<cv::cuda::HOG>(cv::cuda::HOG::create(winSz, blockSz, blockSt, cellSz, nbins));
}
struct Rects HOG_DetectMultiScale(HOG hog, GpuMat img) {
std::vector<cv::Rect> detected;
(*hog)->detectMultiScale(*img, detected);
Rect* rects = new Rect[detected.size()];
for (size_t i = 0; i < detected.size(); ++i) {
Rect r = {detected[i].x, detected[i].y, detected[i].width, detected[i].height};
rects[i] = r;
}
Rects ret = {rects, (int)detected.size()};
return ret;
}
GpuMat HOG_Compute(HOG hog, GpuMat img) {
GpuMat dst = new cv::cuda::GpuMat();
(*hog)->compute(*img, *dst);
return dst;
}
Mat HOG_GetPeopleDetector(HOG hog) {
return new cv::Mat((*hog)->getDefaultPeopleDetector());
}
void HOG_SetSVMDetector(HOG hog, Mat det) {
(*hog)->setSVMDetector(*det);
}
int HOG_GetDescriptorFormat(HOG hog) {
return int((*hog)->getDescriptorFormat());
}
size_t HOG_GetBlockHistogramSize(HOG hog) {
return size_t((*hog)->getBlockHistogramSize());
}
size_t HOG_GetDescriptorSize(HOG hog) {
return size_t((*hog)->getDescriptorSize());
}
bool HOG_GetGammaCorrection(HOG hog) {
return bool((*hog)->getGammaCorrection());
}
int HOG_GetGroupThreshold(HOG hog) {
return int((*hog)->getGroupThreshold());
}
double HOG_GetHitThreshold(HOG hog) {
return double((*hog)->getHitThreshold());
}
double HOG_GetL2HysThreshold(HOG hog) {
return double((*hog)->getL2HysThreshold());
}
int HOG_GetNumLevels(HOG hog) {
return int((*hog)->getNumLevels());
}
double HOG_GetScaleFactor(HOG hog) {
return double((*hog)->getScaleFactor());
}
double HOG_GetWinSigma(HOG hog) {
return double((*hog)->getWinSigma());
}
struct Size HOG_GetWinStride(HOG hog) {
cv::Size sz = (*hog)->getWinStride();
Size size = {sz.width, sz.height};
return size;
}
void HOG_SetDescriptorFormat(HOG hog, int descrFormat) {
auto df = static_cast<cv::HOGDescriptor::DescriptorStorageFormat>(descrFormat);
(*hog)->setDescriptorFormat(df);
}
void HOG_SetGammaCorrection(HOG hog, bool gammaCorrection) {
(*hog)->setGammaCorrection(gammaCorrection);
}
void HOG_SetGroupThreshold(HOG hog, int groupThreshold) {
(*hog)->setGroupThreshold(groupThreshold);
}
void HOG_SetHitThreshold(HOG hog, double hitThreshold) {
(*hog)->setHitThreshold(hitThreshold);
}
void HOG_SetL2HysThreshold(HOG hog, double thresholdL2hys) {
(*hog)->setL2HysThreshold(thresholdL2hys);
}
void HOG_SetNumLevels(HOG hog, int nlevels) {
(*hog)->setNumLevels(nlevels);
}
void HOG_SetScaleFactor(HOG hog, double scale0) {
(*hog)->setScaleFactor(scale0);
}
void HOG_SetWinSigma(HOG hog, double winSigma) {
(*hog)->setWinSigma(winSigma);
}
void HOG_SetWinStride(HOG hog, Size dsize) {
cv::Size sz(dsize.width, dsize.height);
(*hog)->setWinStride(sz);
}
| 1,752 |
777 | <reponame>google-ar/chromium<filename>gpu/command_buffer/tests/gl_texture_mailbox_unittest.cc
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <GLES2/gl2extchromium.h>
#include <stddef.h>
#include <stdint.h>
#include "gpu/command_buffer/client/gles2_lib.h"
#include "gpu/command_buffer/common/mailbox.h"
#include "gpu/command_buffer/service/gles2_cmd_decoder.h"
#include "gpu/command_buffer/service/mailbox_manager.h"
#include "gpu/command_buffer/tests/gl_manager.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "ui/gl/gl_share_group.h"
namespace gpu {
namespace {
uint32_t ReadTexel(GLuint id, GLint x, GLint y) {
GLint old_fbo = 0;
glGetIntegerv(GL_FRAMEBUFFER_BINDING, &old_fbo);
GLuint fbo;
glGenFramebuffers(1, &fbo);
glBindFramebuffer(GL_FRAMEBUFFER, fbo);
glFramebufferTexture2D(GL_FRAMEBUFFER,
GL_COLOR_ATTACHMENT0,
GL_TEXTURE_2D,
id,
0);
// Some drivers (NVidia/SGX) require texture settings to be a certain way or
// they won't report FRAMEBUFFER_COMPLETE.
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
EXPECT_EQ(static_cast<GLenum>(GL_FRAMEBUFFER_COMPLETE),
glCheckFramebufferStatus(GL_FRAMEBUFFER));
uint32_t texel = 0;
glReadPixels(x, y, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, &texel);
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
glBindFramebuffer(GL_FRAMEBUFFER, old_fbo);
glDeleteFramebuffers(1, &fbo);
return texel;
}
}
class GLTextureMailboxTest : public testing::Test {
protected:
void SetUpContexts() {
gl1_.Initialize(GLManager::Options());
GLManager::Options options;
options.share_mailbox_manager = &gl1_;
gl2_.Initialize(options);
}
void TearDown() override {
gl1_.Destroy();
gl2_.Destroy();
}
// The second GL context takes and consumes a mailbox from the first GL
// context. Assumes that |gl1_| is current.
Mailbox TakeAndConsumeMailbox() {
glResizeCHROMIUM(10, 10, 1, true);
glClearColor(0, 1, 1, 1);
glClear(GL_COLOR_BUFFER_BIT);
::gles2::GetGLContext()->SwapBuffers();
Mailbox mailbox;
glGenMailboxCHROMIUM(mailbox.name);
gl1_.decoder()->TakeFrontBuffer(mailbox);
gl2_.MakeCurrent();
GLuint tex;
glGenTextures(1, &tex);
glBindTexture(GL_TEXTURE_2D, tex);
glConsumeTextureCHROMIUM(GL_TEXTURE_2D, mailbox.name);
glDeleteTextures(1, &tex);
glFlush();
gl1_.MakeCurrent();
return mailbox;
}
GLManager gl1_;
GLManager gl2_;
};
TEST_F(GLTextureMailboxTest, ProduceAndConsumeTexture) {
SetUpContexts();
gl1_.MakeCurrent();
GLbyte mailbox1[GL_MAILBOX_SIZE_CHROMIUM];
glGenMailboxCHROMIUM(mailbox1);
GLbyte mailbox2[GL_MAILBOX_SIZE_CHROMIUM];
glGenMailboxCHROMIUM(mailbox2);
GLuint tex1;
glGenTextures(1, &tex1);
glBindTexture(GL_TEXTURE_2D, tex1);
uint32_t source_pixel = 0xFF0000FF;
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGBA,
1, 1,
0,
GL_RGBA,
GL_UNSIGNED_BYTE,
&source_pixel);
glProduceTextureCHROMIUM(GL_TEXTURE_2D, mailbox1);
glFlush();
gl2_.MakeCurrent();
GLuint tex2;
glGenTextures(1, &tex2);
glBindTexture(GL_TEXTURE_2D, tex2);
glConsumeTextureCHROMIUM(GL_TEXTURE_2D, mailbox1);
EXPECT_EQ(source_pixel, ReadTexel(tex2, 0, 0));
glProduceTextureCHROMIUM(GL_TEXTURE_2D, mailbox2);
glFlush();
gl1_.MakeCurrent();
glBindTexture(GL_TEXTURE_2D, tex1);
glConsumeTextureCHROMIUM(GL_TEXTURE_2D, mailbox2);
EXPECT_EQ(source_pixel, ReadTexel(tex1, 0, 0));
}
TEST_F(GLTextureMailboxTest, ProduceAndConsumeTextureRGB) {
SetUpContexts();
gl1_.MakeCurrent();
GLbyte mailbox1[GL_MAILBOX_SIZE_CHROMIUM];
glGenMailboxCHROMIUM(mailbox1);
GLbyte mailbox2[GL_MAILBOX_SIZE_CHROMIUM];
glGenMailboxCHROMIUM(mailbox2);
GLuint tex1;
glGenTextures(1, &tex1);
glBindTexture(GL_TEXTURE_2D, tex1);
uint32_t source_pixel = 0xFF000000;
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGB,
1, 1,
0,
GL_RGB,
GL_UNSIGNED_BYTE,
&source_pixel);
glProduceTextureCHROMIUM(GL_TEXTURE_2D, mailbox1);
glFlush();
gl2_.MakeCurrent();
GLuint tex2;
glGenTextures(1, &tex2);
glBindTexture(GL_TEXTURE_2D, tex2);
glConsumeTextureCHROMIUM(GL_TEXTURE_2D, mailbox1);
EXPECT_EQ(source_pixel, ReadTexel(tex2, 0, 0));
glProduceTextureCHROMIUM(GL_TEXTURE_2D, mailbox2);
glFlush();
gl1_.MakeCurrent();
glBindTexture(GL_TEXTURE_2D, tex1);
glConsumeTextureCHROMIUM(GL_TEXTURE_2D, mailbox2);
EXPECT_EQ(source_pixel, ReadTexel(tex1, 0, 0));
}
TEST_F(GLTextureMailboxTest, ProduceAndConsumeTextureDirect) {
SetUpContexts();
gl1_.MakeCurrent();
GLbyte mailbox1[GL_MAILBOX_SIZE_CHROMIUM];
glGenMailboxCHROMIUM(mailbox1);
GLbyte mailbox2[GL_MAILBOX_SIZE_CHROMIUM];
glGenMailboxCHROMIUM(mailbox2);
GLuint tex1;
glGenTextures(1, &tex1);
glBindTexture(GL_TEXTURE_2D, tex1);
uint32_t source_pixel = 0xFF0000FF;
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGBA,
1, 1,
0,
GL_RGBA,
GL_UNSIGNED_BYTE,
&source_pixel);
glProduceTextureDirectCHROMIUM(tex1, GL_TEXTURE_2D, mailbox1);
glFlush();
gl2_.MakeCurrent();
GLuint tex2 = glCreateAndConsumeTextureCHROMIUM(GL_TEXTURE_2D, mailbox1);
glBindTexture(GL_TEXTURE_2D, tex2);
EXPECT_EQ(source_pixel, ReadTexel(tex2, 0, 0));
glProduceTextureDirectCHROMIUM(tex2, GL_TEXTURE_2D, mailbox2);
glFlush();
gl1_.MakeCurrent();
GLuint tex3 = glCreateAndConsumeTextureCHROMIUM(GL_TEXTURE_2D, mailbox2);
glBindTexture(GL_TEXTURE_2D, tex3);
EXPECT_EQ(source_pixel, ReadTexel(tex3, 0, 0));
}
TEST_F(GLTextureMailboxTest, ConsumeTextureValidatesKey) {
SetUpContexts();
GLuint tex;
glGenTextures(1, &tex);
glBindTexture(GL_TEXTURE_2D, tex);
uint32_t source_pixel = 0xFF0000FF;
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGBA,
1, 1,
0,
GL_RGBA,
GL_UNSIGNED_BYTE,
&source_pixel);
GLbyte invalid_mailbox[GL_MAILBOX_SIZE_CHROMIUM];
glGenMailboxCHROMIUM(invalid_mailbox);
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
glConsumeTextureCHROMIUM(GL_TEXTURE_2D, invalid_mailbox);
EXPECT_EQ(static_cast<GLenum>(GL_INVALID_OPERATION), glGetError());
// Ensure level 0 is still intact after glConsumeTextureCHROMIUM fails.
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
EXPECT_EQ(source_pixel, ReadTexel(tex, 0, 0));
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
}
TEST_F(GLTextureMailboxTest, SharedTextures) {
SetUpContexts();
gl1_.MakeCurrent();
GLuint tex1;
glGenTextures(1, &tex1);
glBindTexture(GL_TEXTURE_2D, tex1);
uint32_t source_pixel = 0xFF0000FF;
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGBA,
1, 1,
0,
GL_RGBA,
GL_UNSIGNED_BYTE,
&source_pixel);
GLbyte mailbox[GL_MAILBOX_SIZE_CHROMIUM];
glGenMailboxCHROMIUM(mailbox);
glProduceTextureCHROMIUM(GL_TEXTURE_2D, mailbox);
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
glFlush();
gl2_.MakeCurrent();
GLuint tex2;
glGenTextures(1, &tex2);
glBindTexture(GL_TEXTURE_2D, tex2);
glConsumeTextureCHROMIUM(GL_TEXTURE_2D, mailbox);
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
// Change texture in context 2.
source_pixel = 0xFF00FF00;
glTexSubImage2D(GL_TEXTURE_2D,
0,
0, 0,
1, 1,
GL_RGBA,
GL_UNSIGNED_BYTE,
&source_pixel);
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
glFlush();
// Check it in context 1.
gl1_.MakeCurrent();
EXPECT_EQ(source_pixel, ReadTexel(tex1, 0, 0));
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
// Change parameters (note: ReadTexel will reset those).
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_LINEAR_MIPMAP_NEAREST);
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
glFlush();
// Check in context 2.
gl2_.MakeCurrent();
GLint parameter = 0;
glGetTexParameteriv(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, ¶meter);
EXPECT_EQ(GL_REPEAT, parameter);
parameter = 0;
glGetTexParameteriv(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, ¶meter);
EXPECT_EQ(GL_LINEAR, parameter);
parameter = 0;
glGetTexParameteriv(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, ¶meter);
EXPECT_EQ(GL_LINEAR_MIPMAP_NEAREST, parameter);
// Delete texture in context 1.
gl1_.MakeCurrent();
glDeleteTextures(1, &tex1);
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
// Check texture still exists in context 2.
gl2_.MakeCurrent();
EXPECT_EQ(source_pixel, ReadTexel(tex2, 0, 0));
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
// The mailbox should still exist too.
GLuint tex3;
glGenTextures(1, &tex3);
glBindTexture(GL_TEXTURE_2D, tex3);
glConsumeTextureCHROMIUM(GL_TEXTURE_2D, mailbox);
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
// Delete both textures.
glDeleteTextures(1, &tex2);
glDeleteTextures(1, &tex3);
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
// Mailbox should be gone now.
glGenTextures(1, &tex2);
glBindTexture(GL_TEXTURE_2D, tex2);
glConsumeTextureCHROMIUM(GL_TEXTURE_2D, mailbox);
EXPECT_EQ(static_cast<GLenum>(GL_INVALID_OPERATION), glGetError());
glDeleteTextures(1, &tex2);
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
}
TEST_F(GLTextureMailboxTest, TakeFrontBuffer) {
SetUpContexts();
gl1_.MakeCurrent();
Mailbox mailbox;
glGenMailboxCHROMIUM(mailbox.name);
gl2_.MakeCurrent();
glResizeCHROMIUM(10, 10, 1, true);
glClearColor(0, 1, 1, 1);
glClear(GL_COLOR_BUFFER_BIT);
::gles2::GetGLContext()->SwapBuffers();
gl2_.decoder()->TakeFrontBuffer(mailbox);
gl1_.MakeCurrent();
GLuint tex1;
glGenTextures(1, &tex1);
glBindTexture(GL_TEXTURE_2D, tex1);
glConsumeTextureCHROMIUM(GL_TEXTURE_2D, mailbox.name);
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
EXPECT_EQ(0xFFFFFF00u, ReadTexel(tex1, 0, 0));
gl2_.MakeCurrent();
glClearColor(1, 0, 0, 1);
glClear(GL_COLOR_BUFFER_BIT);
::gles2::GetGLContext()->SwapBuffers();
gl1_.MakeCurrent();
EXPECT_EQ(0xFFFFFF00u, ReadTexel(tex1, 0, 0));
glDeleteTextures(1, &tex1);
Mailbox mailbox2;
glGenMailboxCHROMIUM(mailbox2.name);
gl2_.MakeCurrent();
gl2_.decoder()->ReturnFrontBuffer(mailbox, false);
// Flushing doesn't matter, only SwapBuffers().
glClearColor(0, 1, 0, 1);
glClear(GL_COLOR_BUFFER_BIT);
glFlush();
gl2_.decoder()->TakeFrontBuffer(mailbox2);
gl1_.MakeCurrent();
glGenTextures(1, &tex1);
glBindTexture(GL_TEXTURE_2D, tex1);
glConsumeTextureCHROMIUM(GL_TEXTURE_2D, mailbox2.name);
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
EXPECT_EQ(0xFF0000FFu, ReadTexel(tex1, 0, 0));
gl2_.MakeCurrent();
gl2_.Destroy();
gl1_.MakeCurrent();
EXPECT_EQ(0xFF0000FFu, ReadTexel(tex1, 0, 0));
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
glDeleteTextures(1, &tex1);
}
// The client, represented by |gl2_|, will request 5 frontbuffers, and then
// start returning them.
TEST_F(GLTextureMailboxTest, FrontBufferCache) {
SetUpContexts();
gl1_.MakeCurrent();
std::vector<Mailbox> mailboxes;
for (int i = 0; i < 5; ++i) {
Mailbox mailbox = TakeAndConsumeMailbox();
mailboxes.push_back(mailbox);
}
EXPECT_EQ(5u, gl1_.decoder()->GetSavedBackTextureCountForTest());
EXPECT_EQ(5u, gl1_.decoder()->GetCreatedBackTextureCountForTest());
// If the textures aren't lost, they're reused.
for (int i = 0; i < 100; ++i) {
gl1_.decoder()->ReturnFrontBuffer(mailboxes[0], false);
mailboxes.erase(mailboxes.begin());
Mailbox mailbox = TakeAndConsumeMailbox();
mailboxes.push_back(mailbox);
}
EXPECT_EQ(5u, gl1_.decoder()->GetSavedBackTextureCountForTest());
EXPECT_EQ(5u, gl1_.decoder()->GetCreatedBackTextureCountForTest());
// If the textures are lost, they're not reused.
for (int i = 0; i < 100; ++i) {
gl1_.decoder()->ReturnFrontBuffer(mailboxes[0], true);
mailboxes.erase(mailboxes.begin());
Mailbox mailbox = TakeAndConsumeMailbox();
mailboxes.push_back(mailbox);
}
EXPECT_EQ(5u, gl1_.decoder()->GetSavedBackTextureCountForTest());
EXPECT_EQ(105u, gl1_.decoder()->GetCreatedBackTextureCountForTest());
}
// The client, represented by |gl2_|, will request and return 5 frontbuffers.
// Then the size of the buffer will be changed. All cached frontbuffers should
// be discarded.
TEST_F(GLTextureMailboxTest, FrontBufferChangeSize) {
SetUpContexts();
gl1_.MakeCurrent();
std::vector<Mailbox> mailboxes;
for (int i = 0; i < 5; ++i) {
Mailbox mailbox = TakeAndConsumeMailbox();
mailboxes.push_back(mailbox);
}
EXPECT_EQ(5u, gl1_.decoder()->GetSavedBackTextureCountForTest());
for (int i = 0; i < 5; ++i) {
gl1_.decoder()->ReturnFrontBuffer(mailboxes[i], false);
}
mailboxes.clear();
EXPECT_EQ(5u, gl1_.decoder()->GetSavedBackTextureCountForTest());
glResizeCHROMIUM(21, 31, 1, true);
::gles2::GetGLContext()->SwapBuffers();
EXPECT_EQ(0u, gl1_.decoder()->GetSavedBackTextureCountForTest());
}
// The client, represented by |gl2_|, will request and return 5 frontbuffers.
// Then |gl1_| will start drawing with a different color. The returned
// frontbuffers should pick up the new color.
TEST_F(GLTextureMailboxTest, FrontBufferChangeColor) {
GLManager::Options options1;
options1.multisampled = true;
gl1_.Initialize(options1);
GLManager::Options options2;
options2.share_mailbox_manager = &gl1_;
gl2_.Initialize(options2);
gl1_.MakeCurrent();
std::vector<Mailbox> mailboxes;
for (int i = 0; i < 5; ++i) {
Mailbox mailbox = TakeAndConsumeMailbox();
mailboxes.push_back(mailbox);
}
for (int i = 0; i < 5; ++i) {
gl1_.decoder()->ReturnFrontBuffer(mailboxes[i], false);
}
mailboxes.clear();
for (int i = 0; i < 5; ++i) {
glClearColor(1, 0, 0, 1);
glClear(GL_COLOR_BUFFER_BIT);
::gles2::GetGLContext()->SwapBuffers();
Mailbox mailbox;
glGenMailboxCHROMIUM(mailbox.name);
gl1_.decoder()->TakeFrontBuffer(mailbox);
// Normally, consumers of TakeFrontBuffer() must supply their own
// synchronization mechanism. For this test, just use a glFinish().
glFinish();
gl2_.MakeCurrent();
GLuint tex;
glGenTextures(1, &tex);
glBindTexture(GL_TEXTURE_2D, tex);
glConsumeTextureCHROMIUM(GL_TEXTURE_2D, mailbox.name);
EXPECT_EQ(0xFF0000FFu, ReadTexel(tex, 0, 0));
glDeleteTextures(1, &tex);
glFlush();
gl1_.MakeCurrent();
}
}
TEST_F(GLTextureMailboxTest, ProduceTextureDirectInvalidTarget) {
SetUpContexts();
gl1_.MakeCurrent();
GLbyte mailbox1[GL_MAILBOX_SIZE_CHROMIUM];
glGenMailboxCHROMIUM(mailbox1);
GLuint tex1;
glGenTextures(1, &tex1);
glBindTexture(GL_TEXTURE_CUBE_MAP, tex1);
uint32_t source_pixel = 0xFF0000FF;
glTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_X,
0,
GL_RGBA,
1, 1,
0,
GL_RGBA,
GL_UNSIGNED_BYTE,
&source_pixel);
glProduceTextureDirectCHROMIUM(tex1, GL_TEXTURE_2D, mailbox1);
EXPECT_EQ(static_cast<GLenum>(GL_INVALID_OPERATION), glGetError());
}
// http://crbug.com/281565
#if !defined(OS_ANDROID)
TEST_F(GLTextureMailboxTest, TakeFrontBufferMultipleContexts) {
SetUpContexts();
gl1_.MakeCurrent();
Mailbox mailbox[2];
glGenMailboxCHROMIUM(mailbox[0].name);
glGenMailboxCHROMIUM(mailbox[1].name);
GLuint tex[2];
glGenTextures(2, tex);
GLManager::Options options;
options.share_mailbox_manager = &gl1_;
GLManager other_gl[2];
for (size_t i = 0; i < 2; ++i) {
other_gl[i].Initialize(options);
other_gl[i].MakeCurrent();
glResizeCHROMIUM(10, 10, 1, true);
glClearColor(1 - i % 2, i % 2, 0, 1);
glClear(GL_COLOR_BUFFER_BIT);
::gles2::GetGLContext()->SwapBuffers();
other_gl[i].decoder()->TakeFrontBuffer(mailbox[i]);
// Make sure both "other gl" are in the same share group.
if (!options.share_group_manager)
options.share_group_manager = other_gl+i;
}
gl1_.MakeCurrent();
for (size_t i = 0; i < 2; ++i) {
glBindTexture(GL_TEXTURE_2D, tex[i]);
glConsumeTextureCHROMIUM(GL_TEXTURE_2D, mailbox[i].name);
EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
}
gl1_.MakeCurrent();
EXPECT_EQ(0xFF0000FFu, ReadTexel(tex[0], 0, 0));
EXPECT_EQ(0xFF00FF00u, ReadTexel(tex[1], 9, 9));
for (size_t i = 0; i < 2; ++i) {
other_gl[i].MakeCurrent();
other_gl[i].Destroy();
}
gl1_.MakeCurrent();
glDeleteTextures(2, tex);
}
#endif
} // namespace gpu
| 7,558 |
3,151 | <filename>src/main/java/com/rarchives/ripme/ripper/rippers/FitnakedgirlsRipper.java
package com.rarchives.ripme.ripper.rippers;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
import com.rarchives.ripme.utils.Http;
public class FitnakedgirlsRipper extends AbstractHTMLRipper {
public FitnakedgirlsRipper(URL url) throws IOException {
super(url);
}
@Override
public String getHost() {
return "fitnakedgirls";
}
@Override
public String getDomain() {
return "fitnakedgirls.com";
}
@Override
public String getGID(URL url) throws MalformedURLException {
Pattern p;
Matcher m;
p = Pattern.compile("^.*fitnakedgirls\\.com/gallery/(.+)$");
m = p.matcher(url.toExternalForm());
if (m.matches()) {
return m.group(1);
}
throw new MalformedURLException(
"Expected fitnakedgirls.com gallery format: " + "fitnakedgirls.com/gallery/####" + " Got: " + url);
}
@Override
public Document getFirstPage() throws IOException {
return Http.url(url).get();
}
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> imageURLs = new ArrayList<>();
Elements imgs = doc.select("div[class*=wp-tiles-tile-bg] > img");
for (Element img : imgs) {
String imgSrc = img.attr("src");
imageURLs.add(imgSrc);
}
return imageURLs;
}
@Override
public void downloadURL(URL url, int index) {
// Send referrer when downloading images
addURLToDownload(url, getPrefix(index), "", this.url.toExternalForm(), null);
}
} | 903 |
2,338 | // RUN: %clang_cc1 -triple x86_64-linux-gnu -fsyntax-only -verify %s
// RUN: %clang_cc1 -triple x86_64-linux-gnu -ffp-exception-behavior=strict -DSTRICT -fsyntax-only -verify %s
// RUN: %clang_cc1 -triple x86_64-linux-gnu -x c++ -DCPP -DSTRICT -ffp-exception-behavior=strict -fsyntax-only -verify %s
#ifdef CPP
#define CONST constexpr
#else
#define CONST const
#endif
#pragma STDC FENV_ACCESS IN_BETWEEN // expected-warning {{expected 'ON' or 'OFF' or 'DEFAULT' in pragma}}
#pragma STDC FENV_ACCESS OFF
float func_04(int x, float y) {
if (x)
return y + 2;
#pragma STDC FENV_ACCESS ON // expected-error{{'#pragma STDC FENV_ACCESS' can only appear at file scope or at the start of a compound statement}}
return x + y;
}
#pragma STDC FENV_ACCESS ON
int main() {
CONST float one = 1.0F ;
CONST float three = 3.0F ;
CONST float four = 4.0F ;
CONST float frac_ok = one/four;
#if !defined(CPP)
//expected-note@+2 {{declared here}}
#endif
CONST float frac = one/three;
CONST double d = one;
CONST int not_too_big = 255;
CONST float fnot_too_big = not_too_big;
CONST int too_big = 0x7ffffff0;
#if defined(CPP)
//expected-warning@+2{{implicit conversion}}
#endif
CONST float fbig = too_big; // inexact
#if !defined(CPP)
#define static_assert _Static_assert
#endif
enum {
e1 = (int)one, e3 = (int)three, e4 = (int)four, e_four_quarters = (int)(frac_ok * 4)
};
static_assert(e1 == 1 && e3 == 3 && e4 == 4 && e_four_quarters == 1, "");
enum {
#if !defined(CPP)
// expected-error@+2 {{not an integer constant expression}} expected-note@+2 {{is not a constant expression}}
#endif
e_three_thirds = (int)(frac * 3)
};
if (one <= four) return 0;
return -1;
}
| 686 |
504 | <reponame>ekoATgithub/dddlib<filename>dddlib-spring-test/src/test/java/org/dayatang/springtest/test/CustomBean.java
package org.dayatang.springtest.test;
import org.springframework.beans.factory.InitializingBean;
//@Service("customBean")
public class CustomBean implements InitializingBean {
@Override
public void afterPropertiesSet() throws Exception {
System.out.println("aaaaaaaaaaa");
}
public void bb() {
System.out.println("oooooooooooooo");
}
}
| 162 |
1,635 | """Support for Synology DSM buttons."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
import logging
from typing import Any, Final
from homeassistant.components.button import (
ButtonDeviceClass,
ButtonEntity,
ButtonEntityDescription,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import DeviceInfo, EntityCategory
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import SynoApi
from .const import DOMAIN, SYNO_API
LOGGER = logging.getLogger(__name__)
@dataclass
class SynologyDSMbuttonDescriptionMixin:
"""Mixin to describe a Synology DSM button entity."""
press_action: Callable[[SynoApi], Any]
@dataclass
class SynologyDSMbuttonDescription(
ButtonEntityDescription, SynologyDSMbuttonDescriptionMixin
):
"""Class to describe a Synology DSM button entity."""
BUTTONS: Final = [
SynologyDSMbuttonDescription(
key="reboot",
name="Reboot",
device_class=ButtonDeviceClass.RESTART,
entity_category=EntityCategory.CONFIG,
press_action=lambda syno_api: syno_api.async_reboot(),
),
SynologyDSMbuttonDescription(
key="shutdown",
name="Shutdown",
icon="mdi:power",
entity_category=EntityCategory.CONFIG,
press_action=lambda syno_api: syno_api.async_shutdown(),
),
]
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set buttons for device."""
data = hass.data[DOMAIN][entry.unique_id]
syno_api: SynoApi = data[SYNO_API]
async_add_entities(SynologyDSMButton(syno_api, button) for button in BUTTONS)
class SynologyDSMButton(ButtonEntity):
"""Defines a Synology DSM button."""
entity_description: SynologyDSMbuttonDescription
def __init__(
self,
api: SynoApi,
description: SynologyDSMbuttonDescription,
) -> None:
"""Initialize the Synology DSM binary_sensor entity."""
self.entity_description = description
self.syno_api = api
self._attr_name = f"{api.network.hostname} {description.name}"
self._attr_unique_id = f"{api.information.serial}_{description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, api.information.serial)}
)
async def async_press(self) -> None:
"""Triggers the Synology DSM button press service."""
LOGGER.debug(
"Trigger %s for %s",
self.entity_description.key,
self.syno_api.network.hostname,
)
await self.entity_description.press_action(self.syno_api)
| 1,045 |
584 | <reponame>FelixPetriconi/libraries<gh_stars>100-1000
/*
Copyright 2017 Adobe
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
*/
/**************************************************************************************************/
#ifndef STLAB_FUNCTIONAL_HPP
#define STLAB_FUNCTIONAL_HPP
/**************************************************************************************************/
#include <functional>
#include <type_traits>
/**************************************************************************************************/
namespace stlab {
/**************************************************************************************************/
inline namespace v1 {
/**************************************************************************************************/
template <class T>
struct unwrap_reference {
using type = T;
};
template <class T>
struct unwrap_reference<std::reference_wrapper<T>> {
using type = T;
};
template <class T>
using unwrap_reference_t = typename unwrap_reference<T>::type;
/**************************************************************************************************/
template <class T>
struct is_reference_wrapper : std::false_type {};
template <class T>
struct is_reference_wrapper<std::reference_wrapper<T>> : std::true_type {};
template <class T>
constexpr bool is_reference_wrapper_v = is_reference_wrapper<T>::value;
/**************************************************************************************************/
template <typename T>
T& unwrap(T& val) {
return val;
}
template <typename T>
const T& unwrap(const T& val) {
return val;
}
template <typename T>
T& unwrap(std::reference_wrapper<T>& val) {
return val.get();
}
template <typename T>
const T& unwrap(const std::reference_wrapper<T>& val) {
return val.get();
}
/**************************************************************************************************/
} // namespace v1
/**************************************************************************************************/
} // namespace stlab
/**************************************************************************************************/
#endif
/**************************************************************************************************/
| 554 |
4,140 | <gh_stars>1000+
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.llap.metrics;
import static org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheInfo.CacheAllocatedArena;
import static org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheInfo.CacheCapacityRemaining;
import static org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheInfo.CacheCapacityRemainingPercentage;
import static org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheInfo.CacheCapacityTotal;
import static org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheInfo.CacheCapacityUsed;
import static org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheInfo.CacheHitBytes;
import static org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheInfo.CacheHitRatio;
import static org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheInfo.CacheMetrics;
import static org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheInfo.CacheNumLockedBuffers;
import static org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheInfo.CacheReadRequests;
import static org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheInfo.CacheRequestedBytes;
import static org.apache.hadoop.metrics2.impl.MsInfo.ProcessName;
import static org.apache.hadoop.metrics2.impl.MsInfo.SessionId;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.MetricsSource;
import org.apache.hadoop.metrics2.MetricsSystem;
import org.apache.hadoop.metrics2.annotation.Metric;
import org.apache.hadoop.metrics2.annotation.Metrics;
import org.apache.hadoop.metrics2.lib.MetricsRegistry;
import org.apache.hadoop.metrics2.lib.MutableCounterLong;
import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
import com.google.common.annotations.VisibleForTesting;
/**
* Llap daemon cache metrics source.
*/
@Metrics(about = "LlapDaemon Cache Metrics", context = "cache")
public class LlapDaemonCacheMetrics implements MetricsSource {
final String name;
private String sessionId;
private final MetricsRegistry registry;
@Metric
MutableCounterLong cacheReadRequests;
@Metric
MutableGaugeLong cacheCapacityTotal;
@Metric
MutableCounterLong cacheCapacityUsed; // Not using the gauge to avoid races.
@Metric
MutableCounterLong cacheRequestedBytes;
@Metric
MutableCounterLong cacheHitBytes;
@Metric
MutableCounterLong cacheAllocatedArena;
@Metric
MutableCounterLong cacheNumLockedBuffers;
private LlapDaemonCacheMetrics(String name, String sessionId) {
this.name = name;
this.sessionId = sessionId;
this.registry = new MetricsRegistry("LlapDaemonCacheRegistry");
this.registry.tag(ProcessName, MetricsUtils.METRICS_PROCESS_NAME).tag(SessionId, sessionId);
}
public static LlapDaemonCacheMetrics create(String displayName, String sessionId) {
MetricsSystem ms = LlapMetricsSystem.instance();
return ms.register(displayName, null, new LlapDaemonCacheMetrics(displayName, sessionId));
}
public void setCacheCapacityTotal(long value) {
cacheCapacityTotal.set(value);
}
public void incrCacheCapacityUsed(long delta) {
cacheCapacityUsed.incr(delta);
}
public void incrCacheRequestedBytes(long delta) {
cacheRequestedBytes.incr(delta);
}
public void incrCacheHitBytes(long delta) {
cacheHitBytes.incr(delta);
}
public void incrCacheReadRequests() {
cacheReadRequests.incr();
}
public void incrAllocatedArena() {
cacheAllocatedArena.incr();
}
public void incrCacheNumLockedBuffers() {
cacheNumLockedBuffers.incr();
}
public void decrCacheNumLockedBuffers() {
cacheNumLockedBuffers.incr(-1);
}
public String getName() {
return name;
}
@VisibleForTesting
public long getCacheRequestedBytes() {
return cacheRequestedBytes.value();
}
@VisibleForTesting
public long getCacheHitBytes() {
return cacheHitBytes.value();
}
@Override
public void getMetrics(MetricsCollector collector, boolean b) {
MetricsRecordBuilder rb = collector.addRecord(CacheMetrics)
.setContext("cache")
.tag(ProcessName, MetricsUtils.METRICS_PROCESS_NAME)
.tag(SessionId, sessionId);
getCacheStats(rb);
}
private void getCacheStats(MetricsRecordBuilder rb) {
float cacheHitRatio = cacheRequestedBytes.value() == 0 ? 0.0f :
(float) cacheHitBytes.value() / (float) cacheRequestedBytes.value();
long cacheCapacityRemaining = cacheCapacityTotal.value() - cacheCapacityUsed.value();
float cacheRemainingPercent = cacheCapacityTotal.value() == 0 ? 0.0f :
(float) cacheCapacityRemaining / (float) cacheCapacityTotal.value();
rb.addCounter(CacheCapacityRemaining, cacheCapacityRemaining)
.addGauge(CacheCapacityRemainingPercentage, cacheRemainingPercent)
.addCounter(CacheCapacityTotal, cacheCapacityTotal.value())
.addCounter(CacheCapacityUsed, cacheCapacityUsed.value())
.addCounter(CacheReadRequests, cacheReadRequests.value())
.addCounter(CacheRequestedBytes, cacheRequestedBytes.value())
.addCounter(CacheHitBytes, cacheHitBytes.value())
.addCounter(CacheAllocatedArena, cacheAllocatedArena.value())
.addCounter(CacheNumLockedBuffers, cacheNumLockedBuffers.value())
.addGauge(CacheHitRatio, cacheHitRatio);
}
}
| 2,037 |
4,569 | package com.brianway.learning.java.multithread.lock.example1;
/**
* Created by brian on 2016/4/15.
*/
public class ThreadC extends Thread {
private ServiceC service;
public ThreadC(ServiceC service) {
this.service = service;
}
@Override
public void run() {
service.await();
}
}
| 123 |
348 | {"nom":"Pluzunet","circ":"4ème circonscription","dpt":"Côtes-d'Armor","inscrits":767,"abs":374,"votants":393,"blancs":31,"nuls":14,"exp":348,"res":[{"nuance":"SOC","nom":"<NAME>","voix":187},{"nuance":"REM","nom":"<NAME>","voix":161}]} | 96 |
892 | <filename>advisories/unreviewed/2022/04/GHSA-h9jq-pm5v-hr69/GHSA-h9jq-pm5v-hr69.json
{
"schema_version": "1.2.0",
"id": "GHSA-h9jq-pm5v-hr69",
"modified": "2022-04-29T02:59:47Z",
"published": "2022-04-29T02:59:47Z",
"aliases": [
"CVE-2004-1555"
],
"details": "Multiple SQL injection vulnerabilities in BroadBoard Instant ASP Message Board allow remote attackers to run arbitrary SQL commands via the (1) keywords parameter to search.asp, (2) handle parameter to profile.asp, (3) txtUserHandle parameter to reg2.asp or (4) txtUserEmail parameter to forgot.asp.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2004-1555"
},
{
"type": "WEB",
"url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/17498"
},
{
"type": "WEB",
"url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/17500"
},
{
"type": "WEB",
"url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/17501"
},
{
"type": "WEB",
"url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/17502"
},
{
"type": "WEB",
"url": "http://marc.info/?l=bugtraq&m=109630777608244&w=2"
},
{
"type": "WEB",
"url": "http://secunia.com/advisories/12658"
},
{
"type": "WEB",
"url": "http://securitytracker.com/id?1011419"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/bid/11250"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": "HIGH",
"github_reviewed": false
}
} | 773 |
903 | <filename>vole-modules/vole-casclient/src/main/java/com/github/vole/casclient/filter/HttpParamsFilter.java<gh_stars>100-1000
package com.github.vole.casclient.filter;
import javax.servlet.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.io.IOException;
public class HttpParamsFilter implements Filter {
public final static String REQUESTED_URL = "CasRequestedUrl";
@Override
public void init(FilterConfig filterConfig) throws ServletException {
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
final HttpServletRequest httpRequest = (HttpServletRequest) request;
final HttpServletResponse httpResponse = (HttpServletResponse) response;
HttpSession session = httpRequest.getSession();
String requestPath = httpRequest.getRequestURL().toString();
session.setAttribute(REQUESTED_URL, requestPath);
chain.doFilter(httpRequest, httpResponse);
}
@Override
public void destroy() {
}
}
| 385 |
1,909 | package org.knowm.xchange.lykke;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.io.InputStream;
import org.junit.Test;
import org.knowm.xchange.lykke.dto.account.LykkeWallet;
public class LykkeAccountTest {
private ObjectMapper mapper = new ObjectMapper();
@Test
public void testUnmarshal() throws IOException {
// Read in the JSON from the example resources
InputStream is =
LykkeAssetsTest.class.getResourceAsStream(
"/org/knowm/xchange/lykke/example-lykkeWallet.json");
LykkeWallet[] wallets = mapper.readValue(is, LykkeWallet[].class);
assertThat(wallets[0].getAssetId()).isEqualTo("string");
assertThat(wallets[0].getBalance()).isEqualTo(0);
assertThat(wallets[0].getReserved()).isEqualTo(0);
assertThat(wallets[1].getAssetId()).isEqualTo("string1");
assertThat(wallets[1].getBalance()).isEqualTo(1);
assertThat(wallets[1].getReserved()).isEqualTo(1);
}
}
| 402 |
190,993 | /* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_UTILS_SPARSITY_FORMAT_CONVERTER_H_
#define TENSORFLOW_LITE_KERNELS_INTERNAL_UTILS_SPARSITY_FORMAT_CONVERTER_H_
#include <vector>
#include "third_party/eigen3/Eigen/Core"
#include "tensorflow/lite/c/common.h"
namespace tflite {
namespace internal {
namespace sparsity {
// A converter that keeps an internal representation of sparse tensor parameters
// and converts tensors between dense and sparse formats.
template <typename T>
class FormatConverter {
public:
/*
* Creates a dense to sparse converter.
* @param shape Shape of the dense tensor.
* @param traversal_order In what order to traverse all dimensions,
* including block dimensions.
* @param format Whether each dimension in the dense tensor is
* dense or sparse (not in the traversal order).
* @param block_size Size of each block dimension.
* @param block_map Map from block dimension to original tensor
* dimension.
*/
FormatConverter(const std::vector<int>& shape,
const std::vector<int>& traversal_order,
const std::vector<TfLiteDimensionType>& format,
const std::vector<int>& block_size = {},
const std::vector<int>& block_map = {});
/*
* Creates a sparse to dense converter.
* @param shape Shape of the target dense tensor.
* @param traversal_order In what order to traverse all dimensions,
* including block dimensions.
* @param format Whether each dimension in the dense tensor is
* dense or sparse (not in the traversal order).
* @param dense_size Size of each dense dimension in the sparse tensor.
* Should be 0 for sparse dimensions.
* @param segments Segments of each dimension in the sparse tensor.
* Should be empty for dense dimensions.
* @param indices Indices in the dense tensor for each dimension.
* Should be empty for dense dimensions.
* @param block_map Map from block dimension to original tensor
* dimension.
*/
FormatConverter(const std::vector<int>& shape,
const std::vector<int>& traversal_order,
const std::vector<TfLiteDimensionType>& format,
const std::vector<int>& dense_size,
const std::vector<std::vector<int>>& segments,
const std::vector<std::vector<int>>& indices,
const std::vector<int>& block_map = {});
/* Creates a sparse to dense converter.
* @param shape Shape of the target dense tensor.
* @param sparsity Sparsity parameter of the sparse TfLiteTensor.
*/
FormatConverter(const std::vector<int>& shape,
const TfLiteSparsity& sparsity);
const std::vector<T>& GetData() { return data_; }
const std::vector<std::vector<int>>& GetDimMetadata() {
return dim_metadata_;
}
// Method for dense to sparse conversion. Need to call GetData() method to get
// the compressed data.
TfLiteStatus DenseToSparse(const T* src_data);
// Method for sparse to dense conversion. Need to call GetData() method to get
// the decompressed data.
TfLiteStatus SparseToDense(const T* src_data);
// Method for sparse to dense conversion with caller provided buffer. No need
// to call GetData() with this method.
TfLiteStatus SparseToDense(const T* src_data, const size_t dest_size,
T* dest_data, TfLiteContext* context = nullptr);
private:
// Helper function for initializing this converter for sparse to dense
// conversion.
void InitSparseToDenseConverter(std::vector<int> shape,
std::vector<int> traversal_order,
std::vector<TfLiteDimensionType> format,
std::vector<int> dense_size,
std::vector<std::vector<int>> segments,
std::vector<std::vector<int>> indices,
std::vector<int> block_map);
// A recursive function to fetch data from the compressed src_data buffer and
// populate the dense buffer.
void Populate(const T* src_data, std::vector<int> indices, int level,
int prev_idx, int* src_data_ptr, T* dest_data);
// Check if val is equal to zero.
bool IsZero(const T val);
// Shape of the conceptual dense tensor.
std::vector<int> dense_shape_;
// Shape of the dense tensor with inner blocks reduced. For example, a (4, 4)
// tensor with (2, 2) block has blocked_shape (2, 2).
std::vector<int> blocked_shape_;
// Total number of elements in the dense tensor.
size_t dense_size_;
// Has n(original dimension)+k(block_dimension) elements.
std::vector<int> traversal_order_;
// Format of each dimension in the traversal order.
std::vector<TfLiteDimensionType> format_;
// Size of each block dimension, in the same order as block map.
std::vector<int> block_size_;
// Map from block dimension to the original tensor dimension.
std::vector<int> block_map_;
// Metadata of each dimension in the traversal order.
// Each dimension needs two vectors. For dense dimensions, the first vector
// stores the size of that dimension, and the second vector is empty. For
// sparse dimensions, the first vector stores the segments and the second one
// stores the indices.
std::vector<std::vector<int>> dim_metadata_;
// Actual buffer holding data after conversion. Could be sparse buffer or
// dense buffer.
std::vector<T> data_;
};
extern template class FormatConverter<int32_t>;
extern template class FormatConverter<int8_t>;
extern template class FormatConverter<float>;
extern template class FormatConverter<Eigen::half>;
} // namespace sparsity
} // namespace internal
} // namespace tflite
#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_UTILS_SPARSITY_FORMAT_CONVERTER_H_
| 2,580 |
7,892 | <reponame>CHRYSICS/audacity
/*!********************************************************************
Audacity: A Digital Audio Editor
@file DBConnection.cpp
@brief Implements DBConnection
<NAME> -- split from ProjectFileIO.cpp
**********************************************************************/
#include "DBConnection.h"
#include "sqlite3.h"
#include <wx/string.h>
#include "AudacityLogger.h"
#include "BasicUI.h"
#include "FileNames.h"
#include "Internat.h"
#include "Project.h"
#include "FileException.h"
#include "wxFileNameWrapper.h"
#include "SentryHelper.h"
#define AUDACITY_PROJECT_PAGE_SIZE 65536
#define xstr(a) str(a)
#define str(a) #a
static const char* PageSizeConfig =
"PRAGMA <schema>.page_size = " xstr(AUDACITY_PROJECT_PAGE_SIZE) ";"
"VACUUM;";
// Configuration to provide "safe" connections
static const char* SafeConfig =
"PRAGMA <schema>.busy_timeout = 5000;"
"PRAGMA <schema>.locking_mode = SHARED;"
"PRAGMA <schema>.synchronous = NORMAL;"
"PRAGMA <schema>.journal_mode = WAL;"
"PRAGMA <schema>.wal_autocheckpoint = 0;";
// Configuration to provide "Fast" connections
static const char *FastConfig =
"PRAGMA <schema>.busy_timeout = 5000;"
"PRAGMA <schema>.locking_mode = SHARED;"
"PRAGMA <schema>.synchronous = OFF;"
"PRAGMA <schema>.journal_mode = OFF;";
DBConnection::DBConnection(
const std::weak_ptr<AudacityProject> &pProject,
const std::shared_ptr<DBConnectionErrors> &pErrors,
CheckpointFailureCallback callback)
: mpProject{ pProject }
, mpErrors{ pErrors }
, mCallback{ std::move(callback) }
{
mDB = nullptr;
mCheckpointDB = nullptr;
mBypass = false;
}
DBConnection::~DBConnection()
{
wxASSERT(mDB == nullptr);
if (mDB)
{
wxLogMessage("Database left open at connection destruction %s\n",
sqlite3_db_filename(mDB, nullptr));
}
}
void DBConnection::SetBypass( bool bypass )
{
mBypass = bypass;
}
bool DBConnection::ShouldBypass()
{
return mBypass;
}
void DBConnection::SetError(
const TranslatableString &msg, const TranslatableString &libraryError, int errorCode)
{
mpErrors->mErrorCode = errorCode;
mpErrors->mLastError = msg;
mpErrors->mLibraryError = errorCode && libraryError.empty()
? XO("(%d): %s").Format(errorCode, sqlite3_errstr(errorCode))
: libraryError;
wxLogMessage("DBConnection SetError\n"
"\tErrorCode: %d\n"
"\tLastError: %s\n"
"\tLibraryError: %s",
mpErrors->mErrorCode,
mpErrors->mLastError.Debug(),
mpErrors->mLibraryError.Debug());
auto logger = AudacityLogger::Get();
if (logger)
{
mpErrors->mLog = logger->GetLog(10);
}
}
void DBConnection::SetDBError(
const TranslatableString &msg, const TranslatableString &libraryError, int errorCode)
{
auto db = DB();
mpErrors->mErrorCode = errorCode < 0 && db
? sqlite3_errcode(db)
: errorCode;
mpErrors->mLastError = msg.empty()
? XO("(%d): %s").Format(mpErrors->mErrorCode, sqlite3_errstr(mpErrors->mErrorCode))
: msg;
mpErrors->mLibraryError = libraryError.empty() && db
? Verbatim(sqlite3_errmsg(db))
: libraryError;
wxLogMessage("DBConnection SetDBError\n"
"\tErrorCode: %d\n"
"\tLastError: %s\n"
"\tLibraryError: %s",
mpErrors->mErrorCode,
mpErrors->mLastError.Debug(),
mpErrors->mLibraryError.Debug());
auto logger = AudacityLogger::Get();
if (logger)
{
mpErrors->mLog = logger->GetLog(10);
}
}
int DBConnection::Open(const FilePath fileName)
{
wxASSERT(mDB == nullptr);
int rc;
// Initialize checkpoint controls
mCheckpointStop = false;
mCheckpointPending = false;
mCheckpointActive = false;
rc = OpenStepByStep( fileName );
if ( rc != SQLITE_OK)
{
if (mCheckpointDB)
{
sqlite3_close(mCheckpointDB);
mCheckpointDB = nullptr;
}
if (mDB)
{
sqlite3_close(mDB);
mDB = nullptr;
}
}
return rc;
}
int DBConnection::OpenStepByStep(const FilePath fileName)
{
const char *name = fileName.ToUTF8();
bool success = false;
int rc = sqlite3_open(name, &mDB);
if (rc != SQLITE_OK)
{
ADD_EXCEPTION_CONTEXT("sqlite3.rc", std::to_string(rc));
ADD_EXCEPTION_CONTEXT("sqlite3.context", "DBConnection::OpenStepByStep::open");
wxLogMessage("Failed to open primary connection to %s: %d, %s\n",
fileName,
rc,
sqlite3_errstr(rc));
return rc;
}
rc = SetPageSize();
if (rc != SQLITE_OK)
{
SetDBError(XO("Failed to set page size for database %s")
.Format(fileName));
return rc;
}
// Set default mode
// (See comments in ProjectFileIO::SaveProject() about threading
rc = SafeMode();
if (rc != SQLITE_OK)
{
SetDBError(XO("Failed to set safe mode on primary connection to %s").Format(fileName));
return rc;
}
rc = sqlite3_open(name, &mCheckpointDB);
if (rc != SQLITE_OK)
{
ADD_EXCEPTION_CONTEXT("sqlite3.rc", std::to_string(rc));
ADD_EXCEPTION_CONTEXT("sqlite3.context", "DBConnection::OpenStepByStep::open_checkpoint");
wxLogMessage("Failed to open checkpoint connection to %s: %d, %s\n",
fileName,
rc,
sqlite3_errstr(rc));
return rc;
}
rc = ModeConfig(mCheckpointDB, "main", SafeConfig);
if (rc != SQLITE_OK) {
SetDBError(XO("Failed to set safe mode on checkpoint connection to %s").Format(fileName));
return rc;
}
auto db = mCheckpointDB;
mCheckpointThread = std::thread(
[this, db, fileName]{ CheckpointThread(db, fileName); });
// Install our checkpoint hook
sqlite3_wal_hook(mDB, CheckpointHook, this);
return rc;
}
bool DBConnection::Close()
{
wxASSERT(mDB != nullptr);
int rc;
// Protect...
if (mDB == nullptr)
{
return true;
}
// Uninstall our checkpoint hook so that no additional checkpoints
// are sent our way. (Though this shouldn't really happen.)
sqlite3_wal_hook(mDB, nullptr, nullptr);
// Display a progress dialog if there's active or pending checkpoints
if (mCheckpointPending || mCheckpointActive)
{
TranslatableString title = XO("Checkpointing project");
// Get access to the active project
auto project = mpProject.lock();
if (project)
{
title = XO("Checkpointing %s").Format(project->GetProjectName());
}
// Provides a progress dialog with indeterminate mode
using namespace BasicUI;
auto pd = MakeGenericProgress({},
title, XO("This may take several seconds"));
wxASSERT(pd);
// Wait for the checkpoints to end
while (mCheckpointPending || mCheckpointActive)
{
wxMilliSleep(50);
pd->Pulse();
}
}
// Tell the checkpoint thread to shutdown
{
std::lock_guard<std::mutex> guard(mCheckpointMutex);
mCheckpointStop = true;
mCheckpointCondition.notify_one();
}
// And wait for it to do so
if (mCheckpointThread.joinable())
{
mCheckpointThread.join();
}
// We're done with the prepared statements
{
std::lock_guard<std::mutex> guard(mStatementMutex);
for (auto stmt : mStatements)
{
// No need to process return code, but log it for diagnosis
rc = sqlite3_finalize(stmt.second);
if (rc != SQLITE_OK)
{
wxLogMessage("Failed to finalize statement on %s\n"
"\tErrMsg: %s\n"
"\tSQL: %s",
sqlite3_db_filename(mDB, nullptr),
sqlite3_errmsg(mDB),
stmt.second);
}
}
mStatements.clear();
}
// Not much we can do if the closes fail, so just report the error
// Close the checkpoint connection
rc = sqlite3_close(mCheckpointDB);
if (rc != SQLITE_OK)
{
ADD_EXCEPTION_CONTEXT("sqlite3.rc", std::to_string(rc));
ADD_EXCEPTION_CONTEXT("sqlite3.context", "DBConnection::Close::close_checkpoint");
wxLogMessage("Failed to close checkpoint connection for %s\n"
"\tError: %s\n",
sqlite3_db_filename(mCheckpointDB, nullptr),
sqlite3_errmsg(mCheckpointDB));
}
mCheckpointDB = nullptr;
// Close the primary connection
rc = sqlite3_close(mDB);
if (rc != SQLITE_OK)
{
ADD_EXCEPTION_CONTEXT("sqlite3.rc", std::to_string(rc));
ADD_EXCEPTION_CONTEXT("sqlite3.context", "DBConnection::OpenStepByStep::close");
wxLogMessage("Failed to close %s\n"
"\tError: %s\n",
sqlite3_db_filename(mDB, nullptr),
sqlite3_errmsg(mDB));
}
mDB = nullptr;
return true;
}
[[noreturn]] void DBConnection::ThrowException( bool write ) const
{
// Sqlite3 documentation says returned character string
// does NOT require freeing by us.
wxString dbName{ sqlite3_db_filename(mDB, "main") };
// Now we have an absolute path. Throw a message box exception that
// formats a helpful message just as used to be done before sqlite3
// was used for projects.
throw FileException{
write ? FileException::Cause::Write : FileException::Cause::Read,
dbName
};
}
int DBConnection::SafeMode(const char *schema /* = "main" */)
{
return ModeConfig(mDB, schema, SafeConfig);
}
int DBConnection::FastMode(const char *schema /* = "main" */)
{
return ModeConfig(mDB, schema, FastConfig);
}
int DBConnection::SetPageSize(const char* schema)
{
// First of all - let's check if the database is empty.
// Otherwise, VACUUM can take a significant amount of time.
// VACUUM is required to force SQLite3 to change the page size.
// This function will be the first called on the connection,
// so if DB is empty we can assume that journal was not
// set to WAL yet.
int rc = sqlite3_exec(
mDB, "SELECT 1 FROM project LIMIT 1;", nullptr, nullptr, nullptr);
if (rc == SQLITE_OK)
return SQLITE_OK; // Project table exists, too late to VACUUM now
return ModeConfig(mDB, schema, PageSizeConfig);
}
int DBConnection::ModeConfig(sqlite3 *db, const char *schema, const char *config)
{
// Ensure attached DB connection gets configured
int rc;
// Replace all schema "keywords" with the schema name
wxString sql = config;
sql.Replace(wxT("<schema>"), schema);
// Set the configuration
rc = sqlite3_exec(db, sql, nullptr, nullptr, nullptr);
if (rc != SQLITE_OK)
{
ADD_EXCEPTION_CONTEXT("sqlite3.rc", std::to_string(rc));
ADD_EXCEPTION_CONTEXT("sqlite3.context", "DBConnection::ModeConfig");
ADD_EXCEPTION_CONTEXT("sqlite3.mode", config);
// Don't store in connection, just report it
wxLogMessage("Failed to set mode on %s\n"
"\tError: %s\n"
"\tSQL: %s",
sqlite3_db_filename(mDB, nullptr),
sqlite3_errmsg(mDB),
sql);
}
return rc;
}
sqlite3 *DBConnection::DB()
{
wxASSERT(mDB != nullptr);
return mDB;
}
int DBConnection::GetLastRC() const
{
return sqlite3_errcode(mDB);
}
const wxString DBConnection::GetLastMessage() const
{
return sqlite3_errmsg(mDB);
}
sqlite3_stmt *DBConnection::Prepare(enum StatementID id, const char *sql)
{
std::lock_guard<std::mutex> guard(mStatementMutex);
int rc;
// See bug 2673
// We must not use the same prepared statement from two different threads.
// Therefore, in the cache, use the thread id too.
StatementIndex ndx(id, std::this_thread::get_id());
// Return an existing statement if it's already been prepared
auto iter = mStatements.find(ndx);
if (iter != mStatements.end())
{
return iter->second;
}
// Prepare the statement
sqlite3_stmt *stmt = nullptr;
rc = sqlite3_prepare_v3(mDB, sql, -1, SQLITE_PREPARE_PERSISTENT, &stmt, 0);
if (rc != SQLITE_OK)
{
ADD_EXCEPTION_CONTEXT("sqlite3.query", sql);
ADD_EXCEPTION_CONTEXT("sqlite3.rc", std::to_string(rc));
ADD_EXCEPTION_CONTEXT("sqlite3.context", "DBConnection::Prepare");
wxLogMessage("Failed to prepare statement for %s\n"
"\tError: %s\n"
"\tSQL: %s",
sqlite3_db_filename(mDB, nullptr),
sqlite3_errmsg(mDB),
sql);
// TODO: Look into why this causes an access violation
THROW_INCONSISTENCY_EXCEPTION;
}
// There are a small number (10 or so) of different id's corresponding
// to different SQL statements, see enum StatementID
// We have relatively few threads running at any one time,
// e.g. main gui thread, a playback thread, a thread for compacting.
// However the cache might keep growing, as we start/stop audio,
// perhaps, if we chose to use a new thread each time.
// For 3.0.0 I think that's OK. If it's a data leak it's a slow
// enough one. wxLogDebugs seem to show that the audio play thread
// is being reused, not recreated with a new ID, i.e. no leak at all.
// ANSWER-ME Just how serious is the data leak? How best to fix?
// Remember the cached statement.
mStatements.insert({ndx, stmt});
return stmt;
}
void DBConnection::CheckpointThread(sqlite3 *db, const FilePath &fileName)
{
int rc = SQLITE_OK;
bool giveUp = false;
while (true)
{
{
// Wait for work or the stop signal
std::unique_lock<std::mutex> lock(mCheckpointMutex);
mCheckpointCondition.wait(lock,
[&]
{
return mCheckpointPending || mCheckpointStop;
});
// Requested to stop, so bail
if (mCheckpointStop)
{
break;
}
// Capture the number of pages that need checkpointing and reset
mCheckpointActive = true;
mCheckpointPending = false;
}
// And kick off the checkpoint. This may not checkpoint ALL frames
// in the WAL. They'll be gotten the next time around.
using namespace std::chrono;
do {
rc = giveUp ? SQLITE_OK :
sqlite3_wal_checkpoint_v2(
db, nullptr, SQLITE_CHECKPOINT_PASSIVE, nullptr, nullptr);
}
// Contentions for an exclusive lock on the database are possible,
// even while the main thread is merely drawing the tracks, which
// may perform reads
while (rc == SQLITE_BUSY && (std::this_thread::sleep_for(1ms), true));
// Reset
mCheckpointActive = false;
if (rc != SQLITE_OK)
{
ADD_EXCEPTION_CONTEXT("sqlite3.rc", std::to_string(rc));
ADD_EXCEPTION_CONTEXT("sqlite3.context", "DBConnection::CheckpointThread");
wxLogMessage("Failed to perform checkpoint on %s\n"
"\tErrCode: %d\n"
"\tErrMsg: %s",
fileName,
sqlite3_errcode(db),
sqlite3_errmsg(db));
// Can't checkpoint -- maybe the device has too little space
wxFileNameWrapper fName{ fileName };
auto path = FileNames::AbbreviatePath(fName);
auto name = fName.GetFullName();
auto longname = name + "-wal";
// TODO: Should we return the actual error message if it's not a
// disk full condition?
auto message1 = rc == SQLITE_FULL
? XO("Could not write to %s.\n").Format(path)
: TranslatableString{};
auto message = XO(
"Disk is full.\n"
"%s\n"
"For tips on freeing up space, click the help button."
).Format(message1);
// Stop trying to checkpoint
giveUp = true;
// Stop the audio.
GuardedCall(
[&message, rc] {
throw SimpleMessageBoxException{ rc != SQLITE_FULL ? ExceptionType::Internal : ExceptionType::BadEnvironment,
message, XO("Warning"), "Error:_Disk_full_or_not_writable" }; },
SimpleGuard<void>{},
[this](AudacityException * e) {
// This executes in the main thread.
if (mCallback)
mCallback();
if (e)
e->DelayedHandlerAction();
}
);
}
}
return;
}
int DBConnection::CheckpointHook(void *data, sqlite3 *db, const char *schema, int pages)
{
// Get access to our object
DBConnection *that = static_cast<DBConnection *>(data);
// Queue the database pointer for our checkpoint thread to process
std::lock_guard<std::mutex> guard(that->mCheckpointMutex);
that->mCheckpointPending = true;
that->mCheckpointCondition.notify_one();
return SQLITE_OK;
}
bool TransactionScope::TransactionStart(const wxString &name)
{
char *errmsg = nullptr;
int rc = sqlite3_exec(mConnection.DB(),
wxT("SAVEPOINT ") + name + wxT(";"),
nullptr,
nullptr,
&errmsg);
if (errmsg)
{
ADD_EXCEPTION_CONTEXT("sqlite3.rc", std::to_string(rc));
ADD_EXCEPTION_CONTEXT("sqlite3.context", "TransactionScope::TransactionStart");
mConnection.SetDBError(
XO("Failed to create savepoint:\n\n%s").Format(name)
);
sqlite3_free(errmsg);
}
return rc == SQLITE_OK;
}
bool TransactionScope::TransactionCommit(const wxString &name)
{
char *errmsg = nullptr;
int rc = sqlite3_exec(mConnection.DB(),
wxT("RELEASE ") + name + wxT(";"),
nullptr,
nullptr,
&errmsg);
if (errmsg)
{
ADD_EXCEPTION_CONTEXT("sqlite3.rc", std::to_string(rc));
ADD_EXCEPTION_CONTEXT("sqlite3.context", "TransactionScope::TransactionCommit");
mConnection.SetDBError(
XO("Failed to release savepoint:\n\n%s").Format(name)
);
sqlite3_free(errmsg);
}
return rc == SQLITE_OK;
}
bool TransactionScope::TransactionRollback(const wxString &name)
{
char *errmsg = nullptr;
int rc = sqlite3_exec(mConnection.DB(),
wxT("ROLLBACK TO ") + name + wxT(";"),
nullptr,
nullptr,
&errmsg);
if (errmsg)
{
ADD_EXCEPTION_CONTEXT("sqlite3.rc", std::to_string(rc));
ADD_EXCEPTION_CONTEXT("sqlite3.context", "TransactionScope::TransactionRollback");
mConnection.SetDBError(
XO("Failed to release savepoint:\n\n%s").Format(name)
);
sqlite3_free(errmsg);
}
return rc == SQLITE_OK;
}
TransactionScope::TransactionScope(
DBConnection &connection, const char *name)
: mConnection(connection),
mName(name)
{
mInTrans = TransactionStart(mName);
if ( !mInTrans )
// To do, improve the message
throw SimpleMessageBoxException( ExceptionType::Internal,
XO("Database error. Sorry, but we don't have more details."),
XO("Warning"),
"Error:_Disk_full_or_not_writable"
);
}
TransactionScope::~TransactionScope()
{
if (mInTrans)
{
// Rollback AND REMOVE the transaction
// -- must do both; rolling back a savepoint only rewinds it
// without removing it, unlike the ROLLBACK command
if (!(TransactionRollback(mName) &&
TransactionCommit(mName) ) )
{
// Do not throw from a destructor!
// This has to be a no-fail cleanup that does the best that it can.
wxLogMessage("Transaction active at scope destruction");
}
}
}
bool TransactionScope::Commit()
{
if ( !mInTrans )
{
wxLogMessage("No active transaction to commit");
// Misuse of this class
THROW_INCONSISTENCY_EXCEPTION;
}
mInTrans = !TransactionCommit(mName);
return mInTrans;
}
ConnectionPtr::~ConnectionPtr()
{
wxASSERT_MSG(!mpConnection, wxT("Project file was not closed at shutdown"));
if (mpConnection)
{
wxLogMessage("Project file was not closed at connection destruction");
}
}
static const AudacityProject::AttachedObjects::RegisteredFactory
sConnectionPtrKey{
[]( AudacityProject & ){
// Ignore the argument; this is just a holder of a
// unique_ptr to DBConnection, which must be filled in later
// (when we can get a weak_ptr to the project)
auto result = std::make_shared< ConnectionPtr >();
return result;
}
};
ConnectionPtr &ConnectionPtr::Get( AudacityProject &project )
{
auto &result =
project.AttachedObjects::Get< ConnectionPtr >( sConnectionPtrKey );
return result;
}
const ConnectionPtr &ConnectionPtr::Get( const AudacityProject &project )
{
return Get( const_cast< AudacityProject & >( project ) );
}
| 9,137 |
1,107 | /**
* @file pause/pause.c
*
* Yori shell wait for the user to press a key
*
* Copyright (c) 2017-2018 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, PAUSEESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include <yoripch.h>
#include <yorilib.h>
/**
Help text to display to the user.
*/
const
CHAR strPauseHelpText[] =
"\n"
"Prompt the user to press any key before continuing.\n"
"\n"
"PAUSE [-license]\n";
/**
Display usage text to the user.
*/
BOOL
PauseHelp(VOID)
{
YoriLibOutput(YORI_LIB_OUTPUT_STDOUT, _T("Pause %i.%02i\n"), YORI_VER_MAJOR, YORI_VER_MINOR);
#if YORI_BUILD_ID
YoriLibOutput(YORI_LIB_OUTPUT_STDOUT, _T(" Build %i\n"), YORI_BUILD_ID);
#endif
YoriLibOutput(YORI_LIB_OUTPUT_STDOUT, _T("%hs"), strPauseHelpText);
return TRUE;
}
#ifdef YORI_BUILTIN
/**
The main entrypoint for the pause builtin command.
*/
#define ENTRYPOINT YoriCmd_YPAUSE
#else
/**
The main entrypoint for the pause standalone application.
*/
#define ENTRYPOINT ymain
#endif
/**
The main entrypoint for the pause cmdlet.
@param ArgC The number of arguments.
@param ArgV An array of arguments.
@return Exit code of the process indicating success or failure.
*/
DWORD
ENTRYPOINT(
__in DWORD ArgC,
__in YORI_STRING ArgV[]
)
{
BOOL ArgumentUnderstood;
TCHAR Char;
DWORD BytesRead;
DWORD i;
YORI_STRING Arg;
for (i = 1; i < ArgC; i++) {
ArgumentUnderstood = FALSE;
ASSERT(YoriLibIsStringNullTerminated(&ArgV[i]));
if (YoriLibIsCommandLineOption(&ArgV[i], &Arg)) {
if (YoriLibCompareStringWithLiteralInsensitive(&Arg, _T("?")) == 0) {
PauseHelp();
return EXIT_SUCCESS;
} else if (YoriLibCompareStringWithLiteralInsensitive(&Arg, _T("license")) == 0) {
YoriLibDisplayMitLicense(_T("2017-2018"));
return EXIT_SUCCESS;
}
}
if (!ArgumentUnderstood) {
YoriLibOutput(YORI_LIB_OUTPUT_STDERR, _T("Argument not understood, ignored: %y\n"), &ArgV[i]);
}
}
YoriLibOutput(YORI_LIB_OUTPUT_STDOUT, _T("Press any key to continue...\n"));
if (!SetConsoleMode(GetStdHandle(STD_INPUT_HANDLE), 0)) {
if (!ReadFile(GetStdHandle(STD_INPUT_HANDLE), &Char, 1, &BytesRead, NULL)) {
return EXIT_FAILURE;
}
} else {
INPUT_RECORD InputRecord;
while(TRUE) {
if (!ReadConsoleInput(GetStdHandle(STD_INPUT_HANDLE), &InputRecord, 1, &BytesRead)) {
break;
}
if (InputRecord.EventType == KEY_EVENT &&
InputRecord.Event.KeyEvent.bKeyDown) {
break;
}
}
}
return EXIT_SUCCESS;
}
// vim:sw=4:ts=4:et:
| 1,681 |
4,845 | <gh_stars>1000+
package cn.dev33.satoken.reactor.context;
import org.springframework.web.server.ServerWebExchange;
import reactor.core.publisher.Mono;
/**
* Reactor上下文操作 [异步]
* @author kong
*
*/
public class SaReactorHolder {
/**
* key
*/
public static final Class<ServerWebExchange> CONTEXT_KEY = ServerWebExchange.class;
/**
* chain_key
*/
public static final String CHAIN_KEY = "WEB_FILTER_CHAIN_KEY";
/**
* 获取上下文对象
* @return see note
*/
public static Mono<ServerWebExchange> getContext() {
// 从全局 Mono<Context> 获取
return Mono.subscriberContext().map(ctx -> ctx.get(CONTEXT_KEY));
}
/**
* 获取上下文对象, 并设置到同步上下文中
* @return see note
*/
public static Mono<ServerWebExchange> getContextAndSetSync() {
// 从全局 Mono<Context> 获取
return Mono.subscriberContext().map(ctx -> {
// 设置到sync中
SaReactorSyncHolder.setContext(ctx.get(CONTEXT_KEY));
return ctx.get(CONTEXT_KEY);
}).doFinally(r->{
// 从sync中清除
SaReactorSyncHolder.clearContext();
});
}
}
| 488 |
348 | {"nom":"Ablon","circ":"4ème circonscription","dpt":"Calvados","inscrits":927,"abs":408,"votants":519,"blancs":5,"nuls":1,"exp":513,"res":[{"nuance":"REM","nom":"<NAME>","voix":196},{"nuance":"LR","nom":"Mme <NAME>","voix":137},{"nuance":"FN","nom":"M. <NAME>","voix":77},{"nuance":"FI","nom":"M. <NAME>","voix":34},{"nuance":"DLF","nom":"M. <NAME>","voix":18},{"nuance":"ECO","nom":"Mme <NAME>","voix":16},{"nuance":"SOC","nom":"M. <NAME>","voix":15},{"nuance":"COM","nom":"M. <NAME>","voix":7},{"nuance":"EXG","nom":"Mme <NAME>","voix":6},{"nuance":"DVG","nom":"Mme <NAME>","voix":3},{"nuance":"DIV","nom":"M. <NAME>","voix":3},{"nuance":"DIV","nom":"Mme <NAME>","voix":1},{"nuance":"DVG","nom":"Mme <NAME>","voix":0},{"nuance":"EXD","nom":"Mme <NAME>","voix":0}]} | 315 |
482 | /*
* Copyright 2012, The Android Open Source Project
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef ANDROID_LINEARALLOCATOR_H
#define ANDROID_LINEARALLOCATOR_H
#include <stddef.h>
namespace android {
/**
* A memory manager that internally allocates multi-kbyte buffers for placing objects in. It avoids
* the overhead of malloc when many objects are allocated. It is most useful when creating many
* small objects with a similar lifetime, and doesn't add significant overhead for large
* allocations.
*/
class LinearAllocator {
public:
LinearAllocator();
~LinearAllocator();
/**
* Reserves and returns a region of memory of at least size 'size', aligning as needed.
* Typically this is used in an object's overridden new() method or as a replacement for malloc.
*
* The lifetime of the returned buffers is tied to that of the LinearAllocator. If calling
* delete() on an object stored in a buffer is needed, it should be overridden to use
* rewindIfLastAlloc()
*/
void* alloc(size_t size);
/**
* Attempt to deallocate the given buffer, with the LinearAllocator attempting to rewind its
* state if possible. No destructors are called.
*/
void rewindIfLastAlloc(void* ptr, size_t allocSize);
/**
* Dump memory usage statistics to the log (allocated and wasted space)
*/
void dumpMemoryStats(const char* prefix = "");
/**
* The number of bytes used for buffers allocated in the LinearAllocator (does not count space
* wasted)
*/
size_t usedSize() const { return mTotalAllocated - mWastedSpace; }
private:
LinearAllocator(const LinearAllocator& other);
class Page;
Page* newPage(size_t pageSize);
bool fitsInCurrentPage(size_t size);
void ensureNext(size_t size);
void* start(Page *p);
void* end(Page* p);
size_t mPageSize;
size_t mMaxAllocSize;
void* mNext;
Page* mCurrentPage;
Page* mPages;
// Memory usage tracking
size_t mTotalAllocated;
size_t mWastedSpace;
size_t mPageCount;
size_t mDedicatedPageCount;
};
}; // namespace android
#endif // ANDROID_LINEARALLOCATOR_H
| 1,088 |
6,717 | <filename>include/GameKit/GKPlayer.h<gh_stars>1000+
//******************************************************************************
//
// Copyright (c) 2016 Microsoft Corporation. All rights reserved.
//
// This code is licensed under the MIT License (MIT).
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
//******************************************************************************
#pragma once
#import <GameKit/GameKitExport.h>
#import <Foundation/NSObjCRuntime.h>
#import <Foundation/NSObject.h>
@class NSArray;
@class NSError;
@class NSString;
@class UIImage;
typedef NSInteger GKPhotoSize;
enum {
GKPhotoSizeSmall = 0,
GKPhotoSizeNormal,
};
GAMEKIT_EXPORT_CLASS
@interface GKPlayer : NSObject <NSObject>
+ (void)loadPlayersForIdentifiers:(NSArray*)identifiers withCompletionHandler:(void (^)(NSArray*, NSError*))completionHandler STUB_METHOD;
- (void)loadPhotoForSize:(GKPhotoSize)size withCompletionHandler:(void (^)(UIImage*, NSError*))completionHandler STUB_METHOD;
+ (instancetype)anonymousGuestPlayerWithIdentifier:(NSString*)guestIdentifier STUB_METHOD;
@property (readonly, retain, nonatomic) NSString* playerID STUB_PROPERTY;
@property (readonly, copy, nonatomic) NSString* alias STUB_PROPERTY;
@property (readonly, nonatomic) NSString* displayName STUB_PROPERTY;
@property (readonly, nonatomic) BOOL isFriend STUB_PROPERTY;
@property (readonly, nonatomic) NSString* guestIdentifier STUB_PROPERTY;
@end
| 576 |
2,645 | <reponame>Stexxen/spring-data-elasticsearch
/**
* Classes and interfaces used by the code that uses Elasticsearch 7 client libraries
*/
@org.springframework.lang.NonNullApi
@org.springframework.lang.NonNullFields
package org.springframework.data.elasticsearch.core.clients.elasticsearch7;
| 86 |
625 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.datasketches.sampling;
import static org.apache.datasketches.Util.LS;
import static org.apache.datasketches.Util.zeroPad;
import java.nio.ByteOrder;
import org.apache.datasketches.Family;
import org.apache.datasketches.ResizeFactor;
import org.apache.datasketches.SketchesArgumentException;
import org.apache.datasketches.memory.Memory;
import org.apache.datasketches.memory.WritableMemory;
//@formatter:off
/**
* This class defines the preamble items structure and provides basic utilities for some of the key
* fields.
*
* <p>
* MAP: Low significance bytes of this <i>long</i> items structure are on the right. However, the
* multi-byte integers (<i>int</i> and <i>long</i>) are stored in native byte order. The
* <i>byte</i> values are treated as unsigned.</p>
*
* <p><strong>Sketch:</strong> The count of items seen is limited to 48 bits (~256 trillion) even
* though there are adjacent unused preamble bits. The acceptance probability for an item is a
* double in the range [0,1), limiting us to 53 bits of randomness due to details of the IEEE
* floating point format. To ensure meaningful probabilities as the items seen count approaches
* capacity, we intentionally use slightly fewer bits.</p>
*
* <p>An empty reservoir sampling sketch only requires 8 bytes. A non-empty sampling sketch
* requires 16 bytes of preamble.</p>
*
* <pre>
* Long || Start Byte Adr:
* Adr:
* || 7 | 6 | 5 | 4 | 3 | 2 | 1 | 0 |
* 0 ||--------Reservoir Size (K)---------| Flags | FamID | SerVer | Preamble_Longs |
*
* || 15 | 14 | 13 | 12 | 11 | 10 | 9 | 8 |
* 1 ||------------------------------Items Seen Count (N)---------------------------------|
* </pre>
*
* <p><strong>Union:</strong> The reservoir union has fewer internal parameters to track and uses
* a slightly different preamble structure. The maximum reservoir size intentionally occupies the
* same byte range as the reservoir size in the sketch preamble, allowing the same methods to be
* used for reading and writing the values. The varopt union takes advantage of the same format.
* The items in the union are stored in a reservoir sketch-compatible format after the union
* preamble.
* </p>
*
* <p>An empty union only requires 8 bytes. A non-empty union requires 8 bytes of preamble.</p>
*
* <pre>
* Long || Start Byte Adr:
* Adr:
* || 7 | 6 | 5 | 4 | 3 | 2 | 1 | 0 |
* 0 ||---------Max Res. Size (K)---------| Flags | FamID | SerVer | Preamble_Longs |
* </pre>
*
* <p><strong>VarOpt:</strong> A VarOpt sketch has a more complex internal items structure and
* requires a larger preamble. Values serving a similar purpose in both reservoir and varopt sampling
* share the same byte ranges, allowing method re-use where practical.</p>
*
* <p>An empty varopt sample requires 8 bytes. A non-empty sketch requires 16 bytes of preamble
* for an under-full sample and otherwise 32 bytes of preamble.</p>
*
* <pre>
* Long || Start Byte Adr:
* Adr:
* || 7 | 6 | 5 | 4 | 3 | 2 | 1 | 0 |
* 0 ||--------Reservoir Size (K)---------| Flags | FamID | SerVer | Preamble_Longs |
*
* || 15 | 14 | 13 | 12 | 11 | 10 | 9 | 8 |
* 1 ||------------------------------Items Seen Count (N)---------------------------------|
*
* || 23 | 22 | 21 | 20 | 19 | 18 | 17 | 16 |
* 2 ||---------Item Count in R-----------|-----------Item Count in H---------------------|
*
* || 31 | 30 | 29 | 28 | 27 | 26 | 25 | 24 |
* 3 ||--------------------------------Total Weight in R----------------------------------|
* </pre>
*
* <p><strong>VarOpt Union:</strong> VarOpt unions also store more information than a reservoir
* sketch. As before, we keep values with similar o hte same meaning in corresponding locations
* actoss sketch and union formats. The items in the union are stored in a varopt sketch-compatible
* format after the union preamble.</p>
*
* <p>An empty union only requires 8 bytes. A non-empty union requires 32 bytes of preamble.</p>
*
* <pre>
* Long || Start Byte Adr:
* Adr:
* || 7 | 6 | 5 | 4 | 3 | 2 | 1 | 0 |
* 0 ||---------Max Res. Size (K)---------| Flags | FamID | SerVer | Preamble_Longs |
*
* || 15 | 14 | 13 | 12 | 11 | 10 | 9 | 8 |
* 1 ||------------------------------Items Seen Count (N)---------------------------------|
*
* || 23 | 22 | 21 | 20 | 19 | 18 | 17 | 16 |
* 2 ||---------------------------Outer Tau Numerator (double)----------------------------|
*
* || 31 | 30 | 29 | 28 | 27 | 26 | 25 | 24 |
* 3 ||---------------------------Outer Tau Denominator (long)----------------------------|
* </pre>
*
* @author <NAME>
* @author <NAME>
*/
final class PreambleUtil {
private PreambleUtil() {}
// ###### DO NOT MESS WITH THIS FROM HERE ...
// Preamble byte Addresses
static final int PREAMBLE_LONGS_BYTE = 0; // Only low 6 bits used
static final int LG_RESIZE_FACTOR_BIT = 6; // upper 2 bits. Not used by compact or direct.
static final int SER_VER_BYTE = 1;
static final int FAMILY_BYTE = 2;
static final int FLAGS_BYTE = 3;
static final int RESERVOIR_SIZE_SHORT = 4; // used in ser_ver 1
static final int RESERVOIR_SIZE_INT = 4;
static final int SERDE_ID_SHORT = 6; // used in ser_ver 1
static final int ITEMS_SEEN_LONG = 8;
static final int MAX_K_SIZE_INT = 4; // used in Union only
static final int OUTER_TAU_NUM_DOUBLE = 16; // used in Varopt Union only
static final int OUTER_TAU_DENOM_LONG = 24; // used in Varopt Union only
// constants and addresses used in varopt
static final int ITEM_COUNT_H_INT = 16;
static final int ITEM_COUNT_R_INT = 20;
static final int TOTAL_WEIGHT_R_DOUBLE = 24;
static final int VO_PRELONGS_EMPTY = Family.VAROPT.getMinPreLongs();
static final int VO_PRELONGS_WARMUP = 3; // Doesn't match min or max prelongs in Family
static final int VO_PRELONGS_FULL = Family.VAROPT.getMaxPreLongs();
// flag bit masks
//static final int BIG_ENDIAN_FLAG_MASK = 1;
//static final int READ_ONLY_FLAG_MASK = 2;
static final int EMPTY_FLAG_MASK = 4;
static final int GADGET_FLAG_MASK = 128;
//Other constants
static final int SER_VER = 2;
static final boolean NATIVE_ORDER_IS_BIG_ENDIAN =
(ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN);
// STRINGS
/**
* Returns a human readable string summary of the preamble state of the given byte array.
* Used primarily in testing.
*
* @param byteArr the given byte array.
* @return the summary preamble string.
*/
static String preambleToString(final byte[] byteArr) {
final Memory mem = Memory.wrap(byteArr);
return preambleToString(mem);
}
/**
* Returns a human readable string summary of the preamble state of the given Memory.
* Note: other than making sure that the given Memory size is large
* enough for just the preamble, this does not do much value checking of the contents of the
* preamble as this is primarily a tool for debugging the preamble visually.
*
* @param mem the given Memory.
* @return the summary preamble string.
*/
static String preambleToString(final Memory mem) {
final int preLongs = getAndCheckPreLongs(mem); // make sure we can get the assumed preamble
final Family family = Family.idToFamily(mem.getByte(FAMILY_BYTE));
switch (family) {
case RESERVOIR:
case VAROPT:
return sketchPreambleToString(mem, family, preLongs);
case RESERVOIR_UNION:
case VAROPT_UNION:
return unionPreambleToString(mem, family, preLongs);
default:
throw new SketchesArgumentException("Inspecting preamble with Sampling family's "
+ "PreambleUtil with object of family " + family.getFamilyName());
}
}
private static String sketchPreambleToString(final Memory mem,
final Family family,
final int preLongs) {
final ResizeFactor rf = ResizeFactor.getRF(extractResizeFactor(mem));
final int serVer = extractSerVer(mem);
// Flags
final int flags = extractFlags(mem);
final String flagsStr = zeroPad(Integer.toBinaryString(flags), 8) + ", " + (flags);
//final boolean bigEndian = (flags & BIG_ENDIAN_FLAG_MASK) > 0;
//final String nativeOrder = ByteOrder.nativeOrder().toString();
//final boolean readOnly = (flags & READ_ONLY_FLAG_MASK) > 0;
final boolean isEmpty = (flags & EMPTY_FLAG_MASK) > 0;
final boolean isGadget = (flags & GADGET_FLAG_MASK) > 0;
final int k;
if (serVer == 1) {
final short encK = extractEncodedReservoirSize(mem);
k = ReservoirSize.decodeValue(encK);
} else {
k = extractK(mem);
}
long n = 0;
if (!isEmpty) {
n = extractN(mem);
}
final long dataBytes = mem.getCapacity() - (preLongs << 3);
final StringBuilder sb = new StringBuilder();
sb.append(LS)
.append("### END ")
.append(family.getFamilyName().toUpperCase())
.append(" PREAMBLE SUMMARY").append(LS)
.append("Byte 0: Preamble Longs : ").append(preLongs).append(LS)
.append("Byte 0: ResizeFactor : ").append(rf.toString()).append(LS)
.append("Byte 1: Serialization Version: ").append(serVer).append(LS)
.append("Byte 2: Family : ").append(family.toString()).append(LS)
.append("Byte 3: Flags Field : ").append(flagsStr).append(LS)
//.append(" BIG_ENDIAN_STORAGE : ").append(bigEndian).append(LS)
//.append(" (Native Byte Order) : ").append(nativeOrder).append(LS)
//.append(" READ_ONLY : ").append(readOnly).append(LS)
.append(" EMPTY : ").append(isEmpty).append(LS);
if (family == Family.VAROPT) {
sb.append(" GADGET : ").append(isGadget).append(LS);
}
sb.append("Bytes 4-7: Sketch Size (k) : ").append(k).append(LS);
if (!isEmpty) {
sb.append("Bytes 8-15: Items Seen (n) : ").append(n).append(LS);
}
if ((family == Family.VAROPT) && !isEmpty) {
final int hCount = extractHRegionItemCount(mem);
final int rCount = extractRRegionItemCount(mem);
final double totalRWeight = extractTotalRWeight(mem);
sb.append("Bytes 16-19: H region count : ").append(hCount).append(LS)
.append("Bytes 20-23: R region count : ").append(rCount).append(LS);
if (rCount > 0) {
sb.append("Bytes 24-31: R region weight : ").append(totalRWeight).append(LS);
}
}
sb.append("TOTAL Sketch Bytes : ").append(mem.getCapacity()).append(LS)
.append(" Preamble Bytes : ").append(preLongs << 3).append(LS)
.append(" Data Bytes : ").append(dataBytes).append(LS)
.append("### END ")
.append(family.getFamilyName().toUpperCase())
.append(" PREAMBLE SUMMARY").append(LS);
return sb.toString();
}
private static String unionPreambleToString(final Memory mem,
final Family family,
final int preLongs) {
final ResizeFactor rf = ResizeFactor.getRF(extractResizeFactor(mem));
final int serVer = extractSerVer(mem);
// Flags
final int flags = extractFlags(mem);
final String flagsStr = zeroPad(Integer.toBinaryString(flags), 8) + ", " + (flags);
//final boolean bigEndian = (flags & BIG_ENDIAN_FLAG_MASK) > 0;
//final String nativeOrder = ByteOrder.nativeOrder().toString();
//final boolean readOnly = (flags & READ_ONLY_FLAG_MASK) > 0;
final boolean isEmpty = (flags & EMPTY_FLAG_MASK) > 0;
final int k;
if (serVer == 1) {
final short encK = extractEncodedReservoirSize(mem);
k = ReservoirSize.decodeValue(encK);
} else {
k = extractK(mem);
}
final long dataBytes = mem.getCapacity() - (preLongs << 3);
return LS
+ "### END " + family.getFamilyName().toUpperCase() + " PREAMBLE SUMMARY" + LS
+ "Byte 0: Preamble Longs : " + preLongs + LS
+ "Byte 0: ResizeFactor : " + rf.toString() + LS
+ "Byte 1: Serialization Version : " + serVer + LS
+ "Byte 2: Family : " + family.toString() + LS
+ "Byte 3: Flags Field : " + flagsStr + LS
//+ " BIG_ENDIAN_STORAGE : " + bigEndian + LS
//+ " (Native Byte Order) : " + nativeOrder + LS
//+ " READ_ONLY : " + readOnly + LS
+ " EMPTY : " + isEmpty + LS
+ "Bytes 4-7: Max Sketch Size (maxK): " + k + LS
+ "TOTAL Sketch Bytes : " + mem.getCapacity() + LS
+ " Preamble Bytes : " + (preLongs << 3) + LS
+ " Sketch Bytes : " + dataBytes + LS
+ "### END " + family.getFamilyName().toUpperCase() + " PREAMBLE SUMMARY" + LS;
}
// Extraction methods
static int extractPreLongs(final Memory mem) {
return mem.getByte(PREAMBLE_LONGS_BYTE) & 0x3F;
}
static int extractResizeFactor(final Memory mem) {
return (mem.getByte(PREAMBLE_LONGS_BYTE) >>> LG_RESIZE_FACTOR_BIT) & 0x3;
}
static int extractSerVer(final Memory mem) {
return mem.getByte(SER_VER_BYTE) & 0xFF;
}
static int extractFamilyID(final Memory mem) {
return mem.getByte(FAMILY_BYTE) & 0xFF;
}
static int extractFlags(final Memory mem) {
return mem.getByte(FLAGS_BYTE) & 0xFF;
}
static short extractEncodedReservoirSize(final Memory mem) {
return mem.getShort(RESERVOIR_SIZE_SHORT);
}
static int extractK(final Memory mem) {
return mem.getInt(RESERVOIR_SIZE_INT);
}
static int extractMaxK(final Memory mem) {
return extractK(mem);
}
static long extractN(final Memory mem) {
return mem.getLong(ITEMS_SEEN_LONG);
}
static int extractHRegionItemCount(final Memory mem) {
return mem.getInt(ITEM_COUNT_H_INT);
}
static int extractRRegionItemCount(final Memory mem) {
return mem.getInt(ITEM_COUNT_R_INT);
}
static double extractTotalRWeight(final Memory mem) {
return mem.getDouble(TOTAL_WEIGHT_R_DOUBLE);
}
static double extractOuterTauNumerator(final Memory mem) {
return mem.getDouble(OUTER_TAU_NUM_DOUBLE);
}
static long extractOuterTauDenominator(final Memory mem) {
return mem.getLong(OUTER_TAU_DENOM_LONG);
}
// Insertion methods
static void insertPreLongs(final WritableMemory wmem, final int preLongs) {
final int curByte = wmem.getByte(PREAMBLE_LONGS_BYTE);
final int mask = 0x3F;
final byte newByte = (byte) ((preLongs & mask) | (~mask & curByte));
wmem.putByte(PREAMBLE_LONGS_BYTE, newByte);
}
static void insertLgResizeFactor(final WritableMemory wmem, final int rf) {
final int curByte = wmem.getByte(PREAMBLE_LONGS_BYTE);
final int shift = LG_RESIZE_FACTOR_BIT; // shift in bits
final int mask = 3;
final byte newByte = (byte) (((rf & mask) << shift) | (~(mask << shift) & curByte));
wmem.putByte(PREAMBLE_LONGS_BYTE, newByte);
}
static void insertSerVer(final WritableMemory wmem, final int serVer) {
wmem.putByte(SER_VER_BYTE, (byte) serVer);
}
static void insertFamilyID(final WritableMemory wmem, final int famId) {
wmem.putByte(FAMILY_BYTE, (byte) famId);
}
static void insertFlags(final WritableMemory wmem, final int flags) {
wmem.putByte(FLAGS_BYTE, (byte) flags);
}
static void insertK(final WritableMemory wmem, final int k) {
wmem.putInt(RESERVOIR_SIZE_INT, k);
}
static void insertMaxK(final WritableMemory wmem, final int maxK) {
insertK(wmem, maxK);
}
static void insertN(final WritableMemory wmem, final long totalSeen) {
wmem.putLong(ITEMS_SEEN_LONG, totalSeen);
}
static void insertHRegionItemCount(final WritableMemory wmem, final int hCount) {
wmem.putInt(ITEM_COUNT_H_INT, hCount);
}
static void insertRRegionItemCount(final WritableMemory wmem, final int rCount) {
wmem.putInt(ITEM_COUNT_R_INT, rCount);
}
static void insertTotalRWeight(final WritableMemory wmem, final double weight) {
wmem.putDouble(TOTAL_WEIGHT_R_DOUBLE, weight);
}
static void insertOuterTauNumerator(final WritableMemory wmem, final double numer) {
wmem.putDouble(OUTER_TAU_NUM_DOUBLE, numer);
}
static void insertOuterTauDenominator(final WritableMemory wmem, final long denom) {
wmem.putLong(OUTER_TAU_DENOM_LONG, denom);
}
/**
* Checks Memory for capacity to hold the preamble and returns the extracted preLongs.
* @param mem the given Memory
* @return the extracted prelongs value.
*/
static int getAndCheckPreLongs(final Memory mem) {
final long cap = mem.getCapacity();
if (cap < 8) { throwNotBigEnough(cap, 8); }
final int preLongs = mem.getByte(0) & 0x3F;
final int required = Math.max(preLongs << 3, 8);
if (cap < required) { throwNotBigEnough(cap, required); }
return preLongs;
}
private static void throwNotBigEnough(final long cap, final int required) {
throw new SketchesArgumentException(
"Possible Corruption: Size of byte array or Memory not large enough: Size: " + cap
+ ", Required: " + required);
}
}
| 7,454 |
436 | import sys
class App:
def __init__(self, scope):
assert scope["type"] == "http"
self.scope = scope
async def __call__(self, receive, send):
await send(
{
"type": "http.response.start",
"status": 200,
"headers": [[b"content-type", b"text/plain"]],
}
)
version = f"{sys.version_info.major}.{sys.version_info.minor}"
message = f"Hello world! From Uvicorn with Gunicorn. Using Python {version}".encode(
"utf-8"
)
await send({"type": "http.response.body", "body": message})
app = App
| 314 |
5,766 | <gh_stars>1000+
//
// ODBCException.cpp
//
// Library: Data/ODBC
// Package: ODBC
// Module: ODBCException
//
// Copyright (c) 2006, Applied Informatics Software Engineering GmbH.
// and Contributors.
//
// SPDX-License-Identifier: BSL-1.0
//
#include "Poco/Data/ODBC/ODBCException.h"
#include <typeinfo>
namespace Poco {
namespace Data {
namespace ODBC {
POCO_IMPLEMENT_EXCEPTION(ODBCException, Poco::Data::DataException, "Generic ODBC error")
POCO_IMPLEMENT_EXCEPTION(InsufficientStorageException, ODBCException, "Insufficient storage error")
POCO_IMPLEMENT_EXCEPTION(UnknownDataLengthException, ODBCException, "Unknown length of remaining data")
POCO_IMPLEMENT_EXCEPTION(DataTruncatedException, ODBCException, "Variable length character or binary data truncated")
} } } // namespace Poco::Data::ODBC
| 268 |
543 | /* NiuTrans.Tensor - an open-source tensor library
* Copyright (C) 2020, Natural Language Processing Lab, Northeastern University.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Created by: <NAME> (<EMAIL>) 2019-04-08
* Start of a new week - I just finished several documents.
* Writing document is harder than writing code :)
* $Modified by: <NAME> (<EMAIL>) 2020-04
*/
#ifndef __T2TLENGTHPENALTY_H__
#define __T2TLENGTHPENALTY_H__
#include "../module/T2TUtility.h"
#include "../../../tensor/XTensor.h"
using namespace nts;
namespace transformer
{
/* We intend to penalize short sequences because they have higher score
in product of a sequence of probability-like terms and have more chances
to beat others in search. */
class T2TLengthPenalizer
{
public:
/* GNMT-like length penalty: pl = ((5 + n)/(5 + 1))^\alpha
where n = length of the sequence */
static float GNMT(float length, float alpha);
};
}
#endif
| 445 |
879 | <reponame>qianfei11/zstack
package org.zstack.sdk;
public class DeleteMulticastRouterResult {
}
| 40 |
683 | package cn.byhieg.annotationstutorial;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.Messager;
import javax.annotation.processing.ProcessingEnvironment;
import javax.annotation.processing.RoundEnvironment;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.Element;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.tools.Diagnostic;
import javax.tools.JavaFileObject;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Writer;
import java.util.Set;
/**
* Created by byhieg on 17/2/14.
* Mail to <EMAIL>
*/
public class APTProcessor extends AbstractProcessor{
//类名的前缀、后缀
public static final String SUFFIX = "AutoGenerate";
public static final String PREFIX = "byhieg_";
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
for (TypeElement typeElement : annotations) {
for (Element e : roundEnv.getElementsAnnotatedWith(typeElement)) {
//打印消息
Messager messager = processingEnv.getMessager();
messager.printMessage(Diagnostic.Kind.NOTE, "Printing:" + e.toString());
messager.printMessage(Diagnostic.Kind.NOTE, "Printing:" + e.getSimpleName());
messager.printMessage(Diagnostic.Kind.NOTE, "Printing:" + e.getEnclosedElements().toString());
//获取注解
APTAnnotation aptAnnotation = e.getAnnotation(APTAnnotation.class);
//获取元素名并将其首字母大写
String name = e.getSimpleName().toString();
char c = Character.toUpperCase(name.charAt(0));
name = String.valueOf(c + name.substring(1));
//包裹注解元素的元素, 也就是其父元素, 比如注解了成员变量或者成员函数, 其上层就是该类
Element enclosingElement = e.getEnclosingElement();
String enclosingQualifiedname;
if (enclosingElement instanceof PackageElement) {
enclosingQualifiedname = ((PackageElement) enclosingElement).getQualifiedName().toString();
} else {
enclosingQualifiedname = ((TypeElement) enclosingElement).getQualifiedName().toString();
}
try {
//生成包名
String generatePackageName = enclosingQualifiedname.substring(0, enclosingQualifiedname.lastIndexOf("."));
// 生成的类名
String genarateClassName = PREFIX + enclosingElement.getSimpleName() + SUFFIX;
//创建Java 文件
JavaFileObject f = processingEnv.getFiler().createSourceFile(genarateClassName);
// 在控制台输出文件路径
messager.printMessage(Diagnostic.Kind.NOTE, "Printing: " + f.toUri());
Writer w = f.openWriter();
try {
PrintWriter pw = new PrintWriter(w);
pw.println("package " + generatePackageName + ";");
pw.println("\npublic class " + genarateClassName + " { ");
pw.println("\n /** 打印值 */");
pw.println(" public static void print" + name + "() {");
pw.println(" // 注解的父元素: " + enclosingElement.toString());
pw.println(" System.out.println(\"代码生成的路径: " + f.toUri() + "\");");
pw.println(" System.out.println(\"注解的元素: " + e.toString() + "\");");
pw.println(" System.out.println(\"注解的版本: " + aptAnnotation.version() + "\");");
pw.println(" System.out.println(\"注解的作者: " + aptAnnotation.author() + "\");");
pw.println(" System.out.println(\"注解的日期: " + aptAnnotation.date() + "\");");
pw.println(" }");
pw.println("}");
pw.flush();
} finally {
w.close();
}
} catch (IOException e1) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, e1.toString());
}
}
}
return true;
}
@Override
public Set<String> getSupportedAnnotationTypes() {
return super.getSupportedAnnotationTypes();
}
@Override
public SourceVersion getSupportedSourceVersion() {
return SourceVersion.latestSupported();
}
@Override
public synchronized void init(ProcessingEnvironment processingEnv) {
super.init(processingEnv);
}
}
| 2,457 |
666 | <gh_stars>100-1000
package com.didispace.scca.plugin.git;
import lombok.Data;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
/**
* Created by 程序猿DD/翟永超 on 2018/4/24.
* <p>
* Blog: http://blog.didispace.com/
* Github: https://github.com/dyc87112/
*/
@Data
@ConfigurationProperties
public class SccaGitProperties {
/**
* Git登录的用户名
*/
@Value("${spring.cloud.config.server.git.username:}")
private String username;
/**
* Git登录的密码
*/
@Value("${spring.cloud.config.server.git.password:}")
private String password;
/**
* Git配置仓库的,与config-server中配置的Git仓库对应
* <p>
* 比如:https://github.com/dyc87112/{application}.git
*/
@Value("${spring.cloud.config.server.git.uri:}")
private String repoUri;
/**
* Git配置仓库下的相对路径,与config-server中配置的Git仓库下的相对路径对应
*/
@Value("${spring.cloud.config.server.git.search-paths:}")
private String searchPaths;
}
| 514 |
6,969 | #include<iostream>
using namespace std;
int remDups(int arr[], int n)
{
int temp[n];
temp[0] = arr[0];
int res = 1;
for(int i=0; i<n; i++)
{
if(temp[res-1] != arr[i])
{
temp[res] = arr[i];
res++;
}
}
for(int i=0; i<n; i++)
{
arr[i] = temp[i];
}
return res;
}
int main()
{
int arr[] = {10, 20, 20, 30, 30, 30, 30}, n=7;
cout<<"Before Removal Duplicates: ";
for(int i=0; i<n; i++)
{
cout<<arr[i]<<" ";
}
cout<<endl<<"Size = "<<n<<endl;
int Res = remDups(arr, n);
cout<<"After Removal Duplicates: ";
for(int i=0; i<Res; i++)
{
cout<<arr[i]<<" ";
}
cout<<endl<<"Size = "<<Res<<endl;
}
| 423 |
407 | package com.alibaba.tesla.appmanager.domain.req;
import com.alibaba.fastjson.JSONObject;
import lombok.Data;
/**
* 应用元信息更新请求
*
* @author <EMAIL>
*/
@Data
public class AppMetaUpdateReq {
/**
* 应用唯一标识
*/
private String appId;
/**
* 应用 Options
*/
private JSONObject options;
/**
* 更新模式,可选 append(追加) / overwrite(覆盖)
*/
private String mode = "append";
}
| 223 |
575 | <reponame>sarang-apps/darshan_browser
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_ANDROID_WEBAPK_WEBAPK_HANDLER_DELEGATE_H_
#define CHROME_BROWSER_ANDROID_WEBAPK_WEBAPK_HANDLER_DELEGATE_H_
#include <jni.h>
#include "base/android/scoped_java_ref.h"
#include "base/callback.h"
#include "base/macros.h"
#include "chrome/browser/android/webapk/webapk_info.h"
// Delegate for retrieving installed WebAPKs for display in WebUI.
class WebApkHandlerDelegate {
public:
using WebApkInfoCallback = base::RepeatingCallback<void(const WebApkInfo&)>;
explicit WebApkHandlerDelegate(const WebApkInfoCallback& callback);
~WebApkHandlerDelegate();
// Fetches information about each WebAPK.
void RetrieveWebApks();
// Called once for each installed WebAPK when RetrieveWebApks() is called.
void OnWebApkInfoRetrieved(
JNIEnv* env,
const base::android::JavaParamRef<jstring>& jname,
const base::android::JavaParamRef<jstring>& jshort_name,
const base::android::JavaParamRef<jstring>& jpackage_name,
const base::android::JavaParamRef<jstring>& jid,
const jint jshell_apk_version,
const jint jversion_code,
const base::android::JavaParamRef<jstring>& juri,
const base::android::JavaParamRef<jstring>& jscope,
const base::android::JavaParamRef<jstring>& jmanifest_url,
const base::android::JavaParamRef<jstring>& jmanifest_start_url,
const jint jdisplay_mode,
const jint jorientation,
const jlong jtheme_color,
const jlong jbackground_color,
const jlong jlast_update_check_time_ms,
const jlong jlast_update_completion_time_ms,
const jboolean jrelax_updates,
const base::android::JavaParamRef<jstring>& jbacking_browser_package_name,
const jboolean jis_backing_browser,
const base::android::JavaParamRef<jstring>& jupdate_status);
private:
WebApkInfoCallback callback_;
base::android::ScopedJavaGlobalRef<jobject> j_obj_;
DISALLOW_COPY_AND_ASSIGN(WebApkHandlerDelegate);
};
#endif // CHROME_BROWSER_ANDROID_WEBAPK_WEBAPK_HANDLER_DELEGATE_H_
| 851 |
834 | <gh_stars>100-1000
// System::Windows::Forms::TreeNode.ImageIndex
// System::Windows::Forms::TreeNode.SelectedImageIndex
// System::Windows::Forms::TreeView.ImageIndex
// System::Windows::Forms::TreeView::SelectedImageIndex
// System::Windows::Forms::TreeView::ImageList
// System::Windows::Forms::TreeNode::TreeNode(String*, int, int)
// System::Windows::Forms::TreeNode::TreeNode(String*, int, int, TreeNode[])
/*
The following example demonstrates the constructors
'TreeNode(String*, int, int)' and 'TreeNode(String*, int, int, TreeNode[])' of
the 'TreeNode' class. This example displays customerinformation in a
'TreeView' control. The root tree nodes display customer names, and the
child tree nodes display the order numbers assigned to each customer.
*/
#using <System.dll>
#using <System.Windows.Forms.dll>
#using <System.Drawing.dll>
using namespace System;
using namespace System::Drawing;
using namespace System::Collections;
using namespace System::Windows::Forms;
public ref class TreeNode_Checked: public Form
{
private:
int rootImageIndex;
int selectedCustomerImageIndex;
int unselectedCustomerImageIndex;
int selectedOrderImageIndex;
int unselectedOrderImageIndex;
TreeView^ myTreeView;
// ArrayList object to hold the 'Customer' objects.
ArrayList^ customerArray;
public:
TreeNode_Checked()
{
customerArray = gcnew ArrayList;
InitializeComponent();
FillMyTreeView();
}
private:
void FillMyTreeView()
{
// Add customers to the ArrayList of 'Customer' objects.
for ( int xIndex = 1; xIndex <= 5; xIndex++ )
{
customerArray->Add( gcnew Customer( String::Concat( "Customer ", xIndex ) ) );
}
// Add orders to each 'Customer' object in the ArrayList.
IEnumerator^ myEnum = customerArray->GetEnumerator();
while ( myEnum->MoveNext() )
{
Customer^ customer1 = safe_cast<Customer^>(myEnum->Current);
for ( int yIndex = 1; yIndex <= 5; yIndex++ )
{
customer1->CustomerOrders->Add( gcnew Order( String::Concat( "Order ", yIndex ) ) );
}
}
myTreeView->BeginUpdate();
// Clear the TreeView each time the method is called.
myTreeView->Nodes->Clear();
FillTreeView();
// Begin repainting the TreeView.
myTreeView->EndUpdate();
}
// <Snippet1>
ref class Customer
{
public:
ArrayList^ CustomerOrders;
String^ CustomerName;
Customer( String^ myName )
{
CustomerName = myName;
CustomerOrders = gcnew ArrayList;
}
};
ref class Order
{
public:
String^ OrderID;
Order( String^ myOrderID )
{
this->OrderID = myOrderID;
}
};
void FillTreeView()
{
// Load the images in an ImageList.
ImageList^ myImageList = gcnew ImageList;
myImageList->Images->Add( Image::FromFile( "Default.gif" ) );
myImageList->Images->Add( Image::FromFile( "SelectedDefault.gif" ) );
myImageList->Images->Add( Image::FromFile( "Root.gif" ) );
myImageList->Images->Add( Image::FromFile( "UnselectedCustomer.gif" ) );
myImageList->Images->Add( Image::FromFile( "SelectedCustomer.gif" ) );
myImageList->Images->Add( Image::FromFile( "UnselectedOrder.gif" ) );
myImageList->Images->Add( Image::FromFile( "SelectedOrder.gif" ) );
// Assign the ImageList to the TreeView.
myTreeView->ImageList = myImageList;
// Set the TreeView control's default image and selected image indexes.
myTreeView->ImageIndex = 0;
myTreeView->SelectedImageIndex = 1;
/* Set the index of image from the
ImageList for selected and unselected tree nodes.*/
this->rootImageIndex = 2;
this->selectedCustomerImageIndex = 3;
this->unselectedCustomerImageIndex = 4;
this->selectedOrderImageIndex = 5;
this->unselectedOrderImageIndex = 6;
// Create the root tree node.
TreeNode^ rootNode = gcnew TreeNode( "CustomerList" );
rootNode->ImageIndex = rootImageIndex;
rootNode->SelectedImageIndex = rootImageIndex;
// Add a main root tree node.
myTreeView->Nodes->Add( rootNode );
// Add a root tree node for each Customer object in the ArrayList.
IEnumerator^ myEnum = customerArray->GetEnumerator();
while ( myEnum->MoveNext() )
{
Customer^ myCustomer = safe_cast<Customer^>(myEnum->Current);
// Add a child tree node for each Order object.
int countIndex = 0;
array<TreeNode^>^myTreeNodeArray = gcnew array<TreeNode^>(myCustomer->CustomerOrders->Count);
IEnumerator^ myEnum = myCustomer->CustomerOrders->GetEnumerator();
while ( myEnum->MoveNext() )
{
Order^ myOrder = safe_cast<Order^>(myEnum->Current);
// Add the Order tree node to the array.
myTreeNodeArray[ countIndex ] = gcnew TreeNode( myOrder->OrderID,unselectedOrderImageIndex,selectedOrderImageIndex );
countIndex++;
}
TreeNode^ customerNode = gcnew TreeNode( myCustomer->CustomerName,unselectedCustomerImageIndex,selectedCustomerImageIndex,myTreeNodeArray );
myTreeView->Nodes[ 0 ]->Nodes->Add( customerNode );
}
}
// </Snippet1>
void InitializeComponent()
{
this->myTreeView = gcnew TreeView;
this->SuspendLayout();
this->myTreeView->ImageIndex = -1;
this->myTreeView->Location = Point(8,0);
this->myTreeView->Name = "myTreeView";
this->myTreeView->SelectedImageIndex = -1;
this->myTreeView->Size = System::Drawing::Size( 280, 152 );
this->myTreeView->TabIndex = 0;
this->ClientSize = System::Drawing::Size( 292, 273 );
this->Controls->Add( this->myTreeView );
this->Name = "Form1";
this->Text = "TreeNode Example";
this->ResumeLayout( true );
}
};
int main()
{
Application::Run( gcnew TreeNode_Checked );
}
| 2,304 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.