code
stringlengths 3
1.05M
| repo_name
stringlengths 4
116
| path
stringlengths 3
942
| language
stringclasses 30
values | license
stringclasses 15
values | size
int32 3
1.05M
|
---|---|---|---|---|---|
package com.carrotsearch.examples.randomizedrunner;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import com.carrotsearch.randomizedtesting.RandomizedRunner;
/**
* This is a test-based tutorial introducing to randomized JUnit testing using
* {@link RandomizedRunner}. Follow test cases in their alphabetic order.
*
* <p>One way to start using {@link RandomizedRunner} is to declare
* your suite class as being executed by {@link RandomizedRunner} (using
* {@link RunWith} annotation). The {@link #success()} method doesn't do anything
* useful but runs under {@link RandomizedRunner}. We know this for sure because we
* can hide a hook (<code>before()</code>) method to be private
* (normal JUnit doesn't allow this).
*/
@RunWith(RandomizedRunner.class)
public class Test001SimpleUseCase {
@Before
private void before() {
// This won't work under the default JUnit runner.
}
@Test
public void success() {
// Do nothing.
}
}
|
randomizedtesting/randomizedtesting
|
examples/maven/src/main/java/com/carrotsearch/examples/randomizedrunner/Test001SimpleUseCase.java
|
Java
|
apache-2.0
| 997 |
# --- !Ups
CREATE TABLE sessions (
token varchar(255) NOT NULL,
useragent varchar(255) NOT NULL,
email varchar(255) NOT NULL,
laststatustoken varchar(255) NOT NULL,
laststatuschange TIMESTAMP NOT NULL,
PRIMARY KEY (token)
);
CREATE TABLE consentlogging (
id bigint auto_increment NOT NULL ,
timestamp TIMESTAMP NOT NULL,
ipaddress varchar(255),
useragent varchar(255),
email varchar(255),
link TEXT,
referer TEXT,
PRIMARY KEY (id)
);
CREATE TABLE userfiles (
uuid varchar(255) NOT NULL,
users_accountsubject varchar(255) REFERENCES users(accountsubject),
originalfilename TEXT NOT NULL,
linkreference TEXT NOT NULL,
laststatustoken varchar(255) NOT NULL,
laststatuschange TIMESTAMP NOT NULL,
PRIMARY KEY (uuid)
);
CREATE TABLE usermetarecords (
uuid varchar(255) NOT NULL,
users_accountsubject varchar(255) REFERENCES users(accountsubject),
originaluuid TEXT NOT NULL,
cswreference TEXT NOT NULL,
laststatustoken varchar(255) NOT NULL,
laststatuschange TIMESTAMP NOT NULL,
PRIMARY KEY (uuid)
);
# --- !Downs
DROP TABLE usermetarecords;
DROP TABLE userfiles;
DROP TABLE consentlogging;
DROP TABLE sessions;
|
ZGIS/smart-portal-backend
|
test/resources/testh2db/evolutions/default/3.sql
|
SQL
|
apache-2.0
| 1,167 |
/**
* Copyright (c), Data Geekery GmbH, [email protected]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jooq.lambda;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.function.Predicate;
import org.jooq.lambda.function.Functions;
import org.junit.Test;
/**
* @author Lukas Eder
*/
public class PredicateTests {
@Test
public void testPredicates() {
Predicate<Integer> even = i -> i % 2 == 0;
Predicate<Integer> threes = i -> i % 3 == 0;
assertTrue(even.test(0));
assertFalse(even.test(1));
assertFalse(Functions.not(even).test(0));
assertTrue(Functions.not(even).test(1));
assertTrue(Functions.and(even, threes).test(0));
assertFalse(Functions.and(even, threes).test(1));
assertFalse(Functions.and(even, threes).test(2));
assertFalse(Functions.and(even, threes).test(3));
assertFalse(Functions.and(even, threes).test(4));
assertFalse(Functions.and(even, threes).test(5));
assertTrue(Functions.and(even, threes).test(6));
assertTrue(Functions.or(even, threes).test(0));
assertFalse(Functions.or(even, threes).test(1));
assertTrue(Functions.or(even, threes).test(2));
assertTrue(Functions.or(even, threes).test(3));
assertTrue(Functions.or(even, threes).test(4));
assertFalse(Functions.or(even, threes).test(5));
assertTrue(Functions.or(even, threes).test(6));
}
}
|
jOOQ/jOOL
|
jOOL/src/test/java/org/jooq/lambda/PredicateTests.java
|
Java
|
apache-2.0
| 2,035 |
/* $Id$
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
#ifndef __ETCHDEFAULTSERVERFACTORY_H__
#define __ETCHDEFAULTSERVERFACTORY_H__
#include "support/EtchServerFactory.h"
/**
* Default implementation of ServerFactory. Used by Etch generated Helper files
* to provide listeners with backstop implementations of Session methods which
* forward to user's implementation factory.
*/
class EtchDefaultServerFactory
: public EtchServerFactory {
public:
/**
* Constructs the DefaultServerFactory.
* @param listener
* @param implFactory
*/
EtchDefaultServerFactory(EtchTransport<EtchServerFactory>* listener, EtchSession* implFactory);
/**
* Destructor
*/
virtual ~EtchDefaultServerFactory();
/**
* @see EtchSession
*/
virtual status_t sessionQuery(capu::SmartPointer<EtchObject> query, capu::SmartPointer<EtchObject> &result);
/**
* @see EtchSession
*/
virtual status_t sessionControl(capu::SmartPointer<EtchObject> control, capu::SmartPointer<EtchObject> value);
/**
* @see EtchSession
*/
virtual status_t sessionNotify(capu::SmartPointer<EtchObject> event );
/**
* @see EtchTransport<EtchSession>
*/
virtual EtchSession* getSession();
/**
* @see EtchTransport<EtchSession>
*/
virtual void setSession( EtchSession* session );
/**
* @see EtchTransport<EtchSession>
*/
virtual status_t transportControl(capu::SmartPointer<EtchObject> control, capu::SmartPointer<EtchObject> value);
/**
* @see EtchTransport<EtchSession>
*/
virtual status_t transportNotify(capu::SmartPointer<EtchObject> event);
/**
* @see EtchTransport<EtchSession>
*/
virtual status_t transportQuery(capu::SmartPointer<EtchObject> query, capu::SmartPointer<EtchObject> *result);
private:
EtchTransport<EtchServerFactory>* mListener;
EtchSession* mSession;
};
#endif /* __ETCHDEFAULTSERVERFACTORY_H__*/
|
OBIGOGIT/etch
|
binding-cpp/runtime/include/support/EtchDefaultServerFactory.h
|
C
|
apache-2.0
| 2,651 |
/*
* Copyright 2022 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config;
import com.thoughtworks.go.config.validation.FilePathTypeValidator;
import com.thoughtworks.go.domain.ArtifactType;
import com.thoughtworks.go.plugin.access.artifact.ArtifactMetadataStore;
import com.thoughtworks.go.plugin.api.info.PluginDescriptor;
import com.thoughtworks.go.plugin.domain.artifact.ArtifactPluginInfo;
import com.thoughtworks.go.plugin.domain.common.Metadata;
import com.thoughtworks.go.plugin.domain.common.PluggableInstanceSettings;
import com.thoughtworks.go.plugin.domain.common.PluginConfiguration;
import com.thoughtworks.go.security.CryptoException;
import com.thoughtworks.go.security.GoCipher;
import org.hamcrest.Matchers;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static com.thoughtworks.go.config.BuildArtifactConfig.DEST;
import static com.thoughtworks.go.config.BuildArtifactConfig.SRC;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class ArtifactTypeConfigsTest {
@Test
public void shouldAddDuplicatedArtifactSoThatValidationKicksIn() throws Exception {
final ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
assertThat(artifactTypeConfigs.size(), is(0));
artifactTypeConfigs.add(new BuildArtifactConfig("src", "dest"));
artifactTypeConfigs.add(new BuildArtifactConfig("src", "dest"));
assertThat(artifactTypeConfigs.size(), is(2));
}
@Test
public void shouldLoadArtifactPlans() {
HashMap<String, String> artifactPlan1 = new HashMap<>();
artifactPlan1.put(SRC, "blah");
artifactPlan1.put(DEST, "something");
artifactPlan1.put("artifactTypeValue", TestArtifactConfig.TEST_PLAN_DISPLAY_NAME);
HashMap<String, String> artifactPlan2 = new HashMap<>();
artifactPlan2.put(SRC, "blah2");
artifactPlan2.put(DEST, "something2");
artifactPlan2.put("artifactTypeValue", BuildArtifactConfig.ARTIFACT_PLAN_DISPLAY_NAME);
List<HashMap> artifactPlansList = new ArrayList<>();
artifactPlansList.add(artifactPlan1);
artifactPlansList.add(artifactPlan2);
ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
artifactTypeConfigs.setConfigAttributes(artifactPlansList);
assertThat(artifactTypeConfigs.size(), is(2));
TestArtifactConfig plan = new TestArtifactConfig();
plan.setSource("blah");
plan.setDestination("something");
assertThat(artifactTypeConfigs.get(0), is(plan));
assertThat(artifactTypeConfigs.get(1), is(new BuildArtifactConfig("blah2", "something2")));
}
@Test
public void setConfigAttributes_shouldIgnoreEmptySourceAndDest() {
HashMap<String, String> artifactPlan1 = new HashMap<>();
artifactPlan1.put(SRC, "blah");
artifactPlan1.put(DEST, "something");
artifactPlan1.put("artifactTypeValue", TestArtifactConfig.TEST_PLAN_DISPLAY_NAME);
HashMap<String, String> artifactPlan2 = new HashMap<>();
artifactPlan2.put(SRC, "blah2");
artifactPlan2.put(DEST, "something2");
artifactPlan2.put("artifactTypeValue", BuildArtifactConfig.ARTIFACT_PLAN_DISPLAY_NAME);
HashMap<String, String> artifactPlan3 = new HashMap<>();
artifactPlan3.put(SRC, "");
artifactPlan3.put(DEST, "");
artifactPlan3.put("artifactTypeValue", BuildArtifactConfig.ARTIFACT_PLAN_DISPLAY_NAME);
List<HashMap> artifactPlansList = new ArrayList<>();
artifactPlansList.add(artifactPlan1);
artifactPlansList.add(artifactPlan3);
artifactPlansList.add(artifactPlan2);
ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
artifactTypeConfigs.setConfigAttributes(artifactPlansList);
assertThat(artifactTypeConfigs.size(), is(2));
TestArtifactConfig plan = new TestArtifactConfig();
plan.setSource("blah");
plan.setDestination("something");
assertThat(artifactTypeConfigs.get(0), is(plan));
assertThat(artifactTypeConfigs.get(1), is(new BuildArtifactConfig("blah2", "something2")));
}
@Test
public void setConfigAttributes_shouldSetExternalArtifactWithPlainTextValuesIfPluginIdIsProvided() {
ArtifactPluginInfo artifactPluginInfo = mock(ArtifactPluginInfo.class);
PluginDescriptor pluginDescriptor = mock(PluginDescriptor.class);
when(artifactPluginInfo.getDescriptor()).thenReturn(pluginDescriptor);
when(pluginDescriptor.id()).thenReturn("cd.go.artifact.foo");
PluginConfiguration image = new PluginConfiguration("Image", new Metadata(true, true));
PluginConfiguration tag = new PluginConfiguration("Tag", new Metadata(true, false));
ArrayList<PluginConfiguration> pluginMetadata = new ArrayList<>();
pluginMetadata.add(image);
pluginMetadata.add(tag);
when(artifactPluginInfo.getArtifactConfigSettings()).thenReturn(new PluggableInstanceSettings(pluginMetadata));
ArtifactMetadataStore.instance().setPluginInfo(artifactPluginInfo);
HashMap<Object, Object> configurationMap1 = new HashMap<>();
configurationMap1.put("Image", "gocd/gocd-server");
configurationMap1.put("Tag", "v18.6.0");
HashMap<String, Object> artifactPlan1 = new HashMap<>();
artifactPlan1.put("artifactTypeValue", "Pluggable Artifact");
artifactPlan1.put("id", "artifactId");
artifactPlan1.put("storeId", "storeId");
artifactPlan1.put("pluginId", "cd.go.artifact.foo");
artifactPlan1.put("configuration", configurationMap1);
List<Map> artifactPlansList = new ArrayList<>();
artifactPlansList.add(artifactPlan1);
ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
artifactTypeConfigs.setConfigAttributes(artifactPlansList);
assertThat(artifactTypeConfigs.size(), is(1));
PluggableArtifactConfig artifactConfig = (PluggableArtifactConfig) artifactTypeConfigs.get(0);
assertThat(artifactConfig.getArtifactType(), is(ArtifactType.external));
assertThat(artifactConfig.getId(), is("artifactId"));
assertThat(artifactConfig.getStoreId(), is("storeId"));
assertThat(artifactConfig.getConfiguration().getProperty("Image").isSecure(), is(false));
}
@Test
public void setConfigAttributes_shouldSetConfigurationAsIsIfPluginIdIsBlank() throws CryptoException {
HashMap<Object, Object> imageMap = new HashMap<>();
imageMap.put("value", new GoCipher().encrypt("some-encrypted-value"));
imageMap.put("isSecure", "true");
HashMap<Object, Object> tagMap = new HashMap<>();
tagMap.put("value", "18.6.0");
tagMap.put("isSecure", "false");
HashMap<Object, Object> configurationMap1 = new HashMap<>();
configurationMap1.put("Image", imageMap);
configurationMap1.put("Tag", tagMap);
HashMap<String, Object> artifactPlan1 = new HashMap<>();
artifactPlan1.put("artifactTypeValue", "Pluggable Artifact");
artifactPlan1.put("id", "artifactId");
artifactPlan1.put("storeId", "storeId");
artifactPlan1.put("pluginId", "");
artifactPlan1.put("configuration", configurationMap1);
List<Map> artifactPlansList = new ArrayList<>();
artifactPlansList.add(artifactPlan1);
ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
artifactTypeConfigs.setConfigAttributes(artifactPlansList);
assertThat(artifactTypeConfigs.size(), is(1));
PluggableArtifactConfig artifactConfig = (PluggableArtifactConfig) artifactTypeConfigs.get(0);
assertThat(artifactConfig.getArtifactType(), is(ArtifactType.external));
assertThat(artifactConfig.getId(), is("artifactId"));
assertThat(artifactConfig.getStoreId(), is("storeId"));
assertThat(artifactConfig.getConfiguration().getProperty("Image").getValue(), is("some-encrypted-value"));
assertThat(artifactConfig.getConfiguration().getProperty("Tag").getValue(), is("18.6.0"));
}
@Test
public void shouldClearAllArtifactsWhenTheMapIsNull() {
ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
artifactTypeConfigs.add(new BuildArtifactConfig("src", "dest"));
artifactTypeConfigs.setConfigAttributes(null);
assertThat(artifactTypeConfigs.size(), is(0));
}
@Test
public void shouldValidateTree() {
ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
artifactTypeConfigs.add(new BuildArtifactConfig("src", "dest"));
artifactTypeConfigs.add(new BuildArtifactConfig("src", "dest"));
artifactTypeConfigs.add(new BuildArtifactConfig("src", "../a"));
artifactTypeConfigs.validateTree(null);
assertThat(artifactTypeConfigs.get(0).errors().on(BuiltinArtifactConfig.DEST), is("Duplicate artifacts defined."));
assertThat(artifactTypeConfigs.get(0).errors().on(BuiltinArtifactConfig.SRC), is("Duplicate artifacts defined."));
assertThat(artifactTypeConfigs.get(1).errors().on(BuiltinArtifactConfig.DEST), is("Duplicate artifacts defined."));
assertThat(artifactTypeConfigs.get(1).errors().on(BuiltinArtifactConfig.SRC), is("Duplicate artifacts defined."));
assertThat(artifactTypeConfigs.get(2).errors().on(BuiltinArtifactConfig.DEST), is("Invalid destination path. Destination path should match the pattern " + FilePathTypeValidator.PATH_PATTERN));
}
@Test
public void shouldErrorOutWhenDuplicateArtifactConfigExists() {
final ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs(new BuildArtifactConfig("src", "dest"));
artifactTypeConfigs.add(new BuildArtifactConfig("src", "dest"));
artifactTypeConfigs.add(new BuildArtifactConfig("src", "dest"));
artifactTypeConfigs.validate(null);
assertFalse(artifactTypeConfigs.get(0).errors().isEmpty());
assertThat(artifactTypeConfigs.get(0).errors().on(BuiltinArtifactConfig.SRC), Matchers.is("Duplicate artifacts defined."));
assertThat(artifactTypeConfigs.get(0).errors().on(BuiltinArtifactConfig.DEST), Matchers.is("Duplicate artifacts defined."));
assertFalse(artifactTypeConfigs.get(1).errors().isEmpty());
assertThat(artifactTypeConfigs.get(1).errors().on(BuiltinArtifactConfig.SRC), Matchers.is("Duplicate artifacts defined."));
assertThat(artifactTypeConfigs.get(1).errors().on(BuiltinArtifactConfig.DEST), Matchers.is("Duplicate artifacts defined."));
assertFalse(artifactTypeConfigs.get(2).errors().isEmpty());
assertThat(artifactTypeConfigs.get(2).errors().on(BuiltinArtifactConfig.SRC), Matchers.is("Duplicate artifacts defined."));
assertThat(artifactTypeConfigs.get(2).errors().on(BuiltinArtifactConfig.DEST), Matchers.is("Duplicate artifacts defined."));
}
@Test
public void getArtifactConfigs_shouldReturnBuiltinArtifactConfigs() {
ArtifactTypeConfigs allConfigs = new ArtifactTypeConfigs();
allConfigs.add(new BuildArtifactConfig("src", "dest"));
allConfigs.add(new BuildArtifactConfig("java", null));
allConfigs.add(new PluggableArtifactConfig("s3", "cd.go.s3"));
allConfigs.add(new PluggableArtifactConfig("docker", "cd.go.docker"));
final List<BuiltinArtifactConfig> artifactConfigs = allConfigs.getBuiltInArtifactConfigs();
assertThat(artifactConfigs, hasSize(2));
assertThat(artifactConfigs, containsInAnyOrder(
new BuildArtifactConfig("src", "dest"),
new BuildArtifactConfig("java", null)
));
}
@Test
public void getPluggableArtifactConfigs_shouldReturnPluggableArtifactConfigs() {
ArtifactTypeConfigs allConfigs = new ArtifactTypeConfigs();
allConfigs.add(new BuildArtifactConfig("src", "dest"));
allConfigs.add(new BuildArtifactConfig("java", null));
allConfigs.add(new PluggableArtifactConfig("s3", "cd.go.s3"));
allConfigs.add(new PluggableArtifactConfig("docker", "cd.go.docker"));
final List<PluggableArtifactConfig> artifactConfigs = allConfigs.getPluggableArtifactConfigs();
assertThat(artifactConfigs, hasSize(2));
assertThat(artifactConfigs, containsInAnyOrder(
new PluggableArtifactConfig("s3", "cd.go.s3"),
new PluggableArtifactConfig("docker", "cd.go.docker")
));
}
@Test
public void findByArtifactId_shouldReturnPluggableArtifactConfigs() {
ArtifactTypeConfigs allConfigs = new ArtifactTypeConfigs();
allConfigs.add(new PluggableArtifactConfig("s3", "cd.go.s3"));
allConfigs.add(new PluggableArtifactConfig("docker", "cd.go.docker"));
final PluggableArtifactConfig s3 = allConfigs.findByArtifactId("s3");
assertThat(s3, is(new PluggableArtifactConfig("s3", "cd.go.s3")));
}
@Test
public void findByArtifactId_shouldReturnNullWhenPluggableArtifactConfigNotExistWithGivenId() {
ArtifactTypeConfigs allConfigs = new ArtifactTypeConfigs();
allConfigs.add(new PluggableArtifactConfig("s3", "cd.go.s3"));
allConfigs.add(new PluggableArtifactConfig("docker", "cd.go.docker"));
final PluggableArtifactConfig s3 = allConfigs.findByArtifactId("foo");
assertNull(s3);
}
}
|
gocd/gocd
|
config/config-api/src/test/java/com/thoughtworks/go/config/ArtifactTypeConfigsTest.java
|
Java
|
apache-2.0
| 14,363 |
<?php
/*
* This file is part of the Doctrine\OrientDB package.
*
* (c) Alessandro Nadalin <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
/**
* Class Updates
*
* @package Doctrine\OrientDB
* @subpackage Formatter
* @author Alessandro Nadalin <[email protected]>
*/
namespace Doctrine\OrientDB\Query\Formatter\Query;
use Doctrine\OrientDB\Query\Formatter\Query;
class Updates extends Query implements TokenInterface
{
public static function format(array $values)
{
$string = "";
foreach ($values as $key => $value) {
if ($key = self::stripNonSQLCharacters($key)) {
if ($value === null) {
$value = 'NULL';
} else if (is_int($value) || is_float($value)) {
// Preserve content of $value as is
} else if (is_bool($value)) {
$value = $value ? 'TRUE' : 'FALSE';
} elseif(is_array($value)) {
$value = '[' . implode(',', $value) . ']';
} else {
$value = '"' . addslashes($value) . '"';
}
$string .= " $key = $value,";
}
}
return substr($string, 0, strlen($string) - 1);
}
}
|
spartaksun/orientdb-query
|
src/Formatter/Query/Updates.php
|
PHP
|
apache-2.0
| 1,400 |
/*
* Copyright 2015-2021 The OpenZipkin Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/* eslint-disable no-shadow */
import { faPlus } from '@fortawesome/free-solid-svg-icons';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import {
Box,
Button,
Theme,
createStyles,
makeStyles,
} from '@material-ui/core';
import React, { useCallback, useEffect, useRef, useState } from 'react';
import { connect } from 'react-redux';
import { ThunkDispatch } from 'redux-thunk';
import Criterion, { newCriterion } from '../Criterion';
import CriterionBox from './CriterionBox';
import { loadAutocompleteValues } from '../../../slices/autocompleteValuesSlice';
import { loadRemoteServices } from '../../../slices/remoteServicesSlice';
import { loadSpans } from '../../../slices/spansSlice';
import { RootState } from '../../../store';
const useStyles = makeStyles((theme: Theme) =>
createStyles({
addButton: {
height: 40,
width: 40,
minWidth: 40,
color: theme.palette.common.white,
},
}),
);
type SearchBarProps = {
searchTraces: () => void;
criteria: Criterion[];
onChange: (criteria: Criterion[]) => void;
serviceNames: string[];
isLoadingServiceNames: boolean;
spanNames: string[];
isLoadingSpanNames: boolean;
remoteServiceNames: string[];
isLoadingRemoteServiceNames: boolean;
autocompleteKeys: string[];
autocompleteValues: string[];
isLoadingAutocompleteValues: boolean;
loadRemoteServices: (serviceName: string) => void;
loadSpans: (serviceName: string) => void;
loadAutocompleteValues: (autocompleteKey: string) => void;
};
export const SearchBarImpl: React.FC<SearchBarProps> = ({
searchTraces,
criteria,
onChange,
serviceNames,
isLoadingServiceNames,
spanNames,
isLoadingSpanNames,
remoteServiceNames,
isLoadingRemoteServiceNames,
autocompleteKeys,
autocompleteValues,
isLoadingAutocompleteValues,
loadRemoteServices,
loadSpans,
loadAutocompleteValues,
}) => {
const classes = useStyles();
// criterionIndex is the index of the criterion currently being edited.
// If the value is -1, there is no criterion being edited.
const [criterionIndex, setCriterionIndex] = useState(-1);
const handleCriterionFocus = (index: number) => {
setCriterionIndex(index);
};
const handleCriterionChange = (index: number, criterion: Criterion) => {
const newCriteria = [...criteria];
newCriteria[index] = criterion;
onChange(newCriteria);
};
const handleCriterionBlur = () => {
setCriterionIndex(-1);
};
const handleCriterionDelete = (index: number) => {
const newCriteria = criteria.filter((_, i) => i !== index);
onChange(newCriteria);
setCriterionIndex(-1);
};
const handleCriterionDecide = (index: number) => {
if (index === criteria.length - 1) {
const newCriteria = [...criteria];
newCriteria.push(newCriterion('', ''));
onChange(newCriteria);
const nextCriterionIndex = criteria.length;
setCriterionIndex(nextCriterionIndex);
} else {
setCriterionIndex(-1);
}
};
const handleAddButtonClick = useCallback(() => {
const newCriteria = [...criteria];
newCriteria.push(newCriterion('', ''));
onChange(newCriteria);
const nextCriterionIndex = criteria.length;
setCriterionIndex(nextCriterionIndex);
}, [criteria, onChange]);
const prevServiceName = useRef('');
useEffect(() => {
const criterion = criteria.find(
// eslint-disable-next-line no-shadow
(criterion) => criterion.key === 'serviceName',
);
const serviceName = criterion ? criterion.value : '';
if (serviceName !== prevServiceName.current) {
prevServiceName.current = serviceName;
loadSpans(serviceName);
loadRemoteServices(serviceName);
}
}, [criteria, loadSpans, loadRemoteServices]);
// Search for traces if not all criterions are in focus
// and the Enter key is pressed.
// Use ref to use the latest criterionIndex state in the callback.
const isFocusedRef = useRef(false);
isFocusedRef.current = criterionIndex !== -1;
const handleKeyDown = useCallback(
(event: KeyboardEvent) => {
// Use setTimeout to ensure that the callback is called
// after the criterionIndex has been updated.
setTimeout(() => {
if (!document.activeElement) {
return;
}
if (
!isFocusedRef.current &&
document.activeElement.tagName === 'BODY' &&
event.key === 'Enter'
) {
searchTraces();
}
}, 0); // Maybe 0 is enough.
},
[searchTraces],
);
useEffect(() => {
window.addEventListener('keydown', handleKeyDown);
return () => {
window.removeEventListener('keydown', handleKeyDown);
};
}, [handleKeyDown]);
return (
<Box
minHeight={60}
display="flex"
alignItems="center"
pr={2}
pl={2}
pt={1}
pb={1}
borderRadius={3}
bgcolor="background.paper"
flexWrap="wrap"
borderColor="grey.400"
border={1}
>
{criteria.map((criterion, index) => (
<CriterionBox
key={criterion.id}
criteria={criteria}
criterion={criterion}
criterionIndex={index}
serviceNames={serviceNames}
remoteServiceNames={remoteServiceNames}
spanNames={spanNames}
autocompleteKeys={autocompleteKeys}
autocompleteValues={autocompleteValues}
isLoadingServiceNames={isLoadingServiceNames}
isLoadingRemoteServiceNames={isLoadingRemoteServiceNames}
isLoadingSpanNames={isLoadingSpanNames}
isLoadingAutocompleteValues={isLoadingAutocompleteValues}
isFocused={index === criterionIndex}
onFocus={handleCriterionFocus}
onBlur={handleCriterionBlur}
onDecide={handleCriterionDecide}
onChange={handleCriterionChange}
onDelete={handleCriterionDelete}
loadAutocompleteValues={loadAutocompleteValues}
/>
))}
<Button
color="secondary"
variant="contained"
onClick={handleAddButtonClick}
className={classes.addButton}
data-testid="add-button"
>
<FontAwesomeIcon icon={faPlus} size="lg" />
</Button>
</Box>
);
};
// For unit testing, `connect` is easier to use than
// useSelector or useDispatch hooks.
const mapStateToProps = (state: RootState) => ({
serviceNames: state.services.services,
isLoadingServiceNames: state.services.isLoading,
spanNames: state.spans.spans,
isLoadingSpanNames: state.spans.isLoading,
remoteServiceNames: state.remoteServices.remoteServices,
isLoadingRemoteServiceNames: state.remoteServices.isLoading,
autocompleteKeys: state.autocompleteKeys.autocompleteKeys,
autocompleteValues: state.autocompleteValues.autocompleteValues,
isLoadingAutocompleteValues: state.autocompleteValues.isLoading,
});
// TODO: Give the appropriate type to ThunkDispatch after TypeScriptizing all action creators.
const mapDispatchToProps = (
dispatch: ThunkDispatch<RootState, undefined, any>,
) => ({
loadRemoteServices: (serviceName: string) => {
dispatch(loadRemoteServices(serviceName));
},
loadSpans: (serviceName: string) => {
dispatch(loadSpans(serviceName));
},
loadAutocompleteValues: (autocompleteKey: string) => {
dispatch(loadAutocompleteValues(autocompleteKey));
},
});
export default connect(mapStateToProps, mapDispatchToProps)(SearchBarImpl);
|
openzipkin/zipkin
|
zipkin-lens/src/components/DiscoverPage/SearchBar/SearchBar.tsx
|
TypeScript
|
apache-2.0
| 8,051 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction;
import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction.Request;
import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction.Response;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
public class TransportGetTrainedModelsAction extends HandledTransportAction<Request, Response> {
private final TrainedModelProvider provider;
@Inject
public TransportGetTrainedModelsAction(TransportService transportService,
ActionFilters actionFilters,
TrainedModelProvider trainedModelProvider) {
super(GetTrainedModelsAction.NAME, transportService, actionFilters, GetTrainedModelsAction.Request::new);
this.provider = trainedModelProvider;
}
@Override
protected void doExecute(Task task, Request request, ActionListener<Response> listener) {
Response.Builder responseBuilder = Response.builder();
ActionListener<Tuple<Long, Set<String>>> idExpansionListener = ActionListener.wrap(
totalAndIds -> {
responseBuilder.setTotalCount(totalAndIds.v1());
if (totalAndIds.v2().isEmpty()) {
listener.onResponse(responseBuilder.build());
return;
}
if (request.getIncludes().isIncludeModelDefinition() && totalAndIds.v2().size() > 1) {
listener.onFailure(
ExceptionsHelper.badRequestException(Messages.INFERENCE_TOO_MANY_DEFINITIONS_REQUESTED)
);
return;
}
if (request.getIncludes().isIncludeModelDefinition()) {
provider.getTrainedModel(
totalAndIds.v2().iterator().next(),
request.getIncludes(),
ActionListener.wrap(
config -> listener.onResponse(responseBuilder.setModels(Collections.singletonList(config)).build()),
listener::onFailure
)
);
} else {
provider.getTrainedModels(
totalAndIds.v2(),
request.getIncludes(),
request.isAllowNoResources(),
ActionListener.wrap(
configs -> listener.onResponse(responseBuilder.setModels(configs).build()),
listener::onFailure
)
);
}
},
listener::onFailure
);
provider.expandIds(request.getResourceId(),
request.isAllowNoResources(),
request.getPageParams(),
new HashSet<>(request.getTags()),
idExpansionListener);
}
}
|
nknize/elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java
|
Java
|
apache-2.0
| 3,814 |
// Copyright 2004, 2005, 2006 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.ioc.test.internal.services;
public interface MiddleFilter
{
public void execute(int count, char ch, MiddleService service, StringBuilder buffer);
}
|
apache/tapestry-5
|
tapestry-ioc/src/test/java/org/apache/tapestry5/ioc/test/internal/services/MiddleFilter.java
|
Java
|
apache-2.0
| 802 |
#
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Shell commands module
module Shell
@@commands = {}
def self.commands
@@commands
end
@@command_groups = {}
def self.command_groups
@@command_groups
end
def self.load_command(name, group, aliases = [])
return if commands[name]
# Register command in the group
raise ArgumentError, "Unknown group: #{group}" unless command_groups[group]
command_groups[group][:commands] << name
# Load command
begin
require "shell/commands/#{name}"
klass_name = name.to_s.gsub(/(?:^|_)(.)/) { Regexp.last_match(1).upcase } # camelize
commands[name] = eval("Commands::#{klass_name}")
aliases.each do |an_alias|
commands[an_alias] = commands[name]
end
rescue => e
raise "Can't load hbase shell command: #{name}. Error: #{e}\n#{e.backtrace.join("\n")}"
end
end
def self.load_command_group(group, opts)
raise ArgumentError, "No :commands for group #{group}" unless opts[:commands]
command_groups[group] = {
commands: [],
command_names: opts[:commands],
full_name: opts[:full_name] || group,
comment: opts[:comment]
}
all_aliases = opts[:aliases] || {}
opts[:commands].each do |command|
aliases = all_aliases[command] || []
load_command(command, group, aliases)
end
end
#----------------------------------------------------------------------
class Shell
attr_accessor :hbase
attr_accessor :interactive
alias interactive? interactive
@debug = false
attr_accessor :debug
def initialize(hbase, interactive = true)
self.hbase = hbase
self.interactive = interactive
end
# Returns Admin class from admin.rb
def admin
@admin ||= hbase.admin
end
def hbase_taskmonitor
@hbase_taskmonitor ||= hbase.taskmonitor
end
def hbase_table(name)
hbase.table(name, self)
end
def hbase_replication_admin
@hbase_replication_admin ||= hbase.replication_admin
end
def hbase_security_admin
@hbase_security_admin ||= hbase.security_admin
end
def hbase_visibility_labels_admin
@hbase_visibility_labels_admin ||= hbase.visibility_labels_admin
end
def hbase_quotas_admin
@hbase_quotas_admin ||= hbase.quotas_admin
end
def hbase_rsgroup_admin
@rsgroup_admin ||= hbase.rsgroup_admin
end
def export_commands(where)
::Shell.commands.keys.each do |cmd|
# here where is the IRB namespace
# this method just adds the call to the specified command
# which just references back to 'this' shell object
# a decently extensible way to add commands
where.send :instance_eval, <<-EOF
def #{cmd}(*args)
ret = @shell.command('#{cmd}', *args)
puts
return ret
end
EOF
end
end
def command_instance(command)
::Shell.commands[command.to_s].new(self)
end
# call the method 'command' on the specified command
# If interactive is enabled, then we suppress the return value. The command should have
# printed relevant output.
# Return value is only useful in non-interactive mode, for e.g. tests.
def command(command, *args)
ret = internal_command(command, :command, *args)
if interactive
return nil
else
return ret
end
end
# call a specific internal method in the command instance
# command - name of the command to call
# method_name - name of the method on the command to call. Defaults to just 'command'
# args - to be passed to the named method
def internal_command(command, method_name = :command, *args)
command_instance(command).command_safe(debug, method_name, *args)
end
def print_banner
puts 'HBase Shell'
puts 'Use "help" to get list of supported commands.'
puts 'Use "exit" to quit this interactive shell.'
print 'Version '
command('version')
puts
end
def help_multi_command(command)
puts "Command: #{command}"
puts command_instance(command).help
puts
nil
end
def help_command(command)
puts command_instance(command).help
nil
end
def help_group(group_name)
group = ::Shell.command_groups[group_name.to_s]
group[:commands].sort.each { |cmd| help_multi_command(cmd) }
if group[:comment]
puts '-' * 80
puts
puts group[:comment]
puts
end
nil
end
def help(command = nil)
if command
return help_command(command) if ::Shell.commands[command.to_s]
return help_group(command) if ::Shell.command_groups[command.to_s]
puts "ERROR: Invalid command or command group name: #{command}"
puts
end
puts help_header
puts
puts 'COMMAND GROUPS:'
::Shell.command_groups.each do |name, group|
puts ' Group name: ' + name
puts ' Commands: ' + group[:command_names].sort.join(', ')
puts
end
unless command
puts 'SHELL USAGE:'
help_footer
end
nil
end
def help_header
"HBase Shell, version #{org.apache.hadoop.hbase.util.VersionInfo.getVersion}, " \
"r#{org.apache.hadoop.hbase.util.VersionInfo.getRevision}, " \
"#{org.apache.hadoop.hbase.util.VersionInfo.getDate}" + "\n" \
"Type 'help \"COMMAND\"', (e.g. 'help \"get\"' -- the quotes are necessary) for help on a specific command.\n" \
"Commands are grouped. Type 'help \"COMMAND_GROUP\"', (e.g. 'help \"general\"') for help on a command group."
end
def help_footer
puts <<-HERE
Quote all names in HBase Shell such as table and column names. Commas delimit
command parameters. Type <RETURN> after entering a command to run it.
Dictionaries of configuration used in the creation and alteration of tables are
Ruby Hashes. They look like this:
{'key1' => 'value1', 'key2' => 'value2', ...}
and are opened and closed with curley-braces. Key/values are delimited by the
'=>' character combination. Usually keys are predefined constants such as
NAME, VERSIONS, COMPRESSION, etc. Constants do not need to be quoted. Type
'Object.constants' to see a (messy) list of all constants in the environment.
If you are using binary keys or values and need to enter them in the shell, use
double-quote'd hexadecimal representation. For example:
hbase> get 't1', "key\\x03\\x3f\\xcd"
hbase> get 't1', "key\\003\\023\\011"
hbase> put 't1', "test\\xef\\xff", 'f1:', "\\x01\\x33\\x40"
The HBase shell is the (J)Ruby IRB with the above HBase-specific commands added.
For more on the HBase Shell, see http://hbase.apache.org/book.html
HERE
end
end
end
# Load commands base class
require 'shell/commands'
# Load all commands
Shell.load_command_group(
'general',
full_name: 'GENERAL HBASE SHELL COMMANDS',
commands: %w[
status
version
table_help
whoami
processlist
]
)
Shell.load_command_group(
'ddl',
full_name: 'TABLES MANAGEMENT COMMANDS',
commands: %w[
alter
create
describe
disable
disable_all
is_disabled
drop
drop_all
enable
enable_all
is_enabled
exists
list
show_filters
alter_status
alter_async
get_table
locate_region
list_regions
],
aliases: {
'describe' => ['desc']
}
)
Shell.load_command_group(
'namespace',
full_name: 'NAMESPACE MANAGEMENT COMMANDS',
commands: %w[
create_namespace
drop_namespace
alter_namespace
describe_namespace
list_namespace
list_namespace_tables
]
)
Shell.load_command_group(
'dml',
full_name: 'DATA MANIPULATION COMMANDS',
commands: %w[
count
delete
deleteall
get
get_counter
incr
put
scan
truncate
truncate_preserve
append
get_splits
]
)
Shell.load_command_group(
'tools',
full_name: 'HBASE SURGERY TOOLS',
comment: "WARNING: Above commands are for 'experts'-only as misuse can damage an install",
commands: %w[
assign
balancer
balance_switch
balancer_enabled
normalize
normalizer_switch
normalizer_enabled
close_region
compact
flush
major_compact
move
split
merge_region
unassign
zk_dump
wal_roll
catalogjanitor_run
catalogjanitor_switch
catalogjanitor_enabled
cleaner_chore_run
cleaner_chore_switch
cleaner_chore_enabled
compact_rs
compaction_state
trace
splitormerge_switch
splitormerge_enabled
clear_compaction_queues
list_deadservers
clear_deadservers
],
# TODO: remove older hlog_roll command
aliases: {
'wal_roll' => ['hlog_roll']
}
)
Shell.load_command_group(
'replication',
full_name: 'CLUSTER REPLICATION TOOLS',
commands: %w[
add_peer
remove_peer
list_peers
enable_peer
disable_peer
set_peer_namespaces
append_peer_namespaces
remove_peer_namespaces
show_peer_tableCFs
set_peer_tableCFs
set_peer_bandwidth
list_replicated_tables
append_peer_tableCFs
remove_peer_tableCFs
enable_table_replication
disable_table_replication
get_peer_config
list_peer_configs
update_peer_config
]
)
Shell.load_command_group(
'snapshots',
full_name: 'CLUSTER SNAPSHOT TOOLS',
commands: %w[
snapshot
clone_snapshot
restore_snapshot
delete_snapshot
delete_all_snapshot
delete_table_snapshots
list_snapshots
list_table_snapshots
]
)
Shell.load_command_group(
'configuration',
full_name: 'ONLINE CONFIGURATION TOOLS',
commands: %w[
update_config
update_all_config
]
)
Shell.load_command_group(
'quotas',
full_name: 'CLUSTER QUOTAS TOOLS',
commands: %w[
set_quota
list_quotas
list_quota_table_sizes
list_quota_snapshots
list_snapshot_sizes
]
)
Shell.load_command_group(
'security',
full_name: 'SECURITY TOOLS',
comment: 'NOTE: Above commands are only applicable if running with the AccessController coprocessor',
commands: %w[
list_security_capabilities
grant
revoke
user_permission
]
)
Shell.load_command_group(
'procedures',
full_name: 'PROCEDURES & LOCKS MANAGEMENT',
commands: %w[
abort_procedure
list_procedures
list_locks
]
)
Shell.load_command_group(
'visibility labels',
full_name: 'VISIBILITY LABEL TOOLS',
comment: 'NOTE: Above commands are only applicable if running with the VisibilityController coprocessor',
commands: %w[
add_labels
list_labels
set_auths
get_auths
clear_auths
set_visibility
]
)
Shell.load_command_group(
'rsgroup',
full_name: 'RSGroups',
comment: "NOTE: The rsgroup Coprocessor Endpoint must be enabled on the Master else commands fail with:
UnknownProtocolException: No registered Master Coprocessor Endpoint found for RSGroupAdminService",
commands: %w[
list_rsgroups
get_rsgroup
add_rsgroup
remove_rsgroup
balance_rsgroup
move_servers_rsgroup
move_tables_rsgroup
move_servers_tables_rsgroup
get_server_rsgroup
get_table_rsgroup
]
)
|
JingchengDu/hbase
|
hbase-shell/src/main/ruby/shell.rb
|
Ruby
|
apache-2.0
| 11,986 |
package org.scalatest.events
import org.scalatest._
class LocationMethodSuiteProp extends MethodSuiteProp {
test("Method suites should have correct TopOfMethod location in test events.") {
forAll(examples) { suite =>
val reporter = new EventRecordingReporter
suite.run(None, Args(reporter, Stopper.default, Filter(), Map(), None, new Tracker(new Ordinal(99)), Set.empty))
val eventList = reporter.eventsReceived
eventList.foreach { event => suite.checkFun(event) }
suite.allChecked
}
}
type FixtureServices = TestLocationMethodServices
def suite = new TestLocationSuite
class TestLocationSuite extends Suite with FixtureServices {
val suiteTypeName = "org.scalatest.events.LocationMethodSuiteProp$TestLocationSuite"
val expectedStartingList = List(TestStartingPair("testSucceed", "org.scalatest.events.LocationMethodSuiteProp$TestLocationSuite", "testSucceed()"),
TestStartingPair("testPending", "org.scalatest.events.LocationMethodSuiteProp$TestLocationSuite", "testPending()"),
TestStartingPair("testCancel", "org.scalatest.events.LocationMethodSuiteProp$TestLocationSuite", "testCancel()"))
val expectedResultList = List(TestResultPair(classOf[TestSucceeded], "org.scalatest.events.LocationMethodSuiteProp$TestLocationSuite", "testSucceed()"),
TestResultPair(classOf[TestPending], "org.scalatest.events.LocationMethodSuiteProp$TestLocationSuite", "testPending()"),
TestResultPair(classOf[TestCanceled], "org.scalatest.events.LocationMethodSuiteProp$TestLocationSuite", "testCancel()"),
TestResultPair(classOf[TestIgnored], "org.scalatest.events.LocationMethodSuiteProp$TestLocationSuite", "testIgnore()"))
val expectedScopeOpenedList = Nil
val expectedScopeClosedList = Nil
def testSucceed() {
}
def testPending() {
pending
}
def testCancel() {
cancel
}
@Ignore
def testIgnore() {
}
}
def fixtureSuite = new TestLocationFixtureSuite
class TestLocationFixtureSuite extends fixture.Suite with FixtureServices with StringFixture {
val suiteTypeName = "org.scalatest.events.LocationMethodSuiteProp$TestLocationFixtureSuite"
val expectedStartingList = List(TestStartingPair("testSucceed", "org.scalatest.events.LocationMethodSuiteProp$TestLocationFixtureSuite", "testSucceed()"),
TestStartingPair("testPending", "org.scalatest.events.LocationMethodSuiteProp$TestLocationFixtureSuite", "testPending()"),
TestStartingPair("testCancel", "org.scalatest.events.LocationMethodSuiteProp$TestLocationFixtureSuite", "testCancel()"))
val expectedResultList = List(TestResultPair(classOf[TestSucceeded], "org.scalatest.events.LocationMethodSuiteProp$TestLocationFixtureSuite", "testSucceed()"),
TestResultPair(classOf[TestPending], "org.scalatest.events.LocationMethodSuiteProp$TestLocationFixtureSuite", "testPending()"),
TestResultPair(classOf[TestCanceled], "org.scalatest.events.LocationMethodSuiteProp$TestLocationFixtureSuite", "testCancel()"),
TestResultPair(classOf[TestIgnored], "org.scalatest.events.LocationMethodSuiteProp$TestLocationFixtureSuite", "testIgnore()"))
val expectedScopeOpenedList = Nil
val expectedScopeClosedList = Nil
def testSucceed() {
}
def testPending() {
pending
}
def testCancel() {
cancel
}
@Ignore
def testIgnore() {
}
}
def spec = new TestLocationSpec
class TestLocationSpec extends Spec with FixtureServices {
val suiteTypeName = "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec"
val expectedStartingList = List(TestStartingPair("A Spec test succeed", "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$", "test$u0020succeed()"),
TestStartingPair("A Spec test pending", "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$", "test$u0020pending()"),
TestStartingPair("A Spec test cancel", "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$", "test$u0020cancel()"))
val expectedResultList = List(TestResultPair(classOf[TestSucceeded], "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$", "test$u0020succeed()"),
TestResultPair(classOf[TestPending], "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$", "test$u0020pending()"),
TestResultPair(classOf[TestCanceled], "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$", "test$u0020cancel()"),
TestResultPair(classOf[TestIgnored], "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$", "test$u0020ignore()"))
val expectedScopeOpenedList = List(ScopeOpenedPair("A Spec", "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$"))
val expectedScopeClosedList = List(ScopeClosedPair("A Spec", "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec$A$u0020Spec$"))
object `A Spec` {
def `test succeed` {
}
def `test pending` {
pending
}
def `test cancel` {
cancel
}
@Ignore
def `test ignore` {
}
}
}
def fixtureSpec = new TestLocationFixtureSpec
class TestLocationFixtureSpec extends fixture.Spec with FixtureServices with StringFixture {
val suiteTypeName = "org.scalatest.events.LocationMethodSuiteProp$TestLocationSpec"
val expectedStartingList = List(TestStartingPair("A Spec test succeed", "org.scalatest.events.LocationMethodSuiteProp$TestLocationFixtureSpec$A$u0020Spec$", "test$u0020succeed(java.lang.String)"),
TestStartingPair("A Spec test pending", "org.scalatest.events.LocationMethodSuiteProp$TestLocationFixtureSpec$A$u0020Spec$", "test$u0020pending(java.lang.String)"),
TestStartingPair("A Spec test cancel", "org.scalatest.events.LocationMethodSuiteProp$TestLocationFixtureSpec$A$u0020Spec$", "test$u0020cancel(java.lang.String)"))
val expectedResultList = List(TestResultPair(classOf[TestSucceeded], "org.scalatest.events.LocationMethodSuiteProp$TestLocationFixtureSpec$A$u0020Spec$", "test$u0020succeed(java.lang.String)"),
TestResultPair(classOf[TestPending], "org.scalatest.events.LocationMethodSuiteProp$TestLocationFixtureSpec$A$u0020Spec$", "test$u0020pending(java.lang.String)"),
TestResultPair(classOf[TestCanceled], "org.scalatest.events.LocationMethodSuiteProp$TestLocationFixtureSpec$A$u0020Spec$", "test$u0020cancel(java.lang.String)"),
TestResultPair(classOf[TestIgnored], "org.scalatest.events.LocationMethodSuiteProp$TestLocationFixtureSpec$A$u0020Spec$", "test$u0020ignore(java.lang.String)"))
val expectedScopeOpenedList = List(ScopeOpenedPair("A Spec", "org.scalatest.events.LocationMethodSuiteProp$TestLocationFixtureSpec$A$u0020Spec$"))
val expectedScopeClosedList = List(ScopeClosedPair("A Spec", "org.scalatest.events.LocationMethodSuiteProp$TestLocationFixtureSpec$A$u0020Spec$"))
object `A Spec` {
def `test succeed`(fixture: String) {
}
def `test pending`(fixture: String) {
pending
}
def `test cancel`(fixture: String) {
cancel
}
@Ignore
def `test ignore`(fixture: String) {
}
}
}
def junit3Suite = new TestLocationMethodJUnit3Suite
def junitSuite = new TestLocationMethodJUnitSuite
def testngSuite = new TestLocationMethodTestNGSuite
}
|
hubertp/scalatest
|
src/test/scala/org/scalatest/events/LocationMethodSuiteProp.scala
|
Scala
|
apache-2.0
| 8,004 |
# sche.scheduler
```python
import sched
import time
def print_time():
print time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
def scheduler():
s = sched.scheduler(time.time, time.sleep) # timefunc, delayfunc
# 延迟5秒,优先级1,action, arguments(传给action的)
s.enter(5, 1, print_time, ())
s.enter(10, 1, print_time, ())
# 阻塞直到队列为空,阻塞是调用上面指定的delayfunc
s.run()
if __name__ == '__main__':
scheduler()
```
多线程模式下不能在scheduler运行时添加任务。
scheduler会阻塞住主进程,直到任务队列为空。
# threading.Timer
```python
from threading import Timer
Timer(5, print_time, ()).start()
Timer(10, print_time, ()).start()
time.sleep(11) # sleep while time-delay events execute
```
|
zhangyuchen0411/python_notes
|
scheduler.md
|
Markdown
|
apache-2.0
| 808 |
package io.cattle.platform.process.agent;
import io.cattle.platform.agent.util.AgentUtils;
import io.cattle.platform.core.constants.StoragePoolConstants;
import io.cattle.platform.core.model.Account;
import io.cattle.platform.core.model.Agent;
import io.cattle.platform.core.model.StoragePool;
import io.cattle.platform.engine.handler.HandlerResult;
import io.cattle.platform.engine.process.ProcessInstance;
import io.cattle.platform.engine.process.ProcessState;
import io.cattle.platform.process.common.handler.AbstractObjectProcessHandler;
import io.github.ibuildthecloud.gdapi.factory.SchemaFactory;
import javax.inject.Inject;
import javax.inject.Named;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Named
public class AgentRemove extends AbstractObjectProcessHandler {
private static final Logger log = LoggerFactory.getLogger(AgentRemove.class);
@Inject
@Named("CoreSchemaFactory")
SchemaFactory schemaFactory;
@Override
public String[] getProcessNames() {
return new String[] {"agent.remove"};
}
@Override
public HandlerResult handle(ProcessState state, ProcessInstance process) {
Agent agent = (Agent)state.getResource();
for (String type : AgentUtils.AGENT_RESOURCES.get()) {
Class<?> clz = schemaFactory.getSchemaClass(type);
if (clz == null) {
log.error("Failed to find class for [{}]", type);
continue;
}
for (Object obj : objectManager.children(agent, clz)) {
if (obj instanceof StoragePool) {
StoragePool sp = (StoragePool)obj;
if (StoragePoolConstants.TYPE.equals(sp.getKind())) {
// Don't automatically delete shared storage pools
continue;
}
}
deactivateThenScheduleRemove(obj, state.getData());
}
}
deactivateThenScheduleRemove(objectManager.loadResource(Account.class, agent.getAccountId()), state.getData());
return null;
}
}
|
wlan0/cattle
|
code/iaas/logic/src/main/java/io/cattle/platform/process/agent/AgentRemove.java
|
Java
|
apache-2.0
| 2,106 |
/**
*
*/
package org.hamster.weixinmp.model.menu;
import java.util.List;
import org.hamster.weixinmp.dao.entity.menu.WxMenuBtnEntity;
/**
* @author [email protected]
* @version Aug 4, 2013
*
*/
public class WxMenuCreateJson {
private List<WxMenuBtnEntity> button;
public List<WxMenuBtnEntity> getButton() {
return button;
}
public void setButton(List<WxMenuBtnEntity> button) {
this.button = button;
}
public WxMenuCreateJson() {
super();
// TODO Auto-generated constructor stub
}
public WxMenuCreateJson(List<WxMenuBtnEntity> button) {
super();
this.button = button;
}
}
|
Wingo7239/WeixinMultiPlatform
|
src/main/java/org/hamster/weixinmp/model/menu/WxMenuCreateJson.java
|
Java
|
apache-2.0
| 617 |
/*
petpvcSTCPVCImageFilter.h
Author: Benjamin A. Thomas
Copyright 2017 Institute of Nuclear Medicine, University College London.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#ifndef __PETPVCSTCIMAGEFILTER_H
#define __PETPVCSTCIMAGEFILTER_H
#include "itkImage.h"
#include "itkImageToImageFilter.h"
#include <itkMultiplyImageFilter.h>
#include <itkDivideImageFilter.h>
#include <itkAddImageFilter.h>
#include <itkSubtractImageFilter.h>
#include <itkDiscreteGaussianImageFilter.h>
#include <itkStatisticsImageFilter.h>
#include <itkLabelStatisticsImageFilter.h>
#include <itkBinaryThresholdImageFilter.h>
//#include <itkImageFileWriter.h>
#include <itkImageDuplicator.h>
#include <itkImageRegionIterator.h>
#include <algorithm>
using namespace itk;
namespace petpvc
{
/*! Single Target Correction (STC)
An Implementation of the STC Method, see<br>
Sari H, Erlandsson K, Law I, Larsson HB, Ourselin S, Arridge S, Atkinson D, Hutton BF.
Estimation of an image derived input function with MR-defined carotid arteries in FDG-PET human studies using a novel partial volume correction method.
J Cereb Blood Flow Metab. 2017;37(4): 1398--409
<br>
Erlanddsson K and Hutton BF.
A novel voxel-based partial volume correction method for single regions of interest.
J Nucl Med Meeting Abstr 2014; 55: 2023.
*/
template< class TInputImage, typename TMaskImage>
class STCPVCImageFilter:public ImageToImageFilter< TInputImage, TInputImage >
{
public:
/** Standard class typedefs. */
typedef STCPVCImageFilter Self;
typedef ImageToImageFilter< TInputImage, TInputImage > Superclass;
typedef SmartPointer< Self > Pointer;
/** Method for creation through the object factory. */
itkNewMacro(Self);
/** Run-time type information (and related methods). */
itkTypeMacro(STCPVCImageFilter, ImageToImageFilter);
/** Image related typedefs. */
typedef TInputImage InputImageType;
typedef typename TInputImage::ConstPointer InputImagePointer;
typedef typename TInputImage::RegionType RegionType;
typedef typename TInputImage::SizeType SizeType;
typedef typename TInputImage::IndexType IndexType;
typedef typename TInputImage::PixelType PixelType;
/** Mask image related typedefs. */
typedef TMaskImage MaskImageType;
typedef typename TMaskImage::ConstPointer MaskImagePointer;
typedef typename TMaskImage::RegionType MaskRegionType;
typedef typename TMaskImage::SizeType MaskSizeType;
typedef typename TMaskImage::IndexType MaskIndexType;
typedef typename TMaskImage::PixelType MaskPixelType;
//For calculating mean values from image
typedef itk::StatisticsImageFilter<TInputImage> StatisticsFilterType;
//For getting information about the mask labels
typedef itk::LabelStatisticsImageFilter<TInputImage, TMaskImage> LabelStatisticsFilterType;
typedef typename LabelStatisticsFilterType::ValidLabelValuesContainerType ValidLabelValuesType;
typedef typename LabelStatisticsFilterType::LabelPixelType LabelPixelType;
typedef itk::BinaryThresholdImageFilter<TMaskImage, TInputImage> BinaryThresholdImageFilterType;
typedef itk::MultiplyImageFilter<TInputImage, TInputImage> MultiplyFilterType;
typedef itk::DivideImageFilter<TInputImage,TInputImage, TInputImage> DivideFilterType;
typedef itk::AddImageFilter<TInputImage, TInputImage> AddFilterType;
typedef itk::SubtractImageFilter<TInputImage, TInputImage> SubtractFilterType;
typedef itk::DiscreteGaussianImageFilter<TInputImage, TInputImage> BlurringFilterType;
typedef itk::ImageDuplicator<TInputImage> DuplicatorType;
typedef itk::ImageRegionIterator<TInputImage> ImageIteratorType;
//typedef itk::ImageFileWriter<TInputImage> WriterType;
typedef itk::Vector<float, 3> ITKVectorType;
/** Image related typedefs. */
itkStaticConstMacro(InputImageDimension, unsigned int,
3);
itkStaticConstMacro(MaskImageDimension, unsigned int,
3);
typedef vnl_vector<float> VectorType;
typedef vnl_matrix<float> MatrixType;
/** Set the mask image */
void SetMaskInput(const TMaskImage *input) {
// Process object is not const-correct so the const casting is required.
this->SetNthInput( 1, const_cast< TMaskImage * >( input ) );
}
/** Get the label image */
const MaskImageType * GetMaskInput() const {
return itkDynamicCastInDebugMode< MaskImageType * >( const_cast< DataObject * >( this->ProcessObject::GetInput(0) ) );
}
VectorType GetCorrectedMeans() const {
return this->m_vecRegMeansPVCorr;
}
MatrixType GetMatrix() const {
return this->m_matGTM;
}
void SetPSF(ITKVectorType vec) {
this->m_vecVariance = vec;
}
ITKVectorType GetPSF() {
return this->m_vecVariance;
}
void SetIterations( unsigned int nIters ) {
this->m_nIterations = nIters;
}
void SetVerbose( bool bVerbose ) {
this->m_bVerbose = bVerbose;
}
protected:
STCPVCImageFilter();
~STCPVCImageFilter() {};
/** Does the real work. */
virtual void GenerateData() ITK_OVERRIDE;
VectorType m_vecRegMeansPVCorr;
MatrixType m_matGTM;
ITKVectorType m_vecVariance;
unsigned int m_nIterations;
bool m_bVerbose;
private:
STCPVCImageFilter(const Self &); //purposely not implemented
void operator=(const Self &); //purposely not implemented
};
} //namespace petpvc
#ifndef ITK_MANUAL_INSTANTIATION
#include "petpvcSTCPVCImageFilter.txx"
#endif
#endif // __PETPVCSTCIMAGEFILTER_H
|
UCL/PETPVC
|
lib/petpvcSTCPVCImageFilter.h
|
C
|
apache-2.0
| 6,237 |
#
# actions.py: routines that actually run the svn client.
#
# Subversion is a tool for revision control.
# See http://subversion.tigris.org for more information.
#
# ====================================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
######################################################################
import os, shutil, re, sys, errno
import difflib, pprint
import xml.parsers.expat
from xml.dom.minidom import parseString
import svntest
from svntest import main, verify, tree, wc
from svntest import Failure
def no_sleep_for_timestamps():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS'] = 'yes'
def do_sleep_for_timestamps():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS'] = 'no'
def no_relocate_validation():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_RELOCATE_VALIDATION'] = 'yes'
def do_relocate_validation():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_RELOCATE_VALIDATION'] = 'no'
def setup_pristine_greek_repository():
"""Create the pristine repository and 'svn import' the greek tree"""
# these directories don't exist out of the box, so we may have to create them
if not os.path.exists(main.general_wc_dir):
os.makedirs(main.general_wc_dir)
if not os.path.exists(main.general_repo_dir):
os.makedirs(main.general_repo_dir) # this also creates all the intermediate dirs
# If there's no pristine repos, create one.
if not os.path.exists(main.pristine_greek_repos_dir):
main.create_repos(main.pristine_greek_repos_dir)
# if this is dav, gives us access rights to import the greek tree.
if main.is_ra_type_dav():
authz_file = os.path.join(main.work_dir, "authz")
main.file_write(authz_file, "[/]\n* = rw\n")
# dump the greek tree to disk.
main.greek_state.write_to_disk(main.greek_dump_dir)
# import the greek tree, using l:foo/p:bar
### todo: svn should not be prompting for auth info when using
### repositories with no auth/auth requirements
exit_code, output, errput = main.run_svn(None, 'import', '-m',
'Log message for revision 1.',
main.greek_dump_dir,
main.pristine_greek_repos_url)
# check for any errors from the import
if len(errput):
display_lines("Errors during initial 'svn import':",
'STDERR', None, errput)
sys.exit(1)
# verify the printed output of 'svn import'.
lastline = output.pop().strip()
match = re.search("(Committed|Imported) revision [0-9]+.", lastline)
if not match:
print("ERROR: import did not succeed, while creating greek repos.")
print("The final line from 'svn import' was:")
print(lastline)
sys.exit(1)
output_tree = wc.State.from_commit(output)
expected_output_tree = main.greek_state.copy(main.greek_dump_dir)
expected_output_tree.tweak(verb='Adding',
contents=None)
try:
expected_output_tree.compare_and_display('output', output_tree)
except tree.SVNTreeUnequal:
verify.display_trees("ERROR: output of import command is unexpected.",
"OUTPUT TREE",
expected_output_tree.old_tree(),
output_tree.old_tree())
sys.exit(1)
# Finally, disallow any changes to the "pristine" repos.
error_msg = "Don't modify the pristine repository"
create_failing_hook(main.pristine_greek_repos_dir, 'start-commit', error_msg)
create_failing_hook(main.pristine_greek_repos_dir, 'pre-lock', error_msg)
create_failing_hook(main.pristine_greek_repos_dir, 'pre-revprop-change', error_msg)
######################################################################
def guarantee_empty_repository(path):
"""Guarantee that a local svn repository exists at PATH, containing
nothing."""
if path == main.pristine_greek_repos_dir:
print("ERROR: attempt to overwrite the pristine repos! Aborting.")
sys.exit(1)
# create an empty repository at PATH.
main.safe_rmtree(path)
main.create_repos(path)
# Used by every test, so that they can run independently of one
# another. Every time this routine is called, it recursively copies
# the `pristine repos' to a new location.
# Note: make sure setup_pristine_greek_repository was called once before
# using this function.
def guarantee_greek_repository(path):
"""Guarantee that a local svn repository exists at PATH, containing
nothing but the greek-tree at revision 1."""
if path == main.pristine_greek_repos_dir:
print("ERROR: attempt to overwrite the pristine repos! Aborting.")
sys.exit(1)
# copy the pristine repository to PATH.
main.safe_rmtree(path)
if main.copy_repos(main.pristine_greek_repos_dir, path, 1):
print("ERROR: copying repository failed.")
sys.exit(1)
# make the repos world-writeable, for mod_dav_svn's sake.
main.chmod_tree(path, 0666, 0666)
def run_and_verify_atomic_ra_revprop_change(message,
expected_stdout,
expected_stderr,
expected_exit,
url, revision, propname,
old_propval, propval,
want_error):
"""Run atomic-ra-revprop-change helper and check its output and exit code.
Transforms OLD_PROPVAL and PROPVAL into a skel.
For HTTP, the default HTTP library is used."""
KEY_OLD_PROPVAL = "old_value_p"
KEY_NEW_PROPVAL = "value"
def skel_make_atom(word):
return "%d %s" % (len(word), word)
def make_proplist_skel_part(nick, val):
if val is None:
return ""
else:
return "%s %s" % (skel_make_atom(nick), skel_make_atom(val))
skel = "( %s %s )" % (make_proplist_skel_part(KEY_OLD_PROPVAL, old_propval),
make_proplist_skel_part(KEY_NEW_PROPVAL, propval))
exit_code, out, err = main.run_atomic_ra_revprop_change(url, revision,
propname, skel,
want_error)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_svnlook(message, expected_stdout,
expected_stderr, *varargs):
"""Like run_and_verify_svnlook2, but the expected exit code is
assumed to be 0 if no output is expected on stderr, and 1 otherwise."""
expected_exit = 0
if expected_stderr is not None and expected_stderr != []:
expected_exit = 1
return run_and_verify_svnlook2(message, expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svnlook2(message, expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Run svnlook command and check its output and exit code."""
exit_code, out, err = main.run_svnlook(*varargs)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_svnadmin(message, expected_stdout,
expected_stderr, *varargs):
"""Like run_and_verify_svnadmin2, but the expected exit code is
assumed to be 0 if no output is expected on stderr, and 1 otherwise."""
expected_exit = 0
if expected_stderr is not None and expected_stderr != []:
expected_exit = 1
return run_and_verify_svnadmin2(message, expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svnadmin2(message, expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Run svnadmin command and check its output and exit code."""
exit_code, out, err = main.run_svnadmin(*varargs)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_svnversion(message, wc_dir, trail_url,
expected_stdout, expected_stderr, *varargs):
"""like run_and_verify_svnversion2, but the expected exit code is
assumed to be 0 if no output is expected on stderr, and 1 otherwise."""
expected_exit = 0
if expected_stderr is not None and expected_stderr != []:
expected_exit = 1
return run_and_verify_svnversion2(message, wc_dir, trail_url,
expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svnversion2(message, wc_dir, trail_url,
expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Run svnversion command and check its output and exit code."""
if trail_url is None:
exit_code, out, err = main.run_svnversion(wc_dir, *varargs)
else:
exit_code, out, err = main.run_svnversion(wc_dir, trail_url, *varargs)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_svn(message, expected_stdout, expected_stderr, *varargs):
"""like run_and_verify_svn2, but the expected exit code is assumed to
be 0 if no output is expected on stderr, and 1 otherwise."""
expected_exit = 0
if expected_stderr is not None:
if isinstance(expected_stderr, verify.ExpectedOutput):
if not expected_stderr.matches([]):
expected_exit = 1
elif expected_stderr != []:
expected_exit = 1
return run_and_verify_svn2(message, expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svn2(message, expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Invoke main.run_svn() with *VARARGS. Return exit code as int; stdout,
stderr as lists of lines (including line terminators). For both
EXPECTED_STDOUT and EXPECTED_STDERR, create an appropriate instance of
verify.ExpectedOutput (if necessary):
- If it is an array of strings, create a vanilla ExpectedOutput.
- If it is a single string, create a RegexOutput that must match every
line (for stdout) or any line (for stderr) of the expected output.
- If it is already an instance of ExpectedOutput
(e.g. UnorderedOutput), leave it alone.
...and invoke compare_and_display_lines() on MESSAGE, a label based
on the name of the stream being compared (e.g. STDOUT), the
ExpectedOutput instance, and the actual output.
If EXPECTED_STDOUT is None, do not check stdout.
EXPECTED_STDERR may not be None.
If output checks pass, the expected and actual codes are compared.
If a comparison fails, a Failure will be raised."""
if expected_stderr is None:
raise verify.SVNIncorrectDatatype("expected_stderr must not be None")
want_err = None
if isinstance(expected_stderr, verify.ExpectedOutput):
if not expected_stderr.matches([]):
want_err = True
elif expected_stderr != []:
want_err = True
exit_code, out, err = main.run_svn(want_err, *varargs)
verify.verify_outputs(message, out, err, expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_load(repo_dir, dump_file_content,
bypass_prop_validation = False):
"Runs 'svnadmin load' and reports any errors."
if not isinstance(dump_file_content, list):
raise TypeError("dump_file_content argument should have list type")
expected_stderr = []
if bypass_prop_validation:
exit_code, output, errput = main.run_command_stdin(
main.svnadmin_binary, expected_stderr, 0, 1, dump_file_content,
'load', '--force-uuid', '--quiet', '--bypass-prop-validation', repo_dir)
else:
exit_code, output, errput = main.run_command_stdin(
main.svnadmin_binary, expected_stderr, 0, 1, dump_file_content,
'load', '--force-uuid', '--quiet', repo_dir)
verify.verify_outputs("Unexpected stderr output", None, errput,
None, expected_stderr)
def run_and_verify_dump(repo_dir, deltas=False):
"Runs 'svnadmin dump' and reports any errors, returning the dump content."
if deltas:
exit_code, output, errput = main.run_svnadmin('dump', '--deltas',
repo_dir)
else:
exit_code, output, errput = main.run_svnadmin('dump', repo_dir)
verify.verify_outputs("Missing expected output(s)", output, errput,
verify.AnyOutput, verify.AnyOutput)
return output
def run_and_verify_svnrdump(dumpfile_content, expected_stdout,
expected_stderr, expected_exit, *varargs):
"""Runs 'svnrdump dump|load' depending on dumpfile_content and
reports any errors."""
exit_code, output, err = main.run_svnrdump(dumpfile_content, *varargs)
# Since main.run_svnrdump() uses binary mode, normalize the stderr
# line endings on Windows ourselves.
if sys.platform == 'win32':
err = map(lambda x : x.replace('\r\n', '\n'), err)
for index, line in enumerate(err[:]):
if re.search("warning: W200007", line):
del err[index]
verify.verify_outputs("Unexpected output", output, err,
expected_stdout, expected_stderr)
verify.verify_exit_code("Unexpected return code", exit_code, expected_exit)
return output
def run_and_verify_svnmucc(message, expected_stdout, expected_stderr,
*varargs):
"""Run svnmucc command and check its output"""
expected_exit = 0
if expected_stderr is not None and expected_stderr != []:
expected_exit = 1
return run_and_verify_svnmucc2(message, expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svnmucc2(message, expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Run svnmucc command and check its output and exit code."""
exit_code, out, err = main.run_svnmucc(*varargs)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def load_repo(sbox, dumpfile_path = None, dump_str = None,
bypass_prop_validation = False):
"Loads the dumpfile into sbox"
if not dump_str:
dump_str = open(dumpfile_path, "rb").read()
# Create a virgin repos and working copy
main.safe_rmtree(sbox.repo_dir, 1)
main.safe_rmtree(sbox.wc_dir, 1)
main.create_repos(sbox.repo_dir)
# Load the mergetracking dumpfile into the repos, and check it out the repo
run_and_verify_load(sbox.repo_dir, dump_str.splitlines(True),
bypass_prop_validation)
run_and_verify_svn(None, None, [], "co", sbox.repo_url, sbox.wc_dir)
return dump_str
def expected_noop_update_output(rev):
"""Return an ExpectedOutput object describing what we'd expect to
see from an update to revision REV that was effectively a no-op (no
server changes transmitted)."""
return verify.createExpectedOutput("Updating '.*':|At revision %d."
% (rev),
"no-op update")
######################################################################
# Subversion Actions
#
# These are all routines that invoke 'svn' in particular ways, and
# then verify the results by comparing expected trees with actual
# trees.
#
def run_and_verify_checkout2(do_remove,
URL, wc_dir_name, output_tree, disk_tree,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
*args):
"""Checkout the URL into a new directory WC_DIR_NAME. *ARGS are any
extra optional args to the checkout subcommand.
The subcommand output will be verified against OUTPUT_TREE,
and the working copy itself will be verified against DISK_TREE.
For the latter comparison, SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
function's doc string for more details. Return if successful, raise
on failure.
WC_DIR_NAME is deleted if DO_REMOVE is True.
"""
if isinstance(output_tree, wc.State):
output_tree = output_tree.old_tree()
if isinstance(disk_tree, wc.State):
disk_tree = disk_tree.old_tree()
# Remove dir if it's already there, unless this is a forced checkout.
# In that case assume we want to test a forced checkout's toleration
# of obstructing paths.
if do_remove:
main.safe_rmtree(wc_dir_name)
# Checkout and make a tree of the output, using l:foo/p:bar
### todo: svn should not be prompting for auth info when using
### repositories with no auth/auth requirements
exit_code, output, errput = main.run_svn(None, 'co',
URL, wc_dir_name, *args)
actual = tree.build_tree_from_checkout(output)
# Verify actual output against expected output.
try:
tree.compare_trees("output", actual, output_tree)
except tree.SVNTreeUnequal:
print("ACTUAL OUTPUT TREE:")
tree.dump_tree_script(actual, wc_dir_name + os.sep)
raise
# Create a tree by scanning the working copy
actual = tree.build_tree_from_wc(wc_dir_name)
# Verify expected disk against actual disk.
try:
tree.compare_trees("disk", actual, disk_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton)
except tree.SVNTreeUnequal:
print("ACTUAL DISK TREE:")
tree.dump_tree_script(actual, wc_dir_name + os.sep)
raise
def run_and_verify_checkout(URL, wc_dir_name, output_tree, disk_tree,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
*args):
"""Same as run_and_verify_checkout2(), but without the DO_REMOVE arg.
WC_DIR_NAME is deleted if present unless the '--force' option is passed
in *ARGS."""
# Remove dir if it's already there, unless this is a forced checkout.
# In that case assume we want to test a forced checkout's toleration
# of obstructing paths.
return run_and_verify_checkout2(('--force' not in args),
URL, wc_dir_name, output_tree, disk_tree,
singleton_handler_a,
a_baton,
singleton_handler_b,
b_baton,
*args)
def run_and_verify_export(URL, export_dir_name, output_tree, disk_tree,
*args):
"""Export the URL into a new directory WC_DIR_NAME.
The subcommand output will be verified against OUTPUT_TREE,
and the exported copy itself will be verified against DISK_TREE.
Return if successful, raise on failure.
"""
assert isinstance(output_tree, wc.State)
assert isinstance(disk_tree, wc.State)
disk_tree = disk_tree.old_tree()
output_tree = output_tree.old_tree()
# Export and make a tree of the output, using l:foo/p:bar
### todo: svn should not be prompting for auth info when using
### repositories with no auth/auth requirements
exit_code, output, errput = main.run_svn(None, 'export',
URL, export_dir_name, *args)
actual = tree.build_tree_from_checkout(output)
# Verify actual output against expected output.
try:
tree.compare_trees("output", actual, output_tree)
except tree.SVNTreeUnequal:
print("ACTUAL OUTPUT TREE:")
tree.dump_tree_script(actual, export_dir_name + os.sep)
raise
# Create a tree by scanning the working copy. Don't ignore
# the .svn directories so that we generate an error if they
# happen to show up.
actual = tree.build_tree_from_wc(export_dir_name, ignore_svn=False)
# Verify expected disk against actual disk.
try:
tree.compare_trees("disk", actual, disk_tree)
except tree.SVNTreeUnequal:
print("ACTUAL DISK TREE:")
tree.dump_tree_script(actual, export_dir_name + os.sep)
raise
# run_and_verify_log_xml
class LogEntry:
def __init__(self, revision, changed_paths=None, revprops=None):
self.revision = revision
if changed_paths == None:
self.changed_paths = {}
else:
self.changed_paths = changed_paths
if revprops == None:
self.revprops = {}
else:
self.revprops = revprops
def assert_changed_paths(self, changed_paths):
"""Assert that changed_paths is the same as this entry's changed_paths
Raises svntest.Failure if not.
"""
raise Failure('NOT IMPLEMENTED')
def assert_revprops(self, revprops):
"""Assert that the dict revprops is the same as this entry's revprops.
Raises svntest.Failure if not.
"""
if self.revprops != revprops:
raise Failure('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(revprops).splitlines(),
pprint.pformat(self.revprops).splitlines())))
class LogParser:
def parse(self, data):
"""Return a list of LogEntrys parsed from the sequence of strings data.
This is the only method of interest to callers.
"""
try:
for i in data:
self.parser.Parse(i)
self.parser.Parse('', True)
except xml.parsers.expat.ExpatError, e:
raise verify.SVNUnexpectedStdout('%s\n%s\n' % (e, ''.join(data),))
return self.entries
def __init__(self):
# for expat
self.parser = xml.parsers.expat.ParserCreate()
self.parser.StartElementHandler = self.handle_start_element
self.parser.EndElementHandler = self.handle_end_element
self.parser.CharacterDataHandler = self.handle_character_data
# Ignore some things.
self.ignore_elements('log', 'paths', 'path', 'revprops')
self.ignore_tags('logentry_end', 'author_start', 'date_start', 'msg_start')
# internal state
self.cdata = []
self.property = None
# the result
self.entries = []
def ignore(self, *args, **kwargs):
del self.cdata[:]
def ignore_tags(self, *args):
for tag in args:
setattr(self, tag, self.ignore)
def ignore_elements(self, *args):
for element in args:
self.ignore_tags(element + '_start', element + '_end')
# expat handlers
def handle_start_element(self, name, attrs):
getattr(self, name + '_start')(attrs)
def handle_end_element(self, name):
getattr(self, name + '_end')()
def handle_character_data(self, data):
self.cdata.append(data)
# element handler utilities
def use_cdata(self):
result = ''.join(self.cdata).strip()
del self.cdata[:]
return result
def svn_prop(self, name):
self.entries[-1].revprops['svn:' + name] = self.use_cdata()
# element handlers
def logentry_start(self, attrs):
self.entries.append(LogEntry(int(attrs['revision'])))
def author_end(self):
self.svn_prop('author')
def msg_end(self):
self.svn_prop('log')
def date_end(self):
# svn:date could be anything, so just note its presence.
self.cdata[:] = ['']
self.svn_prop('date')
def property_start(self, attrs):
self.property = attrs['name']
def property_end(self):
self.entries[-1].revprops[self.property] = self.use_cdata()
def run_and_verify_log_xml(message=None, expected_paths=None,
expected_revprops=None, expected_stdout=None,
expected_stderr=None, args=[]):
"""Call run_and_verify_svn with log --xml and args (optional) as command
arguments, and pass along message, expected_stdout, and expected_stderr.
If message is None, pass the svn log command as message.
expected_paths checking is not yet implemented.
expected_revprops is an optional list of dicts, compared to each
revision's revprops. The list must be in the same order the log entries
come in. Any svn:date revprops in the dicts must be '' in order to
match, as the actual dates could be anything.
expected_paths and expected_revprops are ignored if expected_stdout or
expected_stderr is specified.
"""
if message == None:
message = ' '.join(args)
# We'll parse the output unless the caller specifies expected_stderr or
# expected_stdout for run_and_verify_svn.
parse = True
if expected_stderr == None:
expected_stderr = []
else:
parse = False
if expected_stdout != None:
parse = False
log_args = list(args)
if expected_paths != None:
log_args.append('-v')
(exit_code, stdout, stderr) = run_and_verify_svn(
message, expected_stdout, expected_stderr,
'log', '--xml', *log_args)
if not parse:
return
entries = LogParser().parse(stdout)
for index in range(len(entries)):
entry = entries[index]
if expected_revprops != None:
entry.assert_revprops(expected_revprops[index])
if expected_paths != None:
entry.assert_changed_paths(expected_paths[index])
def verify_update(actual_output,
actual_mergeinfo_output,
actual_elision_output,
wc_dir_name,
output_tree,
mergeinfo_output_tree,
elision_output_tree,
disk_tree,
status_tree,
singleton_handler_a=None,
a_baton=None,
singleton_handler_b=None,
b_baton=None,
check_props=False):
"""Verify update of WC_DIR_NAME.
The subcommand output (found in ACTUAL_OUTPUT, ACTUAL_MERGEINFO_OUTPUT,
and ACTUAL_ELISION_OUTPUT) will be verified against OUTPUT_TREE,
MERGEINFO_OUTPUT_TREE, and ELISION_OUTPUT_TREE respectively (if any of
these is provided, they may be None in which case a comparison is not
done). The working copy itself will be verified against DISK_TREE (if
provided), and the working copy's 'svn status' output will be verified
against STATUS_TREE (if provided). (This is a good way to check that
revision numbers were bumped.)
Return if successful, raise on failure.
For the comparison with DISK_TREE, pass SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B to tree.compare_trees -- see that function's doc
string for more details. If CHECK_PROPS is set, then disk
comparison will examine props."""
if isinstance(actual_output, wc.State):
actual_output = actual_output.old_tree()
if isinstance(actual_mergeinfo_output, wc.State):
actual_mergeinfo_output = actual_mergeinfo_output.old_tree()
if isinstance(actual_elision_output, wc.State):
actual_elision_output = actual_elision_output.old_tree()
if isinstance(output_tree, wc.State):
output_tree = output_tree.old_tree()
if isinstance(mergeinfo_output_tree, wc.State):
mergeinfo_output_tree = mergeinfo_output_tree.old_tree()
if isinstance(elision_output_tree, wc.State):
elision_output_tree = elision_output_tree.old_tree()
if isinstance(disk_tree, wc.State):
disk_tree = disk_tree.old_tree()
if isinstance(status_tree, wc.State):
status_tree = status_tree.old_tree()
# Verify actual output against expected output.
if output_tree:
try:
tree.compare_trees("output", actual_output, output_tree)
except tree.SVNTreeUnequal:
print("ACTUAL OUTPUT TREE:")
tree.dump_tree_script(actual_output, wc_dir_name + os.sep)
raise
# Verify actual mergeinfo recording output against expected output.
if mergeinfo_output_tree:
try:
tree.compare_trees("mergeinfo_output", actual_mergeinfo_output,
mergeinfo_output_tree)
except tree.SVNTreeUnequal:
print("ACTUAL MERGEINFO OUTPUT TREE:")
tree.dump_tree_script(actual_mergeinfo_output,
wc_dir_name + os.sep)
raise
# Verify actual mergeinfo elision output against expected output.
if elision_output_tree:
try:
tree.compare_trees("elision_output", actual_elision_output,
elision_output_tree)
except tree.SVNTreeUnequal:
print("ACTUAL ELISION OUTPUT TREE:")
tree.dump_tree_script(actual_elision_output,
wc_dir_name + os.sep)
raise
# Create a tree by scanning the working copy, and verify it
if disk_tree:
actual_disk = tree.build_tree_from_wc(wc_dir_name, check_props)
try:
tree.compare_trees("disk", actual_disk, disk_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton)
except tree.SVNTreeUnequal:
print("EXPECTED DISK TREE:")
tree.dump_tree_script(disk_tree)
print("ACTUAL DISK TREE:")
tree.dump_tree_script(actual_disk)
raise
# Verify via 'status' command too, if possible.
if status_tree:
run_and_verify_status(wc_dir_name, status_tree)
def verify_disk(wc_dir_name, disk_tree, check_props=False):
"""Verify WC_DIR_NAME against DISK_TREE. If CHECK_PROPS is set,
the comparison will examin props. Returns if successful, raises on
failure."""
verify_update(None, None, None, wc_dir_name, None, None, None, disk_tree,
None, check_props=check_props)
def run_and_verify_update(wc_dir_name,
output_tree, disk_tree, status_tree,
error_re_string = None,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
check_props = False,
*args):
"""Update WC_DIR_NAME. *ARGS are any extra optional args to the
update subcommand. NOTE: If *ARGS is specified at all, explicit
target paths must be passed in *ARGS as well (or a default `.' will
be chosen by the 'svn' binary). This allows the caller to update
many items in a single working copy dir, but still verify the entire
working copy dir.
If ERROR_RE_STRING, the update must exit with error, and the error
message must match regular expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, then:
If OUTPUT_TREE is not None, the subcommand output will be verified
against OUTPUT_TREE. If DISK_TREE is not None, the working copy
itself will be verified against DISK_TREE. If STATUS_TREE is not
None, the 'svn status' output will be verified against STATUS_TREE.
(This is a good way to check that revision numbers were bumped.)
For the DISK_TREE verification, SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
function's doc string for more details.
If CHECK_PROPS is set, then disk comparison will examine props.
Return if successful, raise on failure."""
# Update and make a tree of the output.
if len(args):
exit_code, output, errput = main.run_svn(error_re_string, 'up', *args)
else:
exit_code, output, errput = main.run_svn(error_re_string,
'up', wc_dir_name,
*args)
if error_re_string:
rm = re.compile(error_re_string)
for line in errput:
match = rm.search(line)
if match:
return
raise main.SVNUnmatchedError
actual = wc.State.from_checkout(output)
verify_update(actual, None, None, wc_dir_name,
output_tree, None, None, disk_tree, status_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton,
check_props)
def run_and_parse_info(*args):
"""Run 'svn info ARGS' and parse its output into a list of dicts,
one dict per reported node."""
# the returned array
all_infos = []
# per-target variables
iter_info = {}
prev_key = None
lock_comment_lines = 0
lock_comments = []
exit_code, output, errput = main.run_svn(None, 'info', *args)
for line in output:
line = line[:-1] # trim '\n'
if lock_comment_lines > 0:
# mop up any lock comment lines
lock_comments.append(line)
lock_comment_lines = lock_comment_lines - 1
if lock_comment_lines == 0:
iter_info[prev_key] = lock_comments
elif len(line) == 0:
# separator line between items
all_infos.append(iter_info)
iter_info = {}
prev_key = None
lock_comment_lines = 0
lock_comments = []
elif line[0].isspace():
# continuation line (for tree conflicts)
iter_info[prev_key] += line[1:]
else:
# normal line
key, value = line.split(':', 1)
if re.search(' \(\d+ lines?\)$', key):
# numbered continuation lines
match = re.match('^(.*) \((\d+) lines?\)$', key)
key = match.group(1)
lock_comment_lines = int(match.group(2))
elif len(value) > 1:
# normal normal line
iter_info[key] = value[1:]
else:
### originally added for "Tree conflict:\n" lines;
### tree-conflicts output format has changed since then
# continuation lines are implicit (prefixed by whitespace)
iter_info[key] = ''
prev_key = key
return all_infos
def run_and_verify_info(expected_infos, *args):
"""Run 'svn info' with the arguments in *ARGS and verify the results
against expected_infos. The latter should be a list of dicts, one dict
per reported node, in the order in which the 'Path' fields of the output
will appear after sorting them as Python strings. (The dicts in
EXPECTED_INFOS, however, need not have a 'Path' key.)
In the dicts, each key is the before-the-colon part of the 'svn info' output,
and each value is either None (meaning that the key should *not* appear in
the 'svn info' output) or a regex matching the output value. Output lines
not matching a key in the dict are ignored.
Return if successful, raise on failure."""
actual_infos = run_and_parse_info(*args)
actual_infos.sort(key=lambda info: info['Path'])
try:
# zip() won't complain, so check this manually
if len(actual_infos) != len(expected_infos):
raise verify.SVNUnexpectedStdout(
"Expected %d infos, found %d infos"
% (len(expected_infos), len(actual_infos)))
for actual, expected in zip(actual_infos, expected_infos):
# compare dicts
for key, value in expected.items():
assert ':' not in key # caller passed impossible expectations?
if value is None and key in actual:
raise main.SVNLineUnequal("Found unexpected key '%s' with value '%s'"
% (key, actual[key]))
if value is not None and key not in actual:
raise main.SVNLineUnequal("Expected key '%s' (with value '%s') "
"not found" % (key, value))
if value is not None and not re.match(value, actual[key]):
raise verify.SVNUnexpectedStdout("Values of key '%s' don't match:\n"
" Expected: '%s' (regex)\n"
" Found: '%s' (string)\n"
% (key, value, actual[key]))
except:
sys.stderr.write("Bad 'svn info' output:\n"
" Received: %s\n"
" Expected: %s\n"
% (actual_infos, expected_infos))
raise
def run_and_verify_merge(dir, rev1, rev2, url1, url2,
output_tree,
mergeinfo_output_tree,
elision_output_tree,
disk_tree, status_tree, skip_tree,
error_re_string = None,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
check_props = False,
dry_run = True,
*args):
"""Run 'svn merge URL1@REV1 URL2@REV2 DIR' if URL2 is not None
(for a three-way merge between URLs and WC).
If URL2 is None, run 'svn merge -rREV1:REV2 URL1 DIR'. If both REV1
and REV2 are None, leave off the '-r' argument.
If ERROR_RE_STRING, the merge must exit with error, and the error
message must match regular expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, then:
The subcommand output will be verified against OUTPUT_TREE. Output
related to mergeinfo notifications will be verified against
MERGEINFO_OUTPUT_TREE if that is not None. Output related to mergeinfo
elision will be verified against ELISION_OUTPUT_TREE if that is not None.
The working copy itself will be verified against DISK_TREE. If optional
STATUS_TREE is given, then 'svn status' output will be compared. The
'skipped' merge output will be compared to SKIP_TREE.
For the DISK_TREE verification, SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
function's doc string for more details.
If CHECK_PROPS is set, then disk comparison will examine props.
If DRY_RUN is set then a --dry-run merge will be carried out first and
the output compared with that of the full merge.
Return if successful, raise on failure.
*ARGS are any extra optional args to the merge subcommand.
NOTE: If *ARGS is specified at all, an explicit target path must be passed
in *ARGS as well. This allows the caller to merge into single items inside
the working copy, but still verify the entire working copy dir. """
merge_command = [ "merge" ]
if url2:
merge_command.extend((url1 + "@" + str(rev1), url2 + "@" + str(rev2)))
else:
if not (rev1 is None and rev2 is None):
merge_command.append("-r" + str(rev1) + ":" + str(rev2))
merge_command.append(url1)
if len(args) == 0:
merge_command.append(dir)
merge_command = tuple(merge_command)
if dry_run:
pre_disk = tree.build_tree_from_wc(dir)
dry_run_command = merge_command + ('--dry-run',)
dry_run_command = dry_run_command + args
exit_code, out_dry, err_dry = main.run_svn(error_re_string,
*dry_run_command)
post_disk = tree.build_tree_from_wc(dir)
try:
tree.compare_trees("disk", post_disk, pre_disk)
except tree.SVNTreeError:
print("=============================================================")
print("Dry-run merge altered working copy")
print("=============================================================")
raise
# Update and make a tree of the output.
merge_command = merge_command + args
exit_code, out, err = main.run_svn(error_re_string, *merge_command)
if error_re_string:
if not error_re_string.startswith(".*"):
error_re_string = ".*(" + error_re_string + ")"
expected_err = verify.RegexOutput(error_re_string, match_all=False)
verify.verify_outputs(None, None, err, None, expected_err)
return
elif err:
raise verify.SVNUnexpectedStderr(err)
# Split the output into that related to application of the actual diff
# and that related to the recording of mergeinfo describing the merge.
merge_diff_out = []
mergeinfo_notification_out = []
mergeinfo_elision_out = []
mergeinfo_notifications = False
elision_notifications = False
for line in out:
if line.startswith('--- Recording'):
mergeinfo_notifications = True
elision_notifications = False
elif line.startswith('--- Eliding'):
mergeinfo_notifications = False
elision_notifications = True
elif line.startswith('--- Merging') or \
line.startswith('--- Reverse-merging') or \
line.startswith('Summary of conflicts') or \
line.startswith('Skipped missing target'):
mergeinfo_notifications = False
elision_notifications = False
if mergeinfo_notifications:
mergeinfo_notification_out.append(line)
elif elision_notifications:
mergeinfo_elision_out.append(line)
else:
merge_diff_out.append(line)
if dry_run and merge_diff_out != out_dry:
# Due to the way ra_serf works, it's possible that the dry-run and
# real merge operations did the same thing, but the output came in
# a different order. Let's see if maybe that's the case by comparing
# the outputs as unordered sets rather than as lists.
#
# This now happens for other RA layers with modern APR because the
# hash order now varies.
#
# The different orders of the real and dry-run merges may cause
# the "Merging rX through rY into" lines to be duplicated a
# different number of times in the two outputs. The list-set
# conversion removes duplicates so these differences are ignored.
# It also removes "U some/path" duplicate lines. Perhaps we
# should avoid that?
out_copy = set(merge_diff_out[:])
out_dry_copy = set(out_dry[:])
if out_copy != out_dry_copy:
print("=============================================================")
print("Merge outputs differ")
print("The dry-run merge output:")
for x in out_dry:
sys.stdout.write(x)
print("The full merge output:")
for x in out:
sys.stdout.write(x)
print("=============================================================")
raise main.SVNUnmatchedError
def missing_skip(a, b):
print("=============================================================")
print("Merge failed to skip: " + a.path)
print("=============================================================")
raise Failure
def extra_skip(a, b):
print("=============================================================")
print("Merge unexpectedly skipped: " + a.path)
print("=============================================================")
raise Failure
myskiptree = tree.build_tree_from_skipped(out)
if isinstance(skip_tree, wc.State):
skip_tree = skip_tree.old_tree()
try:
tree.compare_trees("skip", myskiptree, skip_tree,
extra_skip, None, missing_skip, None)
except tree.SVNTreeUnequal:
print("ACTUAL SKIP TREE:")
tree.dump_tree_script(myskiptree, dir + os.sep)
raise
actual_diff = svntest.wc.State.from_checkout(merge_diff_out, False)
actual_mergeinfo = svntest.wc.State.from_checkout(mergeinfo_notification_out,
False)
actual_elision = svntest.wc.State.from_checkout(mergeinfo_elision_out,
False)
verify_update(actual_diff, actual_mergeinfo, actual_elision, dir,
output_tree, mergeinfo_output_tree, elision_output_tree,
disk_tree, status_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton,
check_props)
def run_and_verify_patch(dir, patch_path,
output_tree, disk_tree, status_tree, skip_tree,
error_re_string=None,
check_props=False,
dry_run=True,
*args):
"""Run 'svn patch patch_path DIR'.
If ERROR_RE_STRING, 'svn patch' must exit with error, and the error
message must match regular expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, then:
The subcommand output will be verified against OUTPUT_TREE, and the
working copy itself will be verified against DISK_TREE. If optional
STATUS_TREE is given, then 'svn status' output will be compared.
The 'skipped' merge output will be compared to SKIP_TREE.
If CHECK_PROPS is set, then disk comparison will examine props.
If DRY_RUN is set then a --dry-run patch will be carried out first and
the output compared with that of the full patch application.
Returns if successful, raises on failure."""
patch_command = [ "patch" ]
patch_command.append(patch_path)
patch_command.append(dir)
patch_command = tuple(patch_command)
if dry_run:
pre_disk = tree.build_tree_from_wc(dir)
dry_run_command = patch_command + ('--dry-run',)
dry_run_command = dry_run_command + args
exit_code, out_dry, err_dry = main.run_svn(error_re_string,
*dry_run_command)
post_disk = tree.build_tree_from_wc(dir)
try:
tree.compare_trees("disk", post_disk, pre_disk)
except tree.SVNTreeError:
print("=============================================================")
print("'svn patch --dry-run' altered working copy")
print("=============================================================")
raise
# Update and make a tree of the output.
patch_command = patch_command + args
exit_code, out, err = main.run_svn(True, *patch_command)
if error_re_string:
rm = re.compile(error_re_string)
match = None
for line in err:
match = rm.search(line)
if match:
break
if not match:
raise main.SVNUnmatchedError
elif err:
print("UNEXPECTED STDERR:")
for x in err:
sys.stdout.write(x)
raise verify.SVNUnexpectedStderr
if dry_run and out != out_dry:
# APR hash order means the output order can vary, assume everything is OK
# if only the order changes.
out_dry_expected = svntest.verify.UnorderedOutput(out)
verify.compare_and_display_lines('dry-run patch output not as expected',
'', out_dry_expected, out_dry)
def missing_skip(a, b):
print("=============================================================")
print("'svn patch' failed to skip: " + a.path)
print("=============================================================")
raise Failure
def extra_skip(a, b):
print("=============================================================")
print("'svn patch' unexpectedly skipped: " + a.path)
print("=============================================================")
raise Failure
myskiptree = tree.build_tree_from_skipped(out)
if isinstance(skip_tree, wc.State):
skip_tree = skip_tree.old_tree()
tree.compare_trees("skip", myskiptree, skip_tree,
extra_skip, None, missing_skip, None)
mytree = tree.build_tree_from_checkout(out, 0)
# when the expected output is a list, we want a line-by-line
# comparison to happen instead of a tree comparison
if (isinstance(output_tree, list)
or isinstance(output_tree, verify.UnorderedOutput)):
verify.verify_outputs(None, out, err, output_tree, error_re_string)
output_tree = None
verify_update(mytree, None, None, dir,
output_tree, None, None, disk_tree, status_tree,
check_props=check_props)
def run_and_verify_mergeinfo(error_re_string = None,
expected_output = [],
*args):
"""Run 'svn mergeinfo ARGS', and compare the result against
EXPECTED_OUTPUT, a list of string representations of revisions
expected in the output. Raise an exception if an unexpected
output is encountered."""
mergeinfo_command = ["mergeinfo"]
mergeinfo_command.extend(args)
exit_code, out, err = main.run_svn(error_re_string, *mergeinfo_command)
if error_re_string:
if not error_re_string.startswith(".*"):
error_re_string = ".*(" + error_re_string + ")"
expected_err = verify.RegexOutput(error_re_string, match_all=False)
verify.verify_outputs(None, None, err, None, expected_err)
return
out = sorted([_f for _f in [x.rstrip()[1:] for x in out] if _f])
expected_output.sort()
extra_out = []
if out != expected_output:
exp_hash = dict.fromkeys(expected_output)
for rev in out:
if rev in exp_hash:
del(exp_hash[rev])
else:
extra_out.append(rev)
extra_exp = list(exp_hash.keys())
raise Exception("Unexpected 'svn mergeinfo' output:\n"
" expected but not found: %s\n"
" found but not expected: %s"
% (', '.join([str(x) for x in extra_exp]),
', '.join([str(x) for x in extra_out])))
def run_and_verify_switch(wc_dir_name,
wc_target,
switch_url,
output_tree, disk_tree, status_tree,
error_re_string = None,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
check_props = False,
*args):
"""Switch WC_TARGET (in working copy dir WC_DIR_NAME) to SWITCH_URL.
If ERROR_RE_STRING, the switch must exit with error, and the error
message must match regular expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, then:
The subcommand output will be verified against OUTPUT_TREE, and the
working copy itself will be verified against DISK_TREE. If optional
STATUS_TREE is given, then 'svn status' output will be
compared. (This is a good way to check that revision numbers were
bumped.)
For the DISK_TREE verification, SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
function's doc string for more details.
If CHECK_PROPS is set, then disk comparison will examine props.
Return if successful, raise on failure."""
# Update and make a tree of the output.
exit_code, output, errput = main.run_svn(error_re_string, 'switch',
switch_url, wc_target, *args)
if error_re_string:
if not error_re_string.startswith(".*"):
error_re_string = ".*(" + error_re_string + ")"
expected_err = verify.RegexOutput(error_re_string, match_all=False)
verify.verify_outputs(None, None, errput, None, expected_err)
return
elif errput:
raise verify.SVNUnexpectedStderr(err)
actual = wc.State.from_checkout(output)
verify_update(actual, None, None, wc_dir_name,
output_tree, None, None, disk_tree, status_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton,
check_props)
def process_output_for_commit(output):
"""Helper for run_and_verify_commit(), also used in the factory."""
# Remove the final output line, and verify that the commit succeeded.
lastline = ""
rest = []
def external_removal(line):
return line.startswith('Removing external') \
or line.startswith('Removed external')
if len(output):
lastline = output.pop().strip()
while len(output) and external_removal(lastline):
rest.append(lastline)
lastline = output.pop().strip()
cm = re.compile("(Committed|Imported) revision [0-9]+.")
match = cm.search(lastline)
if not match:
print("ERROR: commit did not succeed.")
print("The final line from 'svn ci' was:")
print(lastline)
raise main.SVNCommitFailure
# The new 'final' line in the output is either a regular line that
# mentions {Adding, Deleting, Sending, ...}, or it could be a line
# that says "Transmitting file data ...". If the latter case, we
# want to remove the line from the output; it should be ignored when
# building a tree.
if len(output):
lastline = output.pop()
tm = re.compile("Transmitting file data.+")
match = tm.search(lastline)
if not match:
# whoops, it was important output, put it back.
output.append(lastline)
if len(rest):
output.extend(rest)
return output
def run_and_verify_commit(wc_dir_name, output_tree, status_tree,
error_re_string = None,
*args):
"""Commit and verify results within working copy WC_DIR_NAME,
sending ARGS to the commit subcommand.
The subcommand output will be verified against OUTPUT_TREE. If
optional STATUS_TREE is given, then 'svn status' output will
be compared. (This is a good way to check that revision numbers
were bumped.)
If ERROR_RE_STRING is None, the commit must not exit with error. If
ERROR_RE_STRING is a string, the commit must exit with error, and
the error message must match regular expression ERROR_RE_STRING.
Return if successful, raise on failure."""
if isinstance(output_tree, wc.State):
output_tree = output_tree.old_tree()
if isinstance(status_tree, wc.State):
status_tree = status_tree.old_tree()
# Commit.
if '-m' not in args and '-F' not in args:
args = list(args) + ['-m', 'log msg']
exit_code, output, errput = main.run_svn(error_re_string, 'ci',
*args)
if error_re_string:
if not error_re_string.startswith(".*"):
error_re_string = ".*(" + error_re_string + ")"
expected_err = verify.RegexOutput(error_re_string, match_all=False)
verify.verify_outputs(None, None, errput, None, expected_err)
return
# Else not expecting error:
# Convert the output into a tree.
output = process_output_for_commit(output)
actual = tree.build_tree_from_commit(output)
# Verify actual output against expected output.
try:
tree.compare_trees("output", actual, output_tree)
except tree.SVNTreeError:
verify.display_trees("Output of commit is unexpected",
"OUTPUT TREE", output_tree, actual)
print("ACTUAL OUTPUT TREE:")
tree.dump_tree_script(actual, wc_dir_name + os.sep)
raise
# Verify via 'status' command too, if possible.
if status_tree:
run_and_verify_status(wc_dir_name, status_tree)
# This function always passes '-q' to the status command, which
# suppresses the printing of any unversioned or nonexistent items.
def run_and_verify_status(wc_dir_name, output_tree,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None):
"""Run 'status' on WC_DIR_NAME and compare it with the
expected OUTPUT_TREE. SINGLETON_HANDLER_A and SINGLETON_HANDLER_B will
be passed to tree.compare_trees - see that function's doc string for
more details.
Returns on success, raises on failure."""
if isinstance(output_tree, wc.State):
output_state = output_tree
output_tree = output_tree.old_tree()
else:
output_state = None
exit_code, output, errput = main.run_svn(None, 'status', '-v', '-u', '-q',
wc_dir_name)
actual = tree.build_tree_from_status(output)
# Verify actual output against expected output.
try:
tree.compare_trees("status", actual, output_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton)
except tree.SVNTreeError:
verify.display_trees(None, 'STATUS OUTPUT TREE', output_tree, actual)
print("ACTUAL STATUS TREE:")
tree.dump_tree_script(actual, wc_dir_name + os.sep)
raise
# if we have an output State, and we can/are-allowed to create an
# entries-based State, then compare the two.
if output_state:
entries_state = wc.State.from_entries(wc_dir_name)
if entries_state:
tweaked = output_state.copy()
tweaked.tweak_for_entries_compare()
try:
tweaked.compare_and_display('entries', entries_state)
except tree.SVNTreeUnequal:
### do something more
raise
# A variant of previous func, but doesn't pass '-q'. This allows us
# to verify unversioned or nonexistent items in the list.
def run_and_verify_unquiet_status(wc_dir_name, status_tree):
"""Run 'status' on WC_DIR_NAME and compare it with the
expected STATUS_TREE.
Returns on success, raises on failure."""
if isinstance(status_tree, wc.State):
status_tree = status_tree.old_tree()
exit_code, output, errput = main.run_svn(None, 'status', '-v',
'-u', wc_dir_name)
actual = tree.build_tree_from_status(output)
# Verify actual output against expected output.
try:
tree.compare_trees("UNQUIET STATUS", actual, status_tree)
except tree.SVNTreeError:
print("ACTUAL UNQUIET STATUS TREE:")
tree.dump_tree_script(actual, wc_dir_name + os.sep)
raise
def run_and_verify_status_xml(expected_entries = [],
*args):
""" Run 'status --xml' with arguments *ARGS. If successful the output
is parsed into an XML document and will be verified by comparing against
EXPECTED_ENTRIES.
"""
exit_code, output, errput = run_and_verify_svn(None, None, [],
'status', '--xml', *args)
if len(errput) > 0:
raise Failure
doc = parseString(''.join(output))
entries = doc.getElementsByTagName('entry')
def getText(nodelist):
rc = []
for node in nodelist:
if node.nodeType == node.TEXT_NODE:
rc.append(node.data)
return ''.join(rc)
actual_entries = {}
for entry in entries:
wcstatus = entry.getElementsByTagName('wc-status')[0]
commit = entry.getElementsByTagName('commit')
author = entry.getElementsByTagName('author')
rstatus = entry.getElementsByTagName('repos-status')
actual_entry = {'wcprops' : wcstatus.getAttribute('props'),
'wcitem' : wcstatus.getAttribute('item'),
}
if wcstatus.hasAttribute('revision'):
actual_entry['wcrev'] = wcstatus.getAttribute('revision')
if (commit):
actual_entry['crev'] = commit[0].getAttribute('revision')
if (author):
actual_entry['author'] = getText(author[0].childNodes)
if (rstatus):
actual_entry['rprops'] = rstatus[0].getAttribute('props')
actual_entry['ritem'] = rstatus[0].getAttribute('item')
actual_entries[entry.getAttribute('path')] = actual_entry
if expected_entries != actual_entries:
raise Failure('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(expected_entries).splitlines(),
pprint.pformat(actual_entries).splitlines())))
def run_and_verify_diff_summarize_xml(error_re_string = [],
expected_prefix = None,
expected_paths = [],
expected_items = [],
expected_props = [],
expected_kinds = [],
*args):
"""Run 'diff --summarize --xml' with the arguments *ARGS, which should
contain all arguments beyond for your 'diff --summarize --xml' omitting
said arguments. EXPECTED_PREFIX will store a "common" path prefix
expected to be at the beginning of each summarized path. If
EXPECTED_PREFIX is None, then EXPECTED_PATHS will need to be exactly
as 'svn diff --summarize --xml' will output. If ERROR_RE_STRING, the
command must exit with error, and the error message must match regular
expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, the subcommand output will be parsed
into an XML document and will then be verified by comparing the parsed
output to the contents in the EXPECTED_PATHS, EXPECTED_ITEMS,
EXPECTED_PROPS and EXPECTED_KINDS. Returns on success, raises
on failure."""
exit_code, output, errput = run_and_verify_svn(None, None, error_re_string,
'diff', '--summarize',
'--xml', *args)
# Return if errors are present since they were expected
if len(errput) > 0:
return
doc = parseString(''.join(output))
paths = doc.getElementsByTagName("path")
items = expected_items
kinds = expected_kinds
for path in paths:
modified_path = path.childNodes[0].data
if (expected_prefix is not None
and modified_path.find(expected_prefix) == 0):
modified_path = modified_path.replace(expected_prefix, '')[1:].strip()
# Workaround single-object diff
if len(modified_path) == 0:
modified_path = path.childNodes[0].data.split(os.sep)[-1]
# From here on, we use '/' as path separator.
if os.sep != "/":
modified_path = modified_path.replace(os.sep, "/")
if modified_path not in expected_paths:
print("ERROR: %s not expected in the changed paths." % modified_path)
raise Failure
index = expected_paths.index(modified_path)
expected_item = items[index]
expected_kind = kinds[index]
expected_prop = expected_props[index]
actual_item = path.getAttribute('item')
actual_kind = path.getAttribute('kind')
actual_prop = path.getAttribute('props')
if expected_item != actual_item:
print("ERROR: expected: %s actual: %s" % (expected_item, actual_item))
raise Failure
if expected_kind != actual_kind:
print("ERROR: expected: %s actual: %s" % (expected_kind, actual_kind))
raise Failure
if expected_prop != actual_prop:
print("ERROR: expected: %s actual: %s" % (expected_prop, actual_prop))
raise Failure
def run_and_verify_diff_summarize(output_tree, *args):
"""Run 'diff --summarize' with the arguments *ARGS.
The subcommand output will be verified against OUTPUT_TREE. Returns
on success, raises on failure.
"""
if isinstance(output_tree, wc.State):
output_tree = output_tree.old_tree()
exit_code, output, errput = main.run_svn(None, 'diff', '--summarize',
*args)
actual = tree.build_tree_from_diff_summarize(output)
# Verify actual output against expected output.
try:
tree.compare_trees("output", actual, output_tree)
except tree.SVNTreeError:
verify.display_trees(None, 'DIFF OUTPUT TREE', output_tree, actual)
print("ACTUAL DIFF OUTPUT TREE:")
tree.dump_tree_script(actual)
raise
def run_and_validate_lock(path, username):
"""`svn lock' the given path and validate the contents of the lock.
Use the given username. This is important because locks are
user specific."""
comment = "Locking path:%s." % path
# lock the path
run_and_verify_svn(None, ".*locked by user", [], 'lock',
'--username', username,
'-m', comment, path)
# Run info and check that we get the lock fields.
exit_code, output, err = run_and_verify_svn(None, None, [],
'info','-R',
path)
### TODO: Leverage RegexOuput([...], match_all=True) here.
# prepare the regexs to compare against
token_re = re.compile(".*?Lock Token: opaquelocktoken:.*?", re.DOTALL)
author_re = re.compile(".*?Lock Owner: %s\n.*?" % username, re.DOTALL)
created_re = re.compile(".*?Lock Created:.*?", re.DOTALL)
comment_re = re.compile(".*?%s\n.*?" % re.escape(comment), re.DOTALL)
# join all output lines into one
output = "".join(output)
# Fail even if one regex does not match
if ( not (token_re.match(output) and
author_re.match(output) and
created_re.match(output) and
comment_re.match(output))):
raise Failure
def _run_and_verify_resolve(cmd, expected_paths, *args):
"""Run "svn CMD" (where CMD is 'resolve' or 'resolved') with arguments
ARGS, and verify that it resolves the paths in EXPECTED_PATHS and no others.
If no ARGS are specified, use the elements of EXPECTED_PATHS as the
arguments."""
# TODO: verify that the status of PATHS changes accordingly.
if len(args) == 0:
args = expected_paths
expected_output = verify.UnorderedOutput([
"Resolved conflicted state of '" + path + "'\n" for path in
expected_paths])
run_and_verify_svn(None, expected_output, [],
cmd, *args)
def run_and_verify_resolve(expected_paths, *args):
"""Run "svn resolve" with arguments ARGS, and verify that it resolves the
paths in EXPECTED_PATHS and no others. If no ARGS are specified, use the
elements of EXPECTED_PATHS as the arguments."""
_run_and_verify_resolve('resolve', expected_paths, *args)
def run_and_verify_resolved(expected_paths, *args):
"""Run "svn resolved" with arguments ARGS, and verify that it resolves the
paths in EXPECTED_PATHS and no others. If no ARGS are specified, use the
elements of EXPECTED_PATHS as the arguments."""
_run_and_verify_resolve('resolved', expected_paths, *args)
def run_and_verify_revert(expected_paths, *args):
"""Run "svn revert" with arguments ARGS, and verify that it reverts
the paths in EXPECTED_PATHS and no others. If no ARGS are
specified, use the elements of EXPECTED_PATHS as the arguments."""
if len(args) == 0:
args = expected_paths
expected_output = verify.UnorderedOutput([
"Reverted '" + path + "'\n" for path in
expected_paths])
run_and_verify_svn(None, expected_output, [],
"revert", *args)
######################################################################
# Other general utilities
# This allows a test to *quickly* bootstrap itself.
def make_repo_and_wc(sbox, create_wc = True, read_only = False):
"""Create a fresh 'Greek Tree' repository and check out a WC from it.
If READ_ONLY is False, a dedicated repository will be created, at the path
SBOX.repo_dir. If READ_ONLY is True, the pristine repository will be used.
In either case, SBOX.repo_url is assumed to point to the repository that
will be used.
If create_wc is True, a dedicated working copy will be checked out from
the repository, at the path SBOX.wc_dir.
Returns on success, raises on failure."""
# Create (or copy afresh) a new repos with a greek tree in it.
if not read_only:
guarantee_greek_repository(sbox.repo_dir)
if create_wc:
# Generate the expected output tree.
expected_output = main.greek_state.copy()
expected_output.wc_dir = sbox.wc_dir
expected_output.tweak(status='A ', contents=None)
# Generate an expected wc tree.
expected_wc = main.greek_state
# Do a checkout, and verify the resulting output and disk contents.
run_and_verify_checkout(sbox.repo_url,
sbox.wc_dir,
expected_output,
expected_wc)
else:
# just make sure the parent folder of our working copy is created
try:
os.mkdir(main.general_wc_dir)
except OSError, err:
if err.errno != errno.EEXIST:
raise
# Duplicate a working copy or other dir.
def duplicate_dir(wc_name, wc_copy_name):
"""Copy the working copy WC_NAME to WC_COPY_NAME. Overwrite any
existing tree at that location."""
main.safe_rmtree(wc_copy_name)
shutil.copytree(wc_name, wc_copy_name)
def get_virginal_state(wc_dir, rev):
"Return a virginal greek tree state for a WC and repos at revision REV."
rev = str(rev) ### maybe switch rev to an integer?
# copy the greek tree, shift it to the new wc_dir, insert a root elem,
# then tweak all values
state = main.greek_state.copy()
state.wc_dir = wc_dir
state.desc[''] = wc.StateItem()
state.tweak(contents=None, status=' ', wc_rev=rev)
return state
# Cheap administrative directory locking
def lock_admin_dir(wc_dir, recursive=False):
"Lock a SVN administrative directory"
db, root_path, relpath = wc.open_wc_db(wc_dir)
svntest.main.run_wc_lock_tester(recursive, wc_dir)
def set_incomplete(wc_dir, revision):
"Make wc_dir incomplete at revision"
svntest.main.run_wc_incomplete_tester(wc_dir, revision)
def get_wc_uuid(wc_dir):
"Return the UUID of the working copy at WC_DIR."
return run_and_parse_info(wc_dir)[0]['Repository UUID']
def get_wc_base_rev(wc_dir):
"Return the BASE revision of the working copy at WC_DIR."
return run_and_parse_info(wc_dir)[0]['Revision']
def hook_failure_message(hook_name):
"""Return the error message that the client prints for failure of the
specified hook HOOK_NAME. The wording changed with Subversion 1.5."""
if svntest.main.options.server_minor_version < 5:
return "'%s' hook failed with error output:\n" % hook_name
else:
if hook_name in ["start-commit", "pre-commit"]:
action = "Commit"
elif hook_name == "pre-revprop-change":
action = "Revprop change"
elif hook_name == "pre-lock":
action = "Lock"
elif hook_name == "pre-unlock":
action = "Unlock"
else:
action = None
if action is None:
message = "%s hook failed (exit code 1)" % (hook_name,)
else:
message = "%s blocked by %s hook (exit code 1)" % (action, hook_name)
return message + " with output:\n"
def create_failing_hook(repo_dir, hook_name, text):
"""Create a HOOK_NAME hook in the repository at REPO_DIR that prints
TEXT to stderr and exits with an error."""
hook_path = os.path.join(repo_dir, 'hooks', hook_name)
# Embed the text carefully: it might include characters like "%" and "'".
main.create_python_hook_script(hook_path, 'import sys\n'
'sys.stderr.write(' + repr(text) + ')\n'
'sys.exit(1)\n')
def enable_revprop_changes(repo_dir):
"""Enable revprop changes in the repository at REPO_DIR by creating a
pre-revprop-change hook script and (if appropriate) making it executable."""
hook_path = main.get_pre_revprop_change_hook_path(repo_dir)
main.create_python_hook_script(hook_path, 'import sys; sys.exit(0)')
def disable_revprop_changes(repo_dir):
"""Disable revprop changes in the repository at REPO_DIR by creating a
pre-revprop-change hook script that prints "pre-revprop-change" followed
by its arguments, and returns an error."""
hook_path = main.get_pre_revprop_change_hook_path(repo_dir)
main.create_python_hook_script(hook_path,
'import sys\n'
'sys.stderr.write("pre-revprop-change %s" % " ".join(sys.argv[1:6]))\n'
'sys.exit(1)\n')
def create_failing_post_commit_hook(repo_dir):
"""Create a post-commit hook script in the repository at REPO_DIR that always
reports an error."""
hook_path = main.get_post_commit_hook_path(repo_dir)
main.create_python_hook_script(hook_path, 'import sys\n'
'sys.stderr.write("Post-commit hook failed")\n'
'sys.exit(1)')
# set_prop can be used for properties with NULL characters which are not
# handled correctly when passed to subprocess.Popen() and values like "*"
# which are not handled correctly on Windows.
def set_prop(name, value, path, expected_re_string=None):
"""Set a property with specified value"""
if value and (value[0] == '-' or '\x00' in value or sys.platform == 'win32'):
from tempfile import mkstemp
(fd, value_file_path) = mkstemp()
value_file = open(value_file_path, 'wb')
value_file.write(value)
value_file.flush()
value_file.close()
exit_code, out, err = main.run_svn(expected_re_string, 'propset',
'-F', value_file_path, name, path)
os.close(fd)
os.remove(value_file_path)
else:
exit_code, out, err = main.run_svn(expected_re_string, 'propset',
name, value, path)
if expected_re_string:
if not expected_re_string.startswith(".*"):
expected_re_string = ".*(" + expected_re_string + ")"
expected_err = verify.RegexOutput(expected_re_string, match_all=False)
verify.verify_outputs(None, None, err, None, expected_err)
def check_prop(name, path, exp_out, revprop=None):
"""Verify that property NAME on PATH has a value of EXP_OUT.
If REVPROP is not None, then it is a revision number and
a revision property is sought."""
if revprop is not None:
revprop_options = ['--revprop', '-r', revprop]
else:
revprop_options = []
# Not using run_svn because binary_mode must be set
exit_code, out, err = main.run_command(main.svn_binary, None, 1, 'pg',
'--strict', name, path,
'--config-dir',
main.default_config_dir,
'--username', main.wc_author,
'--password', main.wc_passwd,
*revprop_options)
if out != exp_out:
print("svn pg --strict %s output does not match expected." % name)
print("Expected standard output: %s\n" % exp_out)
print("Actual standard output: %s\n" % out)
raise Failure
def fill_file_with_lines(wc_path, line_nbr, line_descrip=None,
append=True):
"""Change the file at WC_PATH (adding some lines), and return its
new contents. LINE_NBR indicates the line number at which the new
contents should assume that it's being appended. LINE_DESCRIP is
something like 'This is line' (the default) or 'Conflicting line'."""
if line_descrip is None:
line_descrip = "This is line"
# Generate the new contents for the file.
contents = ""
for n in range(line_nbr, line_nbr + 3):
contents = contents + line_descrip + " " + repr(n) + " in '" + \
os.path.basename(wc_path) + "'.\n"
# Write the new contents to the file.
if append:
main.file_append(wc_path, contents)
else:
main.file_write(wc_path, contents)
return contents
def inject_conflict_into_wc(sbox, state_path, file_path,
expected_disk, expected_status, merged_rev):
"""Create a conflict at FILE_PATH by replacing its contents,
committing the change, backdating it to its previous revision,
changing its contents again, then updating it to merge in the
previous change."""
wc_dir = sbox.wc_dir
# Make a change to the file.
contents = fill_file_with_lines(file_path, 1, "This is line", append=False)
# Commit the changed file, first taking note of the current revision.
prev_rev = expected_status.desc[state_path].wc_rev
expected_output = wc.State(wc_dir, {
state_path : wc.StateItem(verb='Sending'),
})
if expected_status:
expected_status.tweak(state_path, wc_rev=merged_rev)
run_and_verify_commit(wc_dir, expected_output, expected_status,
None, file_path)
# Backdate the file.
exit_code, output, errput = main.run_svn(None, "up", "-r", str(prev_rev),
file_path)
if expected_status:
expected_status.tweak(state_path, wc_rev=prev_rev)
# Make a conflicting change to the file, and backdate the file.
conflicting_contents = fill_file_with_lines(file_path, 1, "Conflicting line",
append=False)
# Merge the previous change into the file to produce a conflict.
if expected_disk:
expected_disk.tweak(state_path, contents="")
expected_output = wc.State(wc_dir, {
state_path : wc.StateItem(status='C '),
})
inject_conflict_into_expected_state(state_path,
expected_disk, expected_status,
conflicting_contents, contents,
merged_rev)
exit_code, output, errput = main.run_svn(None, "up", "-r", str(merged_rev),
file_path)
if expected_status:
expected_status.tweak(state_path, wc_rev=merged_rev)
def inject_conflict_into_expected_state(state_path,
expected_disk, expected_status,
wc_text, merged_text, merged_rev):
"""Update the EXPECTED_DISK and EXPECTED_STATUS trees for the
conflict at STATE_PATH (ignored if None). WC_TEXT, MERGED_TEXT, and
MERGED_REV are used to determine the contents of the conflict (the
text parameters should be newline-terminated)."""
if expected_disk:
conflict_marker = make_conflict_marker_text(wc_text, merged_text,
merged_rev)
existing_text = expected_disk.desc[state_path].contents or ""
expected_disk.tweak(state_path, contents=existing_text + conflict_marker)
if expected_status:
expected_status.tweak(state_path, status='C ')
def make_conflict_marker_text(wc_text, merged_text, merged_rev):
"""Return the conflict marker text described by WC_TEXT (the current
text in the working copy, MERGED_TEXT (the conflicting text merged
in), and MERGED_REV (the revision from whence the conflicting text
came)."""
return "<<<<<<< .working\n" + wc_text + "=======\n" + \
merged_text + ">>>>>>> .merge-right.r" + str(merged_rev) + "\n"
def build_greek_tree_conflicts(sbox):
"""Create a working copy that has tree-conflict markings.
After this function has been called, sbox.wc_dir is a working
copy that has specific tree-conflict markings.
In particular, this does two conflicting sets of edits and performs an
update so that tree conflicts appear.
Note that this function calls sbox.build() because it needs a clean sbox.
So, there is no need to call sbox.build() before this.
The conflicts are the result of an 'update' on the following changes:
Incoming Local
A/D/G/pi text-mod del
A/D/G/rho del text-mod
A/D/G/tau del del
This function is useful for testing that tree-conflicts are handled
properly once they have appeared, e.g. that commits are blocked, that the
info output is correct, etc.
See also the tree-conflicts tests using deep_trees in various other
.py files, and tree_conflict_tests.py.
"""
sbox.build()
wc_dir = sbox.wc_dir
j = os.path.join
G = j(wc_dir, 'A', 'D', 'G')
pi = j(G, 'pi')
rho = j(G, 'rho')
tau = j(G, 'tau')
# Make incoming changes and "store them away" with a commit.
main.file_append(pi, "Incoming edit.\n")
main.run_svn(None, 'del', rho)
main.run_svn(None, 'del', tau)
expected_output = wc.State(wc_dir, {
'A/D/G/pi' : Item(verb='Sending'),
'A/D/G/rho' : Item(verb='Deleting'),
'A/D/G/tau' : Item(verb='Deleting'),
})
expected_status = get_virginal_state(wc_dir, 1)
expected_status.tweak('A/D/G/pi', wc_rev='2')
expected_status.remove('A/D/G/rho', 'A/D/G/tau')
run_and_verify_commit(wc_dir, expected_output, expected_status, None,
'-m', 'Incoming changes.', wc_dir )
# Update back to the pristine state ("time-warp").
expected_output = wc.State(wc_dir, {
'A/D/G/pi' : Item(status='U '),
'A/D/G/rho' : Item(status='A '),
'A/D/G/tau' : Item(status='A '),
})
expected_disk = main.greek_state
expected_status = get_virginal_state(wc_dir, 1)
run_and_verify_update(wc_dir, expected_output, expected_disk,
expected_status, None, None, None, None, None, False,
'-r', '1', wc_dir)
# Make local changes
main.run_svn(None, 'del', pi)
main.file_append(rho, "Local edit.\n")
main.run_svn(None, 'del', tau)
# Update, receiving the incoming changes on top of the local changes,
# causing tree conflicts. Don't check for any particular result: that is
# the job of other tests.
run_and_verify_svn(None, verify.AnyOutput, [], 'update', wc_dir)
def make_deep_trees(base):
"""Helper function for deep trees conflicts. Create a set of trees,
each in its own "container" dir. Any conflicts can be tested separately
in each container.
"""
j = os.path.join
# Create the container dirs.
F = j(base, 'F')
D = j(base, 'D')
DF = j(base, 'DF')
DD = j(base, 'DD')
DDF = j(base, 'DDF')
DDD = j(base, 'DDD')
os.makedirs(F)
os.makedirs(j(D, 'D1'))
os.makedirs(j(DF, 'D1'))
os.makedirs(j(DD, 'D1', 'D2'))
os.makedirs(j(DDF, 'D1', 'D2'))
os.makedirs(j(DDD, 'D1', 'D2', 'D3'))
# Create their files.
alpha = j(F, 'alpha')
beta = j(DF, 'D1', 'beta')
gamma = j(DDF, 'D1', 'D2', 'gamma')
main.file_append(alpha, "This is the file 'alpha'.\n")
main.file_append(beta, "This is the file 'beta'.\n")
main.file_append(gamma, "This is the file 'gamma'.\n")
def add_deep_trees(sbox, base_dir_name):
"""Prepare a "deep_trees" within a given directory.
The directory <sbox.wc_dir>/<base_dir_name> is created and a deep_tree
is created within. The items are only added, a commit has to be
called separately, if needed.
<base_dir_name> will thus be a container for the set of containers
mentioned in make_deep_trees().
"""
j = os.path.join
base = j(sbox.wc_dir, base_dir_name)
make_deep_trees(base)
main.run_svn(None, 'add', base)
Item = wc.StateItem
# initial deep trees state
deep_trees_virginal_state = wc.State('', {
'F' : Item(),
'F/alpha' : Item("This is the file 'alpha'.\n"),
'D' : Item(),
'D/D1' : Item(),
'DF' : Item(),
'DF/D1' : Item(),
'DF/D1/beta' : Item("This is the file 'beta'.\n"),
'DD' : Item(),
'DD/D1' : Item(),
'DD/D1/D2' : Item(),
'DDF' : Item(),
'DDF/D1' : Item(),
'DDF/D1/D2' : Item(),
'DDF/D1/D2/gamma' : Item("This is the file 'gamma'.\n"),
'DDD' : Item(),
'DDD/D1' : Item(),
'DDD/D1/D2' : Item(),
'DDD/D1/D2/D3' : Item(),
})
# Many actions on deep trees and their resulting states...
def deep_trees_leaf_edit(base):
"""Helper function for deep trees test cases. Append text to files,
create new files in empty directories, and change leaf node properties."""
j = os.path.join
F = j(base, 'F', 'alpha')
DF = j(base, 'DF', 'D1', 'beta')
DDF = j(base, 'DDF', 'D1', 'D2', 'gamma')
main.file_append(F, "More text for file alpha.\n")
main.file_append(DF, "More text for file beta.\n")
main.file_append(DDF, "More text for file gamma.\n")
run_and_verify_svn(None, verify.AnyOutput, [],
'propset', 'prop1', '1', F, DF, DDF)
D = j(base, 'D', 'D1')
DD = j(base, 'DD', 'D1', 'D2')
DDD = j(base, 'DDD', 'D1', 'D2', 'D3')
run_and_verify_svn(None, verify.AnyOutput, [],
'propset', 'prop1', '1', D, DD, DDD)
D = j(base, 'D', 'D1', 'delta')
DD = j(base, 'DD', 'D1', 'D2', 'epsilon')
DDD = j(base, 'DDD', 'D1', 'D2', 'D3', 'zeta')
main.file_append(D, "This is the file 'delta'.\n")
main.file_append(DD, "This is the file 'epsilon'.\n")
main.file_append(DDD, "This is the file 'zeta'.\n")
run_and_verify_svn(None, verify.AnyOutput, [],
'add', D, DD, DDD)
# deep trees state after a call to deep_trees_leaf_edit
deep_trees_after_leaf_edit = wc.State('', {
'F' : Item(),
'F/alpha' : Item("This is the file 'alpha'.\nMore text for file alpha.\n"),
'D' : Item(),
'D/D1' : Item(),
'D/D1/delta' : Item("This is the file 'delta'.\n"),
'DF' : Item(),
'DF/D1' : Item(),
'DF/D1/beta' : Item("This is the file 'beta'.\nMore text for file beta.\n"),
'DD' : Item(),
'DD/D1' : Item(),
'DD/D1/D2' : Item(),
'DD/D1/D2/epsilon' : Item("This is the file 'epsilon'.\n"),
'DDF' : Item(),
'DDF/D1' : Item(),
'DDF/D1/D2' : Item(),
'DDF/D1/D2/gamma' : Item("This is the file 'gamma'.\nMore text for file gamma.\n"),
'DDD' : Item(),
'DDD/D1' : Item(),
'DDD/D1/D2' : Item(),
'DDD/D1/D2/D3' : Item(),
'DDD/D1/D2/D3/zeta' : Item("This is the file 'zeta'.\n"),
})
def deep_trees_leaf_del(base):
"""Helper function for deep trees test cases. Delete files and empty
dirs."""
j = os.path.join
F = j(base, 'F', 'alpha')
D = j(base, 'D', 'D1')
DF = j(base, 'DF', 'D1', 'beta')
DD = j(base, 'DD', 'D1', 'D2')
DDF = j(base, 'DDF', 'D1', 'D2', 'gamma')
DDD = j(base, 'DDD', 'D1', 'D2', 'D3')
main.run_svn(None, 'rm', F, D, DF, DD, DDF, DDD)
# deep trees state after a call to deep_trees_leaf_del
deep_trees_after_leaf_del = wc.State('', {
'F' : Item(),
'D' : Item(),
'DF' : Item(),
'DF/D1' : Item(),
'DD' : Item(),
'DD/D1' : Item(),
'DDF' : Item(),
'DDF/D1' : Item(),
'DDF/D1/D2' : Item(),
'DDD' : Item(),
'DDD/D1' : Item(),
'DDD/D1/D2' : Item(),
})
# deep trees state after a call to deep_trees_leaf_del with no commit
def deep_trees_after_leaf_del_no_ci(wc_dir):
if svntest.main.wc_is_singledb(wc_dir):
return deep_trees_after_leaf_del
else:
return deep_trees_empty_dirs
def deep_trees_tree_del(base):
"""Helper function for deep trees test cases. Delete top-level dirs."""
j = os.path.join
F = j(base, 'F', 'alpha')
D = j(base, 'D', 'D1')
DF = j(base, 'DF', 'D1')
DD = j(base, 'DD', 'D1')
DDF = j(base, 'DDF', 'D1')
DDD = j(base, 'DDD', 'D1')
main.run_svn(None, 'rm', F, D, DF, DD, DDF, DDD)
def deep_trees_rmtree(base):
"""Helper function for deep trees test cases. Delete top-level dirs
with rmtree instead of svn del."""
j = os.path.join
F = j(base, 'F', 'alpha')
D = j(base, 'D', 'D1')
DF = j(base, 'DF', 'D1')
DD = j(base, 'DD', 'D1')
DDF = j(base, 'DDF', 'D1')
DDD = j(base, 'DDD', 'D1')
os.unlink(F)
main.safe_rmtree(D)
main.safe_rmtree(DF)
main.safe_rmtree(DD)
main.safe_rmtree(DDF)
main.safe_rmtree(DDD)
# deep trees state after a call to deep_trees_tree_del
deep_trees_after_tree_del = wc.State('', {
'F' : Item(),
'D' : Item(),
'DF' : Item(),
'DD' : Item(),
'DDF' : Item(),
'DDD' : Item(),
})
# deep trees state without any files
deep_trees_empty_dirs = wc.State('', {
'F' : Item(),
'D' : Item(),
'D/D1' : Item(),
'DF' : Item(),
'DF/D1' : Item(),
'DD' : Item(),
'DD/D1' : Item(),
'DD/D1/D2' : Item(),
'DDF' : Item(),
'DDF/D1' : Item(),
'DDF/D1/D2' : Item(),
'DDD' : Item(),
'DDD/D1' : Item(),
'DDD/D1/D2' : Item(),
'DDD/D1/D2/D3' : Item(),
})
# deep trees state after a call to deep_trees_tree_del with no commit
def deep_trees_after_tree_del_no_ci(wc_dir):
if svntest.main.wc_is_singledb(wc_dir):
return deep_trees_after_tree_del
else:
return deep_trees_empty_dirs
def deep_trees_tree_del_repos(base):
"""Helper function for deep trees test cases. Delete top-level dirs,
directly in the repository."""
j = '/'.join
F = j([base, 'F', 'alpha'])
D = j([base, 'D', 'D1'])
DF = j([base, 'DF', 'D1'])
DD = j([base, 'DD', 'D1'])
DDF = j([base, 'DDF', 'D1'])
DDD = j([base, 'DDD', 'D1'])
main.run_svn(None, 'mkdir', '-m', '', F, D, DF, DD, DDF, DDD)
# Expected merge/update/switch output.
deep_trees_conflict_output = wc.State('', {
'F/alpha' : Item(status=' ', treeconflict='C'),
'D/D1' : Item(status=' ', treeconflict='C'),
'DF/D1' : Item(status=' ', treeconflict='C'),
'DD/D1' : Item(status=' ', treeconflict='C'),
'DDF/D1' : Item(status=' ', treeconflict='C'),
'DDD/D1' : Item(status=' ', treeconflict='C'),
})
deep_trees_conflict_output_skipped = wc.State('', {
'D/D1' : Item(verb='Skipped'),
'F/alpha' : Item(verb='Skipped'),
'DD/D1' : Item(verb='Skipped'),
'DF/D1' : Item(verb='Skipped'),
'DDD/D1' : Item(verb='Skipped'),
'DDF/D1' : Item(verb='Skipped'),
})
# Expected status output after merge/update/switch.
deep_trees_status_local_tree_del = wc.State('', {
'' : Item(status=' ', wc_rev=3),
'D' : Item(status=' ', wc_rev=3),
'D/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
'DD' : Item(status=' ', wc_rev=3),
'DD/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
'DD/D1/D2' : Item(status='D ', wc_rev=2),
'DDD' : Item(status=' ', wc_rev=3),
'DDD/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
'DDD/D1/D2' : Item(status='D ', wc_rev=2),
'DDD/D1/D2/D3' : Item(status='D ', wc_rev=2),
'DDF' : Item(status=' ', wc_rev=3),
'DDF/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
'DDF/D1/D2' : Item(status='D ', wc_rev=2),
'DDF/D1/D2/gamma' : Item(status='D ', wc_rev=2),
'DF' : Item(status=' ', wc_rev=3),
'DF/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
'DF/D1/beta' : Item(status='D ', wc_rev=2),
'F' : Item(status=' ', wc_rev=3),
'F/alpha' : Item(status='D ', wc_rev=2, treeconflict='C'),
})
deep_trees_status_local_leaf_edit = wc.State('', {
'' : Item(status=' ', wc_rev=3),
'D' : Item(status=' ', wc_rev=3),
'D/D1' : Item(status=' M', wc_rev=2, treeconflict='C'),
'D/D1/delta' : Item(status='A ', wc_rev=0),
'DD' : Item(status=' ', wc_rev=3),
'DD/D1' : Item(status=' ', wc_rev=2, treeconflict='C'),
'DD/D1/D2' : Item(status=' M', wc_rev=2),
'DD/D1/D2/epsilon' : Item(status='A ', wc_rev=0),
'DDD' : Item(status=' ', wc_rev=3),
'DDD/D1' : Item(status=' ', wc_rev=2, treeconflict='C'),
'DDD/D1/D2' : Item(status=' ', wc_rev=2),
'DDD/D1/D2/D3' : Item(status=' M', wc_rev=2),
'DDD/D1/D2/D3/zeta' : Item(status='A ', wc_rev=0),
'DDF' : Item(status=' ', wc_rev=3),
'DDF/D1' : Item(status=' ', wc_rev=2, treeconflict='C'),
'DDF/D1/D2' : Item(status=' ', wc_rev=2),
'DDF/D1/D2/gamma' : Item(status='MM', wc_rev=2),
'DF' : Item(status=' ', wc_rev=3),
'DF/D1' : Item(status=' ', wc_rev=2, treeconflict='C'),
'DF/D1/beta' : Item(status='MM', wc_rev=2),
'F' : Item(status=' ', wc_rev=3),
'F/alpha' : Item(status='MM', wc_rev=2, treeconflict='C'),
})
class DeepTreesTestCase:
"""Describes one tree-conflicts test case.
See deep_trees_run_tests_scheme_for_update(), ..._switch(), ..._merge().
The name field is the subdirectory name in which the test should be run.
The local_action and incoming_action are the functions to run
to construct the local changes and incoming changes, respectively.
See deep_trees_leaf_edit, deep_trees_tree_del, etc.
The expected_* and error_re_string arguments are described in functions
run_and_verify_[update|switch|merge]
except expected_info, which is a dict that has path keys with values
that are dicts as passed to run_and_verify_info():
expected_info = {
'F/alpha' : {
'Revision' : '3',
'Tree conflict' :
'^local delete, incoming edit upon update'
+ ' Source left: .file.*/F/alpha@2'
+ ' Source right: .file.*/F/alpha@3$',
},
'DF/D1' : {
'Tree conflict' :
'^local delete, incoming edit upon update'
+ ' Source left: .dir.*/DF/D1@2'
+ ' Source right: .dir.*/DF/D1@3$',
},
...
}
Note: expected_skip is only used in merge, i.e. using
deep_trees_run_tests_scheme_for_merge.
"""
def __init__(self, name, local_action, incoming_action,
expected_output = None, expected_disk = None,
expected_status = None, expected_skip = None,
error_re_string = None,
commit_block_string = ".*remains in conflict.*",
expected_info = None):
self.name = name
self.local_action = local_action
self.incoming_action = incoming_action
self.expected_output = expected_output
self.expected_disk = expected_disk
self.expected_status = expected_status
self.expected_skip = expected_skip
self.error_re_string = error_re_string
self.commit_block_string = commit_block_string
self.expected_info = expected_info
def deep_trees_run_tests_scheme_for_update(sbox, greater_scheme):
"""
Runs a given list of tests for conflicts occuring at an update operation.
This function wants to save time and perform a number of different
test cases using just a single repository and performing just one commit
for all test cases instead of one for each test case.
1) Each test case is initialized in a separate subdir. Each subdir
again contains one set of "deep_trees", being separate container
dirs for different depths of trees (F, D, DF, DD, DDF, DDD).
2) A commit is performed across all test cases and depths.
(our initial state, -r2)
3) In each test case subdir (e.g. "local_tree_del_incoming_leaf_edit"),
its *incoming* action is performed (e.g. "deep_trees_leaf_edit"), in
each of the different depth trees (F, D, DF, ... DDD).
4) A commit is performed across all test cases and depths:
our "incoming" state is "stored away in the repository for now",
-r3.
5) All test case dirs and contained deep_trees are time-warped
(updated) back to -r2, the initial state containing deep_trees.
6) In each test case subdir (e.g. "local_tree_del_incoming_leaf_edit"),
its *local* action is performed (e.g. "deep_trees_leaf_del"), in
each of the different depth trees (F, D, DF, ... DDD).
7) An update to -r3 is performed across all test cases and depths.
This causes tree-conflicts between the "local" state in the working
copy and the "incoming" state from the repository, -r3.
8) A commit is performed in each separate container, to verify
that each tree-conflict indeed blocks a commit.
The sbox parameter is just the sbox passed to a test function. No need
to call sbox.build(), since it is called (once) within this function.
The "table" greater_scheme models all of the different test cases
that should be run using a single repository.
greater_scheme is a list of DeepTreesTestCase items, which define complete
test setups, so that they can be performed as described above.
"""
j = os.path.join
if not sbox.is_built():
sbox.build()
wc_dir = sbox.wc_dir
# 1) create directories
for test_case in greater_scheme:
try:
add_deep_trees(sbox, test_case.name)
except:
print("ERROR IN: Tests scheme for update: "
+ "while setting up deep trees in '%s'" % test_case.name)
raise
# 2) commit initial state
main.run_svn(None, 'commit', '-m', 'initial state', wc_dir)
# 3) apply incoming changes
for test_case in greater_scheme:
try:
test_case.incoming_action(j(sbox.wc_dir, test_case.name))
except:
print("ERROR IN: Tests scheme for update: "
+ "while performing incoming action in '%s'" % test_case.name)
raise
# 4) commit incoming changes
main.run_svn(None, 'commit', '-m', 'incoming changes', wc_dir)
# 5) time-warp back to -r2
main.run_svn(None, 'update', '-r2', wc_dir)
# 6) apply local changes
for test_case in greater_scheme:
try:
test_case.local_action(j(wc_dir, test_case.name))
except:
print("ERROR IN: Tests scheme for update: "
+ "while performing local action in '%s'" % test_case.name)
raise
# 7) update to -r3, conflicting with incoming changes.
# A lot of different things are expected.
# Do separate update operations for each test case.
for test_case in greater_scheme:
try:
base = j(wc_dir, test_case.name)
x_out = test_case.expected_output
if x_out != None:
x_out = x_out.copy()
x_out.wc_dir = base
x_disk = test_case.expected_disk
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = base
run_and_verify_update(base, x_out, x_disk, None,
error_re_string = test_case.error_re_string)
if x_status:
run_and_verify_unquiet_status(base, x_status)
x_info = test_case.expected_info or {}
for path in x_info:
run_and_verify_info([x_info[path]], j(base, path))
except:
print("ERROR IN: Tests scheme for update: "
+ "while verifying in '%s'" % test_case.name)
raise
# 8) Verify that commit fails.
for test_case in greater_scheme:
try:
base = j(wc_dir, test_case.name)
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = base
run_and_verify_commit(base, None, x_status,
test_case.commit_block_string,
base)
except:
print("ERROR IN: Tests scheme for update: "
+ "while checking commit-blocking in '%s'" % test_case.name)
raise
def deep_trees_skipping_on_update(sbox, test_case, skip_paths,
chdir_skip_paths):
"""
Create tree conflicts, then update again, expecting the existing tree
conflicts to be skipped.
SKIP_PATHS is a list of paths, relative to the "base dir", for which
"update" on the "base dir" should report as skipped.
CHDIR_SKIP_PATHS is a list of (target-path, skipped-path) pairs for which
an update of "target-path" (relative to the "base dir") should result in
"skipped-path" (relative to "target-path") being reported as skipped.
"""
"""FURTHER_ACTION is a function that will make a further modification to
each target, this being the modification that we expect to be skipped. The
function takes the "base dir" (the WC path to the test case directory) as
its only argument."""
further_action = deep_trees_tree_del_repos
j = os.path.join
wc_dir = sbox.wc_dir
base = j(wc_dir, test_case.name)
# Initialize: generate conflicts. (We do not check anything here.)
setup_case = DeepTreesTestCase(test_case.name,
test_case.local_action,
test_case.incoming_action,
None,
None,
None)
deep_trees_run_tests_scheme_for_update(sbox, [setup_case])
# Make a further change to each target in the repository so there is a new
# revision to update to. (This is r4.)
further_action(sbox.repo_url + '/' + test_case.name)
# Update whole working copy, expecting the nodes still in conflict to be
# skipped.
x_out = test_case.expected_output
if x_out != None:
x_out = x_out.copy()
x_out.wc_dir = base
x_disk = test_case.expected_disk
x_status = test_case.expected_status
if x_status != None:
x_status = x_status.copy()
x_status.wc_dir = base
# Account for nodes that were updated by further_action
x_status.tweak('', 'D', 'F', 'DD', 'DF', 'DDD', 'DDF', wc_rev=4)
run_and_verify_update(base, x_out, x_disk, None,
error_re_string = test_case.error_re_string)
run_and_verify_unquiet_status(base, x_status)
# Try to update each in-conflict subtree. Expect a 'Skipped' output for
# each, and the WC status to be unchanged.
for path in skip_paths:
run_and_verify_update(j(base, path),
wc.State(base, {path : Item(verb='Skipped')}),
None, None)
run_and_verify_unquiet_status(base, x_status)
# Try to update each in-conflict subtree. Expect a 'Skipped' output for
# each, and the WC status to be unchanged.
# This time, cd to the subdir before updating it.
was_cwd = os.getcwd()
for path, skipped in chdir_skip_paths:
if isinstance(skipped, list):
expected_skip = {}
for p in skipped:
expected_skip[p] = Item(verb='Skipped')
else:
expected_skip = {skipped : Item(verb='Skipped')}
p = j(base, path)
run_and_verify_update(p,
wc.State(p, expected_skip),
None, None)
os.chdir(was_cwd)
run_and_verify_unquiet_status(base, x_status)
# Verify that commit still fails.
for path, skipped in chdir_skip_paths:
run_and_verify_commit(j(base, path), None, None,
test_case.commit_block_string,
base)
run_and_verify_unquiet_status(base, x_status)
def deep_trees_run_tests_scheme_for_switch(sbox, greater_scheme):
"""
Runs a given list of tests for conflicts occuring at a switch operation.
This function wants to save time and perform a number of different
test cases using just a single repository and performing just one commit
for all test cases instead of one for each test case.
1) Each test case is initialized in a separate subdir. Each subdir
again contains two subdirs: one "local" and one "incoming" for
the switch operation. These contain a set of deep_trees each.
2) A commit is performed across all test cases and depths.
(our initial state, -r2)
3) In each test case subdir's incoming subdir, the
incoming actions are performed.
4) A commit is performed across all test cases and depths. (-r3)
5) In each test case subdir's local subdir, the local actions are
performed. They remain uncommitted in the working copy.
6) In each test case subdir's local dir, a switch is performed to its
corresponding incoming dir.
This causes conflicts between the "local" state in the working
copy and the "incoming" state from the incoming subdir (still -r3).
7) A commit is performed in each separate container, to verify
that each tree-conflict indeed blocks a commit.
The sbox parameter is just the sbox passed to a test function. No need
to call sbox.build(), since it is called (once) within this function.
The "table" greater_scheme models all of the different test cases
that should be run using a single repository.
greater_scheme is a list of DeepTreesTestCase items, which define complete
test setups, so that they can be performed as described above.
"""
j = os.path.join
if not sbox.is_built():
sbox.build()
wc_dir = sbox.wc_dir
# 1) Create directories.
for test_case in greater_scheme:
try:
base = j(sbox.wc_dir, test_case.name)
os.makedirs(base)
make_deep_trees(j(base, "local"))
make_deep_trees(j(base, "incoming"))
main.run_svn(None, 'add', base)
except:
print("ERROR IN: Tests scheme for switch: "
+ "while setting up deep trees in '%s'" % test_case.name)
raise
# 2) Commit initial state (-r2).
main.run_svn(None, 'commit', '-m', 'initial state', wc_dir)
# 3) Apply incoming changes
for test_case in greater_scheme:
try:
test_case.incoming_action(j(sbox.wc_dir, test_case.name, "incoming"))
except:
print("ERROR IN: Tests scheme for switch: "
+ "while performing incoming action in '%s'" % test_case.name)
raise
# 4) Commit all changes (-r3).
main.run_svn(None, 'commit', '-m', 'incoming changes', wc_dir)
# 5) Apply local changes in their according subdirs.
for test_case in greater_scheme:
try:
test_case.local_action(j(sbox.wc_dir, test_case.name, "local"))
except:
print("ERROR IN: Tests scheme for switch: "
+ "while performing local action in '%s'" % test_case.name)
raise
# 6) switch the local dir to the incoming url, conflicting with incoming
# changes. A lot of different things are expected.
# Do separate switch operations for each test case.
for test_case in greater_scheme:
try:
local = j(wc_dir, test_case.name, "local")
incoming = sbox.repo_url + "/" + test_case.name + "/incoming"
x_out = test_case.expected_output
if x_out != None:
x_out = x_out.copy()
x_out.wc_dir = local
x_disk = test_case.expected_disk
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = local
run_and_verify_switch(local, local, incoming, x_out, x_disk, None,
test_case.error_re_string, None, None, None,
None, False, '--ignore-ancestry')
run_and_verify_unquiet_status(local, x_status)
x_info = test_case.expected_info or {}
for path in x_info:
run_and_verify_info([x_info[path]], j(local, path))
except:
print("ERROR IN: Tests scheme for switch: "
+ "while verifying in '%s'" % test_case.name)
raise
# 7) Verify that commit fails.
for test_case in greater_scheme:
try:
local = j(wc_dir, test_case.name, 'local')
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = local
run_and_verify_commit(local, None, x_status,
test_case.commit_block_string,
local)
except:
print("ERROR IN: Tests scheme for switch: "
+ "while checking commit-blocking in '%s'" % test_case.name)
raise
def deep_trees_run_tests_scheme_for_merge(sbox, greater_scheme,
do_commit_local_changes,
do_commit_conflicts=True,
ignore_ancestry=False):
"""
Runs a given list of tests for conflicts occuring at a merge operation.
This function wants to save time and perform a number of different
test cases using just a single repository and performing just one commit
for all test cases instead of one for each test case.
1) Each test case is initialized in a separate subdir. Each subdir
initially contains another subdir, called "incoming", which
contains a set of deep_trees.
2) A commit is performed across all test cases and depths.
(a pre-initial state)
3) In each test case subdir, the "incoming" subdir is copied to "local",
via the `svn copy' command. Each test case's subdir now has two sub-
dirs: "local" and "incoming", initial states for the merge operation.
4) An update is performed across all test cases and depths, so that the
copies made in 3) are pulled into the wc.
5) In each test case's "incoming" subdir, the incoming action is
performed.
6) A commit is performed across all test cases and depths, to commit
the incoming changes.
If do_commit_local_changes is True, this becomes step 7 (swap steps).
7) In each test case's "local" subdir, the local_action is performed.
If do_commit_local_changes is True, this becomes step 6 (swap steps).
Then, in effect, the local changes are committed as well.
8) In each test case subdir, the "incoming" subdir is merged into the
"local" subdir. If ignore_ancestry is True, then the merge is done
with the --ignore-ancestry option, so mergeinfo is neither considered
nor recorded. This causes conflicts between the "local" state in the
working copy and the "incoming" state from the incoming subdir.
9) If do_commit_conflicts is True, then a commit is performed in each
separate container, to verify that each tree-conflict indeed blocks
a commit.
The sbox parameter is just the sbox passed to a test function. No need
to call sbox.build(), since it is called (once) within this function.
The "table" greater_scheme models all of the different test cases
that should be run using a single repository.
greater_scheme is a list of DeepTreesTestCase items, which define complete
test setups, so that they can be performed as described above.
"""
j = os.path.join
if not sbox.is_built():
sbox.build()
wc_dir = sbox.wc_dir
# 1) Create directories.
for test_case in greater_scheme:
try:
base = j(sbox.wc_dir, test_case.name)
os.makedirs(base)
make_deep_trees(j(base, "incoming"))
main.run_svn(None, 'add', base)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while setting up deep trees in '%s'" % test_case.name)
raise
# 2) Commit pre-initial state (-r2).
main.run_svn(None, 'commit', '-m', 'pre-initial state', wc_dir)
# 3) Copy "incoming" to "local".
for test_case in greater_scheme:
try:
base_url = sbox.repo_url + "/" + test_case.name
incoming_url = base_url + "/incoming"
local_url = base_url + "/local"
main.run_svn(None, 'cp', incoming_url, local_url, '-m',
'copy incoming to local')
except:
print("ERROR IN: Tests scheme for merge: "
+ "while copying deep trees in '%s'" % test_case.name)
raise
# 4) Update to load all of the "/local" subdirs into the working copies.
try:
main.run_svn(None, 'up', sbox.wc_dir)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while updating local subdirs")
raise
# 5) Perform incoming actions
for test_case in greater_scheme:
try:
test_case.incoming_action(j(sbox.wc_dir, test_case.name, "incoming"))
except:
print("ERROR IN: Tests scheme for merge: "
+ "while performing incoming action in '%s'" % test_case.name)
raise
# 6) or 7) Commit all incoming actions
if not do_commit_local_changes:
try:
main.run_svn(None, 'ci', '-m', 'Committing incoming actions',
sbox.wc_dir)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while committing incoming actions")
raise
# 7) or 6) Perform all local actions.
for test_case in greater_scheme:
try:
test_case.local_action(j(sbox.wc_dir, test_case.name, "local"))
except:
print("ERROR IN: Tests scheme for merge: "
+ "while performing local action in '%s'" % test_case.name)
raise
# 6) or 7) Commit all incoming actions
if do_commit_local_changes:
try:
main.run_svn(None, 'ci', '-m', 'Committing incoming and local actions',
sbox.wc_dir)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while committing incoming and local actions")
raise
# 8) Merge all "incoming" subdirs to their respective "local" subdirs.
# This creates conflicts between the local changes in the "local" wc
# subdirs and the incoming states committed in the "incoming" subdirs.
for test_case in greater_scheme:
try:
local = j(sbox.wc_dir, test_case.name, "local")
incoming = sbox.repo_url + "/" + test_case.name + "/incoming"
x_out = test_case.expected_output
if x_out != None:
x_out = x_out.copy()
x_out.wc_dir = local
x_disk = test_case.expected_disk
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = local
x_skip = test_case.expected_skip
if x_skip != None:
x_skip.copy()
x_skip.wc_dir = local
varargs = (local,)
if ignore_ancestry:
varargs = varargs + ('--ignore-ancestry',)
run_and_verify_merge(local, None, None, incoming, None,
x_out, None, None, x_disk, None, x_skip,
test_case.error_re_string,
None, None, None, None,
False, False, *varargs)
run_and_verify_unquiet_status(local, x_status)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while verifying in '%s'" % test_case.name)
raise
# 9) Verify that commit fails.
if do_commit_conflicts:
for test_case in greater_scheme:
try:
local = j(wc_dir, test_case.name, 'local')
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = local
run_and_verify_commit(local, None, x_status,
test_case.commit_block_string,
local)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while checking commit-blocking in '%s'" % test_case.name)
raise
|
wbond/subversion
|
subversion/tests/cmdline/svntest/actions.py
|
Python
|
apache-2.0
| 113,603 |
# Filtering Issues
An essential part of navigating GitHub with HubTurbo is having multiple panels, all filled with exactly the issues you're interested in. Controlling what shows up in panels is done by writing a filter which precisely expresses what it should contain.
<img src="images/gettingStarted/panelExplanation.png" width="600">
Don't worry, there isn't much syntax to learn!
## Contents
- [Basics](#basics)
- [Examples](#examples)
- [Key Differences](#key-differences)
- [Operators](#operators)
- [Application](#application)
- [Qualifiers](#qualifiers)
- [Incompatibilities](#incompatibilities)
## Basics
Filters are compatible with a subset of GitHub's search syntax. Their documentation is a great reference for the details:
- [Searching Issues](https://help.github.com/articles/searching-issues/)
- [Search Syntax](https://help.github.com/articles/search-syntax/)
- [Examples](http://zachholman.com/posts/searching-github-issues/)
To very quickly summarize the key points in our own words:
- Filters contain **keywords**, **qualifiers**, and **meta-qualifiers**.
+ A **keyword** is a search term which matches issue text. The filter `cats dogs` will pick issues containing BOTH the words `cats` and `dogs` in any order.
+ A **qualifier** is a search term which matches metadata instead of text. `label:red` will match issues with the label containing `red`.
+ A **meta-qualifier** changes the semantics of keywords or qualifiers. `in:title meow` will pick issues with the text `meow` in their titles only. `repo:hubturbo/hubturbo` will search in HubTurbo's repository, loading it if it's not already loaded.
- All of the above can be freely intermixed and combined with operators.
## Examples
The filter "all issues assigned to John that aren't closed and are due in milestones v0.1 or v0.2" may be expressed as:
```
assignee:john -state:closed (milestone:v0.1 OR milestone:v0.2)
```
"Open issues updated in the last day" may be expressed as:
```
is:open updated:<24
```
Viewing recently-updated, closed issues across multiple repositories is also easy:
```
(repo:hubturbo/hubturbo | repo:teammates/repo) updated:2 ~is:open is:issue
```
The repository qualifiers need to be parenthesised because leaving out an operator implicitly inserts an AND there, and AND has a higher precedence than OR.
All this syntax is explained below!
## Key Differences
HubTurbo extends GitHub's search syntax in a number of ways.
- Boolean operators are supported for all qualifiers. `assignee:alice | assignee:bob` will show issues which match either qualifier in a panel.
- Parentheses can be used for grouping. `(assignee:alice || assignee:bob) && state:open` will show open issues assigned to either Alice or Bob.
- Quotation marks can additionally be used to specify search keywords containing spaces. For example, `"test case"` will match issues containing the string `test case`, space included.
- Additional qualifiers are available, for example `has` and `updated`.
A number of GitHub's qualifiers are not yet supported or [inapplicable](#incompatibilities).
## Operators
Logical operators (AND, OR, NOT) may be used to combine qualifiers. This can be used to compose complex expressions. If an operator is left out between two qualifiers, it's taken to be AND.
Operators may be written in any of the following forms:
- AND: `AND` `&&` `&`
- OR: `OR` `||` `|`
- NOT: `NOT` `!` `~` `-`
As in C-like languages, NOT is prefix, AND and OR are infix and left-associative, and precedence goes: NOT > AND > OR.
Note that operators are case sensitive: `AND` is a valid operator, but `and` is not.
<!-- To be enabled later
## Application
Predicates are useful for specifying the exact subset of labels to show in a panel. This admits a useful secondary function -- dragging an issue onto a panel will cause HubTurbo to automatically apply the attributes required to make it show up in that panel! In other words, the issue will be modified such that it will be matched by the filter of the target panel.
This will not work for ambiguous expressions (containing OR or NOT operators) and expressions containing predicates for which this does not make sense (`title`, `id`, `in`, `has`, etc.).
-->
## Qualifiers
- [`id`](#id)
- [`keyword`](#keyword)
- [`title`](#title)
- [`body`](#body)
- [`milestone`](#milestone)
- [`label`](#label)
- [`assignee`](#assignee)
- [`author`](#author)
- [`involves`](#involves)
- [`state`](#state)
- [`has`](#has)
- [`no`](#no)
- [`in`](#in)
- [`type`](#type)
- [`is`](#is)
- [`created`](#created)
- [`updated`](#updated)
- [`repo`](#repo)
- [`sort`](#sort)
#### Formats
- Dates are written as `YYYY-MM-DD`.
- Date ranges are written using a relational operator (e.g. `>=2014-1-1`) or as a range (e.g. `2014-1-1 .. 2014-2-1`).
- Numbers are assumed to be integers.
- Number ranges are written using a relational operator (.e.g `>5`, `<=10`).
- Repo ids are written as `owner/name`
- Sorting keys are written as a comma-separated list of possibly-negated keys. For example, `repo, ~updated, -comments`. See `sort` for more information.
### id
*Expects a number or number range*
Matches the issue with the given id number, or issues with ids in the given range.
### keyword
*Expects a string*
Matches all issues with text containing the given string. Same as not specifying a qualifier.
### title
*Expects a string*
Matches all issues with a title containing the given string.
### body
*Expects a string*
Matches all issues with a body (or description) containing the given string.
### milestone
*Expects a string*
Matches all issues associated with any milestones whose names contain the given string.
### label
*Expects a string in the form `name`, `group.name`, or `group.`*
Matches all issues with the given label. A group name may be used to qualify the label, in case labels of the same name appear in different groups. If only a group name is provided, matches issues containing any label in the given group.
### assignee
*Expects a string*
Matches all issues assigned to the given person, identified by the given alias, GitHub username, or real name, in that priority.
### author
*Expects a string*
Matches all issues created by the given person, identified by the given alias, GitHub username, or real name, in that priority.
### involves
*Expects a string*
Matches all issues involving (assigned to or created by) the given person, identified by the given alias, GitHub username, or real name, in that priority.
### state
*Expects one of `open` or `closed`*
Matches all issues of the given state.
### has
*Expects one of `label`, `milestone`, `assignee`
Matches all issues possessing the given attribute.
### no
*Expects one of `label`, `milestone`, `assignee`
The negation of `has`. Matches all issues without the given attribute.
### in
*Expects one of `title` or `body`*
Meta-qualifier. Changes the semantics of `keyword` so it only searches in either the title or body.
### type
*Expects one of `issue` or `pr`*
Matches issues of a given issue type. Pull requests are loosely considered issues in that the same operations work on both; this predicate allows users to distinguish them.
### is
*Expects one of `open`, `closed`, `pr`, `issue`, `merged`, `unmerged`, `read`, `unread`*
Matches issues which are either open or closed, a pull request or an issue, depending on their merged status if they are pull requests, or read or unread. Is partially an alias for `state` and `type`.
### created
*Expects a date or date range*
Matches issues which were created on a given date, or within a given date range.
### updated
*Expects a number or number range*
Matches issues which were updated in the given number of hours. For example, `updated:<24` would match issues updated in the last day. If a number `n` is given, it is implicitly translated to `<n`.
### repo
*Expects a repo id*
Matches issues of the given repository. If omitted, will match issues of the default repository instead (not all repositories).
### sort
*Expects a comma-separated list of sorting keys. For example, `repo, ~updated, -comments`. Any negation operator can be used to invert them. Keys can be any of `comments`, `repo`, `updated`, `date` (an alias for `updated`), `id`, `assignee` or a label group. Label groups can be disambiguated by appending a `.`.*
Sorts a repository by the list of keys, going from left to right. Negated keys will reverse the ordering that they describe.
## Additional features
HubTurbo automatically downloads detailed information about issues when the [`updated`](#updated) filter is specified, and then displays them within the issue cards in the form of a timeline.
When the [`updated`](#updated) filter is specified, the issues to be displayed are also automatically sorted by the latest *non-self update* i.e. the last time someone other than the currently logged-in user makes a change to the issue. This order can be explicitly overridden by specifying another sort order through the [`sort`](#sort) filter.
To use a reverse-non-self-update or combine sorting by non-self-update times with other sorting orders, use the `nonSelfUpdate` sorting key e.g. `sort:-nonSelfUpdate` or `sort:nonSelfUpdate,comments`
## Incompatibilities
HubTurbo's filter system is incompatible with GitHub's in a number of ways.
- GitHub's filter system is site-wide and so contains qualifiers for matching repositories, language, etc. HubTurbo's filters are for local use only; while `repo` applies, most other GitHub qualifiers do not:
+ `language`
+ `is:public|private` (repository)
+ `team`
- A number of qualifiers are not implemented because they would require as-yet unimplemented features in HubTurbo.
+ `in:comment`
+ `closed:[date range]`
+ `mentions:[string]`
+ `commenter:[string]`
|
Sumei1009/HubTurbo
|
docs/filters.md
|
Markdown
|
apache-2.0
| 9,854 |
/*===------- llvm/Config/llvm-config.h - llvm configuration -------*- C -*-===*/
/* */
/* The LLVM Compiler Infrastructure */
/* */
/* This file is distributed under the University of Illinois Open Source */
/* License. See LICENSE.TXT for details. */
/* */
/*===----------------------------------------------------------------------===*/
/* This file enumerates variables from the LLVM configuration so that they
can be in exported headers and won't override package specific directives.
This is a C header that can be included in the llvm-c headers. */
#ifndef LLVM_CONFIG_H
#define LLVM_CONFIG_H
/* Installation directory for binary executables */
/* #undef LLVM_BINDIR */
/* Time at which LLVM was configured */
/* #undef LLVM_CONFIGTIME */
/* Installation directory for data files */
/* #undef LLVM_DATADIR */
/* Target triple LLVM will generate code for by default */
#define LLVM_DEFAULT_TARGET_TRIPLE "i686-pc-win32"
/* Installation directory for documentation */
/* #undef LLVM_DOCSDIR */
/* Define if LLVM is built with asserts and checks that change the layout of
client-visible data structures. */
#define LLVM_ENABLE_ABI_BREAKING_CHECKS
/* Define if threads enabled */
#define LLVM_ENABLE_THREADS 1
/* Installation directory for config files */
/* #undef LLVM_ETCDIR */
/* Has gcc/MSVC atomic intrinsics */
#define LLVM_HAS_ATOMICS 1
/* Host triple LLVM will be executed on */
#define LLVM_HOST_TRIPLE "i686-pc-win32"
/* Installation directory for include files */
/* #undef LLVM_INCLUDEDIR */
/* Installation directory for .info files */
/* #undef LLVM_INFODIR */
/* Installation directory for man pages */
/* #undef LLVM_MANDIR */
/* LLVM architecture name for the native architecture, if available */
#define LLVM_NATIVE_ARCH X86
/* LLVM name for the native AsmParser init function, if available */
#define LLVM_NATIVE_ASMPARSER LLVMInitializeX86AsmParser
/* LLVM name for the native AsmPrinter init function, if available */
#define LLVM_NATIVE_ASMPRINTER LLVMInitializeX86AsmPrinter
/* LLVM name for the native Disassembler init function, if available */
#define LLVM_NATIVE_DISASSEMBLER LLVMInitializeX86Disassembler
/* LLVM name for the native Target init function, if available */
#define LLVM_NATIVE_TARGET LLVMInitializeX86Target
/* LLVM name for the native TargetInfo init function, if available */
#define LLVM_NATIVE_TARGETINFO LLVMInitializeX86TargetInfo
/* LLVM name for the native target MC init function, if available */
#define LLVM_NATIVE_TARGETMC LLVMInitializeX86TargetMC
/* Define if this is Unixish platform */
/* #undef LLVM_ON_UNIX */
/* Define if this is Win32ish platform */
#define LLVM_ON_WIN32 1
/* Installation prefix directory */
#define LLVM_PREFIX "C:/Program Files (x86)/LLVM"
/* Define if we have the Intel JIT API runtime support library */
/* #undef LLVM_USE_INTEL_JITEVENTS */
/* Define if we have the oprofile JIT-support library */
/* #undef LLVM_USE_OPROFILE */
/* Major version of the LLVM API */
#define LLVM_VERSION_MAJOR 3
/* Minor version of the LLVM API */
#define LLVM_VERSION_MINOR 8
/* Patch version of the LLVM API */
#define LLVM_VERSION_PATCH 0
/* LLVM version string */
#define LLVM_VERSION_STRING "3.8.0svn"
/* Define if we link Polly to the tools */
/* #undef LINK_POLLY_INTO_TOOLS */
#endif
|
Enseed/Reflect_build
|
build-clang/msvc/generated/include/llvm/Config/llvm-config.h
|
C
|
apache-2.0
| 3,614 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_72-internal) on Mon Mar 14 13:22:18 GMT 2016 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Uses of Class org.apache.taverna.scufl2.xml.scufl.jaxb.OperationType (Apache Taverna Language APIs (Scufl2, Databundle) 0.15.1-incubating API)</title>
<meta name="date" content="2016-03-14">
<link rel="stylesheet" type="text/css" href="../../../../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.taverna.scufl2.xml.scufl.jaxb.OperationType (Apache Taverna Language APIs (Scufl2, Databundle) 0.15.1-incubating API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../../index.html?org/apache/taverna/scufl2/xml/scufl/jaxb/class-use/OperationType.html" target="_top">Frames</a></li>
<li><a href="OperationType.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class org.apache.taverna.scufl2.xml.scufl.jaxb.OperationType" class="title">Uses of Class<br>org.apache.taverna.scufl2.xml.scufl.jaxb.OperationType</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb">OperationType</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.apache.taverna.scufl2.xml.scufl.jaxb">org.apache.taverna.scufl2.xml.scufl.jaxb</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="org.apache.taverna.scufl2.xml.scufl.jaxb">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb">OperationType</a> in <a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/package-summary.html">org.apache.taverna.scufl2.xml.scufl.jaxb</a></h3>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation">
<caption><span>Fields in <a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/package-summary.html">org.apache.taverna.scufl2.xml.scufl.jaxb</a> declared as <a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb">OperationType</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Field and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>protected <a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb">OperationType</a></code></td>
<td class="colLast"><span class="typeNameLabel">ArbitraryWsdlType.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/ArbitraryWsdlType.html#operation">operation</a></span></code> </td>
</tr>
</tbody>
</table>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/package-summary.html">org.apache.taverna.scufl2.xml.scufl.jaxb</a> that return <a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb">OperationType</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb">OperationType</a></code></td>
<td class="colLast"><span class="typeNameLabel">ObjectFactory.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/ObjectFactory.html#createOperationType--">createOperationType</a></span>()</code>
<div class="block">Create an instance of <a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb"><code>OperationType</code></a></div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code><a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb">OperationType</a></code></td>
<td class="colLast"><span class="typeNameLabel">ArbitraryWsdlType.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/ArbitraryWsdlType.html#getOperation--">getOperation</a></span>()</code>
<div class="block">Gets the value of the operation property.</div>
</td>
</tr>
</tbody>
</table>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/package-summary.html">org.apache.taverna.scufl2.xml.scufl.jaxb</a> that return types with arguments of type <a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb">OperationType</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="http://docs.oracle.com/javase/7/docs/api/javax/xml/bind/JAXBElement.html?is-external=true" title="class or interface in javax.xml.bind">JAXBElement</a><<a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb">OperationType</a>></code></td>
<td class="colLast"><span class="typeNameLabel">ObjectFactory.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/ObjectFactory.html#createOperation-org.apache.taverna.scufl2.xml.scufl.jaxb.OperationType-">createOperation</a></span>(<a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb">OperationType</a> value)</code>
<div class="block">Create an instance of <a href="http://docs.oracle.com/javase/7/docs/api/javax/xml/bind/JAXBElement.html?is-external=true" title="class or interface in javax.xml.bind"><code>JAXBElement</code></a><code><</code><a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb"><code>OperationType</code></a><code>></code>}</div>
</td>
</tr>
</tbody>
</table>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/package-summary.html">org.apache.taverna.scufl2.xml.scufl.jaxb</a> with parameters of type <a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb">OperationType</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="http://docs.oracle.com/javase/7/docs/api/javax/xml/bind/JAXBElement.html?is-external=true" title="class or interface in javax.xml.bind">JAXBElement</a><<a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb">OperationType</a>></code></td>
<td class="colLast"><span class="typeNameLabel">ObjectFactory.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/ObjectFactory.html#createOperation-org.apache.taverna.scufl2.xml.scufl.jaxb.OperationType-">createOperation</a></span>(<a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb">OperationType</a> value)</code>
<div class="block">Create an instance of <a href="http://docs.oracle.com/javase/7/docs/api/javax/xml/bind/JAXBElement.html?is-external=true" title="class or interface in javax.xml.bind"><code>JAXBElement</code></a><code><</code><a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb"><code>OperationType</code></a><code>></code>}</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="typeNameLabel">ArbitraryWsdlType.</span><code><span class="memberNameLink"><a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/ArbitraryWsdlType.html#setOperation-org.apache.taverna.scufl2.xml.scufl.jaxb.OperationType-">setOperation</a></span>(<a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb">OperationType</a> value)</code>
<div class="block">Sets the value of the operation property.</div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../../org/apache/taverna/scufl2/xml/scufl/jaxb/OperationType.html" title="class in org.apache.taverna.scufl2.xml.scufl.jaxb">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../../index.html?org/apache/taverna/scufl2/xml/scufl/jaxb/class-use/OperationType.html" target="_top">Frames</a></li>
<li><a href="OperationType.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2015–2016 <a href="http://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
</body>
</html>
|
apache/incubator-taverna-site
|
content/javadoc/taverna-language/org/apache/taverna/scufl2/xml/scufl/jaxb/class-use/OperationType.html
|
HTML
|
apache-2.0
| 14,122 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""TFRecord sources and sinks."""
from __future__ import absolute_import
import logging
import struct
from apache_beam import coders
from apache_beam.io import filebasedsource
from apache_beam.io import fileio
from apache_beam.io.iobase import Read
from apache_beam.io.iobase import Write
from apache_beam.transforms import PTransform
import crcmod
__all__ = ['ReadFromTFRecord', 'WriteToTFRecord']
def _default_crc32c_fn(value):
"""Calculates crc32c by either snappy or crcmod based on installation."""
if not _default_crc32c_fn.fn:
try:
import snappy # pylint: disable=import-error
_default_crc32c_fn.fn = snappy._crc32c # pylint: disable=protected-access
except ImportError:
logging.warning('Couldn\'t find python-snappy so the implementation of '
'_TFRecordUtil._masked_crc32c is not as fast as it could '
'be.')
_default_crc32c_fn.fn = crcmod.predefined.mkPredefinedCrcFun('crc-32c')
return _default_crc32c_fn.fn(value)
_default_crc32c_fn.fn = None
class _TFRecordUtil(object):
"""Provides basic TFRecord encoding/decoding with consistency checks.
For detailed TFRecord format description see:
https://www.tensorflow.org/versions/master/api_docs/python/python_io.html#tfrecords-format-details
Note that masks and length are represented in LittleEndian order.
"""
@classmethod
def _masked_crc32c(cls, value, crc32c_fn=_default_crc32c_fn):
"""Compute a masked crc32c checksum for a value.
Args:
value: A string for which we compute the crc.
crc32c_fn: A function that can compute a crc32c.
This is a performance hook that also helps with testing. Callers are
not expected to make use of it directly.
Returns:
Masked crc32c checksum.
"""
crc = crc32c_fn(value)
return (((crc >> 15) | (crc << 17)) + 0xa282ead8) & 0xffffffff
@staticmethod
def encoded_num_bytes(record):
"""Return the number of bytes consumed by a record in its encoded form."""
# 16 = 8 (Length) + 4 (crc of length) + 4 (crc of data)
return len(record) + 16
@classmethod
def write_record(cls, file_handle, value):
"""Encode a value as a TFRecord.
Args:
file_handle: The file to write to.
value: A string content of the record.
"""
encoded_length = struct.pack('<Q', len(value))
file_handle.write('{}{}{}{}'.format(
encoded_length,
struct.pack('<I', cls._masked_crc32c(encoded_length)), #
value,
struct.pack('<I', cls._masked_crc32c(value))))
@classmethod
def read_record(cls, file_handle):
"""Read a record from a TFRecords file.
Args:
file_handle: The file to read from.
Returns:
None if EOF is reached; the paylod of the record otherwise.
Raises:
ValueError: If file appears to not be a valid TFRecords file.
"""
buf_length_expected = 12
buf = file_handle.read(buf_length_expected)
if not buf:
return None # EOF Reached.
# Validate all length related payloads.
if len(buf) != buf_length_expected:
raise ValueError('Not a valid TFRecord. Fewer than %d bytes: %s' %
(buf_length_expected, buf.encode('hex')))
length, length_mask_expected = struct.unpack('<QI', buf)
length_mask_actual = cls._masked_crc32c(buf[:8])
if length_mask_actual != length_mask_expected:
raise ValueError('Not a valid TFRecord. Mismatch of length mask: %s' %
buf.encode('hex'))
# Validate all data related payloads.
buf_length_expected = length + 4
buf = file_handle.read(buf_length_expected)
if len(buf) != buf_length_expected:
raise ValueError('Not a valid TFRecord. Fewer than %d bytes: %s' %
(buf_length_expected, buf.encode('hex')))
data, data_mask_expected = struct.unpack('<%dsI' % length, buf)
data_mask_actual = cls._masked_crc32c(data)
if data_mask_actual != data_mask_expected:
raise ValueError('Not a valid TFRecord. Mismatch of data mask: %s' %
buf.encode('hex'))
# All validation checks passed.
return data
class _TFRecordSource(filebasedsource.FileBasedSource):
"""A File source for reading files of TFRecords.
For detailed TFRecords format description see:
https://www.tensorflow.org/versions/master/api_docs/python/python_io.html#tfrecords-format-details
"""
def __init__(self,
file_pattern,
coder,
compression_type,
validate):
"""Initialize a TFRecordSource. See ReadFromTFRecord for details."""
super(_TFRecordSource, self).__init__(
file_pattern=file_pattern,
compression_type=compression_type,
splittable=False,
validate=validate)
self._coder = coder
def read_records(self, file_name, offset_range_tracker):
if offset_range_tracker.start_position():
raise ValueError('Start position not 0:%s' %
offset_range_tracker.start_position())
current_offset = offset_range_tracker.start_position()
with self.open_file(file_name) as file_handle:
while True:
if not offset_range_tracker.try_claim(current_offset):
raise RuntimeError('Unable to claim position: %s' % current_offset)
record = _TFRecordUtil.read_record(file_handle)
if record is None:
return # Reached EOF
else:
current_offset += _TFRecordUtil.encoded_num_bytes(record)
yield self._coder.decode(record)
class ReadFromTFRecord(PTransform):
"""Transform for reading TFRecord sources."""
def __init__(self,
file_pattern,
coder=coders.BytesCoder(),
compression_type=fileio.CompressionTypes.AUTO,
validate=True,
**kwargs):
"""Initialize a ReadFromTFRecord transform.
Args:
file_pattern: A file glob pattern to read TFRecords from.
coder: Coder used to decode each record.
compression_type: Used to handle compressed input files. Default value
is CompressionTypes.AUTO, in which case the file_path's extension will
be used to detect the compression.
validate: Boolean flag to verify that the files exist during the pipeline
creation time.
**kwargs: optional args dictionary. These are passed through to parent
constructor.
Returns:
A ReadFromTFRecord transform object.
"""
super(ReadFromTFRecord, self).__init__(**kwargs)
self._args = (file_pattern, coder, compression_type, validate)
def expand(self, pvalue):
return pvalue.pipeline | Read(_TFRecordSource(*self._args))
class _TFRecordSink(fileio.FileSink):
"""Sink for writing TFRecords files.
For detailed TFRecord format description see:
https://www.tensorflow.org/versions/master/api_docs/python/python_io.html#tfrecords-format-details
"""
def __init__(self, file_path_prefix, coder, file_name_suffix, num_shards,
shard_name_template, compression_type):
"""Initialize a TFRecordSink. See WriteToTFRecord for details."""
super(_TFRecordSink, self).__init__(
file_path_prefix=file_path_prefix,
coder=coder,
file_name_suffix=file_name_suffix,
num_shards=num_shards,
shard_name_template=shard_name_template,
mime_type='application/octet-stream',
compression_type=compression_type)
def write_encoded_record(self, file_handle, value):
_TFRecordUtil.write_record(file_handle, value)
class WriteToTFRecord(PTransform):
"""Transform for writing to TFRecord sinks."""
def __init__(self,
file_path_prefix,
coder=coders.BytesCoder(),
file_name_suffix='',
num_shards=0,
shard_name_template=fileio.DEFAULT_SHARD_NAME_TEMPLATE,
compression_type=fileio.CompressionTypes.AUTO,
**kwargs):
"""Initialize WriteToTFRecord transform.
Args:
file_path_prefix: The file path to write to. The files written will begin
with this prefix, followed by a shard identifier (see num_shards), and
end in a common extension, if given by file_name_suffix.
coder: Coder used to encode each record.
file_name_suffix: Suffix for the files written.
num_shards: The number of files (shards) used for output. If not set, the
default value will be used.
shard_name_template: A template string containing placeholders for
the shard number and shard count. Currently only '' and
'-SSSSS-of-NNNNN' are patterns allowed.
When constructing a filename for a particular shard number, the
upper-case letters 'S' and 'N' are replaced with the 0-padded shard
number and shard count respectively. This argument can be '' in which
case it behaves as if num_shards was set to 1 and only one file will be
generated. The default pattern is '-SSSSS-of-NNNNN'.
compression_type: Used to handle compressed output files. Typical value
is CompressionTypes.AUTO, in which case the file_path's extension will
be used to detect the compression.
**kwargs: Optional args dictionary. These are passed through to parent
constructor.
Returns:
A WriteToTFRecord transform object.
"""
super(WriteToTFRecord, self).__init__(**kwargs)
self._args = (file_path_prefix, coder, file_name_suffix, num_shards,
shard_name_template, compression_type)
def expand(self, pcoll):
return pcoll | Write(_TFRecordSink(*self._args))
|
chamikaramj/incubator-beam
|
sdks/python/apache_beam/io/tfrecordio.py
|
Python
|
apache-2.0
| 10,445 |
#!/bin/sh
# Windows Azure OS X Package: Create packages script
# Copyright (C) 2012 Microsoft Corporation. All Rights Reserved.
#
# This builds the package as well as prepares the tarball file, etc.
# This script is only used at build time, it is not part of the package.
#
CURRENT_NODE_DISTRIBUTION_VERSION=v0.6.17
# Check for Apple's PackageMaker
# ------------------------------
if [ ! -f /Applications/Utilities/PackageMaker.app/Contents/MacOS/PackageMaker ]; then
echo PackageMaker needs to be installed in the Utilies folder on your Mac.
echo If you do not yet have PackageMaker, please download it from the Apple Dev Center.
echo
echo If you need to download it:
echo open http://adcdownload.apple.com/Developer_Tools/auxiliary_tools_for_xcode__february_2012/auxiliary_tools_for_xcode.dmg
echo
echo If you already have it, just drag it into the Utilities folder since this is hard-coded in the script.
echo
exit 1
fi
# Node.js validation
# ------------------
if [ ! -f /usr/local/bin/node ]; then
echo Node.js is not installed on this machine.
echo Please download and install it from http://nodejs.org/
open http://nodejs.org/
exit 1
fi
export NODE_VERSION=`/usr/local/bin/node -v`
echo The current Node.js version we are shipping is $CURRENT_NODE_DISTRIBUTION_VERSION
if [ ! "$NODE_VERSION" = "$CURRENT_NODE_DISTRIBUTION_VERSION" ]; then
echo Your Node.js version $NODE_VERSION does not match the version to distribute.
echo Aborting package preparation.
exit 1
fi
# Ensure that all modules are present
# -----------------------------------
pushd ../../
echo Running npm update to make sure that all modules are present locally...
npm update
popd
# Tarball creation
# ----------------
scripts/createTarball.sh
# Node.js binary
# --------------
# Copy the OS node into our local out folder for packaging
cp /usr/local/bin/node out/
echo Copied your local Node.js binary version $NODE_VERSION into the output folder
# OS X Package creation
# ---------------------
echo Building "Windows Azure SDK.pkg"
/Applications/Utilities/PackageMaker.app/Contents/MacOS/PackageMaker --doc sdk.pmdoc --out "./out/Windows Azure SDK.pkg"
echo
echo The package has been built and can be found in the ./out/ folder.
open ./out
|
egamma/azure-sdk-for-node
|
tools/osx-setup/build.sh
|
Shell
|
apache-2.0
| 2,249 |
package daemon
import (
"fmt"
"sync"
"time"
"github.com/docker/docker/pkg/units"
)
type State struct {
sync.Mutex
Running bool
Paused bool
Restarting bool
Pid int
ExitCode int
StartedAt time.Time
FinishedAt time.Time
waitChan chan struct{}
}
func NewState() *State {
return &State{
waitChan: make(chan struct{}),
}
}
// String returns a human-readable description of the state
func (s *State) String() string {
if s.Running {
if s.Paused {
return fmt.Sprintf("Up %s (Paused)", units.HumanDuration(time.Now().UTC().Sub(s.StartedAt)))
}
if s.Restarting {
return fmt.Sprintf("Restarting (%d) %s ago", s.ExitCode, units.HumanDuration(time.Now().UTC().Sub(s.FinishedAt)))
}
return fmt.Sprintf("Up %s", units.HumanDuration(time.Now().UTC().Sub(s.StartedAt)))
}
if s.FinishedAt.IsZero() {
return ""
}
return fmt.Sprintf("Exited (%d) %s ago", s.ExitCode, units.HumanDuration(time.Now().UTC().Sub(s.FinishedAt)))
}
func wait(waitChan <-chan struct{}, timeout time.Duration) error {
if timeout < 0 {
<-waitChan
return nil
}
select {
case <-time.After(timeout):
return fmt.Errorf("Timed out: %v", timeout)
case <-waitChan:
return nil
}
}
// WaitRunning waits until state is running. If state already running it returns
// immediatly. If you want wait forever you must supply negative timeout.
// Returns pid, that was passed to SetRunning
func (s *State) WaitRunning(timeout time.Duration) (int, error) {
s.Lock()
if s.Running {
pid := s.Pid
s.Unlock()
return pid, nil
}
waitChan := s.waitChan
s.Unlock()
if err := wait(waitChan, timeout); err != nil {
return -1, err
}
return s.GetPid(), nil
}
// WaitStop waits until state is stopped. If state already stopped it returns
// immediatly. If you want wait forever you must supply negative timeout.
// Returns exit code, that was passed to SetStopped
func (s *State) WaitStop(timeout time.Duration) (int, error) {
s.Lock()
if !s.Running {
exitCode := s.ExitCode
s.Unlock()
return exitCode, nil
}
waitChan := s.waitChan
s.Unlock()
if err := wait(waitChan, timeout); err != nil {
return -1, err
}
return s.GetExitCode(), nil
}
func (s *State) IsRunning() bool {
s.Lock()
res := s.Running
s.Unlock()
return res
}
func (s *State) GetPid() int {
s.Lock()
res := s.Pid
s.Unlock()
return res
}
func (s *State) GetExitCode() int {
s.Lock()
res := s.ExitCode
s.Unlock()
return res
}
func (s *State) SetRunning(pid int) {
s.Lock()
s.setRunning(pid)
s.Unlock()
}
func (s *State) setRunning(pid int) {
s.Running = true
s.Paused = false
s.Restarting = false
s.ExitCode = 0
s.Pid = pid
s.StartedAt = time.Now().UTC()
close(s.waitChan) // fire waiters for start
s.waitChan = make(chan struct{})
}
func (s *State) SetStopped(exitCode int) {
s.Lock()
s.setStopped(exitCode)
s.Unlock()
}
func (s *State) setStopped(exitCode int) {
s.Running = false
s.Restarting = false
s.Pid = 0
s.FinishedAt = time.Now().UTC()
s.ExitCode = exitCode
close(s.waitChan) // fire waiters for stop
s.waitChan = make(chan struct{})
}
// SetRestarting is when docker hanldes the auto restart of containers when they are
// in the middle of a stop and being restarted again
func (s *State) SetRestarting(exitCode int) {
s.Lock()
// we should consider the container running when it is restarting because of
// all the checks in docker around rm/stop/etc
s.Running = true
s.Restarting = true
s.Pid = 0
s.FinishedAt = time.Now().UTC()
s.ExitCode = exitCode
close(s.waitChan) // fire waiters for stop
s.waitChan = make(chan struct{})
s.Unlock()
}
func (s *State) IsRestarting() bool {
s.Lock()
res := s.Restarting
s.Unlock()
return res
}
func (s *State) SetPaused() {
s.Lock()
s.Paused = true
s.Unlock()
}
func (s *State) SetUnpaused() {
s.Lock()
s.Paused = false
s.Unlock()
}
func (s *State) IsPaused() bool {
s.Lock()
res := s.Paused
s.Unlock()
return res
}
|
mwhudson/docker
|
daemon/state.go
|
GO
|
apache-2.0
| 3,937 |
/*
* Copyright 2012-2016 bambooCORE, greenstep of copyright Chen Xin Nien
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* -----------------------------------------------------------------------
*
* author: Chen Xin Nien
* contact: [email protected]
*
*/
package com.netsteadfast.greenstep.qcharts.action.utils;
import org.apache.commons.lang3.StringUtils;
import com.netsteadfast.greenstep.base.Constants;
import com.netsteadfast.greenstep.base.exception.ControllerException;
import com.netsteadfast.greenstep.base.model.IActionFieldsCheckUtils;
public class SelectItemFieldCheckUtils implements IActionFieldsCheckUtils {
@Override
public boolean check(String value) throws ControllerException {
if (StringUtils.isBlank(value) || Constants.HTML_SELECT_NO_SELECT_ID.equals(value)) {
return false;
}
return true;
}
}
|
quangnguyen9x/bamboobsc_quangnv
|
qcharts-web/src/com/netsteadfast/greenstep/qcharts/action/utils/SelectItemFieldCheckUtils.java
|
Java
|
apache-2.0
| 1,369 |
import scala.language.existentials
import scala.reflect.runtime.universe._
import internal._
object Test {
trait ToS { final override def toString = getClass.getName }
def f1 = { case class Bar() extends ToS; Bar }
def f2 = { case class Bar() extends ToS; Bar() }
def f3 = { class Bar() extends ToS; object Bar extends ToS; Bar }
def f4 = { class Bar() extends ToS; new Bar() }
def f5 = { object Bar extends ToS; Bar }
def f6 = { () => { object Bar extends ToS ; Bar } }
def f7 = { val f = { () => { object Bar extends ToS ; Bar } } ; f }
def f8 = { trait A ; trait B extends A ; class C extends B with ToS; new C { } }
def f9 = { trait A ; trait B ; class C extends B with A with ToS; new C { } }
def f10 = { class A { type T1 } ; List[A#T1]() }
def f11 = { abstract class A extends Seq[Int] ; List[A]() }
def f12 = { abstract class A extends Seq[U forSome { type U <: Int }] ; List[A]() }
val g1 = { case class Bar() extends ToS; Bar }
val g2 = { case class Bar() extends ToS; Bar() }
val g3 = { class Bar() extends ToS; object Bar extends ToS; Bar }
val g4 = { class Bar() extends ToS; new Bar() }
val g5 = { object Bar extends ToS; Bar }
val g6 = { () => { object Bar extends ToS ; Bar } }
val g7 = { val f = { () => { object Bar extends ToS ; Bar } } ; f }
val g8 = { trait A ; trait B extends A ; class C extends B with ToS; new C { } }
val g9 = { trait A ; trait B ; class C extends B with A with ToS; new C { } }
val g10 = { class A { type T1 } ; List[A#T1]() }
val g11 = { abstract class A extends Seq[Int] ; List[A]() }
val g12 = { abstract class A extends Seq[U forSome { type U <: Int }] ; List[A]() }
def printTpe(t: Type) = {
val s = if (isFreeType(t.typeSymbol)) t.typeSymbol.info.toString else t.typeSymbol.toString
println("%s, t=%s, s=%s".format(t, t.asInstanceOf[Product].productPrefix, s))
}
def m[T: TypeTag](x: T) = printTpe(typeOf[T])
def m2[T: WeakTypeTag](x: T) = printTpe(implicitly[WeakTypeTag[T]].tpe)
// tags do work for f10/g10
def main(args: Array[String]): Unit = {
m2(f1)
m2(f2)
m(f3)
m(f4)
m(f5)
m(f6)
m(f7)
m2(f8)
m2(f9)
m2(f10)
m(f11)
m(f12)
m2(g1)
m2(g2)
m(g3)
m(g4)
m(g5)
m(g6)
m(g7)
m2(g8)
m2(g9)
m2(g10)
m(g11)
m(g12)
}
}
object Misc {
trait Bippy { def bippy = "I'm Bippy!" }
object o1 {
def f1 = { trait A extends Seq[U forSome { type U <: Bippy }] ; abstract class B extends A ; trait C extends B ; (null: C) }
def f2 = f1.head.bippy
}
def g1 = () => o1.f1
def g2 = () => o1.f2
}
|
scala/scala
|
test/files/run/existentials3-new.scala
|
Scala
|
apache-2.0
| 2,619 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.mskcc.shenkers.control.track;
/**
*
* @author sol
*/
public enum FileType {
BAM,
WIG,
GTF,
BED,
FASTA
}
|
shenkers/CrossBrowse
|
src/main/java/org/mskcc/shenkers/control/track/FileType.java
|
Java
|
apache-2.0
| 326 |
<?php
require_once("../config/connect.php");
?>
|
Elimean/DMXT
|
admin/manager_admin_classManager_Updata.php
|
PHP
|
apache-2.0
| 50 |
import React from 'react';
import { Box, WorldMap } from 'grommet';
export const SelectPlace = () => {
const [places, setPlaces] = React.useState();
const onSelectPlace = (place) => {
console.log('Selected', place);
setPlaces([{ color: 'graph-1', location: place }]);
};
return (
<Box align="center" pad="large">
<WorldMap onSelectPlace={onSelectPlace} places={places} />
</Box>
);
};
SelectPlace.storyName = 'Select place';
SelectPlace.parameters = {
chromatic: { disable: true },
};
export default {
title: 'Visualizations/WorldMap/Select place',
};
|
HewlettPackard/grommet
|
src/js/components/WorldMap/stories/SelectPlace.js
|
JavaScript
|
apache-2.0
| 595 |
/**
* Licensed to Jasig under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Jasig licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.openregistry.core.domain;
import java.io.Serializable;
/**
* @author Scott Battaglia
* @version $Revision$ $Date$
* @since 1.0.0
*/
public interface OrganizationalUnit extends Serializable {
Long getId();
Type getOrganizationalUnitType();
String getLocalCode();
String getName();
Campus getCampus();
OrganizationalUnit getParentOrganizationalUnit();
String getRBHS();
void setRBHS(String RBHS);
String getPHI();
void setPHI(String PHI);
}
|
sheliu/openregistry
|
openregistry-api/src/main/java/org/openregistry/core/domain/OrganizationalUnit.java
|
Java
|
apache-2.0
| 1,279 |
package com.github.czyzby.lml.parser.impl.attribute.list;
import com.badlogic.gdx.scenes.scene2d.ui.List;
import com.github.czyzby.lml.parser.LmlParser;
import com.github.czyzby.lml.parser.tag.LmlAttribute;
import com.github.czyzby.lml.parser.tag.LmlTag;
/** See {@link com.badlogic.gdx.scenes.scene2d.utils.Selection#setRequired(boolean)}. Mapped to "required".
*
* @author MJ */
public class RequiredLmlAttribute implements LmlAttribute<List<?>> {
@Override
@SuppressWarnings("unchecked")
public Class<List<?>> getHandledType() {
// Double cast as there were a problem with generics - SomeClass.class cannot be returned as
// <Class<SomeClass<?>>, even though casting never throws ClassCastException in the end.
return (Class<List<?>>) (Object) List.class;
}
@Override
public void process(final LmlParser parser, final LmlTag tag, final List<?> actor, final String rawAttributeData) {
actor.getSelection().setRequired(parser.parseBoolean(rawAttributeData, actor));
}
}
|
tommyettinger/SquidSetup
|
src/main/java/com/github/czyzby/lml/parser/impl/attribute/list/RequiredLmlAttribute.java
|
Java
|
apache-2.0
| 1,036 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cocoon.portal.acting;
import java.util.Map;
import org.apache.avalon.framework.configuration.ConfigurationException;
import org.apache.avalon.framework.parameters.Parameters;
import org.apache.cocoon.environment.ObjectModelHelper;
import org.apache.cocoon.environment.Redirector;
import org.apache.cocoon.environment.SourceResolver;
import org.apache.cocoon.portal.event.Event;
import org.apache.cocoon.portal.event.EventManager;
import org.apache.cocoon.portal.event.coplet.CopletJXPathEvent;
import org.apache.cocoon.portal.sitemap.Constants;
/**
* Using this action, you can set values in a coplet.
*
* @version $Id$
*/
public class CopletSetDataAction
extends AbstractPortalAction {
/**
* @see org.apache.cocoon.acting.Action#act(org.apache.cocoon.environment.Redirector, org.apache.cocoon.environment.SourceResolver, java.util.Map, java.lang.String, org.apache.avalon.framework.parameters.Parameters)
*/
public Map act(Redirector redirector, SourceResolver resolver, Map objectModel, String source, Parameters parameters)
throws Exception {
// determine coplet id
String copletId = null;
Map context = (Map)objectModel.get(ObjectModelHelper.PARENT_CONTEXT);
if (context != null) {
copletId = (String)context.get(Constants.COPLET_ID_KEY);
} else {
copletId = (String)objectModel.get(Constants.COPLET_ID_KEY);
}
if (copletId == null) {
throw new ConfigurationException("copletId must be passed in the object model either directly (e.g. by using ObjectModelAction) or within the parent context.");
}
// now traverse parameters:
// parameter name is path
// parameter value is value
// if the value is null or empty, the value is not set!
final String[] names = parameters.getNames();
if ( names != null ) {
final EventManager publisher = this.portalService.getEventManager();
for(int i=0; i<names.length; i++) {
final String path = names[i];
final String value = parameters.getParameter(path, null );
if ( value != null && value.trim().length() > 0 ) {
final Event event = new CopletJXPathEvent(this.portalService.getProfileManager().getCopletInstance(copletId),
path,
value);
publisher.send(event);
}
}
}
return EMPTY_MAP;
}
}
|
apache/cocoon
|
blocks/cocoon-portal/cocoon-portal-sitemap/src/main/java/org/apache/cocoon/portal/acting/CopletSetDataAction.java
|
Java
|
apache-2.0
| 3,337 |
<?php
include('dao.php');
global $dbh;
$dbh->setAttribute(PDO::ATTR_ERRMODE, PDO::ERRMODE_EXCEPTION);
$data = '';
$id = $_POST['id'];
$status = $_POST['status'];
if($status == "Failed" || $status == "Rejected"){
$failureReason = $_POST['failureReason'];
$sql = $dbh->prepare("UPDATE tbl_totalsentmessages SET sent_status = :status, failure_reason=:reason WHERE sent_messageid = :id");
$sql->bindParam(":status", $status );
$sql->bindParam(":reason", $failureReason);
$sql->bindParam(":id", $id );
try {
$data = $sql->execute();
} catch(PDOException $e) {
echo $e->getMessage();
}
if ($data) {
echo '1';
}
else {
echo '0';
}
}
else {
$sql = $dbh->prepare("UPDATE tbl_totalsentmessages SET sent_status = :status WHERE sent_messageid = :id");
$sql->bindParam(":status", $status );
$sql->bindParam(":id", $id );
try {
$data = $sql->execute();
} catch(PDOException $e) {
echo $e->getMessage();
}
if ($data) {
echo '1';
}
else {
echo '0';
}
}
?>
|
chebryan/Qsoft-Admin
|
core/callback_url.php
|
PHP
|
apache-2.0
| 1,383 |
/*!
* ${copyright}
*/
// Provides the Design Time Metadata for the sap.m.Slider control
sap.ui.define([],
function () {
"use strict";
return {
name: {
singular: "SLIDER_NAME",
plural: "SLIDER_NAME_PLURAL"
},
palette: {
group: "INPUT",
icons: {
svg: "sap/m/designtime/Slider.icon.svg"
}
},
actions: {
remove: {
changeType: "hideControl"
},
reveal: {
changeType: "unhideControl"
}
},
aggregations: {
scale: {
domRef: ":sap-domref > .sapMSliderTickmarks"
},
customTooltips: {
ignore: true
}
},
templates: {
create: "sap/m/designtime/Slider.create.fragment.xml"
}
};
}, /* bExport= */ true);
|
SAP/openui5
|
src/sap.m/src/sap/m/designtime/Slider.designtime.js
|
JavaScript
|
apache-2.0
| 709 |
# ThriftMux Protocol (experimental)
> This config routes thriftmux from port 4400 to port 5005.
```yaml
routers:
- protocol: thriftmux
experimental: true
label: port-shifter
dtab: |
/svc => /$/inet/127.1/5005;
servers:
- port: 4400
ip: 0.0.0.0
```
protocol: `thriftmux`
Linkerd _experimentally_ supports the thriftmux protocol.
Thriftmux protocol is capable of routing traffic to pure thrift service and
will use [Thrift](https://twitter.github.io/finagle/guide/Protocols.html#thrift) protocol on the client.
Protocol configuration uses the same parameters as
[Thrift protocol](https://linkerd.io/config/head/linkerd#thrift-protocol).
## ThriftMux Router Parameters
See [Thrift Router Parameters](https://linkerd.io/config/head/linkerd#thrift-router-parameters)
## ThriftMux Server Parameters
See [Thrift Server Parameters](https://linkerd.io/config/head/linkerd#thrift-server-parameters)
## ThriftMux Client Parameters
See [Thrift Client Parameters](https://linkerd.io/config/head/linkerd#thrift-client-parameters)
|
linkerd/linkerd
|
linkerd/docs/protocol-thriftmux.md
|
Markdown
|
apache-2.0
| 1,046 |
/* Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#pragma once
#include <string>
#include <unordered_set>
#include "paddle/fluid/framework/ir/pass.h"
namespace paddle {
namespace framework {
namespace ir {
class SimplifyWithBasicOpsPass : public Pass {
protected:
void ApplyImpl(Graph* graph) const override;
private:
bool SimplifyDropout(Graph* graph, Node* n,
std::unordered_set<const Node*>* del_node_set) const;
Node* GetInputVar(Node* n, const std::string& name) const;
Node* GetOutputVar(Node* n, const std::string& name) const;
void ReplaceInputVar(Node* op, Node* old_var, Node* new_var) const;
void ReplaceOutputVar(Node* op, Node* old_var, Node* new_var) const;
};
} // namespace ir
} // namespace framework
} // namespace paddle
|
chengduoZH/Paddle
|
paddle/fluid/framework/ir/simplify_with_basic_ops_pass.h
|
C
|
apache-2.0
| 1,331 |
// Copyright 2011 The Avalon Project Authors. All rights reserved.
// Use of this source code is governed by the Apache License 2.0
// that can be found in the LICENSE file.
//
// SMS encoding/decoding functions, which are based on examples from:
// http://www.dreamfabric.com/sms/
#include "pdu.h"
#include <string.h>
#include <time.h>
enum {
BITMASK_7BITS = 0x7F,
BITMASK_8BITS = 0xFF,
BITMASK_HIGH_4BITS = 0xF0,
BITMASK_LOW_4BITS = 0x0F,
TYPE_OF_ADDRESS_INTERNATIONAL_PHONE = 0x91,
TYPE_OF_ADDRESS_NATIONAL_SUBSCRIBER = 0xC8,
SMS_DELIVER_ONE_MESSAGE = 0x04,
SMS_SUBMIT = 0x11,
SMS_MAX_7BIT_TEXT_LENGTH = 160,
};
// Swap decimal digits of a number (e.g. 12 -> 21).
static unsigned char
SwapDecimalNibble(const unsigned char x)
{
return (x / 16) + ((x % 16) * 10);
}
// Encode/Decode PDU: Translate ASCII 7bit characters to 8bit buffer.
// SMS encoding example from: http://www.dreamfabric.com/sms/.
//
// 7-bit ASCII: "hellohello"
// [0]:h [1]:e [2]:l [3]:l [4]:o [5]:h [6]:e [7]:l [8]:l [9]:o
// 1101000 1100101 1101100 1101100 1101111 1101000 1100101 1101100 1101100 1101111
// | ||| ||||| | ||||||| ||||||
// /-------------/ ///-------/// /////-///// \------------\ ||||||| \\\\\\ .
// | ||| ||||| | ||||||| ||||||
// input buffer position
// 10000000 22111111 33322222 44443333 55555333 66666655 77777776 98888888 --999999
// | ||| ||||| | ||||||| ||||||
// 8bit encoded buffer
// 11101000 00110010 10011011 11111101 01000110 10010111 11011001 11101100 00110111
// E8 32 9B FD 46 97 D9 EC 37
// Encode PDU message by merging 7 bit ASCII characters into 8 bit octets.
static int
EncodePDUMessage(const char* sms_text, int sms_text_length, unsigned char* output_buffer, int buffer_size)
{
// Check if output buffer is big enough.
if ((sms_text_length * 7 + 7) / 8 > buffer_size)
return -1;
int output_buffer_length = 0;
int carry_on_bits = 1;
int i = 0;
for (; i < sms_text_length - 1; ++i) {
output_buffer[output_buffer_length++] =
((sms_text[i] & BITMASK_7BITS) >> (carry_on_bits - 1)) |
((sms_text[i + 1] & BITMASK_7BITS) << (8 - carry_on_bits));
carry_on_bits++;
if (carry_on_bits == 8) {
carry_on_bits = 1;
++i;
}
}
if (i <= sms_text_length)
output_buffer[output_buffer_length++] = (sms_text[i] & BITMASK_7BITS) >> (carry_on_bits - 1);
return output_buffer_length;
}
// Decode PDU message by splitting 8 bit encoded buffer into 7 bit ASCII
// characters.
static int
DecodePDUMessage(const unsigned char* buffer, int buffer_length, char* output_sms_text, int sms_text_length)
{
int output_text_length = 0;
if (buffer_length > 0)
output_sms_text[output_text_length++] = BITMASK_7BITS & buffer[0];
int carry_on_bits = 1;
int i = 1;
for (; i < buffer_length; ++i) {
output_sms_text[output_text_length++] = BITMASK_7BITS & ((buffer[i] << carry_on_bits) | (buffer[i - 1] >> (8 - carry_on_bits)));
if (output_text_length == sms_text_length) break;
carry_on_bits++;
if (carry_on_bits == 8) {
carry_on_bits = 1;
output_sms_text[output_text_length++] = buffer[i] & BITMASK_7BITS;
if (output_text_length == sms_text_length) break;
}
}
if (output_text_length < sms_text_length) // Add last remainder.
output_sms_text[output_text_length++] = buffer[i - 1] >> (8 - carry_on_bits);
return output_text_length;
}
// Encode a digit based phone number for SMS based format.
static int
EncodePhoneNumber(const char* phone_number, unsigned char* output_buffer, int buffer_size)
{
int output_buffer_length = 0;
const int phone_number_length = strlen(phone_number);
// Check if the output buffer is big enough.
if ((phone_number_length + 1) / 2 > buffer_size)
return -1;
int i = 0;
for (; i < phone_number_length; ++i) {
if (phone_number[i] < '0' && phone_number[i] > '9')
return -1;
if (i % 2 == 0) {
output_buffer[output_buffer_length++] = BITMASK_HIGH_4BITS | (phone_number[i] - '0');
} else {
output_buffer[output_buffer_length - 1] =
(output_buffer[output_buffer_length - 1] & BITMASK_LOW_4BITS) |
((phone_number[i] - '0') << 4);
}
}
return output_buffer_length;
}
// Decode a digit based phone number for SMS based format.
static int
DecodePhoneNumber(const unsigned char* buffer, int phone_number_length, char* output_phone_number)
{
int i = 0;
for (; i < phone_number_length; ++i) {
if (i % 2 == 0)
output_phone_number[i] = (buffer[i / 2] & BITMASK_LOW_4BITS) + '0';
else
output_phone_number[i] = ((buffer[i / 2] & BITMASK_HIGH_4BITS) >> 4) + '0';
}
output_phone_number[phone_number_length] = '\0'; // Terminate C string.
return phone_number_length;
}
// Encode a SMS message to PDU
int
pdu_encode(const char* service_center_number, const char* phone_number, const char* sms_text,
unsigned char* output_buffer, int buffer_size)
{
if (buffer_size < 2)
return -1;
int output_buffer_length = 0;
// 1. Set SMS center number.
int length = 0;
if (service_center_number && strlen(service_center_number) > 0) {
output_buffer[1] = TYPE_OF_ADDRESS_INTERNATIONAL_PHONE;
length = EncodePhoneNumber(service_center_number,
output_buffer + 2, buffer_size - 2);
if (length < 0 && length >= 254)
return -1;
length++; // Add type of address.
}
output_buffer[0] = length;
output_buffer_length = length + 1;
if (output_buffer_length + 4 > buffer_size)
return -1; // Check if it has space for four more bytes.
// 2. Set type of message.
output_buffer[output_buffer_length++] = SMS_SUBMIT;
output_buffer[output_buffer_length++] = 0x00; // Message reference.
// 3. Set phone number.
output_buffer[output_buffer_length] = strlen(phone_number);
output_buffer[output_buffer_length + 1] = TYPE_OF_ADDRESS_INTERNATIONAL_PHONE;
length = EncodePhoneNumber(phone_number,
output_buffer + output_buffer_length + 2,
buffer_size - output_buffer_length - 2);
output_buffer_length += length + 2;
if (output_buffer_length + 4 > buffer_size)
return -1; // Check if it has space for four more bytes.
// 4. Protocol identifiers.
output_buffer[output_buffer_length++] = 0x00; // TP-PID: Protocol identifier.
output_buffer[output_buffer_length++] = 0x00; // TP-DCS: Data coding scheme.
output_buffer[output_buffer_length++] = 0xB0; // TP-VP: Validity: 10 days
// 5. SMS message.
const int sms_text_length = strlen(sms_text);
if (sms_text_length > SMS_MAX_7BIT_TEXT_LENGTH)
return -1;
output_buffer[output_buffer_length++] = sms_text_length;
length = EncodePDUMessage(sms_text, sms_text_length,
output_buffer + output_buffer_length,
buffer_size - output_buffer_length);
if (length < 0)
return -1;
output_buffer_length += length;
return output_buffer_length;
}
int pdu_decode(const unsigned char* buffer, int buffer_length,
time_t* output_sms_time,
char* output_sender_phone_number, int sender_phone_number_size,
char* output_sms_text, int sms_text_size)
{
if (buffer_length <= 0)
return -1;
const int sms_deliver_start = 1 + buffer[0];
if (sms_deliver_start + 1 > buffer_length) return -1;
if ((buffer[sms_deliver_start] & SMS_DELIVER_ONE_MESSAGE) != SMS_DELIVER_ONE_MESSAGE) return -1;
const int sender_number_length = buffer[sms_deliver_start + 1];
if (sender_number_length + 1 > sender_phone_number_size) return -1; // Buffer too small to hold decoded phone number.
// const int sender_type_of_address = buffer[sms_deliver_start + 2];
DecodePhoneNumber(buffer + sms_deliver_start + 3, sender_number_length, output_sender_phone_number);
const int sms_pid_start = sms_deliver_start + 3 + (buffer[sms_deliver_start + 1] + 1) / 2;
// Decode timestamp.
struct tm sms_broken_time;
sms_broken_time.tm_year = 100 + SwapDecimalNibble(buffer[sms_pid_start + 2]);
sms_broken_time.tm_mon = SwapDecimalNibble(buffer[sms_pid_start + 3]) - 1;
sms_broken_time.tm_mday = SwapDecimalNibble(buffer[sms_pid_start + 4]);
sms_broken_time.tm_hour = SwapDecimalNibble(buffer[sms_pid_start + 5]);
sms_broken_time.tm_min = SwapDecimalNibble(buffer[sms_pid_start + 6]);
sms_broken_time.tm_sec = SwapDecimalNibble(buffer[sms_pid_start + 7]);
const char gmt_offset = SwapDecimalNibble(buffer[sms_pid_start + 8]);
// GMT offset is expressed in 15 minutes increments.
(*output_sms_time) = mktime(&sms_broken_time) - gmt_offset * 15 * 60;
const int sms_start = sms_pid_start + 2 + 7;
if (sms_start + 1 > buffer_length) return -1; // Invalid input buffer.
const int output_sms_text_length = buffer[sms_start];
if (sms_text_size < output_sms_text_length) return -1; // Cannot hold decoded buffer.
const int decoded_sms_text_size = DecodePDUMessage(buffer + sms_start + 1, buffer_length - (sms_start + 1),
output_sms_text, output_sms_text_length);
if (decoded_sms_text_size != output_sms_text_length) return -1; // Decoder length is not as expected.
// Add a C string end.
if (output_sms_text_length < sms_text_size)
output_sms_text[output_sms_text_length] = 0;
else
output_sms_text[sms_text_size-1] = 0;
return output_sms_text_length;
}
|
denidoank/avalonsailing
|
sms/pdu.c
|
C
|
apache-2.0
| 9,295 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using Microsoft.Scripting;
using Microsoft.Scripting.Runtime;
using Microsoft.Scripting.Utils;
using IronPython.Runtime;
using IronPython.Runtime.Operations;
using IronPython.Runtime.Types;
using System.Numerics;
[assembly: PythonModule("math", typeof(IronPython.Modules.PythonMath))]
namespace IronPython.Modules {
public static partial class PythonMath {
public const string __doc__ = "Provides common mathematical functions.";
public const double pi = Math.PI;
public const double e = Math.E;
private const double degreesToRadians = Math.PI / 180.0;
private const int Bias = 0x3FE;
public static double degrees(double radians) {
return Check(radians, radians / degreesToRadians);
}
public static double radians(double degrees) {
return Check(degrees, degrees * degreesToRadians);
}
public static double fmod(double v, double w) {
return Check(v, w, v % w);
}
private static double sum(List<double> partials) {
// sum the partials the same was as CPython does
var n = partials.Count;
var hi = 0.0;
if (n == 0) return hi;
var lo = 0.0;
// sum exact
while (n > 0) {
var x = hi;
var y = partials[--n];
hi = x + y;
lo = y - (hi - x);
if (lo != 0.0)
break;
}
if (n == 0) return hi;
// half-even rounding
if (lo < 0.0 && partials[n - 1] < 0.0 || lo > 0.0 && partials[n - 1] > 0.0) {
var y = lo * 2.0;
var x = hi + y;
var yr = x - hi;
if (y == yr)
hi = x;
}
return hi;
}
public static double fsum(IEnumerable e) {
// msum from https://code.activestate.com/recipes/393090/
var partials = new List<double>();
foreach (var v in e.Cast<object>().Select(o => Converter.ConvertToDouble(o))) {
var x = v;
var i = 0;
for (var j = 0; j < partials.Count; j++) {
var y = partials[j];
if (Math.Abs(x) < Math.Abs(y)) {
var t = x;
x = y;
y = t;
}
var hi = x + y;
var lo = y - (hi - x);
if (lo != 0) {
partials[i++] = lo;
}
x = hi;
}
partials.RemoveRange(i, partials.Count - i);
partials.Add(x);
}
return sum(partials);
}
public static PythonTuple frexp(double v) {
if (Double.IsInfinity(v) || Double.IsNaN(v)) {
return PythonTuple.MakeTuple(v, 0.0);
}
int exponent = 0;
double mantissa = 0;
if (v == 0) {
mantissa = 0;
exponent = 0;
} else {
byte[] vb = BitConverter.GetBytes(v);
if (BitConverter.IsLittleEndian) {
DecomposeLe(vb, out mantissa, out exponent);
} else {
throw new NotImplementedException();
}
}
return PythonTuple.MakeTuple(mantissa, exponent);
}
public static PythonTuple modf(double v) {
if (double.IsInfinity(v)) {
return PythonTuple.MakeTuple(0.0, v);
}
double w = v % 1.0;
v -= w;
return PythonTuple.MakeTuple(w, v);
}
public static double ldexp(double v, BigInteger w) {
if (v == 0.0 || double.IsInfinity(v)) {
return v;
}
return Check(v, v * Math.Pow(2.0, (double)w));
}
public static double hypot(double v, double w) {
if (double.IsInfinity(v) || double.IsInfinity(w)) {
return double.PositiveInfinity;
}
return Check(v, w, MathUtils.Hypot(v, w));
}
public static double pow(double v, double exp) {
if (v == 1.0 || exp == 0.0) {
return 1.0;
} else if (double.IsNaN(v) || double.IsNaN(exp)) {
return double.NaN;
} else if (v == 0.0) {
if (exp > 0.0) {
return 0.0;
}
throw PythonOps.ValueError("math domain error");
} else if (double.IsPositiveInfinity(exp)) {
if (v > 1.0 || v < -1.0) {
return double.PositiveInfinity;
} else if (v == -1.0) {
return 1.0;
} else {
return 0.0;
}
} else if (double.IsNegativeInfinity(exp)) {
if (v > 1.0 || v < -1.0) {
return 0.0;
} else if (v == -1.0) {
return 1.0;
} else {
return double.PositiveInfinity;
}
}
return Check(v, exp, Math.Pow(v, exp));
}
public static double log(double v0) {
if (v0 <= 0.0) {
throw PythonOps.ValueError("math domain error");
}
return Check(v0, Math.Log(v0));
}
public static double log(double v0, double v1) {
if (v0 <= 0.0 || v1 == 0.0) {
throw PythonOps.ValueError("math domain error");
} else if (v1 == 1.0) {
throw PythonOps.ZeroDivisionError("float division");
} else if (v1 == Double.PositiveInfinity) {
return 0.0;
}
return Check(Math.Log(v0, v1));
}
public static double log(BigInteger value) {
if (value.Sign <= 0) {
throw PythonOps.ValueError("math domain error");
}
return value.Log();
}
public static double log(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return log(val);
} else {
return log(Converter.ConvertToBigInteger(value));
}
}
public static double log(BigInteger value, double newBase) {
if (newBase <= 0.0 || value <= 0) {
throw PythonOps.ValueError("math domain error");
} else if (newBase == 1.0) {
throw PythonOps.ZeroDivisionError("float division");
} else if (newBase == Double.PositiveInfinity) {
return 0.0;
}
return Check(value.Log(newBase));
}
public static double log(object value, double newBase) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return log(val, newBase);
} else {
return log(Converter.ConvertToBigInteger(value), newBase);
}
}
public static double log10(double v0) {
if (v0 <= 0.0) {
throw PythonOps.ValueError("math domain error");
}
return Check(v0, Math.Log10(v0));
}
public static double log10(BigInteger value) {
if (value.Sign <= 0) {
throw PythonOps.ValueError("math domain error");
}
return value.Log10();
}
public static double log10(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return log10(val);
} else {
return log10(Converter.ConvertToBigInteger(value));
}
}
public static double log1p(double v0) {
// Calculate log(1.0 + v0) using William Kahan's algorithm for numerical precision
if (double.IsPositiveInfinity(v0)) {
return double.PositiveInfinity;
}
double v1 = v0 + 1.0;
// Linear approximation for very small v0
if (v1 == 1.0) {
return v0;
}
// Apply correction factor
return log(v1) * v0 / (v1 - 1.0);
}
public static double log1p(BigInteger value) {
return log(value + BigInteger.One);
}
public static double log1p(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return log1p(val);
} else {
return log1p(Converter.ConvertToBigInteger(value));
}
}
public static double expm1(double v0) {
return Check(v0, Math.Tanh(v0 / 2.0) * (Math.Exp(v0) + 1.0));
}
public static double asinh(double v0) {
if (v0 == 0.0 || double.IsInfinity(v0)) {
return v0;
}
// rewrote ln(v0 + sqrt(v0**2 + 1)) for precision
if (Math.Abs(v0) > 1.0) {
return Math.Sign(v0)*(Math.Log(Math.Abs(v0)) + Math.Log(1.0 + MathUtils.Hypot(1.0, 1.0 / v0)));
} else {
return Math.Log(v0 + MathUtils.Hypot(1.0, v0));
}
}
public static double asinh(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return asinh(val);
} else {
return asinh(Converter.ConvertToBigInteger(value));
}
}
public static double acosh(double v0) {
if (v0 < 1.0) {
throw PythonOps.ValueError("math domain error");
} else if (double.IsPositiveInfinity(v0)) {
return double.PositiveInfinity;
}
// rewrote ln(v0 + sqrt(v0**2 - 1)) for precision
double c = Math.Sqrt(v0 + 1.0);
return Math.Log(c) + Math.Log(v0 / c + Math.Sqrt(v0 - 1.0));
}
public static double acosh(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return acosh(val);
} else {
return acosh(Converter.ConvertToBigInteger(value));
}
}
public static double atanh(double v0) {
if (v0 >= 1.0 || v0 <= -1.0) {
throw PythonOps.ValueError("math domain error");
} else if (v0 == 0.0) {
// preserve +/-0.0
return v0;
}
return Math.Log((1.0 + v0) / (1.0 - v0)) * 0.5;
}
public static double atanh(BigInteger value) {
if (value == 0) {
return 0;
} else {
throw PythonOps.ValueError("math domain error");
}
}
public static double atanh(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return atanh(val);
} else {
return atanh(Converter.ConvertToBigInteger(value));
}
}
public static double atan2(double v0, double v1) {
if (double.IsNaN(v0) || double.IsNaN(v1)) {
return double.NaN;
} else if (double.IsInfinity(v0)) {
if (double.IsPositiveInfinity(v1)) {
return pi * 0.25 * Math.Sign(v0);
} else if (double.IsNegativeInfinity(v1)) {
return pi * 0.75 * Math.Sign(v0);
} else {
return pi * 0.5 * Math.Sign(v0);
}
} else if (double.IsInfinity(v1)) {
return v1 > 0.0 ? 0.0 : pi * DoubleOps.Sign(v0);
}
return Math.Atan2(v0, v1);
}
/// <summary>
/// Error function on real values
/// </summary>
public static double erf(double v0) {
return MathUtils.Erf(v0);
}
/// <summary>
/// Complementary error function on real values: erfc(x) = 1 - erf(x)
/// </summary>
public static double erfc(double v0) {
return MathUtils.ErfComplement(v0);
}
public static object factorial(double v0) {
if (v0 % 1.0 != 0.0) {
throw PythonOps.ValueError("factorial() only accepts integral values");
}
if (v0 < 0.0) {
throw PythonOps.ValueError("factorial() not defined for negative values");
}
BigInteger val = 1;
for (BigInteger mul = (BigInteger)v0; mul > BigInteger.One; mul -= BigInteger.One) {
val *= mul;
}
if (val > SysModule.maxint) {
return val;
}
return (int)val;
}
public static object factorial(BigInteger value) {
if (value < 0) {
throw PythonOps.ValueError("factorial() not defined for negative values");
}
BigInteger val = 1;
for (BigInteger mul = value; mul > BigInteger.One; mul -= BigInteger.One) {
val *= mul;
}
if (val > SysModule.maxint) {
return val;
}
return (int)val;
}
public static object factorial(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return factorial(val);
} else {
return factorial(Converter.ConvertToBigInteger(value));
}
}
/// <summary>
/// Gamma function on real values
/// </summary>
public static double gamma(double v0) {
return Check(v0, MathUtils.Gamma(v0));
}
/// <summary>
/// Natural log of absolute value of Gamma function
/// </summary>
public static double lgamma(double v0) {
return Check(v0, MathUtils.LogGamma(v0));
}
public static object trunc(CodeContext/*!*/ context, object value) {
object func;
if (PythonOps.TryGetBoundAttr(value, "__trunc__", out func)) {
return PythonOps.CallWithContext(context, func);
} else {
throw PythonOps.AttributeError("__trunc__");
}
}
public static bool isinf(double v0) {
return double.IsInfinity(v0);
}
public static bool isinf(BigInteger value) {
return false;
}
public static bool isinf(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return isinf(val);
}
return false;
}
public static bool isnan(double v0) {
return double.IsNaN(v0);
}
public static bool isnan(BigInteger value) {
return false;
}
public static bool isnan(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return isnan(val);
}
return false;
}
public static double copysign(double x, double y) {
return DoubleOps.CopySign(x, y);
}
public static double copysign(object x, object y) {
double val, sign;
if (!Converter.TryConvertToDouble(x, out val) ||
!Converter.TryConvertToDouble(y, out sign)) {
throw PythonOps.TypeError("TypeError: a float is required");
}
return DoubleOps.CopySign(val, sign);
}
#region Private Implementation Details
private static void SetExponentLe(byte[] v, int exp) {
exp += Bias;
ushort oldExp = LdExponentLe(v);
ushort newExp = (ushort)(oldExp & 0x800f | (exp << 4));
StExponentLe(v, newExp);
}
private static int IntExponentLe(byte[] v) {
ushort exp = LdExponentLe(v);
return ((int)((exp & 0x7FF0) >> 4) - Bias);
}
private static ushort LdExponentLe(byte[] v) {
return (ushort)(v[6] | ((ushort)v[7] << 8));
}
private static long LdMantissaLe(byte[] v) {
int i1 = (v[0] | (v[1] << 8) | (v[2] << 16) | (v[3] << 24));
int i2 = (v[4] | (v[5] << 8) | ((v[6] & 0xF) << 16));
return i1 | (i2 << 32);
}
private static void StExponentLe(byte[] v, ushort e) {
v[6] = (byte)e;
v[7] = (byte)(e >> 8);
}
private static bool IsDenormalizedLe(byte[] v) {
ushort exp = LdExponentLe(v);
long man = LdMantissaLe(v);
return ((exp & 0x7FF0) == 0 && (man != 0));
}
private static void DecomposeLe(byte[] v, out double m, out int e) {
if (IsDenormalizedLe(v)) {
m = BitConverter.ToDouble(v, 0);
m *= Math.Pow(2.0, 1022);
v = BitConverter.GetBytes(m);
e = IntExponentLe(v) - 1022;
} else {
e = IntExponentLe(v);
}
SetExponentLe(v, 0);
m = BitConverter.ToDouble(v, 0);
}
private static double Check(double v) {
return PythonOps.CheckMath(v);
}
private static double Check(double input, double output) {
return PythonOps.CheckMath(input, output);
}
private static double Check(double in0, double in1, double output) {
return PythonOps.CheckMath(in0, in1, output);
}
#endregion
}
}
|
slozier/ironpython2
|
Src/IronPython.Modules/math.cs
|
C#
|
apache-2.0
| 19,692 |
package org.jrivets.transaction;
/**
* The interface defines an action which can be executed in
* {@link SimpleTransaction} context.
*
* @author Dmitry Spasibenko
*
*/
public interface Action {
/**
* Executes the action itself. In case of fail should throw an exception
*
* @throws Throwable
* describes the fail cause
*/
void doAction() throws Throwable;
/**
* Rolls back the action executed by <tt>doAction()</tt>. It will be invoked
* ONLY if the action method <tt>doAction()</tt> for the object has been
* executed successfully (did not throw an exception). The cause of calling
* this method can be one of the following: an action after this one is
* failed or transaction was cancelled explicitly (
* <tt>SimpleTransaction.cancel()</tt> method is called).
*/
void rollbackAction();
}
|
obattalov/jrivets-common
|
src/main/java/org/jrivets/transaction/Action.java
|
Java
|
apache-2.0
| 891 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.security.examples.pages;
import org.apache.wicket.IPageMap;
import org.apache.wicket.PageParameters;
import org.apache.wicket.markup.html.WebPage;
import org.apache.wicket.model.IModel;
/**
* Base page for all pages that do not require a login.
*
* @author marrink
*
*/
public class MyUnSecurePage extends WebPage
{
private static final long serialVersionUID = 1L;
/**
*
*/
public MyUnSecurePage()
{
}
/**
* @param model
*/
public MyUnSecurePage(IModel< ? > model)
{
super(model);
}
/**
* @param pageMap
*/
public MyUnSecurePage(IPageMap pageMap)
{
super(pageMap);
}
/**
* @param parameters
*/
public MyUnSecurePage(PageParameters parameters)
{
super(parameters);
}
/**
* @param pageMap
* @param model
*/
public MyUnSecurePage(IPageMap pageMap, IModel< ? > model)
{
super(pageMap, model);
}
}
|
duesenklipper/wicket-security-1.4
|
examples/all_in_one/src/main/java/org/apache/wicket/security/examples/pages/MyUnSecurePage.java
|
Java
|
apache-2.0
| 1,765 |
/*
* Copyright 2008-2014 Aerospike, Inc.
*
* Portions may be licensed to Aerospike, Inc. under one or more contributor
* license agreements.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
#pragma once
#include <stdlib.h>
#include <aerospike/as_val.h>
#include <aerospike/as_util.h>
#include <citrusleaf/alloc.h>
/******************************************************************************
* MACROS
*****************************************************************************/
#define AS_STREAM_END ((void *) 0)
/******************************************************************************
* TYPES
*****************************************************************************/
struct as_stream_hooks_s;
/**
* Stream Status Codes
*/
typedef enum as_stream_status_e {
AS_STREAM_OK = 0,
AS_STREAM_ERR = 1
} as_stream_status;
/**
* Stream Interface
*
* To use the stream interface, you will need to create an instance
* via one of the implementations.
*
* @ingroup aerospike_t
*/
typedef struct as_stream_s {
/**
* Specifies whether the cf_free() can be used
* on this stream.
*/
bool free;
/**
* Context data for the stream.
*/
void * data;
/**
* Hooks for the stream
*/
const struct as_stream_hooks_s * hooks;
} as_stream;
/**
* Stream Hooks
*
* An implementation of `as_rec` should provide implementations for each
* of the hooks.
*/
typedef struct as_stream_hooks_s {
/**
* Destroy the stream.
*/
int (* destroy)(as_stream * stream);
/**
* Read the next value from the stream.
*/
as_val * (* read)(const as_stream * stream);
/**
* Write a value to the stream.
*/
as_stream_status (* write)(const as_stream * stream, as_val * value);
} as_stream_hooks;
/**
* Wrapper functions to ensure each CF allocation-related function call has a unique line.
*/
void *as_stream_malloc(size_t size);
void as_stream_free(void *ptr);
/******************************************************************************
* INSTANCE FUNCTIONS
*****************************************************************************/
/**
* Initializes a stack allocated as_stream for a given source and hooks.
*
* @param stream The stream to initialize.
* @param data The source feeding the stream
* @param hooks The hooks that interface with the source
*
* @return On success, the initialized stream. Otherwise NULL.
*
* @relatesalso as_stream
*/
static inline as_stream * as_stream_init(as_stream * stream, void * data, const as_stream_hooks * hooks)
{
if ( !stream ) return stream;
stream->free = false;
stream->data = data;
stream->hooks = hooks;
return stream;
}
/**
* Creates a new heap allocated as_stream for a given source and hooks.
*
* @param data The source feeding the stream
* @param hooks The hooks that interface with the source
*
* @return On success, a new stream. Otherwise NULL.
*
* @relatesalso as_stream
*/
static inline as_stream * as_stream_new(void * data, const as_stream_hooks * hooks)
{
as_stream * stream = (as_stream *) as_stream_malloc(sizeof(as_stream));
if ( !stream ) return stream;
stream->free = true;
stream->data = data;
stream->hooks = hooks;
return stream;
}
/**
* Destroy the as_stream and associated resources.
*
* @param stream The stream to destroy.
*
* @return 0 on success, otherwise 1.
*
* @relatesalso as_stream
*/
static inline void as_stream_destroy(as_stream * stream)
{
as_util_hook(destroy, 1, stream);
if ( stream && stream->free ) {
as_stream_free(stream);
}
}
/******************************************************************************
* VALUE FUNCTIONS
*****************************************************************************/
/**
* Get the source for the stream
*
* @param stream The stream to get the source from
*
* @return pointer to the source of the stream
*
* @relatesalso as_stream
*/
static inline void * as_stream_source(const as_stream * stream)
{
return (stream ? stream->data : NULL);
}
/**
* Reads a value from the stream
*
* @param stream The stream to be read.
*
* @return the element read from the stream or STREAM_END
*
* @relatesalso as_stream
*/
static inline as_val * as_stream_read(const as_stream * stream)
{
return as_util_hook(read, NULL, stream);
}
/**
* Is the stream readable? Tests whether the stream has a read function.
*
* @param stream The stream to test.
*
* @return true if the stream can be read from
*
* @relatesalso as_stream
*/
static inline bool as_stream_readable(const as_stream * stream)
{
return stream != NULL && stream->hooks != NULL && stream->hooks->read;
}
/**
* Write a value to the stream
*
* @param stream The stream to write to.
* @param value The element to write to the stream.
*
* @return AS_STREAM_OK on success, otherwise is failure.
*
* @relatesalso as_stream
*/
static inline as_stream_status as_stream_write(const as_stream * stream, as_val * value)
{
return as_util_hook(write, AS_STREAM_ERR, stream, value);
}
/**
* Is the stream writable? Tests whether the stream has a write function.
*
* @param stream The stream to test.
*
* @return true if the stream can be written to.
*
* @relatesalso as_stream
*/
static inline bool as_stream_writable(const as_stream * stream)
{
return stream != NULL && stream->hooks != NULL && stream->hooks->write;
}
|
aerospike/concurrency-control-tic-tac-toe
|
node_modules/aerospike/aerospike-client-c/package/usr/local/include/aerospike/as_stream.h
|
C
|
apache-2.0
| 5,850 |
/*
* Copyright 2019, EnMasse authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.enmasse.admin.model;
import io.fabric8.kubernetes.api.model.HasMetadata;
import java.util.List;
import java.util.Map;
public interface AddressSpacePlan extends HasMetadata {
Map<String, Double> getResourceLimits();
List<String> getAddressPlans();
String getShortDescription();
String getDisplayName();
int getDisplayOrder();
String getAddressSpaceType();
String getInfraConfigRef();
}
|
EnMasseProject/enmasse
|
api-model/src/main/java/io/enmasse/admin/model/AddressSpacePlan.java
|
Java
|
apache-2.0
| 572 |
################################################################################
# Copyright (c) 2015-2019 Skymind, Inc.
#
# This program and the accompanying materials are made available under the
# terms of the Apache License, Version 2.0 which is available at
# https://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# SPDX-License-Identifier: Apache-2.0
################################################################################
from .progressbar import ProgressBar
import requests
import math
import os
import hashlib
def download(url, file_name):
r = requests.get(url, stream=True)
file_size = int(r.headers['Content-length'])
'''
if py3:
file_size = int(u.getheader("Content-Length")[0])
else:
file_size = int(u.info().getheaders("Content-Length")[0])
'''
file_exists = False
if os.path.isfile(file_name):
local_file_size = os.path.getsize(file_name)
if local_file_size == file_size:
sha1_file = file_name + '.sha1'
if os.path.isfile(sha1_file):
print('sha1 found')
with open(sha1_file) as f:
expected_sha1 = f.read()
BLOCKSIZE = 65536
sha1 = hashlib.sha1()
with open(file_name) as f:
buff = f.read(BLOCKSIZE)
while len(buff) > 0:
sha1.update(buff)
buff = f.read(BLOCKSIZE)
if expected_sha1 == sha1:
file_exists = True
else:
print("File corrupt. Downloading again.")
os.remove(file_name)
else:
file_exists = True
else:
print("File corrupt. Downloading again.")
os.remove(file_name)
if not file_exists:
factor = int(math.floor(math.log(file_size) / math.log(1024)))
display_file_size = str(file_size / 1024 ** factor) + \
['B', 'KB', 'MB', 'GB', 'TB', 'PB'][factor]
print("Source: " + url)
print("Destination " + file_name)
print("Size: " + display_file_size)
file_size_dl = 0
block_sz = 8192
f = open(file_name, 'wb')
pbar = ProgressBar(file_size)
for chunk in r.iter_content(chunk_size=block_sz):
if not chunk:
continue
chunk_size = len(chunk)
file_size_dl += chunk_size
f.write(chunk)
pbar.update(chunk_size)
# status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size)
# status = status + chr(8)*(len(status)+1)
# print(status)
f.close()
else:
print("File already exists - " + file_name)
return True
|
RobAltena/deeplearning4j
|
pydl4j/pydl4j/downloader.py
|
Python
|
apache-2.0
| 3,179 |
package org.carlspring.strongbox.xml.parsers;
import org.carlspring.strongbox.url.ClasspathURLStreamHandler;
import org.carlspring.strongbox.url.ClasspathURLStreamHandlerFactory;
import org.carlspring.strongbox.xml.CustomTagService;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
import java.io.*;
import java.net.URL;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.concurrent.locks.ReentrantLock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author mtodorov
*/
public class GenericParser<T>
{
public final static boolean IS_OUTPUT_FORMATTED = true;
private static final Logger logger = LoggerFactory.getLogger(GenericParser.class);
private ReentrantLock lock = new ReentrantLock();
private Set<Class> classes = new LinkedHashSet<>();
private JAXBContext context;
static
{
final ClasspathURLStreamHandler handler = new ClasspathURLStreamHandler(ClassLoader.getSystemClassLoader());
ClasspathURLStreamHandlerFactory factory = new ClasspathURLStreamHandlerFactory("classpath", handler);
try
{
URL.setURLStreamHandlerFactory(factory);
}
catch (Error e)
{
// You can safely disregard this, as a second attempt to register a an already
// registered URLStreamHandlerFactory will throw an error. Since there's no
// apparent way to check if it's registered, just catch and ignore the error.
}
}
public GenericParser()
{
this.classes.addAll(CustomTagService.getInstance().getImplementations());
}
public GenericParser(boolean useServiceLoader)
{
if (useServiceLoader)
{
this.classes.addAll(CustomTagService.getInstance().getImplementations());
}
}
public GenericParser(boolean useServiceLoader, Class... classes)
{
Collections.addAll(this.classes, classes);
if (useServiceLoader)
{
this.classes.addAll(CustomTagService.getInstance().getImplementations());
}
}
public GenericParser(Class... classes)
{
Collections.addAll(this.classes, classes);
this.classes.addAll(CustomTagService.getInstance().getImplementations());
}
public T parse(File file)
throws JAXBException, IOException
{
T object = null;
try (FileInputStream is = new FileInputStream(file))
{
object = parse(is);
}
return object;
}
public T parse(URL url)
throws IOException, JAXBException
{
try (InputStream is = url.openStream())
{
return parse(is);
}
}
public T parse(InputStream is)
throws JAXBException
{
T object = null;
try
{
lock.lock();
Unmarshaller unmarshaller = getContext().createUnmarshaller();
//noinspection unchecked
object = (T) unmarshaller.unmarshal(is);
}
finally
{
lock.unlock();
}
return object;
}
public void store(T object,
String path)
throws JAXBException, IOException
{
store(object, new File(path).getAbsoluteFile());
}
public void store(T object,
File file)
throws JAXBException, IOException
{
try (FileOutputStream os = new FileOutputStream(file))
{
store(object, os);
}
}
public void store(T object,
OutputStream os)
throws JAXBException
{
try
{
lock.lock();
JAXBContext context = getContext();
Marshaller marshaller = context.createMarshaller();
marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, IS_OUTPUT_FORMATTED);
marshaller.marshal(object, os);
}
finally
{
lock.unlock();
}
}
/**
* Serialize #object to String using JAXB marshaller.
*
* @param object the object to be serialized
* @return String representation of object
*/
public String serialize(T object)
throws JAXBException
{
StringWriter writer = new StringWriter();
try
{
lock.lock();
JAXBContext context = getContext();
Marshaller marshaller = context.createMarshaller();
marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, IS_OUTPUT_FORMATTED);
marshaller.marshal(object, writer);
return writer.getBuffer().toString();
}
finally
{
lock.unlock();
}
}
@SuppressWarnings("unchecked")
public T deserialize(String input)
throws JAXBException
{
try
{
lock.lock();
JAXBContext context = getContext();
Unmarshaller m = context.createUnmarshaller();
return (T) m.unmarshal(new StringReader(input));
}
finally
{
lock.unlock();
}
}
public void setContext(Class<?> classType)
throws JAXBException
{
context = JAXBContext.newInstance(classType);
}
public JAXBContext getContext()
throws JAXBException
{
if (context == null)
{
try
{
context = JAXBContext.newInstance(classes.toArray(new Class[classes.size()]));
}
catch (Exception e)
{
logger.error(e.getMessage(), e);
return null;
}
}
return context;
}
}
|
AlexOreshkevich/strongbox
|
strongbox-configuration/src/main/java/org/carlspring/strongbox/xml/parsers/GenericParser.java
|
Java
|
apache-2.0
| 5,887 |
var zkUtil = require('../util');
var zkConstants = require('../constants');
var ZK = require('zookeeper').ZooKeeper;
/**
* @constructor
* encapsulate the lock algorithm. I didn't want it exposed in the client.
* @param {ZkClient} client client doing the locking.
* @param {String} node name of lock.
* @param {Function} callback gets executed when the lock is acquired or reaches an error. expects (error).
*/
function LockAlgorithm(client, node, callback) {
this.client = client;
this.node = node;
this.callback = callback;
}
/**
* given a sorted list of child paths, finds the one that precedes myPath.
* @param {Array} children list of children nodes.
* @param {String} myPath path to compare against.
* @return {String} valid child path (doesn't contain parent) or null if none exists.
*/
LockAlgorithm.prototype.pathBeforeMe = function(children, myPath) {
var i;
for (i = 0; i < children.length - 1; i++) {
if (children[i + 1] === myPath) {
return children[i];
}
}
return null;
};
/**
* checks for the presence of path. it doesn't exist, it gets created.
* @param {String} path node to ensure existence of.
* @param {Function} callback expects (error, pathName).
*/
LockAlgorithm.prototype.ensureNode = function(path, callback) {
var self = this;
this.client.createPaths(path, 'lock node', 0, function(err, pathCreated) {
if (err) {
callback(err);
return;
}
self.client.options.log.tracef('successful parent node creation: ${path}', {'path': pathCreated});
// assert path === pathCreated
callback(null, pathCreated);
});
};
/**
* creates an child node.
* @param {String} path ephemeral child node (specified by path).
* @param {String} txnId The transaction ID.
* @param {Function} callback expects (error, pathName).
*/
LockAlgorithm.prototype.createChild = function(path, txnId, callback) {
var self = this,
lockValue = JSON.stringify([txnId, Date.now()]);
self.client.create(path, lockValue, ZK.ZOO_SEQUENCE | ZK.ZOO_EPHEMERAL, function(err, pathCreated) {
if (err) {
self.client.options.log.error('node creation error', {err: err, pathCreated: pathCreated});
callback(err);
return;
}
// assert pathCreated === path.
callback(null, pathCreated);
});
};
/**
* gets children of a particular node. errors if there are no children.
* @param {String} path the parent of the children.
* @param {Function} callback expects (error, sorted list of children). the children are not full paths, but names only.
*/
LockAlgorithm.prototype.getSortedChildren = function(path, callback) {
// false because we don't want to watch.
this.client._getChildren(path, false, '', function(err, children) {
if (err) {
callback(err);
return;
}
if (children.length < 1) {
// there should *always* be children since this method always gets called after the lock node is created.
callback(new Error('Could not create lock node for ' + path), null);
return;
}
children.sort(function(a, b) {
// each child name is formatted like this: lock-00000000. so peel of chars before creating a number.
return parseInt(a.substr(zkConstants.LOCK_PREFIX.length), 10) -
parseInt(b.substr(zkConstants.LOCK_PREFIX.length), 10);
});
callback(null, children);
});
};
/**
* watches watchPath for deletion. parentPath is roughly equal to the name of the lock, lockPath is the child node
* name for the lock that is to be acquired (e.g. '/this_lock/-lock000000121').
* it is perfectly reasonable for this watch to execute without executing a callback (in the event we need to wait
* for watchPath to be deleted).
* @param {String} parentPath basically the name of the lock (which is the parent node).
* @param {String} lockPath child lock that is basically a place in line.
* @param {String} watchPath the child node that we are waiting on to go away. when that happens it is our turn (we
* have the lock).
* @param {Function} callback expects (error). only purposes is to catch and report problems.
*/
LockAlgorithm.prototype.watch = function(parentPath, lockPath, watchPath, callback) {
var self = this;
self.client.options.log.trace1('watching: ' + watchPath);
self.client._exists(watchPath, true, function(err, exists) {
self.client.options.log.trace('exists', {err: err, exists: exists});
if (err) {
callback(err);
return;
}
if (!exists) {
self.lockAlgorithm(parentPath, lockPath);
return;
}
// wait for it to be deleted, then execute the callback.
if (self.client.waitCallbacks[watchPath]) {
callback(new Error('Already waiting on ' + watchPath));
return;
}
// set a callback that gets invoked when watchPath is deleted.
self.client.waitCallbacks[watchPath] = function() {
self.client.options.log.trace('Invoked wait callback');
self.lockAlgorithm(parentPath, lockPath);
};
});
};
/**
* implements the lock algorithm.
* @param {String} parentPath a decorated form of the lock name.
* @param {String} lockPath a child of parentPath.
*/
LockAlgorithm.prototype.lockAlgorithm = function(parentPath, lockPath) {
var self = this, absolutePath;
self.getSortedChildren(parentPath, function(err, children) {
if (err) {
self.callback(err);
} else {
//log.trace1('PARENT:%s, LOCK:%s, CHILDREN: %j', parentPath, lockPath, children);
if (zkUtil.lte(zkUtil.last(lockPath), children[0])) {
// we've got the lock!!!!
self.client.options.log.tracef('lock acquired on ${parentPath} by ${lockPath}',
{parentPath: parentPath, lockPath: lockPath});
self.client.locks[self.node] = lockPath;
self.callback(null);
} else {
// watch the child path immediately preceeding lockPath. When it is deleted or no longer exists,
// this process owns the lock.
absolutePath = parentPath + '/' + self.pathBeforeMe(children, zkUtil.last(lockPath));
self.watch(parentPath, lockPath, absolutePath, function(err) {
if (err) {
self.callback(err);
} // else, a watch was set.
});
}
}
});
};
/** LockAlgorithm */
exports.LockAlgorithm = LockAlgorithm;
|
racker/service-registry
|
node_modules/zookeeper-client/lib/algorithms/lock.js
|
JavaScript
|
apache-2.0
| 6,297 |
#!/usr/bin/env bash
# Stop script if unbound variable found (use ${var:-} if intentional)
set -u
# Stop script if command returns non-zero exit code.
# Prevents hidden errors caused by missing error code propagation.
set -e
usage()
{
echo "Common settings:"
echo " --configuration <value> Build configuration: 'Debug' or 'Release' (short: -c)"
echo " --verbosity <value> Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)"
echo " --binaryLog Create MSBuild binary log (short: -bl)"
echo ""
echo "Actions:"
echo " --restore Restore dependencies (short: -r)"
echo " --build Build all projects (short: -b)"
echo " --rebuild Rebuild all projects"
echo " --test Run all unit tests (short: -t)"
echo " --sign Sign build outputs"
echo " --publish Publish artifacts (e.g. symbols)"
echo " --pack Package build outputs into NuGet packages and Willow components"
echo " --help Print help and exit (short: -h)"
echo ""
echo "Advanced settings:"
echo " --projects <value> Project or solution file(s) to build"
echo " --ci Set when running on CI server"
echo " --prepareMachine Prepare machine for CI run, clean up processes after build"
echo " --nodeReuse <value> Sets nodereuse msbuild parameter ('true' or 'false')"
echo " --warnAsError <value> Sets warnaserror msbuild parameter ('true' or 'false')"
echo ""
echo "Command line arguments starting with '/p:' are passed through to MSBuild."
}
source="${BASH_SOURCE[0]}"
# resolve $source until the file is no longer a symlink
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
restore=false
build=false
rebuild=false
test=false
pack=false
publish=false
integration_test=false
performance_test=false
sign=false
public=false
ci=false
warn_as_error=true
node_reuse=true
binary_log=false
projects=''
configuration='Debug'
prepare_machine=false
verbosity='minimal'
properties=''
while [[ $# > 0 ]]; do
opt="$(echo "$1" | awk '{print tolower($0)}')"
case "$opt" in
--help|-h)
usage
exit 0
;;
--configuration|-c)
configuration=$2
shift
;;
--verbosity|-v)
verbosity=$2
shift
;;
--binarylog|-bl)
binary_log=true
;;
--restore|-r)
restore=true
;;
--build|-b)
build=true
;;
--rebuild)
rebuild=true
;;
--pack)
pack=true
;;
--test|-t)
test=true
;;
--integrationtest)
integration_test=true
;;
--performancetest)
performance_test=true
;;
--sign)
sign=true
;;
--publish)
publish=true
;;
--preparemachine)
prepare_machine=true
;;
--projects)
projects=$2
shift
;;
--ci)
ci=true
;;
--warnaserror)
warn_as_error=$2
shift
;;
--nodereuse)
node_reuse=$2
shift
;;
/p:*)
properties="$properties $1"
;;
/m:*)
properties="$properties $1"
;;
/bl:*)
properties="$properties $1"
;;
*)
echo "Invalid argument: $1"
usage
exit 1
;;
esac
shift
done
if [[ "$ci" == true ]]; then
binary_log=true
node_reuse=false
fi
. "$scriptroot/tools.sh"
function InitializeCustomToolset {
local script="$eng_root/restore-toolset.sh"
if [[ -a "$script" ]]; then
. "$script"
fi
}
function Build {
InitializeToolset
InitializeCustomToolset
if [[ ! -z "$projects" ]]; then
properties="$properties /p:Projects=$projects"
fi
local bl=""
if [[ "$binary_log" == true ]]; then
bl="/bl:\"$log_dir/Build.binlog\""
fi
MSBuild $_InitializeToolset \
$bl \
/p:Configuration=$configuration \
/p:RepoRoot="$repo_root" \
/p:Restore=$restore \
/p:Build=$build \
/p:Rebuild=$rebuild \
/p:Test=$test \
/p:Pack=$pack \
/p:IntegrationTest=$integration_test \
/p:PerformanceTest=$performance_test \
/p:Sign=$sign \
/p:Publish=$publish \
$properties
ExitWithExitCode 0
}
# Import custom tools configuration, if present in the repo.
configure_toolset_script="$eng_root/configure-toolset.sh"
if [[ -a "$configure_toolset_script" ]]; then
. "$configure_toolset_script"
fi
# TODO: https://github.com/dotnet/arcade/issues/1468
# Temporary workaround to avoid breaking change.
# Remove once repos are updated.
if [[ -n "${useInstalledDotNetCli:-}" ]]; then
use_installed_dotnet_cli="$useInstalledDotNetCli"
fi
Build
|
VSadov/roslyn
|
eng/common/build.sh
|
Shell
|
apache-2.0
| 5,023 |
/* ----------------------------------------------------------------------------
* This file was automatically generated by SWIG (http://www.swig.org).
* Version 2.0.10
*
* Do not make changes to this file unless you know what you are doing--modify
* the SWIG interface file instead.
* ----------------------------------------------------------------------------- */
package com.badlogic.gdx.physics.bullet;
public final class btContactManifoldTypes {
public final static int MIN_CONTACT_MANIFOLD_TYPE = 1024;
public final static int BT_PERSISTENT_MANIFOLD_TYPE = MIN_CONTACT_MANIFOLD_TYPE + 1;
}
|
domix/libgdx
|
extensions/gdx-bullet/jni/swig-src/com/badlogic/gdx/physics/bullet/btContactManifoldTypes.java
|
Java
|
apache-2.0
| 624 |
import {Request} from '../lib/request';
import {Response} from '../lib/response';
import {AWSError} from '../lib/error';
import {Service} from '../lib/service';
import {ServiceConfigurationOptions} from '../lib/service';
import {ConfigBase as Config} from '../lib/config';
interface Blob {}
declare class IoT1ClickProjects extends Service {
/**
* Constructs a service object. This object has one method for each API operation.
*/
constructor(options?: IoT1ClickProjects.Types.ClientConfiguration)
config: Config & IoT1ClickProjects.Types.ClientConfiguration;
/**
* Associates a physical device with a placement.
*/
associateDeviceWithPlacement(params: IoT1ClickProjects.Types.AssociateDeviceWithPlacementRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.AssociateDeviceWithPlacementResponse) => void): Request<IoT1ClickProjects.Types.AssociateDeviceWithPlacementResponse, AWSError>;
/**
* Associates a physical device with a placement.
*/
associateDeviceWithPlacement(callback?: (err: AWSError, data: IoT1ClickProjects.Types.AssociateDeviceWithPlacementResponse) => void): Request<IoT1ClickProjects.Types.AssociateDeviceWithPlacementResponse, AWSError>;
/**
* Creates an empty placement.
*/
createPlacement(params: IoT1ClickProjects.Types.CreatePlacementRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.CreatePlacementResponse) => void): Request<IoT1ClickProjects.Types.CreatePlacementResponse, AWSError>;
/**
* Creates an empty placement.
*/
createPlacement(callback?: (err: AWSError, data: IoT1ClickProjects.Types.CreatePlacementResponse) => void): Request<IoT1ClickProjects.Types.CreatePlacementResponse, AWSError>;
/**
* Creates an empty project with a placement template. A project contains zero or more placements that adhere to the placement template defined in the project.
*/
createProject(params: IoT1ClickProjects.Types.CreateProjectRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.CreateProjectResponse) => void): Request<IoT1ClickProjects.Types.CreateProjectResponse, AWSError>;
/**
* Creates an empty project with a placement template. A project contains zero or more placements that adhere to the placement template defined in the project.
*/
createProject(callback?: (err: AWSError, data: IoT1ClickProjects.Types.CreateProjectResponse) => void): Request<IoT1ClickProjects.Types.CreateProjectResponse, AWSError>;
/**
* Deletes a placement. To delete a placement, it must not have any devices associated with it. When you delete a placement, all associated data becomes irretrievable.
*/
deletePlacement(params: IoT1ClickProjects.Types.DeletePlacementRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.DeletePlacementResponse) => void): Request<IoT1ClickProjects.Types.DeletePlacementResponse, AWSError>;
/**
* Deletes a placement. To delete a placement, it must not have any devices associated with it. When you delete a placement, all associated data becomes irretrievable.
*/
deletePlacement(callback?: (err: AWSError, data: IoT1ClickProjects.Types.DeletePlacementResponse) => void): Request<IoT1ClickProjects.Types.DeletePlacementResponse, AWSError>;
/**
* Deletes a project. To delete a project, it must not have any placements associated with it. When you delete a project, all associated data becomes irretrievable.
*/
deleteProject(params: IoT1ClickProjects.Types.DeleteProjectRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.DeleteProjectResponse) => void): Request<IoT1ClickProjects.Types.DeleteProjectResponse, AWSError>;
/**
* Deletes a project. To delete a project, it must not have any placements associated with it. When you delete a project, all associated data becomes irretrievable.
*/
deleteProject(callback?: (err: AWSError, data: IoT1ClickProjects.Types.DeleteProjectResponse) => void): Request<IoT1ClickProjects.Types.DeleteProjectResponse, AWSError>;
/**
* Describes a placement in a project.
*/
describePlacement(params: IoT1ClickProjects.Types.DescribePlacementRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.DescribePlacementResponse) => void): Request<IoT1ClickProjects.Types.DescribePlacementResponse, AWSError>;
/**
* Describes a placement in a project.
*/
describePlacement(callback?: (err: AWSError, data: IoT1ClickProjects.Types.DescribePlacementResponse) => void): Request<IoT1ClickProjects.Types.DescribePlacementResponse, AWSError>;
/**
* Returns an object describing a project.
*/
describeProject(params: IoT1ClickProjects.Types.DescribeProjectRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.DescribeProjectResponse) => void): Request<IoT1ClickProjects.Types.DescribeProjectResponse, AWSError>;
/**
* Returns an object describing a project.
*/
describeProject(callback?: (err: AWSError, data: IoT1ClickProjects.Types.DescribeProjectResponse) => void): Request<IoT1ClickProjects.Types.DescribeProjectResponse, AWSError>;
/**
* Removes a physical device from a placement.
*/
disassociateDeviceFromPlacement(params: IoT1ClickProjects.Types.DisassociateDeviceFromPlacementRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.DisassociateDeviceFromPlacementResponse) => void): Request<IoT1ClickProjects.Types.DisassociateDeviceFromPlacementResponse, AWSError>;
/**
* Removes a physical device from a placement.
*/
disassociateDeviceFromPlacement(callback?: (err: AWSError, data: IoT1ClickProjects.Types.DisassociateDeviceFromPlacementResponse) => void): Request<IoT1ClickProjects.Types.DisassociateDeviceFromPlacementResponse, AWSError>;
/**
* Returns an object enumerating the devices in a placement.
*/
getDevicesInPlacement(params: IoT1ClickProjects.Types.GetDevicesInPlacementRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.GetDevicesInPlacementResponse) => void): Request<IoT1ClickProjects.Types.GetDevicesInPlacementResponse, AWSError>;
/**
* Returns an object enumerating the devices in a placement.
*/
getDevicesInPlacement(callback?: (err: AWSError, data: IoT1ClickProjects.Types.GetDevicesInPlacementResponse) => void): Request<IoT1ClickProjects.Types.GetDevicesInPlacementResponse, AWSError>;
/**
* Lists the placement(s) of a project.
*/
listPlacements(params: IoT1ClickProjects.Types.ListPlacementsRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.ListPlacementsResponse) => void): Request<IoT1ClickProjects.Types.ListPlacementsResponse, AWSError>;
/**
* Lists the placement(s) of a project.
*/
listPlacements(callback?: (err: AWSError, data: IoT1ClickProjects.Types.ListPlacementsResponse) => void): Request<IoT1ClickProjects.Types.ListPlacementsResponse, AWSError>;
/**
* Lists the AWS IoT 1-Click project(s) associated with your AWS account and region.
*/
listProjects(params: IoT1ClickProjects.Types.ListProjectsRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.ListProjectsResponse) => void): Request<IoT1ClickProjects.Types.ListProjectsResponse, AWSError>;
/**
* Lists the AWS IoT 1-Click project(s) associated with your AWS account and region.
*/
listProjects(callback?: (err: AWSError, data: IoT1ClickProjects.Types.ListProjectsResponse) => void): Request<IoT1ClickProjects.Types.ListProjectsResponse, AWSError>;
/**
* Updates a placement with the given attributes. To clear an attribute, pass an empty value (i.e., "").
*/
updatePlacement(params: IoT1ClickProjects.Types.UpdatePlacementRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.UpdatePlacementResponse) => void): Request<IoT1ClickProjects.Types.UpdatePlacementResponse, AWSError>;
/**
* Updates a placement with the given attributes. To clear an attribute, pass an empty value (i.e., "").
*/
updatePlacement(callback?: (err: AWSError, data: IoT1ClickProjects.Types.UpdatePlacementResponse) => void): Request<IoT1ClickProjects.Types.UpdatePlacementResponse, AWSError>;
/**
* Updates a project associated with your AWS account and region. With the exception of device template names, you can pass just the values that need to be updated because the update request will change only the values that are provided. To clear a value, pass the empty string (i.e., "").
*/
updateProject(params: IoT1ClickProjects.Types.UpdateProjectRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.UpdateProjectResponse) => void): Request<IoT1ClickProjects.Types.UpdateProjectResponse, AWSError>;
/**
* Updates a project associated with your AWS account and region. With the exception of device template names, you can pass just the values that need to be updated because the update request will change only the values that are provided. To clear a value, pass the empty string (i.e., "").
*/
updateProject(callback?: (err: AWSError, data: IoT1ClickProjects.Types.UpdateProjectResponse) => void): Request<IoT1ClickProjects.Types.UpdateProjectResponse, AWSError>;
}
declare namespace IoT1ClickProjects {
export interface AssociateDeviceWithPlacementRequest {
/**
* The name of the project containing the placement in which to associate the device.
*/
projectName: ProjectName;
/**
* The name of the placement in which to associate the device.
*/
placementName: PlacementName;
/**
* The ID of the physical device to be associated with the given placement in the project. Note that a mandatory 4 character prefix is required for all deviceId values.
*/
deviceId: DeviceId;
/**
* The device template name to associate with the device ID.
*/
deviceTemplateName: DeviceTemplateName;
}
export interface AssociateDeviceWithPlacementResponse {
}
export type AttributeDefaultValue = string;
export type AttributeName = string;
export type AttributeValue = string;
export interface CreatePlacementRequest {
/**
* The name of the placement to be created.
*/
placementName: PlacementName;
/**
* The name of the project in which to create the placement.
*/
projectName: ProjectName;
/**
* Optional user-defined key/value pairs providing contextual data (such as location or function) for the placement.
*/
attributes?: PlacementAttributeMap;
}
export interface CreatePlacementResponse {
}
export interface CreateProjectRequest {
/**
* The name of the project to create.
*/
projectName: ProjectName;
/**
* An optional description for the project.
*/
description?: Description;
/**
* The schema defining the placement to be created. A placement template defines placement default attributes and device templates. You cannot add or remove device templates after the project has been created. However, you can update callbackOverrides for the device templates using the UpdateProject API.
*/
placementTemplate?: PlacementTemplate;
}
export interface CreateProjectResponse {
}
export type DefaultPlacementAttributeMap = {[key: string]: AttributeDefaultValue};
export interface DeletePlacementRequest {
/**
* The name of the empty placement to delete.
*/
placementName: PlacementName;
/**
* The project containing the empty placement to delete.
*/
projectName: ProjectName;
}
export interface DeletePlacementResponse {
}
export interface DeleteProjectRequest {
/**
* The name of the empty project to delete.
*/
projectName: ProjectName;
}
export interface DeleteProjectResponse {
}
export interface DescribePlacementRequest {
/**
* The name of the placement within a project.
*/
placementName: PlacementName;
/**
* The project containing the placement to be described.
*/
projectName: ProjectName;
}
export interface DescribePlacementResponse {
/**
* An object describing the placement.
*/
placement: PlacementDescription;
}
export interface DescribeProjectRequest {
/**
* The name of the project to be described.
*/
projectName: ProjectName;
}
export interface DescribeProjectResponse {
/**
* An object describing the project.
*/
project: ProjectDescription;
}
export type Description = string;
export type DeviceCallbackKey = string;
export type DeviceCallbackOverrideMap = {[key: string]: DeviceCallbackValue};
export type DeviceCallbackValue = string;
export type DeviceId = string;
export type DeviceMap = {[key: string]: DeviceId};
export interface DeviceTemplate {
/**
* The device type, which currently must be "button".
*/
deviceType?: DeviceType;
/**
* An optional Lambda function to invoke instead of the default Lambda function provided by the placement template.
*/
callbackOverrides?: DeviceCallbackOverrideMap;
}
export type DeviceTemplateMap = {[key: string]: DeviceTemplate};
export type DeviceTemplateName = string;
export type DeviceType = string;
export interface DisassociateDeviceFromPlacementRequest {
/**
* The name of the project that contains the placement.
*/
projectName: ProjectName;
/**
* The name of the placement that the device should be removed from.
*/
placementName: PlacementName;
/**
* The device ID that should be removed from the placement.
*/
deviceTemplateName: DeviceTemplateName;
}
export interface DisassociateDeviceFromPlacementResponse {
}
export interface GetDevicesInPlacementRequest {
/**
* The name of the project containing the placement.
*/
projectName: ProjectName;
/**
* The name of the placement to get the devices from.
*/
placementName: PlacementName;
}
export interface GetDevicesInPlacementResponse {
/**
* An object containing the devices (zero or more) within the placement.
*/
devices: DeviceMap;
}
export interface ListPlacementsRequest {
/**
* The project containing the placements to be listed.
*/
projectName: ProjectName;
/**
* The token to retrieve the next set of results.
*/
nextToken?: NextToken;
/**
* The maximum number of results to return per request. If not set, a default value of 100 is used.
*/
maxResults?: MaxResults;
}
export interface ListPlacementsResponse {
/**
* An object listing the requested placements.
*/
placements: PlacementSummaryList;
/**
* The token used to retrieve the next set of results - will be effectively empty if there are no further results.
*/
nextToken?: NextToken;
}
export interface ListProjectsRequest {
/**
* The token to retrieve the next set of results.
*/
nextToken?: NextToken;
/**
* The maximum number of results to return per request. If not set, a default value of 100 is used.
*/
maxResults?: MaxResults;
}
export interface ListProjectsResponse {
/**
* An object containing the list of projects.
*/
projects: ProjectSummaryList;
/**
* The token used to retrieve the next set of results - will be effectively empty if there are no further results.
*/
nextToken?: NextToken;
}
export type MaxResults = number;
export type NextToken = string;
export type PlacementAttributeMap = {[key: string]: AttributeValue};
export interface PlacementDescription {
/**
* The name of the project containing the placement.
*/
projectName: ProjectName;
/**
* The name of the placement.
*/
placementName: PlacementName;
/**
* The user-defined attributes associated with the placement.
*/
attributes: PlacementAttributeMap;
/**
* The date when the placement was initially created, in UNIX epoch time format.
*/
createdDate: Time;
/**
* The date when the placement was last updated, in UNIX epoch time format. If the placement was not updated, then createdDate and updatedDate are the same.
*/
updatedDate: Time;
}
export type PlacementName = string;
export interface PlacementSummary {
/**
* The name of the project containing the placement.
*/
projectName: ProjectName;
/**
* The name of the placement being summarized.
*/
placementName: PlacementName;
/**
* The date when the placement was originally created, in UNIX epoch time format.
*/
createdDate: Time;
/**
* The date when the placement was last updated, in UNIX epoch time format. If the placement was not updated, then createdDate and updatedDate are the same.
*/
updatedDate: Time;
}
export type PlacementSummaryList = PlacementSummary[];
export interface PlacementTemplate {
/**
* The default attributes (key/value pairs) to be applied to all placements using this template.
*/
defaultAttributes?: DefaultPlacementAttributeMap;
/**
* An object specifying the DeviceTemplate for all placements using this (PlacementTemplate) template.
*/
deviceTemplates?: DeviceTemplateMap;
}
export interface ProjectDescription {
/**
* The name of the project for which to obtain information from.
*/
projectName: ProjectName;
/**
* The description of the project.
*/
description?: Description;
/**
* The date when the project was originally created, in UNIX epoch time format.
*/
createdDate: Time;
/**
* The date when the project was last updated, in UNIX epoch time format. If the project was not updated, then createdDate and updatedDate are the same.
*/
updatedDate: Time;
/**
* An object describing the project's placement specifications.
*/
placementTemplate?: PlacementTemplate;
}
export type ProjectName = string;
export interface ProjectSummary {
/**
* The name of the project being summarized.
*/
projectName: ProjectName;
/**
* The date when the project was originally created, in UNIX epoch time format.
*/
createdDate: Time;
/**
* The date when the project was last updated, in UNIX epoch time format. If the project was not updated, then createdDate and updatedDate are the same.
*/
updatedDate: Time;
}
export type ProjectSummaryList = ProjectSummary[];
export type Time = Date;
export interface UpdatePlacementRequest {
/**
* The name of the placement to update.
*/
placementName: PlacementName;
/**
* The name of the project containing the placement to be updated.
*/
projectName: ProjectName;
/**
* The user-defined object of attributes used to update the placement. The maximum number of key/value pairs is 50.
*/
attributes?: PlacementAttributeMap;
}
export interface UpdatePlacementResponse {
}
export interface UpdateProjectRequest {
/**
* The name of the project to be updated.
*/
projectName: ProjectName;
/**
* An optional user-defined description for the project.
*/
description?: Description;
/**
* An object defining the project update. Once a project has been created, you cannot add device template names to the project. However, for a given placementTemplate, you can update the associated callbackOverrides for the device definition using this API.
*/
placementTemplate?: PlacementTemplate;
}
export interface UpdateProjectResponse {
}
/**
* A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version.
*/
export type apiVersion = "2018-05-14"|"latest"|string;
export interface ClientApiVersions {
/**
* A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version.
*/
apiVersion?: apiVersion;
}
export type ClientConfiguration = ServiceConfigurationOptions & ClientApiVersions;
/**
* Contains interfaces for use with the IoT1ClickProjects client.
*/
export import Types = IoT1ClickProjects;
}
export = IoT1ClickProjects;
|
chrisradek/aws-sdk-js
|
clients/iot1clickprojects.d.ts
|
TypeScript
|
apache-2.0
| 20,275 |
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using soothsayer.Infrastructure;
using soothsayer.Infrastructure.IO;
using soothsayer.Migrations;
using soothsayer.Scanners;
using soothsayer.Scripts;
namespace soothsayer
{
public class OracleMigrator : IMigrator
{
private readonly IConnectionFactory _connectionFactory;
private readonly IVersionRespositoryFactory _versionRespositoryFactory;
private readonly IAppliedScriptsRepositoryFactory _appliedScriptsRepositoryFactory;
private readonly IDatabaseMetadataProviderFactory _databaseMetadataProviderFactory;
private readonly IScriptScannerFactory _scriptScannerFactory;
private readonly IScriptRunnerFactory _scriptRunnerFactory;
public OracleMigrator(
IConnectionFactory connectionFactory,
IVersionRespositoryFactory versionRespositoryFactory,
IAppliedScriptsRepositoryFactory appliedScriptsRepositoryFactory,
IDatabaseMetadataProviderFactory databaseMetadataProviderFactory,
IScriptScannerFactory scriptScannerFactory,
IScriptRunnerFactory scriptRunnerFactory)
{
_connectionFactory = connectionFactory;
_versionRespositoryFactory = versionRespositoryFactory;
_databaseMetadataProviderFactory = databaseMetadataProviderFactory;
_scriptScannerFactory = scriptScannerFactory;
_scriptRunnerFactory = scriptRunnerFactory;
_appliedScriptsRepositoryFactory = appliedScriptsRepositoryFactory;
}
public void Migrate(DatabaseConnectionInfo databaseConnectionInfo, MigrationInfo migrationInfo)
{
using (var connection = _connectionFactory.Create(databaseConnectionInfo))
{
Output.Text("Connected to oracle database on connection string '{0}'.".FormatWith(databaseConnectionInfo.ConnectionString));
Output.EmptyLine();
Output.Text("Checking for the current database version.");
var oracleMetadataProvider = _databaseMetadataProviderFactory.Create(connection);
var oracleVersioning = _versionRespositoryFactory.Create(connection);
var oracleAppliedScriptsRepository = _appliedScriptsRepositoryFactory.Create(connection);
var currentVersion = oracleMetadataProvider.SchemaExists(migrationInfo.TargetSchema) ? oracleVersioning.GetCurrentVersion(migrationInfo.TargetSchema) : null;
Output.Info("The current database version is: {0}".FormatWith(currentVersion.IsNotNull() ? currentVersion.Version.ToString(CultureInfo.InvariantCulture) : "<empty>"));
Output.EmptyLine();
Output.Info("Scanning input folder '{0}' for scripts...".FormatWith(migrationInfo.ScriptFolder));
var initScripts = ScanForScripts(migrationInfo, ScriptFolders.Init, _scriptScannerFactory.Create(ScriptFolders.Init)).ToArray();
var upScripts = ScanForScripts(migrationInfo, ScriptFolders.Up, _scriptScannerFactory.Create(ScriptFolders.Up)).ToArray();
var downScripts = ScanForScripts(migrationInfo, ScriptFolders.Down, _scriptScannerFactory.Create(ScriptFolders.Down)).ToArray();
var termScripts = ScanForScripts(migrationInfo, ScriptFolders.Term, _scriptScannerFactory.Create(ScriptFolders.Term)).ToArray();
Output.EmptyLine();
if (migrationInfo.TargetVersion.HasValue)
{
Output.Info("Target database version was provided, will target migrating the database to version {0}".FormatWith(migrationInfo.TargetVersion.Value));
}
VerifyDownScripts(upScripts, downScripts);
var storedMigrationSteps = new List<IStep>();
if (migrationInfo.UseStored)
{
Output.Info("--usestored was specified, fetching set of applied scripts stored in the target database...".FormatWith());
storedMigrationSteps = oracleAppliedScriptsRepository.GetAppliedScripts(migrationInfo.TargetSchema).ToList();
Output.Text(" {0} stored applied scripts found.".FormatWith(storedMigrationSteps.Count));
Output.EmptyLine();
}
var scriptRunner = _scriptRunnerFactory.Create(databaseConnectionInfo);
RunMigration(migrationInfo, currentVersion, initScripts, upScripts, downScripts, termScripts, storedMigrationSteps, scriptRunner, oracleMetadataProvider, oracleVersioning, oracleAppliedScriptsRepository);
if (oracleMetadataProvider.SchemaExists(migrationInfo.TargetSchema))
{
var newVersion = oracleVersioning.GetCurrentVersion(migrationInfo.TargetSchema);
Output.Info("Database version is now: {0}".FormatWith(newVersion.IsNotNull() ? newVersion.Version.ToString(CultureInfo.InvariantCulture) : "<empty>"));
}
else
{
Output.Info("Target schema '{0}' no longer exists.".FormatWith(migrationInfo.TargetSchema));
}
}
}
private static IEnumerable<Script> ScanForScripts(MigrationInfo migrationInfo, string migrationFolder, IScriptScanner scanner)
{
var environments = (migrationInfo.TargetEnvironment ?? Enumerable.Empty<string>()).ToArray();
var scripts = (scanner.Scan(migrationInfo.ScriptFolder.Whack(migrationFolder), environments) ?? Enumerable.Empty<Script>()).ToArray();
Output.Text("Found {0} '{1}' scripts.".FormatWith(scripts.Length, migrationFolder));
foreach (var script in scripts)
{
Output.Verbose(script.Name, 1);
}
return scripts;
}
private static void VerifyDownScripts(IEnumerable<Script> upScripts, IEnumerable<Script> downScripts)
{
var withoutRollback = upScripts.Where(u => downScripts.All(d => d.Version != u.Version)).ToArray();
if (withoutRollback.Any())
{
Output.Warn("The following 'up' scripts do not have a corresponding 'down' script, any rollback may not work as expected:");
foreach (var script in withoutRollback)
{
Output.Warn(script.Name, 1);
}
Output.EmptyLine();
}
}
private static void RunMigration(MigrationInfo migrationInfo, DatabaseVersion currentVersion, IEnumerable<Script> initScripts, IEnumerable<Script> upScripts, IEnumerable<Script> downScripts, IEnumerable<Script> termScripts,
IList<IStep> storedSteps, IScriptRunner scriptRunner, IDatabaseMetadataProvider databaseMetadataProvider, IVersionRespository versionRespository, IAppliedScriptsRepository appliedScriptsRepository)
{
var upDownSteps = upScripts.Select(u => new DatabaseStep(u, downScripts.FirstOrDefault(d => d.Version == u.Version))).ToList();
var initTermSteps = initScripts.Select(i => new DatabaseStep(i, termScripts.FirstOrDefault(t => t.Version == i.Version))).ToList();
if (migrationInfo.Direction == MigrationDirection.Down)
{
var downMigration = new DownMigration(databaseMetadataProvider, versionRespository, appliedScriptsRepository, migrationInfo.Forced);
if (storedSteps.Any())
{
Output.Warn("NOTE: Using stored applied scripts to perform downgrade instead of local 'down' scripts.");
downMigration.Migrate(storedSteps, currentVersion, migrationInfo.TargetVersion, scriptRunner, migrationInfo.TargetSchema, migrationInfo.TargetTablespace);
}
else
{
downMigration.Migrate(upDownSteps, currentVersion, migrationInfo.TargetVersion, scriptRunner, migrationInfo.TargetSchema, migrationInfo.TargetTablespace);
}
if (!migrationInfo.TargetVersion.HasValue)
{
var termMigration = new TermMigration(databaseMetadataProvider, migrationInfo.Forced);
termMigration.Migrate(initTermSteps, currentVersion, migrationInfo.TargetVersion, scriptRunner, migrationInfo.TargetSchema, migrationInfo.TargetTablespace);
}
else
{
Output.Info("A target version was provided, termination scripts will not be executed.");
}
}
else
{
var initMigration = new InitMigration(databaseMetadataProvider, migrationInfo.Forced);
initMigration.Migrate(initTermSteps, currentVersion, migrationInfo.TargetVersion, scriptRunner, migrationInfo.TargetSchema, migrationInfo.TargetTablespace);
EnsureVersioningTableIsInitialised(versionRespository, migrationInfo.TargetSchema, migrationInfo.TargetTablespace);
EnsureAppliedScriptsTableIsInitialised(appliedScriptsRepository, migrationInfo.TargetSchema, migrationInfo.TargetTablespace);
var upMigration = new UpMigration(versionRespository, appliedScriptsRepository, migrationInfo.Forced);
upMigration.Migrate(upDownSteps, currentVersion, migrationInfo.TargetVersion, scriptRunner, migrationInfo.TargetSchema, migrationInfo.TargetTablespace);
}
}
private static void EnsureAppliedScriptsTableIsInitialised(IAppliedScriptsRepository appliedScriptsRepository, string targetSchema, string targetTablespace)
{
bool alreadyInitialised = appliedScriptsRepository.AppliedScriptsTableExists(targetSchema);
if (!alreadyInitialised)
{
appliedScriptsRepository.InitialiseAppliedScriptsTable(targetSchema, targetTablespace);
}
}
private static void EnsureVersioningTableIsInitialised(IVersionRespository versionRespository, string targetSchema, string targetTablespace)
{
bool alreadyInitialised = versionRespository.VersionTableExists(targetSchema);
if (!alreadyInitialised)
{
versionRespository.InitialiseVersioningTable(targetSchema, targetTablespace);
}
}
}
}
|
paybyphone/soothsayer
|
soothsayer/OracleMigrator.cs
|
C#
|
apache-2.0
| 10,677 |
package org.apache.uima.casviewer.core.internal;
import java.util.List;
/**
* A node that contains a list of AnnotationObject(s)
*
*/
public class AnnotationObjectsNode {
protected List<AnnotationObject> annotationList;
public AnnotationObjectsNode () {
}
public AnnotationObjectsNode(List<AnnotationObject> list) {
annotationList = list;
}
/**
* @return the annotationList
*/
public List<AnnotationObject> getAnnotationList() {
return annotationList;
}
/**
* @param annotationList the annotationList to set
*/
public void setAnnotationList(List<AnnotationObject> list) {
this.annotationList = list;
}
}
|
apache/uima-sandbox
|
CasViewerEclipsePlugin/uimaj-ep-casviewer-core/src/main/java/org/apache/uima/casviewer/core/internal/AnnotationObjectsNode.java
|
Java
|
apache-2.0
| 717 |
/*
* JBoss, Home of Professional Open Source
* Copyright 2012, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.injection.producer;
import java.lang.reflect.Modifier;
import java.lang.reflect.Type;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.util.Set;
import javax.enterprise.context.spi.CreationalContext;
import javax.enterprise.event.Observes;
import javax.enterprise.event.ObservesAsync;
import javax.enterprise.inject.CreationException;
import javax.enterprise.inject.Disposes;
import javax.enterprise.inject.spi.AnnotatedMember;
import javax.enterprise.inject.spi.InjectionPoint;
import javax.enterprise.inject.spi.Producer;
import org.jboss.weld.annotated.enhanced.EnhancedAnnotatedMethod;
import org.jboss.weld.bean.DisposalMethod;
import org.jboss.weld.bean.SessionBean;
import org.jboss.weld.exceptions.DefinitionException;
import org.jboss.weld.injection.InjectionPointFactory;
import org.jboss.weld.injection.MethodInjectionPoint;
import org.jboss.weld.injection.MethodInjectionPoint.MethodInjectionPointType;
import org.jboss.weld.logging.BeanLogger;
import org.jboss.weld.security.GetMethodAction;
import org.jboss.weld.util.reflection.Formats;
import org.jboss.weld.util.reflection.Reflections;
/**
* {@link Producer} implementation for producer methods.
*
* @author Jozef Hartinger
*
*/
public abstract class ProducerMethodProducer<X, T> extends AbstractMemberProducer<X, T> {
private static final String PRODUCER_ANNOTATION = "@Produces";
// The underlying method
private final MethodInjectionPoint<T, ? super X> method;
public ProducerMethodProducer(EnhancedAnnotatedMethod<T, ? super X> enhancedAnnotatedMethod, DisposalMethod<?, ?> disposalMethod) {
super(enhancedAnnotatedMethod, disposalMethod);
// Note that for producer method injection points the declaring bean is the producer method itself
this.method = InjectionPointFactory.instance().createMethodInjectionPoint(MethodInjectionPointType.PRODUCER, enhancedAnnotatedMethod, getBean(), enhancedAnnotatedMethod.getDeclaringType().getJavaClass(), null, getBeanManager());
checkProducerMethod(enhancedAnnotatedMethod);
checkDelegateInjectionPoints();
}
/**
* Validates the producer method
*/
protected void checkProducerMethod(EnhancedAnnotatedMethod<T, ? super X> method) {
if (method.getEnhancedParameters(Observes.class).size() > 0) {
throw BeanLogger.LOG.inconsistentAnnotationsOnMethod(PRODUCER_ANNOTATION, "@Observes", this.method,
Formats.formatAsStackTraceElement(method.getJavaMember()));
} else if (method.getEnhancedParameters(ObservesAsync.class).size() > 0) {
throw BeanLogger.LOG.inconsistentAnnotationsOnMethod(PRODUCER_ANNOTATION, "@ObservesAsync", this.method,
Formats.formatAsStackTraceElement(method.getJavaMember()));
} else if (method.getEnhancedParameters(Disposes.class).size() > 0) {
throw BeanLogger.LOG.inconsistentAnnotationsOnMethod(PRODUCER_ANNOTATION, "@Disposes", this.method,
Formats.formatAsStackTraceElement(method.getJavaMember()));
} else if (getDeclaringBean() instanceof SessionBean<?> && !Modifier.isStatic(method.slim().getJavaMember().getModifiers())) {
boolean methodDeclaredOnTypes = false;
for (Type type : getDeclaringBean().getTypes()) {
Class<?> clazz = Reflections.getRawType(type);
try {
AccessController.doPrivileged(new GetMethodAction(clazz, method.getName(), method.getParameterTypesAsArray()));
methodDeclaredOnTypes = true;
break;
} catch (PrivilegedActionException ignored) {
}
}
if (!methodDeclaredOnTypes) {
throw BeanLogger.LOG.methodNotBusinessMethod("Producer", this, getDeclaringBean(), Formats.formatAsStackTraceElement(method.getJavaMember()));
}
}
}
@Override
public Set<InjectionPoint> getInjectionPoints() {
return method.getInjectionPoints();
}
@Override
protected T produce(Object receiver, CreationalContext<T> ctx) {
return method.invoke(receiver, null, getBeanManager(), ctx, CreationException.class);
}
@Override
public AnnotatedMember<? super X> getAnnotated() {
return method.getAnnotated();
}
@Override
protected DefinitionException producerWithInvalidTypeVariable(AnnotatedMember<?> member) {
return BeanLogger.LOG.producerMethodReturnTypeInvalidTypeVariable(member, Formats.formatAsStackTraceElement(member.getJavaMember()));
}
@Override
protected DefinitionException producerWithInvalidWildcard(AnnotatedMember<?> member) {
return BeanLogger.LOG.producerMethodCannotHaveAWildcardReturnType(member, Formats.formatAsStackTraceElement(member.getJavaMember()));
}
@Override
protected DefinitionException producerWithParameterizedTypeWithTypeVariableBeanTypeMustBeDependent(AnnotatedMember<?> member) {
return BeanLogger.LOG.producerMethodWithTypeVariableReturnTypeMustBeDependent(member, Formats.formatAsStackTraceElement(member.getJavaMember()));
}
}
|
antoinesd/weld-core
|
impl/src/main/java/org/jboss/weld/injection/producer/ProducerMethodProducer.java
|
Java
|
apache-2.0
| 5,998 |
package controllers;
import play.shaded.ahc.org.asynchttpclient.AsyncHttpClient;
import play.shaded.ahc.org.asynchttpclient.BoundRequestBuilder;
import play.shaded.ahc.org.asynchttpclient.ListenableFuture;
import play.shaded.ahc.org.asynchttpclient.ws.WebSocket;
import play.shaded.ahc.org.asynchttpclient.ws.WebSocketListener;
import play.shaded.ahc.org.asynchttpclient.ws.WebSocketTextListener;
import play.shaded.ahc.org.asynchttpclient.ws.WebSocketUpgradeHandler;
import org.slf4j.Logger;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.function.Consumer;
/**
* A quick wrapper around AHC WebSocket
*
* https://github.com/AsyncHttpClient/async-http-client/blob/2.0/client/src/main/java/org/asynchttpclient/ws/WebSocket.java
*/
public class WebSocketClient {
private AsyncHttpClient client;
public WebSocketClient(AsyncHttpClient c) {
this.client = c;
}
public CompletableFuture<WebSocket> call(String url, WebSocketTextListener listener) throws ExecutionException, InterruptedException {
final BoundRequestBuilder requestBuilder = client.prepareGet(url);
final WebSocketUpgradeHandler handler = new WebSocketUpgradeHandler.Builder().addWebSocketListener(listener).build();
final ListenableFuture<WebSocket> future = requestBuilder.execute(handler);
return future.toCompletableFuture();
}
static class LoggingListener implements WebSocketTextListener {
private final Consumer<String> onMessageCallback;
public LoggingListener(Consumer<String> onMessageCallback) {
this.onMessageCallback = onMessageCallback;
}
private Logger logger = org.slf4j.LoggerFactory.getLogger(LoggingListener.class);
private Throwable throwableFound = null;
public Throwable getThrowable() {
return throwableFound;
}
public void onOpen(WebSocket websocket) {
//logger.info("onClose: ");
//websocket.sendMessage("hello");
}
public void onClose(WebSocket websocket) {
//logger.info("onClose: ");
}
public void onError(Throwable t) {
//logger.error("onError: ", t);
throwableFound = t;
}
@Override
public void onMessage(String s) {
//logger.info("onMessage: s = " + s);
onMessageCallback.accept(s);
}
}
}
|
play2-maven-plugin/play2-maven-test-projects
|
play26/java/websocket-example-using-webjars-assets/test/controllers/WebSocketClient.java
|
Java
|
apache-2.0
| 2,469 |
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
# COM interop utility module
import sys
import nt
from iptest.assert_util import *
from iptest.file_util import *
from iptest.process_util import *
if is_cli:
import clr
from System import Type
from System import Activator
from System import Exception as System_dot_Exception
remove_ironpython_dlls(testpath.public_testdir)
load_iron_python_dll()
import IronPython
load_iron_python_test()
import IronPythonTest
#--For asserts in IP/DLR assemblies----------------------------------------
from System.Diagnostics import Debug, DefaultTraceListener
class MyTraceListener(DefaultTraceListener):
def Fail(self, msg, detailMsg=''):
print "ASSERT FAILED:", msg
if detailMsg!='':
print " ", detailMsg
sys.exit(1)
if is_snap:
Debug.Listeners.Clear()
Debug.Listeners.Add(MyTraceListener())
is_pywin32 = False
if sys.platform=="win32":
try:
import win32com.client
is_pywin32 = True
if sys.prefix not in nt.environ["Path"]:
nt.environ["Path"] += ";" + sys.prefix
except:
pass
#------------------------------------------------------------------------------
#--GLOBALS
windir = get_environ_variable("windir")
agentsvr_path = path_combine(windir, r"msagent\agentsvr.exe")
scriptpw_path = path_combine(windir, r"system32\scriptpw.dll")
STRING_VALUES = [ "", "a", "ab", "abc", "aa",
"a" * 100000,
"1", "1.0", "1L", "object", "str", "object()",
" ", "_", "abc ", " abc", " abc ", "ab c", "ab c",
"\ta", "a\t", "\n", "\t", "\na", "a\n"]
STRING_VALUES = [unicode(x) for x in STRING_VALUES] + STRING_VALUES
def aFunc(): pass
class KNew(object): pass
class KOld: pass
NON_NUMBER_VALUES = [ object,
KNew, KOld,
Exception,
object(), KNew(), KOld(),
aFunc, str, eval, type,
[], [3.14], ["abc"],
(), (3,), (u"xyz",),
xrange(5),
{}, {'a':1},
__builtins__,
]
FPN_VALUES = [ -1.23, -1.0, -0.123, -0.0, 0.123, 1.0, 1.23,
0.0000001, 3.14159265, 1E10, 1.0E10 ]
UINT_VALUES = [ 0, 1, 2, 7, 10, 32]
INT_VALUES = [ -x for x in UINT_VALUES ] + UINT_VALUES
LONG_VALUES = [long(x) for x in INT_VALUES]
COMPLEX_VALUES = [ 3j]
#--Subclasses of Python/.NET types
class Py_Str(str): pass
if is_cli:
class Py_System_String(System.String): pass
class Py_Float(float): pass
class Py_Double(float): pass
if is_cli:
class Py_System_Double(System.Double): pass
class Py_UShort(int): pass
class Py_ULong(long): pass
class Py_ULongLong(long): pass
class Py_Short(int): pass
class Py_Long(int): pass
if is_cli:
class Py_System_Int32(System.Int32): pass
class Py_LongLong(long): pass
#-------Helpers----------------
def shallow_copy(in_list):
'''
We do not necessarily have access to the copy module.
'''
return [x for x in in_list]
def pos_num_helper(clr_type):
return [
clr_type.MinValue,
clr_type.MinValue + 1,
clr_type.MinValue + 2,
clr_type.MinValue + 10,
clr_type.MaxValue/2,
clr_type.MaxValue - 10,
clr_type.MaxValue - 2,
clr_type.MaxValue - 1,
clr_type.MaxValue,
]
def overflow_num_helper(clr_type):
return [
clr_type.MinValue - 1,
clr_type.MinValue - 2,
clr_type.MinValue - 3,
clr_type.MinValue - 10,
clr_type.MaxValue + 10,
clr_type.MaxValue + 3,
clr_type.MaxValue + 2,
clr_type.MaxValue + 1,
]
def valueErrorTrigger(in_type):
ret_val = {}
############################################################
#Is there anything in Python not being able to evaluate to a bool?
ret_val["VARIANT_BOOL"] = [ ]
############################################################
ret_val["BYTE"] = shallow_copy(NON_NUMBER_VALUES)
ret_val["BYTE"] += COMPLEX_VALUES
if sys.platform=="win32":
ret_val["BYTE"] += FPN_VALUES #Merlin 323751
ret_val["BYTE"] = [x for x in ret_val["BYTE"] if type(x) not in [unicode, str]] #INCOMPAT BUG - should be ValueError
ret_val["BYTE"] = [x for x in ret_val["BYTE"] if not isinstance(x, KOld)] #INCOMPAT BUG - should be AttributeError
############################################################
ret_val["BSTR"] = shallow_copy(NON_NUMBER_VALUES)
ret_val["BSTR"] += COMPLEX_VALUES
if sys.platform=="win32":
ret_val["BSTR"] = [] #INCOMPAT BUG
#strip out string values
ret_val["BSTR"] = [x for x in ret_val["BSTR"] if type(x) is not str and type(x) is not KNew and type(x) is not KOld and type(x) is not object]
############################################################
ret_val["CHAR"] = shallow_copy(NON_NUMBER_VALUES)
ret_val["CHAR"] += COMPLEX_VALUES
if sys.platform=="win32":
ret_val["CHAR"] += FPN_VALUES #Merlin 323751
############################################################
ret_val["FLOAT"] = shallow_copy(NON_NUMBER_VALUES)
ret_val["FLOAT"] += COMPLEX_VALUES
if sys.platform=="win32":
ret_val["FLOAT"] += UINT_VALUES + INT_VALUES #COMPAT BUG
############################################################
ret_val["DOUBLE"] = shallow_copy(ret_val["FLOAT"])
############################################################
ret_val["USHORT"] = shallow_copy(NON_NUMBER_VALUES)
ret_val["USHORT"] += COMPLEX_VALUES
if sys.platform=="win32":
ret_val["USHORT"] += FPN_VALUES #Merlin 323751
############################################################
ret_val["ULONG"] = shallow_copy(ret_val["USHORT"])
############################################################
ret_val["ULONGLONG"] = shallow_copy(ret_val["ULONG"])
############################################################
ret_val["SHORT"] = shallow_copy(NON_NUMBER_VALUES)
ret_val["SHORT"] += COMPLEX_VALUES
if sys.platform=="win32":
ret_val["SHORT"] += FPN_VALUES #Merlin 323751
############################################################
ret_val["LONG"] = shallow_copy(ret_val["SHORT"])
############################################################
ret_val["LONGLONG"] = shallow_copy(ret_val["LONG"])
############################################################
return ret_val[in_type]
def typeErrorTrigger(in_type):
ret_val = {}
############################################################
#Is there anything in Python not being able to evaluate to a bool?
ret_val["VARIANT_BOOL"] = [ ]
############################################################
ret_val["BYTE"] = []
############################################################
ret_val["BSTR"] = []
#strip out string values
ret_val["BSTR"] = [x for x in ret_val["BSTR"] if type(x) is not str]
############################################################
ret_val["CHAR"] = []
############################################################
ret_val["FLOAT"] = []
############################################################
ret_val["DOUBLE"] = []
############################################################
ret_val["USHORT"] = []
############################################################
ret_val["ULONG"] = []
############################################################
ret_val["ULONGLONG"] = []
############################################################
ret_val["SHORT"] = []
############################################################
ret_val["LONG"] = []
############################################################
ret_val["LONGLONG"] = []
############################################################
return ret_val[in_type]
def overflowErrorTrigger(in_type):
ret_val = {}
############################################################
ret_val["VARIANT_BOOL"] = []
############################################################
ret_val["BYTE"] = []
ret_val["BYTE"] += overflow_num_helper(System.Byte)
############################################################
#Doesn't seem possible to create a value (w/o 1st overflowing
#in Python) to pass to the COM method which will overflow.
ret_val["BSTR"] = [] #["0123456789" * 1234567890]
############################################################
ret_val["CHAR"] = []
ret_val["CHAR"] += overflow_num_helper(System.SByte)
############################################################
ret_val["FLOAT"] = []
ret_val["FLOAT"] += overflow_num_helper(System.Double)
#Shouldn't be possible to overflow a double.
ret_val["DOUBLE"] = []
############################################################
ret_val["USHORT"] = []
ret_val["USHORT"] += overflow_num_helper(System.UInt16)
ret_val["ULONG"] = []
ret_val["ULONG"] += overflow_num_helper(System.UInt32)
ret_val["ULONGLONG"] = []
# Dev10 475426
#ret_val["ULONGLONG"] += overflow_num_helper(System.UInt64)
ret_val["SHORT"] = []
ret_val["SHORT"] += overflow_num_helper(System.Int16)
ret_val["LONG"] = []
# Dev10 475426
#ret_val["LONG"] += overflow_num_helper(System.Int32)
ret_val["LONGLONG"] = []
# Dev10 475426
#ret_val["LONGLONG"] += overflow_num_helper(System.Int64)
############################################################
return ret_val[in_type]
def pythonToCOM(in_type):
'''
Given a COM type (in string format), this helper function returns a list of
lists where each sublists contains 1-N elements. Each of these elements in
turn are of different types (compatible with in_type), but equivalent to
one another.
'''
ret_val = {}
############################################################
temp_funcs = [int, bool, System.Boolean] # long, Dev10 475426
temp_values = [ 0, 1, True, False]
ret_val["VARIANT_BOOL"] = [ [y(x) for y in temp_funcs] for x in temp_values]
############################################################
temp_funcs = [System.Byte]
temp_values = pos_num_helper(System.Byte)
ret_val["BYTE"] = [ [y(x) for y in temp_funcs] for x in temp_values]
############################################################
temp_funcs = [ str, unicode, # Py_Str, Py_System_String,
System.String ]
temp_values = shallow_copy(STRING_VALUES)
ret_val["BSTR"] = [ [y(x) for y in temp_funcs] for x in temp_values]
############################################################
temp_funcs = [System.SByte]
temp_values = pos_num_helper(System.SByte)
ret_val["CHAR"] = [ [y(x) for y in temp_funcs] for x in temp_values]
############################################################
temp_funcs = [ float, # Py_Float,
System.Single]
ret_val["FLOAT"] = [ [y(x) for y in temp_funcs] for x in FPN_VALUES]
############################################################
temp_funcs = [ float, System.Double] # Py_Double, Py_System_Double,
temp_values = [-1.0e+308, 1.0e308] + FPN_VALUES
ret_val["DOUBLE"] = [ [y(x) for y in temp_funcs] for x in temp_values]
ret_val["DOUBLE"] += ret_val["FLOAT"]
############################################################
temp_funcs = [int, System.UInt16] # Py_UShort,
temp_values = pos_num_helper(System.UInt16)
ret_val["USHORT"] = [ [y(x) for y in temp_funcs] for x in temp_values]
############################################################
temp_funcs = [int, System.UInt32] # Py_ULong,
temp_values = pos_num_helper(System.UInt32) + pos_num_helper(System.UInt16)
ret_val["ULONG"] = [ [y(x) for y in temp_funcs] for x in temp_values]
ret_val["ULONG"] += ret_val["USHORT"]
############################################################
temp_funcs = [int, long, System.UInt64] # Py_ULongLong,
temp_values = pos_num_helper(System.UInt64) + pos_num_helper(System.UInt32) + pos_num_helper(System.UInt16)
ret_val["ULONGLONG"] = [ [y(x) for y in temp_funcs] for x in temp_values]
ret_val["ULONGLONG"] += ret_val["ULONG"]
############################################################
temp_funcs = [int, System.Int16] # Py_Short,
temp_values = pos_num_helper(System.Int16)
ret_val["SHORT"] = [ [y(x) for y in temp_funcs] for x in temp_values]
############################################################
temp_funcs = [int, System.Int32] # Py_Long, Dev10 475426
temp_values = pos_num_helper(System.Int32) + pos_num_helper(System.Int16)
ret_val["LONG"] = [ [y(x) for y in temp_funcs] for x in temp_values]
ret_val["LONG"] += ret_val["SHORT"]
############################################################
temp_funcs = [int, long, System.Int64] # Py_LongLong, Dev10 475426
temp_values = pos_num_helper(System.Int64) + pos_num_helper(System.Int32) + pos_num_helper(System.Int16)
ret_val["LONGLONG"] = [ [y(x) for y in temp_funcs] for x in temp_values]
ret_val["LONGLONG"] += ret_val["LONG"]
############################################################
return ret_val[in_type]
#------------------------------------------------------------------------------
#--Override a couple of definitions from assert_util
from iptest import assert_util
DEBUG = 1
def assert_helper(in_dict):
#add the keys if they're not there
if not in_dict.has_key("runonly"): in_dict["runonly"] = True
if not in_dict.has_key("skip"): in_dict["skip"] = False
#determine whether this test will be run or not
run = in_dict["runonly"] and not in_dict["skip"]
#strip out the keys
for x in ["runonly", "skip"]: in_dict.pop(x)
if not run:
if in_dict.has_key("bugid"):
print "...skipped an assert due to bug", str(in_dict["bugid"])
elif DEBUG:
print "...skipped an assert on", sys.platform
if in_dict.has_key("bugid"): in_dict.pop("bugid")
return run
def Assert(*args, **kwargs):
if assert_helper(kwargs): assert_util.Assert(*args, **kwargs)
def AreEqual(*args, **kwargs):
if assert_helper(kwargs): assert_util.AreEqual(*args, **kwargs)
def AssertError(*args, **kwargs):
try:
if assert_helper(kwargs): assert_util.AssertError(*args, **kwargs)
except Exception, e:
print "AssertError(" + str(args) + ", " + str(kwargs) + ") failed!"
raise e
def AssertErrorWithMessage(*args, **kwargs):
try:
if assert_helper(kwargs): assert_util.AssertErrorWithMessage(*args, **kwargs)
except Exception, e:
print "AssertErrorWithMessage(" + str(args) + ", " + str(kwargs) + ") failed!"
raise e
def AssertErrorWithPartialMessage(*args, **kwargs):
try:
if assert_helper(kwargs): assert_util.AssertErrorWithPartialMessage(*args, **kwargs)
except Exception, e:
print "AssertErrorWithPartialMessage(" + str(args) + ", " + str(kwargs) + ") failed!"
raise e
def AlmostEqual(*args, **kwargs):
if assert_helper(kwargs): assert_util.AlmostEqual(*args, **kwargs)
#------------------------------------------------------------------------------
#--HELPERS
def TryLoadExcelInteropAssembly():
try:
clr.AddReferenceByName('Microsoft.Office.Interop.Excel, Version=12.0.0.0, Culture=neutral, PublicKeyToken=71e9bce111e9429c')
except:
try:
clr.AddReferenceByName('Microsoft.Office.Interop.Excel, Version=11.0.0.0, Culture=neutral, PublicKeyToken=71e9bce111e9429c')
except:
pass
#------------------------------------------------------------------------------
def TryLoadWordInteropAssembly():
try:
clr.AddReferenceByName('Microsoft.Office.Interop.Word, Version=12.0.0.0, Culture=neutral, PublicKeyToken=71e9bce111e9429c')
except:
try:
clr.AddReferenceByName('Microsoft.Office.Interop.Word, Version=11.0.0.0, Culture=neutral, PublicKeyToken=71e9bce111e9429c')
except:
pass
#------------------------------------------------------------------------------
def IsExcelInstalled():
from Microsoft.Win32 import Registry
from System.IO import File
excel = None
#Office 11 or 12 are both OK for this test. Office 12 is preferred.
excel = Registry.LocalMachine.OpenSubKey("Software\\Microsoft\\Office\\12.0\\Excel\\InstallRoot")
if excel==None:
excel = Registry.LocalMachine.OpenSubKey("Software\\Microsoft\\Office\\11.0\\Excel\\InstallRoot")
#sanity check
if excel==None:
return False
#make sure it's really installed on disk
excel_path = excel.GetValue("Path") + "excel.exe"
return File.Exists(excel_path)
#------------------------------------------------------------------------------
def IsWordInstalled():
from Microsoft.Win32 import Registry
from System.IO import File
word = None
#Office 11 or 12 are both OK for this test. Office 12 is preferred.
word = Registry.LocalMachine.OpenSubKey("Software\\Microsoft\\Office\\12.0\\Word\\InstallRoot")
if word==None:
word= Registry.LocalMachine.OpenSubKey("Software\\Microsoft\\Office\\11.0\\Word\\InstallRoot")
#sanity check
if word==None:
return False
#make sure it's really installed on disk
word_path = word.GetValue("Path") + "winword.exe"
return File.Exists(word_path)
#------------------------------------------------------------------------------
def CreateExcelApplication():
#TODO: why is there use of the GUID here?
#import clr
#typelib = clr.LoadTypeLibrary(System.Guid("00020813-0000-0000-C000-000000000046"))
#return typelib.Excel.Application()
import System
type = System.Type.GetTypeFromProgID("Excel.Application")
return System.Activator.CreateInstance(type)
#------------------------------------------------------------------------------
def CreateWordApplication():
import System
#import clr
#typelib = clr.LoadTypeLibrary(System.Guid("00020905-0000-0000-C000-000000000046"))
#return typelib.Word.Application()
type = System.Type.GetTypeFromProgID("Word.Application")
return System.Activator.CreateInstance(type)
#------------------------------------------------------------------------------
def CreateAgentServer():
import clr
from System import Guid
typelib = clr.LoadTypeLibrary(Guid("A7B93C73-7B81-11D0-AC5F-00C04FD97575"))
return typelib.AgentServerObjects.AgentServer()
#------------------------------------------------------------------------------
def CreateDlrComServer():
com_type_name = "DlrComLibrary.DlrComServer"
if is_cli:
com_obj = getRCWFromProgID(com_type_name)
else:
com_obj = win32com.client.Dispatch(com_type_name)
return com_obj
#------------------------------------------------------------------------------
def getTypeFromProgID(prog_id):
'''
Returns the Type object for prog_id.
'''
return Type.GetTypeFromProgID(prog_id)
#------------------------------------------------------------------------------
def getRCWFromProgID(prog_id):
'''
Returns an instance of prog_id.
'''
if is_cli:
return Activator.CreateInstance(getTypeFromProgID(prog_id))
else:
return win32com.client.Dispatch(prog_id)
#------------------------------------------------------------------------------
def genPeverifyInteropAsm(file):
#if this isn't a test run that will invoke peverify there's no point in
#continuing
if not is_peverify_run:
return
else:
mod_name = file.rsplit("\\", 1)[1].split(".py")[0]
print "Generating interop assemblies for the", mod_name, "test module which are needed in %TEMP% by peverify..."
from System.IO import Path
tempDir = Path.GetTempPath()
cwd = nt.getcwd()
#maps COM interop test module names to a list of DLLs
module_dll_dict = {
"excel" : [],
"msagent" : [agentsvr_path],
"scriptpw" : [scriptpw_path],
"word" : [],
}
dlrcomlib_list = [ "dlrcomserver", "paramsinretval", "method", "obj", "prop", ]
if is_cli32:
temp_name = testpath.rowan_root + "\\Test\\DlrComLibrary\\Debug\\DlrComLibrary.dll"
else:
temp_name = testpath.rowan_root + "\\Test\\DlrComLibrary\\x64\\Release\\DlrComLibrary.dll"
for mod_name in dlrcomlib_list: module_dll_dict[mod_name] = [ temp_name ]
if not file_exists_in_path("tlbimp.exe"):
print "ERROR: tlbimp.exe is not in the path!"
sys.exit(1)
try:
if not module_dll_dict.has_key(mod_name):
print "WARNING: cannot determine which interop assemblies to install!"
print " This may affect peverify runs adversely."
print
return
else:
nt.chdir(tempDir)
for com_dll in module_dll_dict[mod_name]:
if not file_exists(com_dll):
print "\tERROR: %s does not exist!" % (com_dll)
continue
print "\trunning tlbimp on", com_dll
run_tlbimp(com_dll)
finally:
nt.chdir(cwd)
#------------------------------------------------------------------------------
#--Fake parts of System for compat tests
if sys.platform=="win32":
class System:
class Byte(int):
MinValue = 0
MaxValue = 255
class SByte(int):
MinValue = -128
MaxValue = 127
class Int16(int):
MinValue = -32768
MaxValue = 32767
class UInt16(int):
MinValue = 0
MaxValue = 65535
class Int32(int):
MinValue = -2147483648
MaxValue = 2147483647
class UInt32(long):
MinValue = 0
MaxValue = 4294967295
class Int64(long):
MinValue = -9223372036854775808L
MaxValue = 9223372036854775807L
class UInt64(long):
MinValue = 0L
MaxValue = 18446744073709551615
class Single(float):
MinValue = -3.40282e+038
MaxValue = 3.40282e+038
class Double(float):
MinValue = -1.79769313486e+308
MaxValue = 1.79769313486e+308
class String(str):
pass
class Boolean(int):
pass
#------------------------------------------------------------------------------
def run_com_test(name, file):
run_test(name)
genPeverifyInteropAsm(file)
|
slozier/ironpython2
|
Src/IronPython/Lib/iptest/cominterop_util.py
|
Python
|
apache-2.0
| 24,101 |
package com.badlogic.gdx.backends.jglfw;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Graphics.DisplayMode;
import com.badlogic.gdx.backends.jglfw.JglfwGraphics.JglfwDisplayMode;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.utils.Array;
import java.awt.GraphicsDevice;
import java.awt.GraphicsEnvironment;
/** @author Nathan Sweet */
public class JglfwApplicationConfiguration {
/** Title of application window. **/
public String title = "";
/** Initial width of the application window. **/
public int width = 640;
/** Initial height of the application window. **/
public int height = 480;
/** Intial x coordinate of the application window, -1 for center. **/
public int x = -1;
/** Intial x coordinate of the application window, -1 for center. **/
public int y = -1;
/** True to start in fullscreen. **/
public boolean fullscreen;
/** Monitor index to use for fullscreen. **/
public int fullscreenMonitorIndex = -1;
/** Number of bits per color channel. **/
public int r = 8, g = 8, b = 8, a = 8;
/** Number of bits for the depth buffer. **/
public int depth = 16;
/** Number of bits for the stencil buffer. **/
public int stencil = 0;
/** Number of samples for MSAA **/
public int samples = 0;
/** True to enable vsync. **/
public boolean vSync = true;
/** True if the window is resizable. **/
public boolean resizable = true;
/** True to attempt to use OpenGL ES 2.0. Note {@link Gdx#gl20} may be null even when this is true. **/
public boolean useGL20;
/** True to call System.exit() when the main loop is complete. **/
public boolean forceExit = true;
/** True to have a title and border around the window. **/
public boolean undecorated;
/** Causes the main loop to run on the EDT instead of a new thread, for easier interoperability with AWT/Swing. Broken on Linux. **/
public boolean runOnEDT;
/** The color to clear the window immediately after creation. **/
public Color initialBackgroundColor = Color.BLACK;
/** True to hide the window when it is created. The window must be shown with {@link JglfwGraphics#show()}. **/
public boolean hidden;
/** Target framerate when the window is in the foreground. The CPU sleeps as needed. Use 0 to never sleep. **/
public int foregroundFPS;
/** Target framerate when the window is in the background. The CPU sleeps as needed. Use 0 to never sleep, -1 to not render. **/
public int backgroundFPS;
/** Target framerate when the window is hidden or minimized. The CPU sleeps as needed. Use 0 to never sleep, -1 to not render. **/
public int hiddenFPS = -1;
static public DisplayMode[] getDisplayModes () {
GraphicsDevice device = GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice();
java.awt.DisplayMode desktopMode = device.getDisplayMode();
java.awt.DisplayMode[] displayModes = device.getDisplayModes();
Array<DisplayMode> modes = new Array();
outer:
for (java.awt.DisplayMode mode : displayModes) {
for (DisplayMode other : modes)
if (other.width == mode.getWidth() && other.height == mode.getHeight() && other.bitsPerPixel == mode.getBitDepth())
continue outer; // Duplicate.
if (mode.getBitDepth() != desktopMode.getBitDepth()) continue;
modes.add(new JglfwDisplayMode(mode.getWidth(), mode.getHeight(), mode.getRefreshRate(), mode.getBitDepth()));
}
return modes.toArray(DisplayMode.class);
}
static public DisplayMode getDesktopDisplayMode () {
java.awt.DisplayMode mode = GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice().getDisplayMode();
return new JglfwDisplayMode(mode.getWidth(), mode.getHeight(), mode.getRefreshRate(), mode.getBitDepth());
}
}
|
domix/libgdx
|
backends/gdx-backend-jglfw/src/com/badlogic/gdx/backends/jglfw/JglfwApplicationConfiguration.java
|
Java
|
apache-2.0
| 3,674 |
package com.xnx3.j2ee.entity;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import static javax.persistence.GenerationType.IDENTITY;
import javax.persistence.Id;
import javax.persistence.Table;
/**
* FriendLog entity. @author MyEclipse Persistence Tools
*/
@Entity
@Table(name = "friend_log")
public class FriendLog implements java.io.Serializable {
// Fields
private Integer id;
private Integer self;
private Integer other;
private Integer time;
private Short state;
private String ip;
// Constructors
/** default constructor */
public FriendLog() {
}
/** full constructor */
public FriendLog(Integer self, Integer other, Integer time, Short state,
String ip) {
this.self = self;
this.other = other;
this.time = time;
this.state = state;
this.ip = ip;
}
// Property accessors
@Id
@GeneratedValue(strategy = IDENTITY)
@Column(name = "id", unique = true, nullable = false)
public Integer getId() {
return this.id;
}
public void setId(Integer id) {
this.id = id;
}
@Column(name = "self", nullable = false)
public Integer getSelf() {
return this.self;
}
public void setSelf(Integer self) {
this.self = self;
}
@Column(name = "other", nullable = false)
public Integer getOther() {
return this.other;
}
public void setOther(Integer other) {
this.other = other;
}
@Column(name = "time", nullable = false)
public Integer getTime() {
return this.time;
}
public void setTime(Integer time) {
this.time = time;
}
@Column(name = "state", nullable = false)
public Short getState() {
return this.state;
}
public void setState(Short state) {
this.state = state;
}
@Column(name = "ip", nullable = false, length = 15)
public String getIp() {
return this.ip;
}
public void setIp(String ip) {
this.ip = ip;
}
}
|
xnx3/iw_demo
|
src/com/xnx3/j2ee/entity/FriendLog.java
|
Java
|
apache-2.0
| 1,858 |
/*
* Copyright 2012 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.apps.iosched.ui;
import com.google.analytics.tracking.android.EasyTracker;
import gdg.devfest.app.R;
import com.google.android.apps.iosched.provider.ScheduleContract;
import com.google.android.apps.iosched.util.ImageFetcher;
import com.google.android.apps.iosched.util.UIUtils;
import com.actionbarsherlock.app.SherlockFragment;
import android.app.Activity;
import android.content.Intent;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.CursorLoader;
import android.support.v4.content.Loader;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import static com.google.android.apps.iosched.util.LogUtils.LOGD;
import static com.google.android.apps.iosched.util.LogUtils.makeLogTag;
/**
* A fragment that shows detail information for a developer sandbox company, including
* company name, description, logo, etc.
*/
public class VendorDetailFragment extends SherlockFragment implements
LoaderManager.LoaderCallbacks<Cursor> {
private static final String TAG = makeLogTag(VendorDetailFragment.class);
private Uri mVendorUri;
private TextView mName;
private ImageView mLogo;
private TextView mUrl;
private TextView mDesc;
private ImageFetcher mImageFetcher;
public interface Callbacks {
public void onTrackIdAvailable(String trackId);
}
private static Callbacks sDummyCallbacks = new Callbacks() {
@Override
public void onTrackIdAvailable(String trackId) {}
};
private Callbacks mCallbacks = sDummyCallbacks;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final Intent intent = BaseActivity.fragmentArgumentsToIntent(getArguments());
mVendorUri = intent.getData();
if (mVendorUri == null) {
return;
}
mImageFetcher = UIUtils.getImageFetcher(getActivity());
mImageFetcher.setImageFadeIn(false);
setHasOptionsMenu(true);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
if (mVendorUri == null) {
return;
}
// Start background query to load vendor details
getLoaderManager().initLoader(VendorsQuery._TOKEN, null, this);
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
if (!(activity instanceof Callbacks)) {
throw new ClassCastException("Activity must implement fragment's callbacks.");
}
mCallbacks = (Callbacks) activity;
}
@Override
public void onDetach() {
super.onDetach();
mCallbacks = sDummyCallbacks;
}
@Override
public void onStop() {
super.onStop();
if (mImageFetcher != null) {
mImageFetcher.closeCache();
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
ViewGroup rootView = (ViewGroup) inflater.inflate(R.layout.fragment_vendor_detail, null);
mName = (TextView) rootView.findViewById(R.id.vendor_name);
mLogo = (ImageView) rootView.findViewById(R.id.vendor_logo);
mUrl = (TextView) rootView.findViewById(R.id.vendor_url);
mDesc = (TextView) rootView.findViewById(R.id.vendor_desc);
return rootView;
}
public void buildUiFromCursor(Cursor cursor) {
if (getActivity() == null) {
return;
}
if (!cursor.moveToFirst()) {
return;
}
String nameString = cursor.getString(VendorsQuery.NAME);
mName.setText(nameString);
// Start background fetch to load vendor logo
final String logoUrl = cursor.getString(VendorsQuery.LOGO_URL);
if (!TextUtils.isEmpty(logoUrl)) {
mImageFetcher.loadThumbnailImage(logoUrl, mLogo, R.drawable.sandbox_logo_empty);
}
mUrl.setText(cursor.getString(VendorsQuery.URL));
mDesc.setText(cursor.getString(VendorsQuery.DESC));
EasyTracker.getTracker().trackView("Sandbox Vendor: " + nameString);
LOGD("Tracker", "Sandbox Vendor: " + nameString);
mCallbacks.onTrackIdAvailable(cursor.getString(VendorsQuery.TRACK_ID));
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle data) {
return new CursorLoader(getActivity(), mVendorUri, VendorsQuery.PROJECTION, null, null,
null);
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) {
buildUiFromCursor(cursor);
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
}
/**
* {@link com.google.android.apps.iosched.provider.ScheduleContract.Vendors}
* query parameters.
*/
private interface VendorsQuery {
int _TOKEN = 0x4;
String[] PROJECTION = {
ScheduleContract.Vendors.VENDOR_NAME,
ScheduleContract.Vendors.VENDOR_DESC,
ScheduleContract.Vendors.VENDOR_URL,
ScheduleContract.Vendors.VENDOR_LOGO_URL,
ScheduleContract.Vendors.TRACK_ID,
};
int NAME = 0;
int DESC = 1;
int URL = 2;
int LOGO_URL = 3;
int TRACK_ID = 4;
}
}
|
printminion/gdgsched
|
android/src/com/google/android/apps/iosched/ui/VendorDetailFragment.java
|
Java
|
apache-2.0
| 6,196 |
/**
* Generated with Acceleo
*/
package org.wso2.developerstudio.eclipse.gmf.esb.parts.forms;
// Start of user code for imports
import org.eclipse.emf.common.util.Enumerator;
import org.eclipse.emf.ecore.util.EcoreAdapterFactory;
import org.eclipse.emf.edit.ui.provider.AdapterFactoryLabelProvider;
import org.eclipse.emf.eef.runtime.EEFRuntimePlugin;
import org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent;
import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent;
import org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart;
import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent;
import org.eclipse.emf.eef.runtime.part.impl.SectionPropertiesEditingPart;
import org.eclipse.emf.eef.runtime.ui.parts.PartComposer;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.BindingCompositionSequence;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionSequence;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionStep;
import org.eclipse.emf.eef.runtime.ui.utils.EditingUtils;
import org.eclipse.emf.eef.runtime.ui.widgets.EMFComboViewer;
import org.eclipse.emf.eef.runtime.ui.widgets.FormUtils;
import org.eclipse.jface.viewers.ArrayContentProvider;
import org.eclipse.jface.viewers.ISelectionChangedListener;
import org.eclipse.jface.viewers.SelectionChangedEvent;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.FocusAdapter;
import org.eclipse.swt.events.FocusEvent;
import org.eclipse.swt.events.KeyAdapter;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.forms.widgets.Form;
import org.eclipse.ui.forms.widgets.FormToolkit;
import org.eclipse.ui.forms.widgets.ScrolledForm;
import org.eclipse.ui.forms.widgets.Section;
import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository;
import org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertyPropertiesEditionPart;
import org.wso2.developerstudio.eclipse.gmf.esb.providers.EsbMessages;
// End of user code
/**
*
*
*/
public class TaskPropertyPropertiesEditionPartForm extends SectionPropertiesEditingPart implements IFormPropertiesEditionPart, TaskPropertyPropertiesEditionPart {
protected Text propertyName;
protected Text propertyValue;
protected EMFComboViewer propertyType;
/**
* For {@link ISection} use only.
*/
public TaskPropertyPropertiesEditionPartForm() { super(); }
/**
* Default constructor
* @param editionComponent the {@link IPropertiesEditionComponent} that manage this part
*
*/
public TaskPropertyPropertiesEditionPartForm(IPropertiesEditionComponent editionComponent) {
super(editionComponent);
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart#
* createFigure(org.eclipse.swt.widgets.Composite, org.eclipse.ui.forms.widgets.FormToolkit)
*
*/
public Composite createFigure(final Composite parent, final FormToolkit widgetFactory) {
ScrolledForm scrolledForm = widgetFactory.createScrolledForm(parent);
Form form = scrolledForm.getForm();
view = form.getBody();
GridLayout layout = new GridLayout();
layout.numColumns = 3;
view.setLayout(layout);
createControls(widgetFactory, view);
return scrolledForm;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart#
* createControls(org.eclipse.ui.forms.widgets.FormToolkit, org.eclipse.swt.widgets.Composite)
*
*/
public void createControls(final FormToolkit widgetFactory, Composite view) {
CompositionSequence taskPropertyStep = new BindingCompositionSequence(propertiesEditionComponent);
CompositionStep propertiesStep = taskPropertyStep.addStep(EsbViewsRepository.TaskProperty.Properties.class);
propertiesStep.addStep(EsbViewsRepository.TaskProperty.Properties.propertyName);
propertiesStep.addStep(EsbViewsRepository.TaskProperty.Properties.propertyValue);
propertiesStep.addStep(EsbViewsRepository.TaskProperty.Properties.propertyType);
composer = new PartComposer(taskPropertyStep) {
@Override
public Composite addToPart(Composite parent, Object key) {
if (key == EsbViewsRepository.TaskProperty.Properties.class) {
return createPropertiesGroup(widgetFactory, parent);
}
if (key == EsbViewsRepository.TaskProperty.Properties.propertyName) {
return createPropertyNameText(widgetFactory, parent);
}
if (key == EsbViewsRepository.TaskProperty.Properties.propertyValue) {
return createPropertyValueText(widgetFactory, parent);
}
if (key == EsbViewsRepository.TaskProperty.Properties.propertyType) {
return createPropertyTypeEMFComboViewer(widgetFactory, parent);
}
return parent;
}
};
composer.compose(view);
}
/**
*
*/
protected Composite createPropertiesGroup(FormToolkit widgetFactory, final Composite parent) {
Section propertiesSection = widgetFactory.createSection(parent, Section.TITLE_BAR | Section.TWISTIE | Section.EXPANDED);
propertiesSection.setText(EsbMessages.TaskPropertyPropertiesEditionPart_PropertiesGroupLabel);
GridData propertiesSectionData = new GridData(GridData.FILL_HORIZONTAL);
propertiesSectionData.horizontalSpan = 3;
propertiesSection.setLayoutData(propertiesSectionData);
Composite propertiesGroup = widgetFactory.createComposite(propertiesSection);
GridLayout propertiesGroupLayout = new GridLayout();
propertiesGroupLayout.numColumns = 3;
propertiesGroup.setLayout(propertiesGroupLayout);
propertiesSection.setClient(propertiesGroup);
return propertiesGroup;
}
protected Composite createPropertyNameText(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.TaskProperty.Properties.propertyName, EsbMessages.TaskPropertyPropertiesEditionPart_PropertyNameLabel);
propertyName = widgetFactory.createText(parent, ""); //$NON-NLS-1$
propertyName.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER);
widgetFactory.paintBordersFor(parent);
GridData propertyNameData = new GridData(GridData.FILL_HORIZONTAL);
propertyName.setLayoutData(propertyNameData);
propertyName.addFocusListener(new FocusAdapter() {
/**
* @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void focusLost(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(
TaskPropertyPropertiesEditionPartForm.this,
EsbViewsRepository.TaskProperty.Properties.propertyName,
PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, propertyName.getText()));
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
TaskPropertyPropertiesEditionPartForm.this,
EsbViewsRepository.TaskProperty.Properties.propertyName,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST,
null, propertyName.getText()));
}
}
/**
* @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent)
*/
@Override
public void focusGained(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
TaskPropertyPropertiesEditionPartForm.this,
null,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED,
null, null));
}
}
});
propertyName.addKeyListener(new KeyAdapter() {
/**
* @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void keyPressed(KeyEvent e) {
if (e.character == SWT.CR) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertyPropertiesEditionPartForm.this, EsbViewsRepository.TaskProperty.Properties.propertyName, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, propertyName.getText()));
}
}
});
EditingUtils.setID(propertyName, EsbViewsRepository.TaskProperty.Properties.propertyName);
EditingUtils.setEEFtype(propertyName, "eef::Text"); //$NON-NLS-1$
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.TaskProperty.Properties.propertyName, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createPropertyNameText
// End of user code
return parent;
}
protected Composite createPropertyValueText(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.TaskProperty.Properties.propertyValue, EsbMessages.TaskPropertyPropertiesEditionPart_PropertyValueLabel);
propertyValue = widgetFactory.createText(parent, ""); //$NON-NLS-1$
propertyValue.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER);
widgetFactory.paintBordersFor(parent);
GridData propertyValueData = new GridData(GridData.FILL_HORIZONTAL);
propertyValue.setLayoutData(propertyValueData);
propertyValue.addFocusListener(new FocusAdapter() {
/**
* @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void focusLost(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(
TaskPropertyPropertiesEditionPartForm.this,
EsbViewsRepository.TaskProperty.Properties.propertyValue,
PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, propertyValue.getText()));
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
TaskPropertyPropertiesEditionPartForm.this,
EsbViewsRepository.TaskProperty.Properties.propertyValue,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST,
null, propertyValue.getText()));
}
}
/**
* @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent)
*/
@Override
public void focusGained(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
TaskPropertyPropertiesEditionPartForm.this,
null,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED,
null, null));
}
}
});
propertyValue.addKeyListener(new KeyAdapter() {
/**
* @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void keyPressed(KeyEvent e) {
if (e.character == SWT.CR) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertyPropertiesEditionPartForm.this, EsbViewsRepository.TaskProperty.Properties.propertyValue, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, propertyValue.getText()));
}
}
});
EditingUtils.setID(propertyValue, EsbViewsRepository.TaskProperty.Properties.propertyValue);
EditingUtils.setEEFtype(propertyValue, "eef::Text"); //$NON-NLS-1$
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.TaskProperty.Properties.propertyValue, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createPropertyValueText
// End of user code
return parent;
}
/**
* @generated NOT
*/
protected Composite createPropertyTypeEMFComboViewer(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.TaskProperty.Properties.propertyType, EsbMessages.TaskPropertyPropertiesEditionPart_PropertyTypeLabel);
propertyType = new EMFComboViewer(parent);
propertyType.setContentProvider(new ArrayContentProvider());
propertyType.setLabelProvider(new AdapterFactoryLabelProvider(EEFRuntimePlugin.getDefault().getAdapterFactory()));
GridData propertyTypeData = new GridData(GridData.FILL_HORIZONTAL);
propertyType.getCombo().setLayoutData(propertyTypeData);
propertyType.getCombo().addListener(SWT.MouseVerticalWheel, new Listener() {
@Override
public void handleEvent(Event arg0) {
arg0.doit = false;
}
});
propertyType.addSelectionChangedListener(new ISelectionChangedListener() {
/**
* {@inheritDoc}
*
* @see org.eclipse.jface.viewers.ISelectionChangedListener#selectionChanged(org.eclipse.jface.viewers.SelectionChangedEvent)
*
*/
public void selectionChanged(SelectionChangedEvent event) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertyPropertiesEditionPartForm.this, EsbViewsRepository.TaskProperty.Properties.propertyType, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, getPropertyType()));
}
});
propertyType.setID(EsbViewsRepository.TaskProperty.Properties.propertyType);
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.TaskProperty.Properties.propertyType, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createPropertyTypeEMFComboViewer
// End of user code
return parent;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionListener#firePropertiesChanged(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent)
*
*/
public void firePropertiesChanged(IPropertiesEditionEvent event) {
// Start of user code for tab synchronization
// End of user code
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertyPropertiesEditionPart#getPropertyName()
*
*/
public String getPropertyName() {
return propertyName.getText();
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertyPropertiesEditionPart#setPropertyName(String newValue)
*
*/
public void setPropertyName(String newValue) {
if (newValue != null) {
propertyName.setText(newValue);
} else {
propertyName.setText(""); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.TaskProperty.Properties.propertyName);
if (eefElementEditorReadOnlyState && propertyName.isEnabled()) {
propertyName.setEnabled(false);
propertyName.setToolTipText(EsbMessages.TaskProperty_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !propertyName.isEnabled()) {
propertyName.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertyPropertiesEditionPart#getPropertyValue()
*
*/
public String getPropertyValue() {
return propertyValue.getText();
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertyPropertiesEditionPart#setPropertyValue(String newValue)
*
*/
public void setPropertyValue(String newValue) {
if (newValue != null) {
propertyValue.setText(newValue);
} else {
propertyValue.setText(""); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.TaskProperty.Properties.propertyValue);
if (eefElementEditorReadOnlyState && propertyValue.isEnabled()) {
propertyValue.setEnabled(false);
propertyValue.setToolTipText(EsbMessages.TaskProperty_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !propertyValue.isEnabled()) {
propertyValue.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertyPropertiesEditionPart#getPropertyType()
*
*/
public Enumerator getPropertyType() {
Enumerator selection = (Enumerator) ((StructuredSelection) propertyType.getSelection()).getFirstElement();
return selection;
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertyPropertiesEditionPart#initPropertyType(Object input, Enumerator current)
*/
public void initPropertyType(Object input, Enumerator current) {
propertyType.setInput(input);
propertyType.modelUpdating(new StructuredSelection(current));
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.TaskProperty.Properties.propertyType);
if (eefElementEditorReadOnlyState && propertyType.isEnabled()) {
propertyType.setEnabled(false);
propertyType.setToolTipText(EsbMessages.TaskProperty_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !propertyType.isEnabled()) {
propertyType.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertyPropertiesEditionPart#setPropertyType(Enumerator newValue)
*
*/
public void setPropertyType(Enumerator newValue) {
propertyType.modelUpdating(new StructuredSelection(newValue));
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.TaskProperty.Properties.propertyType);
if (eefElementEditorReadOnlyState && propertyType.isEnabled()) {
propertyType.setEnabled(false);
propertyType.setToolTipText(EsbMessages.TaskProperty_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !propertyType.isEnabled()) {
propertyType.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IPropertiesEditionPart#getTitle()
*
*/
public String getTitle() {
return EsbMessages.TaskProperty_Part_Title;
}
// Start of user code additional methods
// End of user code
}
|
prabushi/devstudio-tooling-esb
|
plugins/org.wso2.developerstudio.eclipse.gmf.esb.edit/src-gen/org/wso2/developerstudio/eclipse/gmf/esb/parts/forms/TaskPropertyPropertiesEditionPartForm.java
|
Java
|
apache-2.0
| 18,210 |
package CustomOreGen.Util;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
public abstract class MapCollection<K,V> implements Collection<V>
{
protected final Map<K,V> backingMap;
public MapCollection(Map<K,V> backingMap)
{
this.backingMap = backingMap;
for (Entry<K,V> entry : backingMap.entrySet()) {
K key = entry.getKey();
V value = entry.getValue();
K nKey = this.getKey(value);
if (key != nKey && (key == null || !key.equals(nKey))) {
throw new IllegalArgumentException("Backing set contains inconsistent key/value pair \'" + key + "\' -> \'" + value + "\', expected \'" + nKey + "\' -> \'" + value + "\'");
}
}
}
protected abstract K getKey(V value);
public int size()
{
return this.backingMap.size();
}
public boolean isEmpty()
{
return this.backingMap.isEmpty();
}
public boolean contains(Object o)
{
try {
@SuppressWarnings("unchecked")
K key = this.getKey((V)o);
return this.backingMap.containsKey(key);
} catch(ClassCastException e) {
return false;
}
}
public Iterator<V> iterator()
{
return this.backingMap.values().iterator();
}
public Object[] toArray()
{
return this.backingMap.values().toArray();
}
public <T> T[] toArray(T[] a)
{
return this.backingMap.values().toArray(a);
}
public boolean add(V v)
{
K key = this.getKey(v);
if (v != null)
{
return this.backingMap.put(key, v) != v;
}
else
{
boolean hasKey = this.backingMap.containsKey(key);
V prev = this.backingMap.put(key, v);
return !hasKey || v != prev;
}
}
public boolean remove(Object o)
{
try {
@SuppressWarnings("unchecked")
K key = this.getKey((V)o);
return this.backingMap.keySet().remove(key);
} catch(ClassCastException e) {
return false;
}
}
public boolean containsAll(Collection<?> c)
{
for (Object o : c) {
if (!this.contains(o))
return false;
}
return true;
}
public boolean addAll(Collection<? extends V> c)
{
boolean changed = false;
for (V v : c) {
changed |= this.add(v);
}
return changed;
}
public boolean removeAll(Collection<?> c)
{
boolean changed = false;
for (Object o : c) {
changed |= this.remove(o);
}
return changed;
}
public boolean retainAll(Collection<?> c)
{
ArrayList<K> keys = new ArrayList<K>(this.backingMap.size());
for (Object o : c) {
try {
@SuppressWarnings("unchecked")
K key = this.getKey((V)o);
keys.add(key);
} catch(ClassCastException e) {
}
}
return this.backingMap.keySet().retainAll(keys);
}
public void clear()
{
this.backingMap.clear();
}
public int hashCode()
{
return this.backingMap.hashCode();
}
public boolean equals(Object obj)
{
return obj instanceof MapCollection ? this.backingMap.equals(((MapCollection<?, ?>)obj).backingMap) : false;
}
public String toString()
{
return this.backingMap.values().toString();
}
}
|
reteo/CustomOreGen
|
src/main/java/CustomOreGen/Util/MapCollection.java
|
Java
|
artistic-2.0
| 3,588 |
class Helmfile < Formula
desc "Deploy Kubernetes Helm Charts"
homepage "https://github.com/roboll/helmfile"
url "https://github.com/roboll/helmfile/archive/v0.142.0.tar.gz"
sha256 "5475a041f0a1eb5777cc45e3fb06458ae76b1d4840aec89f2fed509d833d0cde"
license "MIT"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "29cf096405cc834e7888ebdee9c811a3e375e8a43b2e045ec0295e8ff654bad3"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "80e9c9d81f57b0331038108026263d6d9b184403659b66a976172e9dde916792"
sha256 cellar: :any_skip_relocation, monterey: "73e5bab63a7d9c0af77ccc72f8bca63cc8f72b96923ebfe41430a356cbf2cdeb"
sha256 cellar: :any_skip_relocation, big_sur: "ca024a40610d455dce99ef913baee47fa1d82dc821d780b94e56a54b3ecbde7b"
sha256 cellar: :any_skip_relocation, catalina: "7fa829db664c78079ba1c8ac19ec75b47e9664dfc55cf79d18970070e81d0fc2"
sha256 cellar: :any_skip_relocation, x86_64_linux: "53ff4d7a0816b82fcd87c791e6a9db70699425931bbba96181b545a837fb7fb7"
end
depends_on "go" => :build
depends_on "helm"
def install
system "go", "build", "-ldflags", "-X github.com/roboll/helmfile/pkg/app/version.Version=v#{version}",
"-o", bin/"helmfile", "-v", "github.com/roboll/helmfile"
end
test do
(testpath/"helmfile.yaml").write <<-EOS
repositories:
- name: stable
url: https://charts.helm.sh/stable
releases:
- name: vault # name of this release
namespace: vault # target namespace
createNamespace: true # helm 3.2+ automatically create release namespace (default true)
labels: # Arbitrary key value pairs for filtering releases
foo: bar
chart: stable/vault # the chart being installed to create this release, referenced by `repository/chart` syntax
version: ~1.24.1 # the semver of the chart. range constraint is supported
EOS
system Formula["helm"].opt_bin/"helm", "create", "foo"
output = "Adding repo stable https://charts.helm.sh/stable"
assert_match output, shell_output("#{bin}/helmfile -f helmfile.yaml repos 2>&1")
assert_match version.to_s, shell_output("#{bin}/helmfile -v")
end
end
|
sjackman/homebrew-core
|
Formula/helmfile.rb
|
Ruby
|
bsd-2-clause
| 2,316 |
import re
# Python 2/3 compatibility hackery
try:
unicode
except NameError:
unicode = str
def compile_url(url):
clean_url = unicode(url).lstrip(u'/')
return re.compile(clean_url)
def compile_urls(urls):
return [compile_url(expr) for expr in urls]
|
ghickman/incuna-auth
|
incuna_auth/middleware/utils.py
|
Python
|
bsd-2-clause
| 272 |
class Helmsman < Formula
desc "Helm Charts as Code tool"
homepage "https://github.com/Praqma/helmsman"
url "https://github.com/Praqma/helmsman.git",
tag: "v3.7.2",
revision: "6d7e6ddb2c7747b8789dd72db7714431fe17e779"
license "MIT"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "a307ed84ab8c572b9256fb54aaad6a7300f9384c94163dfeafc7419188aed4d0"
sha256 cellar: :any_skip_relocation, big_sur: "2f8fa84afd13560540da1be94ba442d8d140821436e14b038b8f034e561d7ca7"
sha256 cellar: :any_skip_relocation, catalina: "c0604c09ea08fd0aefb7ad9f24ec6df256156670fa8d30c180365c9b479cf99f"
sha256 cellar: :any_skip_relocation, mojave: "4841f957b4825a3501faa2ccf437d79042ea9019389ad344d4f20f9cecfe3830"
sha256 cellar: :any_skip_relocation, x86_64_linux: "dd61ceab712bafb407449a97d4e5e3df51bf50514f27c4bdd228796032748527"
end
depends_on "go" => :build
depends_on "helm"
depends_on "kubernetes-cli"
def install
system "go", "build", *std_go_args, "-ldflags", "-s -w -X main.version=#{version}", "./cmd/helmsman"
pkgshare.install "examples/example.yaml"
end
test do
assert_match version.to_s, shell_output("#{bin}/helmsman version")
output = shell_output("#{bin}/helmsman --apply -f #{pkgshare}/example.yaml 2>&1", 1)
assert_match "helm diff not found", output
end
end
|
zyedidia/homebrew-core
|
Formula/helmsman.rb
|
Ruby
|
bsd-2-clause
| 1,369 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from math import log
import argh
import numpy as np
from chemreac import ReactionDiffusion
from chemreac.integrate import run
from chemreac.util.plotting import plot_solver_linear_error
def efield_cb(x, logx=False):
"""
Returns a flat efield (-1)
"""
return -np.ones_like(x)
def y0_flat_cb(x, logx=False, use_log2=False):
xc = x[:-1] + np.diff(x)/2
if logx:
expb = (lambda arg: 2**arg) if use_log2 else np.exp
x, xc = map(expb, (x, xc))
return 17 - 11*(xc-x[0])/(x[-1]-x[0])
def y0_cylindrical_cb(x, logx=False, use_log2=False):
xc = x[:-1] + np.diff(x)/2
if logx:
expb = (lambda arg: 2**arg) if use_log2 else np.exp
x, xc = map(expb, (x, xc))
return 17 - np.log((xc-x[0])/(x[-1]-x[0]))
def y0_spherical_cb(x, logx=False, use_log2=False):
xc = x[:-1] + np.diff(x)/2
if logx:
expb = (lambda arg: 2**arg) if use_log2 else np.exp
x, xc = map(expb, (x, xc))
return 3 + 0.1/((xc-x[0])/(x[-1]-x[0]))
def integrate_rd(D=2e-3, t0=3., tend=7., x0=0.0, xend=1.0, mu=None, N=32,
nt=25, geom='f', logt=False, logy=False, logx=False,
random=False, nstencil=3, lrefl=False, rrefl=False,
num_jacobian=False, method='bdf', plot=False,
atol=1e-6, rtol=1e-6, efield=False, random_seed=42,
verbose=False, use_log2=False):
if random_seed:
np.random.seed(random_seed)
n = 1
mu = float(mu or x0)
tout = np.linspace(t0, tend, nt)
assert geom in 'fcs'
# Setup the grid
logb = (lambda arg: log(arg)/log(2)) if use_log2 else log
_x0 = logb(x0) if logx else x0
_xend = logb(xend) if logx else xend
x = np.linspace(_x0, _xend, N+1)
if random:
x += (np.random.random(N+1)-0.5)*(_xend-_x0)/(N+2)
mob = 0.3
# Initial conditions
y0 = {
'f': y0_flat_cb,
'c': y0_cylindrical_cb,
's': y0_spherical_cb
}[geom](x, logx)
# Setup the system
stoich_active = []
stoich_prod = []
k = []
assert not lrefl
assert not rrefl
rd = ReactionDiffusion(
n, stoich_active, stoich_prod, k, N,
D=[D],
z_chg=[1],
mobility=[mob],
x=x,
geom=geom,
logy=logy,
logt=logt,
logx=logx,
nstencil=nstencil,
lrefl=lrefl,
rrefl=rrefl,
use_log2=use_log2
)
if efield:
if geom != 'f':
raise ValueError("Only analytic sol. for flat drift implemented.")
rd.efield = efield_cb(rd.xcenters, logx)
# Analytic reference values
t = tout.copy().reshape((nt, 1))
Cref = np.repeat(y0[np.newaxis, :, np.newaxis], nt, axis=0)
if efield:
Cref += t.reshape((nt, 1, 1))*mob
# Run the integration
integr = run(rd, y0, tout, atol=atol, rtol=rtol,
with_jacobian=(not num_jacobian), method=method)
Cout, info = integr.Cout, integr.info
if verbose:
print(info)
def lin_err(i=slice(None), j=slice(None)):
return integr.Cout[i, :, j] - Cref[i, :, j]
rmsd = np.sum(lin_err()**2 / N, axis=1)**0.5
ave_rmsd_over_atol = np.average(rmsd, axis=0)/info['atol']
# Plot results
if plot:
import matplotlib.pyplot as plt
def _plot(y, c, ttl=None, apply_exp_on_y=False):
plt.plot(rd.xcenters, rd.expb(y) if apply_exp_on_y else y, c=c)
if N < 100:
plt.vlines(rd.x, 0, np.ones_like(rd.x)*max(y), linewidth=.1,
colors='gray')
plt.xlabel('x / m')
plt.ylabel('C / M')
if ttl:
plt.title(ttl)
for i in range(nt):
c = 1-tout[i]/tend
c = (1.0-c, .5-c/2, .5-c/2) # over time: dark red -> light red
plt.subplot(4, 1, 1)
_plot(Cout[i, :, 0], c, 'Simulation (N={})'.format(rd.N),
apply_exp_on_y=logy)
plt.subplot(4, 1, 2)
_plot(Cref[i, :, 0], c, 'Analytic', apply_exp_on_y=logy)
ax_err = plt.subplot(4, 1, 3)
plot_solver_linear_error(integr, Cref, ax_err, ti=i,
bi=slice(None),
color=c, fill=(i == 0))
plt.title('Linear rel error / Log abs. tol. (={})'.format(
info['atol']))
plt.subplot(4, 1, 4)
tspan = [tout[0], tout[-1]]
plt.plot(tout, rmsd[:, 0] / info['atol'], 'r')
plt.plot(tspan, [ave_rmsd_over_atol[0]]*2, 'r--')
plt.xlabel('Time / s')
plt.ylabel(r'$\sqrt{\langle E^2 \rangle} / atol$')
plt.tight_layout()
plt.show()
return tout, Cout, info, ave_rmsd_over_atol, rd
if __name__ == '__main__':
argh.dispatch_command(integrate_rd, output_file=None)
|
bjodah/chemreac
|
examples/steady_state.py
|
Python
|
bsd-2-clause
| 4,970 |
package org.jvnet.jaxb2_commons.xml.bind.model.concrete;
import java.util.ArrayList;
import java.util.Collection;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.activation.MimeType;
import javax.xml.namespace.NamespaceContext;
import javax.xml.namespace.QName;
import org.jvnet.jaxb2_commons.lang.Validate;
import org.jvnet.jaxb2_commons.xml.bind.model.MBuiltinLeafInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MClassInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MClassTypeInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MContainer;
import org.jvnet.jaxb2_commons.xml.bind.model.MElement;
import org.jvnet.jaxb2_commons.xml.bind.model.MElementInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MElementTypeRef;
import org.jvnet.jaxb2_commons.xml.bind.model.MEnumLeafInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MModelInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MPackageInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MPropertyInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MTypeInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMAnyAttributePropertyInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMBuiltinLeafInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMClassInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMElementInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMElementOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMElementTypeRefOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMEnumConstantInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMEnumLeafInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMModelInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMPropertyInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMWildcardTypeInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MBuiltinLeafInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MClassInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MElementInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MElementOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MElementTypeRefOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MEnumConstantInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MEnumLeafInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MModelInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MPropertyInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MWildcardTypeInfoOrigin;
import com.sun.xml.bind.v2.model.core.Adapter;
import com.sun.xml.bind.v2.model.core.AttributePropertyInfo;
import com.sun.xml.bind.v2.model.core.BuiltinLeafInfo;
import com.sun.xml.bind.v2.model.core.ClassInfo;
import com.sun.xml.bind.v2.model.core.Element;
import com.sun.xml.bind.v2.model.core.ElementInfo;
import com.sun.xml.bind.v2.model.core.ElementPropertyInfo;
import com.sun.xml.bind.v2.model.core.EnumConstant;
import com.sun.xml.bind.v2.model.core.EnumLeafInfo;
import com.sun.xml.bind.v2.model.core.ID;
import com.sun.xml.bind.v2.model.core.MapPropertyInfo;
import com.sun.xml.bind.v2.model.core.PropertyInfo;
import com.sun.xml.bind.v2.model.core.ReferencePropertyInfo;
import com.sun.xml.bind.v2.model.core.TypeInfo;
import com.sun.xml.bind.v2.model.core.TypeInfoSet;
import com.sun.xml.bind.v2.model.core.TypeRef;
import com.sun.xml.bind.v2.model.core.ValuePropertyInfo;
import com.sun.xml.bind.v2.model.core.WildcardTypeInfo;
public abstract class CMInfoFactory<T, C extends T, TIS extends TypeInfoSet<T, C, ?, ?>,
//
TI extends TypeInfo<T, C>,
//
BLI extends BuiltinLeafInfo<T, C>,
//
E extends Element<T, C>,
//
EI extends ElementInfo<T, C>,
//
ELI extends EnumLeafInfo<T, C>,
//
EC extends EnumConstant<T, C>,
//
CI extends ClassInfo<T, C>,
//
PI extends PropertyInfo<T, C>,
//
API extends AttributePropertyInfo<T, C>,
//
VPI extends ValuePropertyInfo<T, C>,
//
EPI extends ElementPropertyInfo<T, C>,
//
RPI extends ReferencePropertyInfo<T, C>,
//
WTI extends WildcardTypeInfo<T, C>,
//
TR extends TypeRef<T, C>> {
private final Map<BLI, MBuiltinLeafInfo<T, C>> builtinLeafInfos = new IdentityHashMap<BLI, MBuiltinLeafInfo<T, C>>();
private final Map<CI, MClassInfo<T, C>> classInfos = new IdentityHashMap<CI, MClassInfo<T, C>>();
private final Map<ELI, MEnumLeafInfo<T, C>> enumLeafInfos = new IdentityHashMap<ELI, MEnumLeafInfo<T, C>>();
private final Map<EI, MElementInfo<T, C>> elementInfos = new IdentityHashMap<EI, MElementInfo<T, C>>();
private final TIS typeInfoSet;
public CMInfoFactory(TIS typeInfoSet) {
Validate.notNull(typeInfoSet);
this.typeInfoSet = typeInfoSet;
}
public TIS getTypeInfoSet() {
return typeInfoSet;
}
public MModelInfo<T, C> createModel() {
final CMModel<T, C> model = new CMModel<T, C>(
createModelInfoOrigin(typeInfoSet));
createBuiltinLeafInfos(model);
createEnumLeafInfos(model);
createClassInfos(model);
createElementInfos(model);
return model;
}
private void createElementInfos(final CMModel<T, C> model) {
Iterable<? extends ElementInfo<T, C>> elements = typeInfoSet
.getAllElements();
for (ElementInfo<T, C> element : elements) {
final EI ei = (EI) element;
getElementInfo(ei);
}
for (ElementInfo<T, C> element : elements) {
model.addElementInfo(getElementInfo((EI) element));
}
}
private void createEnumLeafInfos(final CMModel<T, C> model) {
Collection<? extends EnumLeafInfo<T, C>> enums = typeInfoSet.enums()
.values();
for (EnumLeafInfo<T, C> enumLeafInfo : enums) {
@SuppressWarnings("unchecked")
final ELI eli = (ELI) enumLeafInfo;
getTypeInfo(eli);
}
for (Map.Entry<ELI, MEnumLeafInfo<T, C>> entry : enumLeafInfos
.entrySet()) {
populateEnumLeafInfo(entry.getKey(), entry.getValue());
}
for (EnumLeafInfo<T, C> enumLeafInfo : enums) {
model.addEnumLeafInfo(getTypeInfo((ELI) enumLeafInfo));
}
}
private void createBuiltinLeafInfos(final CMModel<T, C> model) {
Collection<? extends BuiltinLeafInfo<T, C>> builtins = typeInfoSet
.builtins().values();
for (BuiltinLeafInfo<T, C> builtinLeafInfo : builtins) {
@SuppressWarnings("unchecked")
final BLI bli = (BLI) builtinLeafInfo;
getTypeInfo(bli);
}
for (BuiltinLeafInfo<T, C> builtinLeafInfo : builtins) {
model.addBuiltinLeafInfo(getTypeInfo((BLI) builtinLeafInfo));
}
}
private void createClassInfos(final CMModel<T, C> model) {
Collection<? extends ClassInfo<T, C>> beans = typeInfoSet.beans()
.values();
for (ClassInfo<T, C> classInfo : beans) {
@SuppressWarnings("unchecked")
final CI ci = (CI) classInfo;
getTypeInfo(ci);
}
for (Map.Entry<CI, MClassInfo<T, C>> entry : classInfos.entrySet()) {
populateClassInfo(entry.getKey(), entry.getValue());
}
for (ClassInfo<T, C> classInfo : beans) {
model.addClassInfo(getTypeInfo((CI) classInfo));
}
}
protected MTypeInfo<T, C> getTypeInfo(PropertyInfo<T, C> propertyInfo,
TI typeInfo, boolean list, Adapter<T, C> adapter, ID id,
MimeType mimeType) {
final MTypeInfo<T, C> ti = getTypeInfo(typeInfo);
if (list) {
switch (id) {
case ID:
final MTypeInfo<T, C> tid = new CMID<T, C>(ti.getTargetType(),
ti);
return new CMList<T, C>(createListType(tid.getTargetType()),
tid, null);
case IDREF:
return new CMIDREFS<T, C>(createListType(ti.getTargetType()),
ti);
default:
return new CMList<T, C>(createListType(ti.getTargetType()), ti,
null);
}
} else {
switch (id) {
case ID:
return new CMID<T, C>(ti.getTargetType(), ti);
case IDREF:
return new CMIDREF<T, C>(ti.getTargetType(), ti);
default:
return ti;
}
}
}
protected MTypeInfo<T, C> getTypeInfo(TI typeInfo) {
if (typeInfo instanceof BuiltinLeafInfo) {
return getTypeInfo((BLI) typeInfo);
} else if (typeInfo instanceof EnumLeafInfo) {
return getTypeInfo((ELI) typeInfo);
} else if (typeInfo instanceof ElementInfo) {
return getTypeInfo((EI) typeInfo);
} else if (typeInfo instanceof WildcardTypeInfo) {
return createWildcardTypeInfo((WTI) typeInfo);
} else if (typeInfo instanceof ClassInfo) {
return getTypeInfo((CI) typeInfo);
} else {
throw new UnsupportedOperationException(typeInfo.getClass()
.getName());
}
}
private MBuiltinLeafInfo<T, C> getTypeInfo(BLI info) {
MBuiltinLeafInfo<T, C> builtinLeafInfo = builtinLeafInfos.get(info);
if (builtinLeafInfo == null) {
builtinLeafInfo = createBuiltinLeafInfo(info);
builtinLeafInfos.put(info, builtinLeafInfo);
}
return builtinLeafInfo;
}
private MTypeInfo<T, C> getTypeInfo(EI info) {
@SuppressWarnings("unchecked")
EPI p = (EPI) info.getProperty();
@SuppressWarnings("unchecked")
TI contentType = (TI) info.getContentType();
return getTypeInfo(p, contentType, p.isValueList(), p.getAdapter(),
p.id(), p.getExpectedMimeType());
}
protected MClassInfo<T, C> getTypeInfo(CI info) {
MClassInfo<T, C> classInfo = classInfos.get(info);
if (classInfo == null) {
classInfo = createClassInfo(info);
classInfos.put(info, classInfo);
}
return classInfo;
}
private MEnumLeafInfo<T, C> getTypeInfo(ELI info) {
MEnumLeafInfo<T, C> enumLeafInfo = enumLeafInfos.get(info);
if (enumLeafInfo == null) {
enumLeafInfo = createEnumLeafInfo(info);
enumLeafInfos.put(info, enumLeafInfo);
}
return enumLeafInfo;
}
private void populateEnumLeafInfo(ELI info, MEnumLeafInfo<T, C> enumLeafInfo) {
@SuppressWarnings("rawtypes")
Iterable<? extends EnumConstant> _constants = info.getConstants();
@SuppressWarnings("unchecked")
final Iterable<? extends EnumConstant<T, C>> enumConstants = (Iterable<? extends EnumConstant<T, C>>) _constants;
for (EnumConstant<?, ?> enumConstant : enumConstants) {
enumLeafInfo.addEnumConstantInfo(createEnumContantInfo(
enumLeafInfo, (EC) enumConstant));
}
}
protected MElementInfo<T, C> getElementInfo(EI info) {
MElementInfo<T, C> elementInfo = elementInfos.get(info);
if (elementInfo == null) {
elementInfo = createElementInfo(info);
elementInfos.put(info, elementInfo);
}
return elementInfo;
}
protected MClassInfo<T, C> createClassInfo(CI info) {
return new CMClassInfo<T, C>(createClassInfoOrigin(info),
info.getClazz(), getPackage(info), getContainer(info),
getLocalName(info), createBaseTypeInfo(info),
info.isElement() ? info.getElementName() : null,
info.getTypeName());
}
private void populateClassInfo(CI info, MClassInfo<T, C> classInfo) {
if (info.hasAttributeWildcard()) {
classInfo.addProperty(createAnyAttributePropertyInfo(classInfo));
}
for (PropertyInfo<T, C> p : (List<? extends PropertyInfo<T, C>>) info
.getProperties()) {
classInfo.addProperty(createPropertyInfo(classInfo, (PI) p));
}
}
protected MClassTypeInfo<T, C, ?> createBaseTypeInfo(CI info) {
return info.getBaseClass() == null ? null : getTypeInfo((CI) info
.getBaseClass());
}
private MPropertyInfo<T, C> createPropertyInfo(
final MClassInfo<T, C> classInfo, PI p) {
if (p instanceof AttributePropertyInfo) {
@SuppressWarnings("unchecked")
final API api = (API) p;
return createAttributePropertyInfo(classInfo, api);
} else if (p instanceof ValuePropertyInfo) {
@SuppressWarnings("unchecked")
final VPI vpi = (VPI) p;
return createValuePropertyInfo(classInfo, vpi);
} else if (p instanceof ElementPropertyInfo) {
@SuppressWarnings("unchecked")
final EPI ep = (EPI) p;
if (ep.getTypes().size() == 1) {
return createElementPropertyInfo(classInfo, ep);
} else {
return createElementsPropertyInfo(classInfo, ep);
}
} else if (p instanceof ReferencePropertyInfo) {
@SuppressWarnings("unchecked")
final RPI rp = (RPI) p;
final Set<? extends Element<T, C>> elements = rp.getElements();
if (elements.size() == 0
&& rp.getWildcard() != null
&& (rp.getWildcard().allowDom || rp.getWildcard().allowTypedObject)) {
return createAnyElementPropertyInfo(classInfo, rp);
} else if (elements.size() == 1) {
return createElementRefPropertyInfo(classInfo, rp);
} else {
return createElementRefsPropertyInfo(classInfo, rp);
}
} else if (p instanceof MapPropertyInfo) {
// System.out.println("Map property: " + p.getName());
// MapPropertyInfo<T, C> mp = (MapPropertyInfo<T, C>) p;
throw new UnsupportedOperationException();
} else {
throw new AssertionError();
}
}
protected MPropertyInfo<T, C> createAttributePropertyInfo(
final MClassInfo<T, C> classInfo, final API propertyInfo) {
return new CMAttributePropertyInfo<T, C>(
createPropertyInfoOrigin((PI) propertyInfo), classInfo,
propertyInfo.getName(), getTypeInfo(propertyInfo),
propertyInfo.getXmlName(), propertyInfo.isRequired(),
getDefaultValue(propertyInfo),
getDefaultValueNamespaceContext(propertyInfo));
}
protected MPropertyInfo<T, C> createValuePropertyInfo(
final MClassInfo<T, C> classInfo, final VPI propertyInfo) {
return new CMValuePropertyInfo<T, C>(
createPropertyInfoOrigin((PI) propertyInfo), classInfo,
propertyInfo.getName(), getTypeInfo(propertyInfo), null, null);
}
protected MPropertyInfo<T, C> createElementPropertyInfo(
final MClassInfo<T, C> classInfo, final EPI ep) {
final TR typeRef = (TR) ep.getTypes().get(0);
return new CMElementPropertyInfo<T, C>(
createPropertyInfoOrigin((PI) ep), classInfo, ep.getName(),
ep.isCollection() && !ep.isValueList(), ep.isRequired(),
getTypeInfo(ep, typeRef), typeRef.getTagName(),
ep.getXmlName(), typeRef.isNillable(),
getDefaultValue(typeRef),
getDefaultValueNamespaceContext(typeRef));
}
protected MPropertyInfo<T, C> createElementsPropertyInfo(
final MClassInfo<T, C> classInfo, final EPI ep) {
List<? extends TR> types = (List<? extends TR>) ep.getTypes();
final Collection<MElementTypeRef<T, C>> typedElements = new ArrayList<MElementTypeRef<T, C>>(
types.size());
for (TR typeRef : types) {
typedElements.add(new CMElementTypeRef<T, C>(
createElementTypeRefOrigin(ep, typeRef), typeRef
.getTagName(), getTypeInfo(ep, typeRef), typeRef
.isNillable(), getDefaultValue(typeRef),
getDefaultValueNamespaceContext(typeRef)));
}
return new CMElementsPropertyInfo<T, C>(
createPropertyInfoOrigin((PI) ep), classInfo, ep.getName(),
ep.isCollection() && !ep.isValueList(), ep.isRequired(),
typedElements, ep.getXmlName());
}
protected MPropertyInfo<T, C> createAnyElementPropertyInfo(
final MClassInfo<T, C> classInfo, final RPI rp) {
return new CMAnyElementPropertyInfo<T, C>(
createPropertyInfoOrigin((PI) rp), classInfo, rp.getName(),
rp.isCollection(), rp.isRequired(), rp.isMixed(),
rp.getWildcard().allowDom, rp.getWildcard().allowTypedObject);
}
protected MPropertyInfo<T, C> createElementRefPropertyInfo(
final MClassInfo<T, C> classInfo, final RPI rp) {
final Element<T, C> element = rp.getElements().iterator().next();
return new CMElementRefPropertyInfo<T, C>(
createPropertyInfoOrigin((PI) rp), classInfo, rp.getName(),
rp.isCollection(), rp.isRequired(), getTypeInfo(rp, element),
element.getElementName(), rp.getXmlName(),
rp.isMixed(), rp.getWildcard() == null ? false
: rp.getWildcard().allowDom,
rp.getWildcard() == null ? true
: rp.getWildcard().allowTypedObject,
getDefaultValue(element),
getDefaultValueNamespaceContext(element));
}
protected MPropertyInfo<T, C> createElementRefsPropertyInfo(
final MClassInfo<T, C> classInfo, final RPI rp) {
final List<MElement<T, C>> typedElements = new ArrayList<MElement<T, C>>();
for (Element<T, C> e : rp.getElements()) {
final E element = (E) e;
typedElements.add(new CMElement<T, C>(createElementOrigin(element),
element.getElementName(), getTypeInfo(rp, element), true,
getDefaultValue(element),
getDefaultValueNamespaceContext(element)));
}
return new CMElementRefsPropertyInfo<T, C>(
createPropertyInfoOrigin((PI) rp), classInfo, rp.getName(),
rp.isCollection(), rp.isRequired(), typedElements,
rp.getXmlName(), rp.isMixed(), rp.getWildcard() == null ? false
: rp.getWildcard().allowDom,
rp.getWildcard() == null ? true
: rp.getWildcard().allowTypedObject);
}
protected CMAnyAttributePropertyInfo<T, C> createAnyAttributePropertyInfo(
final MClassInfo<T, C> classInfo) {
return new CMAnyAttributePropertyInfo<T, C>(
createAnyAttributePropertyInfoOrigin(), classInfo,
"otherAttributes");
}
protected MTypeInfo<T, C> getTypeInfo(final ValuePropertyInfo<T, C> vp) {
return getTypeInfo(vp, (TI) vp.ref().iterator().next(),
vp.isCollection(), vp.getAdapter(), vp.id(),
vp.getExpectedMimeType());
}
protected MTypeInfo<T, C> getTypeInfo(final AttributePropertyInfo<T, C> ap) {
return getTypeInfo(ap, (TI) ap.ref().iterator().next(),
ap.isCollection(), ap.getAdapter(), ap.id(),
ap.getExpectedMimeType());
}
protected MTypeInfo<T, C> getTypeInfo(final ElementPropertyInfo<T, C> ep,
final TR typeRef) {
return getTypeInfo(ep, (TI) typeRef.getTarget(),
ep.isValueList(), ep.getAdapter(), ep.id(), ep.getExpectedMimeType());
}
protected MTypeInfo<T, C> getTypeInfo(final ReferencePropertyInfo<T, C> rp,
Element<T, C> element) {
return getTypeInfo(rp, (TI) element, false, rp.getAdapter(), rp.id(),
rp.getExpectedMimeType());
}
private String getDefaultValue(Element<T, C> element) {
if (element instanceof ElementInfo) {
final ElementInfo<T, C> elementInfo = (ElementInfo<T, C>) element;
final ElementPropertyInfo<T, C> property = elementInfo
.getProperty();
if (property != null) {
final List<? extends TR> types = (List<? extends TR>) property.getTypes();
if (types.size() == 1) {
final TR typeRef = types.get(0);
return getDefaultValue(typeRef);
}
}
}
return null;
}
private NamespaceContext getDefaultValueNamespaceContext(
Element<T, C> element) {
if (element instanceof ElementInfo) {
final ElementInfo<T, C> elementInfo = (ElementInfo<T, C>) element;
final ElementPropertyInfo<T, C> property = elementInfo
.getProperty();
if (property != null) {
final List<? extends TypeRef<T, C>> types = property.getTypes();
if (types.size() == 1) {
final TypeRef<T, C> typeRef = types.get(0);
return getDefaultValueNamespaceContext(typeRef);
}
}
}
return null;
}
protected abstract MPackageInfo getPackage(CI info);
protected abstract String getLocalName(CI info);
protected abstract MClassInfo<T, C> getScope(CI info);
protected abstract MPackageInfo getPackage(ELI info);
protected abstract String getLocalName(ELI info);
protected abstract String getLocalName(EI info);
protected abstract MPackageInfo getPackage(EI info);
protected abstract MContainer getContainer(CI info);
protected abstract MContainer getContainer(EI info);
protected abstract MContainer getContainer(ELI info);
//
protected MBuiltinLeafInfo<T, C> createBuiltinLeafInfo(BLI info) {
return new CMBuiltinLeafInfo<T, C>(createBuiltinLeafInfoOrigin(info),
info.getType(), info.getTypeName());
}
protected MEnumLeafInfo<T, C> createEnumLeafInfo(final ELI info) {
@SuppressWarnings("unchecked")
final TI baseType = (TI) info.getBaseType();
return new CMEnumLeafInfo<T, C>(createEnumLeafInfoOrigin(info),
info.getClazz(), getPackage(info), getContainer(info),
getLocalName(info), getTypeInfo(baseType),
info.getElementName(), info.getTypeName());
}
protected CMEnumConstantInfo<T, C> createEnumContantInfo(
MEnumLeafInfo<T, C> enumLeafInfo, EC enumConstant) {
return new CMEnumConstantInfo<T, C>(
createEnumConstantInfoOrigin(enumConstant), enumLeafInfo,
enumConstant.getLexicalValue());
}
protected MElementInfo<T, C> createElementInfo(EI element) {
@SuppressWarnings("unchecked")
final CI scopeCI = (CI) element.getScope();
final MClassInfo<T, C> scope = element.getScope() == null ? null
: getTypeInfo(scopeCI);
final QName substitutionHead = element.getSubstitutionHead() == null ? null
: element.getSubstitutionHead().getElementName();
final MElementInfo<T, C> elementInfo = new CMElementInfo<T, C>(
createElementInfoOrigin(element), getPackage(element),
getContainer(element), getLocalName(element),
element.getElementName(), scope, getTypeInfo(element),
substitutionHead, getDefaultValue(element),
getDefaultValueNamespaceContext(element));
return elementInfo;
}
protected MTypeInfo<T, C> createWildcardTypeInfo(WTI info) {
return new CMWildcardTypeInfo<T, C>(createWildcardTypeInfoOrigin(info),
info.getType());
}
protected MModelInfoOrigin createModelInfoOrigin(TIS info) {
return new CMModelInfoOrigin<T, C, TIS>(info);
}
protected MBuiltinLeafInfoOrigin createBuiltinLeafInfoOrigin(BLI info) {
return new CMBuiltinLeafInfoOrigin<T, C, BLI>(info);
}
protected MClassInfoOrigin createClassInfoOrigin(CI info) {
return new CMClassInfoOrigin<T, C, CI>(info);
}
protected MPropertyInfoOrigin createAnyAttributePropertyInfoOrigin() {
return new CMAnyAttributePropertyInfoOrigin();
}
protected MPropertyInfoOrigin createPropertyInfoOrigin(PI info) {
return new CMPropertyInfoOrigin<T, C, PI>(info);
}
protected MElementOrigin createElementOrigin(E info) {
return new CMElementOrigin<T, C, E>(info);
}
protected MElementTypeRefOrigin createElementTypeRefOrigin(EPI ep,
TR typeRef) {
return new CMElementTypeRefOrigin<T, C, EPI, TR>(ep, typeRef);
}
protected MElementInfoOrigin createElementInfoOrigin(EI info) {
return new CMElementInfoOrigin<T, C, EI>(info);
}
protected MEnumLeafInfoOrigin createEnumLeafInfoOrigin(ELI info) {
return new CMEnumLeafInfoOrigin<T, C, ELI>(info);
}
protected MEnumConstantInfoOrigin createEnumConstantInfoOrigin(EC info) {
return new CMEnumConstantInfoOrigin<T, C, EC>(info);
}
protected MWildcardTypeInfoOrigin createWildcardTypeInfoOrigin(WTI info) {
return new CMWildcardTypeInfoOrigin<T, C, WTI>(info);
}
protected abstract T createListType(T elementType);
/**
* Returns Java class for the reference type or null if it can't be found.
*
* @param referencedType
* referenced type.
* @return Java class for the reference type or null.
*/
protected abstract Class<?> loadClass(T referencedType);
protected abstract String getDefaultValue(API propertyInfo);
protected abstract NamespaceContext getDefaultValueNamespaceContext(
API propertyInfo);
protected abstract String getDefaultValue(TypeRef<T, C> typeRef);
protected abstract NamespaceContext getDefaultValueNamespaceContext(
TypeRef<T, C> typeRef);
}
|
Stephan202/jaxb2-basics
|
runtime/src/main/java/org/jvnet/jaxb2_commons/xml/bind/model/concrete/CMInfoFactory.java
|
Java
|
bsd-2-clause
| 22,939 |
// Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
info: |
When "String" is called as part of a new expression, it is a constructor: it initialises the newly created object and
The [[Value]] property of the newly constructed object is set to ToString(value), or to the empty string if value is not supplied
es5id: 15.5.2.1_A1_T16
description: >
Creating string object with "new String()" initialized with .12345
and other numbers
---*/
var __str = new String(.12345);
//////////////////////////////////////////////////////////////////////////////
//CHECK#1
if (typeof __str !== "object") {
$ERROR('#1: __str =new String(.12345); typeof __str === "object". Actual: typeof __str ===' + typeof __str);
}
//
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//CHECK#1.5
if (__str.constructor !== String) {
$ERROR('#1.5: __str =new String(.12345); __str.constructor === String. Actual: __str.constructor ===' + __str.constructor);
}
//
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//CHECK#2
if (__str != "0.12345") {
$ERROR('#2: __str =new String(.12345); __str =="0.12345". Actual: __str ==' + __str);
}
//
//////////////////////////////////////////////////////////////////////////////
__str = new String(.012345);
//////////////////////////////////////////////////////////////////////////////
//CHECK#3
if (typeof __str !== "object") {
$ERROR('#3: __str =new String(.012345); typeof __str === "object". Actual: typeof __str ===' + typeof __str);
}
//
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//CHECK#2.5
if (__str.constructor !== String) {
$ERROR('#3.5: __str =new String(.012345); __str.constructor === String. Actual: __str.constructor ===' + __str.constructor);
}
//
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//CHECK#4
if (__str != "0.012345") {
$ERROR('#4: __str =new String(.012345); __str =="0.012345". Actual: __str ==' + __str);
}
//
//////////////////////////////////////////////////////////////////////////////
__str = new String(.0012345);
//////////////////////////////////////////////////////////////////////////////
//CHECK#5
if (typeof __str !== "object") {
$ERROR('#5: __str =new String(.0012345); typeof __str === "object". Actual: typeof __str ===' + typeof __str);
}
//
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//CHECK#5.5
if (__str.constructor !== String) {
$ERROR('#5.5: __str =new String(.0012345); __str.constructor === String. Actual: __str.constructor ===' + __str.constructor);
}
//
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//CHECK#6
if (__str != "0.0012345") {
$ERROR('#6: __str =new String(.0012345); __str =="0.0012345". Actual: __str ==' + __str);
}
//
//////////////////////////////////////////////////////////////////////////////
__str = new String(.00000012345);
//////////////////////////////////////////////////////////////////////////////
//CHECK#7
if (typeof __str !== "object") {
$ERROR('#7: __str =new String(.00000012345); typeof __str === "object". Actual: typeof __str ===' + typeof __str);
}
//
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//CHECK#7.5
if (__str.constructor !== String) {
$ERROR('#7.5: __str =new String(.00000012345); __str.constructor === String. Actual: __str.constructor ===' + __str.constructor);
}
//
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//CHECK#8
if (__str != "1.2345e-7") {
$ERROR('#8: __str =new String(.00000012345); __str =="1.2345e-7". Actual: __str ==' + __str);
}
//
//////////////////////////////////////////////////////////////////////////////
|
sebastienros/jint
|
Jint.Tests.Test262/test/built-ins/String/S15.5.2.1_A1_T16.js
|
JavaScript
|
bsd-2-clause
| 4,487 |
cask 'anka-flow' do
version '1.1.1.79'
sha256 '9f91222458f5b7b52bee53a62e878faed4a4894ca02fe3d37b52b79b54c523fa'
# d1efqjhnhbvc57.cloudfront.net was verified as official when first introduced to the cask
url "https://d1efqjhnhbvc57.cloudfront.net/AnkaFlow-#{version}.pkg",
referer: 'https://veertu.com/download-anka-run/'
appcast 'https://ankadoc.bitbucket.io/release-notes/index.html',
checkpoint: '902e8a6a51287459ac85fb33f03569004dc105637b9c6a86827beacf53f341c9'
name 'Veertu Anka Flow'
homepage 'https://veertu.com/'
depends_on macos: '>= :yosemite'
pkg "AnkaFlow-#{version}.pkg"
uninstall launchctl: [
'com.veertu.nlimit',
'com.veertu.vlaunch',
],
script: {
executable: '/Library/Application Support/Veertu/Anka/tools/uninstall.sh',
args: ['-f'],
sudo: true,
}
zap trash: [
'~/.anka',
'~/Library/Application Support/Veertu/Anka',
'~/Library/Logs/Anka',
'~/Library/Preferences/com.veertu.ankaview.plist',
'/Library/Application Support/Veertu/Anka',
],
rmdir: [
'~/Library/Application Support/Veertu',
'/Library/Application Support/Veertu',
]
caveats <<~EOS
Installing this Cask means you have AGREED to the
Veertu End User License Agreement at
https://veertu.com/terms-and-conditions/
EOS
end
|
tedski/homebrew-cask
|
Casks/anka-flow.rb
|
Ruby
|
bsd-2-clause
| 1,593 |
/*
* Copyright (C) 2013 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE, INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#include "config.h"
#if ENABLE(MEDIA_STREAM)
#include "MediaTrackConstraints.h"
#include "MediaTrackConstraint.h"
#include "MediaTrackConstraintSet.h"
#include "NotImplemented.h"
using namespace JSC;
namespace WebCore {
RefPtr<MediaTrackConstraints> MediaTrackConstraints::create(PassRefPtr<MediaConstraintsImpl> constraints)
{
return adoptRef(new MediaTrackConstraints(constraints));
}
MediaTrackConstraints::MediaTrackConstraints(PassRefPtr<MediaConstraintsImpl> constraints)
: m_constraints(constraints)
{
}
Vector<PassRefPtr<MediaTrackConstraint>> MediaTrackConstraints::optional(bool) const
{
// https://bugs.webkit.org/show_bug.cgi?id=121954
notImplemented();
return Vector<PassRefPtr<MediaTrackConstraint>>();
}
RefPtr<MediaTrackConstraintSet> MediaTrackConstraints::mandatory(bool) const
{
// https://bugs.webkit.org/show_bug.cgi?id=121954
notImplemented();
return nullptr;
}
} // namespace WebCore
#endif
|
aosm/WebCore
|
Modules/mediastream/MediaTrackConstraints.cpp
|
C++
|
bsd-2-clause
| 2,294 |
# Generated by Django 3.1.4 on 2020-12-15 15:58
from django.db import migrations
def copy_labels(apps, schema_editor):
Trek = apps.get_model('trekking', 'Trek')
Label = apps.get_model('common', 'Label')
for trek in Trek.objects.all():
for label in trek.labels.all():
label2, created = Label.objects.get_or_create(name=label.name, defaults={'advice': label.advice, 'filter': label.filter_rando})
trek.labels2.add(label2)
class Migration(migrations.Migration):
dependencies = [
('trekking', '0023_trek_labels2'),
]
operations = [
migrations.RunPython(copy_labels),
]
|
makinacorpus/Geotrek
|
geotrek/trekking/migrations/0024_copy_labels.py
|
Python
|
bsd-2-clause
| 649 |
# rc-animate
---
animate react element easily
[![NPM version][npm-image]][npm-url]
[![build status][travis-image]][travis-url]
[![Test coverage][coveralls-image]][coveralls-url]
[![gemnasium deps][gemnasium-image]][gemnasium-url]
[![node version][node-image]][node-url]
[![npm download][download-image]][download-url]
[](https://saucelabs.com/u/rc_animate)
[](https://saucelabs.com/u/rc_animate)
[npm-image]: http://img.shields.io/npm/v/rc-animate.svg?style=flat-square
[npm-url]: http://npmjs.org/package/rc-animate
[travis-image]: https://img.shields.io/travis/react-component/animate.svg?style=flat-square
[travis-url]: https://travis-ci.org/react-component/animate
[coveralls-image]: https://img.shields.io/coveralls/react-component/animate.svg?style=flat-square
[coveralls-url]: https://coveralls.io/r/react-component/animate?branch=master
[gemnasium-image]: http://img.shields.io/gemnasium/react-component/animate.svg?style=flat-square
[gemnasium-url]: https://gemnasium.com/react-component/animate
[node-image]: https://img.shields.io/badge/node.js-%3E=_0.10-green.svg?style=flat-square
[node-url]: http://nodejs.org/download/
[download-image]: https://img.shields.io/npm/dm/rc-animate.svg?style=flat-square
[download-url]: https://npmjs.org/package/rc-animate
## Feature
* support ie8,ie8+,chrome,firefox,safari
## install
[](https://npmjs.org/package/rc-animate)
## Usage
```js
var Animate = require('rc-animate');
var React = require('react');
React.render(<Animate animation={{}}><p key="1">1</p><p key="2">2</p></Animate>, container);
```
## API
### props
<table class="table table-bordered table-striped">
<thead>
<tr>
<th style="width: 100px;">name</th>
<th style="width: 50px;">type</th>
<th style="width: 50px;">default</th>
<th>description</th>
</tr>
</thead>
<tbody>
<tr>
<td>component</td>
<td>React.Element/String</td>
<td>'span'</td>
<td>wrap dom node or component for children. set to '' if you do not wrap for only one child</td>
</tr>
<tr>
<td>showProp</td>
<td>String</td>
<td></td>
<td>using prop for show and hide. [demo](http://react-component.github.io/animate/examples/hide-todo.html) </td>
</tr>
<tr>
<td>exclusive</td>
<td>Boolean</td>
<td></td>
<td>whether allow only one set of animations(enter and leave) at the same time. </td>
</tr>
<tr>
<td>transitionName</td>
<td>String</td>
<td></td>
<td>transitionName, need to specify corresponding css</td>
</tr>
<tr>
<td>transitionAppear</td>
<td>Boolean</td>
<td>false</td>
<td>whether support transition appear anim</td>
</tr>
<tr>
<td>transitionEnter</td>
<td>Boolean</td>
<td>true</td>
<td>whether support transition enter anim</td>
</tr>
<tr>
<td>transitionLeave</td>
<td>Boolean</td>
<td>true</td>
<td>whether support transition leave anim</td>
</tr>
<tr>
<td>onEnd</td>
<td>function(key:String, exists:Boolean)</td>
<td>true</td>
<td>animation end callback</td>
</tr>
<tr>
<td>animation</td>
<td>Object</td>
<td>{}</td>
<td>
to animate with js. see animation format below.
</td>
</tr>
</tbody>
</table>
### animation format
with appear, enter and leave as keys. for example:
```js
{
appear: function(node, done){
node.style.display='none';
$(node).slideUp(done);
return {
stop:function(){
// jq will call done on finish
$(node).stop(true);
}
};
},
enter: function(){
this.appear.apply(this,arguments);
},
leave: function(node, done){
node.style.display='';
$(node).slideDown(done);
return {
stop:function(){
// jq will call done on finish
$(node).stop(true);
}
};
}
}
```
## Development
```
npm install
npm start
```
## Example
http://localhost:8200/examples/index.md
online example: http://react-component.github.io/animate/examples/
## Test Case
http://localhost:8200/tests/runner.html?coverage
## Coverage
http://localhost:8200/node_modules/rc-server/node_modules/node-jscover/lib/front-end/jscoverage.html?w=http://localhost:8200/tests/runner.html?coverage
## License
rc-animate is released under the MIT license.
|
Axivity/openmovement-axsys-client
|
vendor/rc-tooltip/node_modules/rc-trigger/node_modules/rc-animate/README.md
|
Markdown
|
bsd-2-clause
| 4,852 |
class Cuetools < Formula
desc "Utilities for .cue and .toc files"
homepage "https://github.com/svend/cuetools"
url "https://github.com/svend/cuetools/archive/1.4.1.tar.gz"
sha256 "24a2420f100c69a6539a9feeb4130d19532f9f8a0428a8b9b289c6da761eb107"
head "https://github.com/svend/cuetools.git"
bottle do
cellar :any_skip_relocation
sha256 "1e36c3c8d2d53947b73a9f0a0aed74145e2b1890f83764de02f1d12566d0300f" => :mojave
sha256 "4393d6db857a9568a34de3a09ff049fbec9a55a95b029eacd24e35d6ce792074" => :high_sierra
sha256 "9456e5957a78f993f5a8cef76aa583ac6a42a8298fb05bded243dbaf810f9a44" => :sierra
sha256 "7f0effc75d64fca0f2695b5f7ddb4d8713cc83522d40dcd37842e83c120ac117" => :el_capitan
sha256 "81d06ef2e3d98061f332a535b810102c1be0505371c1ac1aed711cf2ae8de5a3" => :yosemite
sha256 "95216c0df3840b2602e61dd3bef7d4c9b65cec0315e5b23ac87329320d9f6be9" => :mavericks
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
# see https://github.com/svend/cuetools/pull/18
patch :DATA
def install
system "autoreconf", "-i"
system "./configure", "--prefix=#{prefix}", "--mandir=#{man}"
system "make", "install"
end
test do
(testpath/"test.cue").write <<~EOS
FILE "sampleimage.bin" BINARY
TRACK 01 MODE1/2352
INDEX 01 00:00:00
EOS
system "cueconvert", testpath/"test.cue", testpath/"test.toc"
assert_predicate testpath/"test.toc", :exist?
end
end
__END__
diff --git a/configure.ac b/configure.ac
index f54bb92..84ab467 100644
--- a/configure.ac
+++ b/configure.ac
@@ -1,5 +1,5 @@
AC_INIT([cuetools], [1.4.0], [[email protected]])
-AM_INIT_AUTOMAKE([-Wall -Werror foreign])
+AM_INIT_AUTOMAKE([-Wall -Werror -Wno-extra-portability foreign])
AC_PROG_CC
AC_PROG_INSTALL
AC_PROG_RANLIB
|
jdubois/homebrew-core
|
Formula/cuetools.rb
|
Ruby
|
bsd-2-clause
| 1,827 |
#define SOL_CHECK_ARGUMENTS 1
#include <sol.hpp>
#include "assert.hpp"
#include <iostream>
int main(int, char*[]) {
std::cout << "=== coroutine state transfer ===" << std::endl;
sol::state lua;
lua.open_libraries();
sol::function transferred_into;
lua["f"] = [&lua, &transferred_into](sol::object t, sol::this_state this_L) {
std::cout << "state of main : " << (void*)lua.lua_state() << std::endl;
std::cout << "state of function : " << (void*)this_L.lua_state() << std::endl;
// pass original lua_State* (or sol::state/sol::state_view)
// transfers ownership from the state of "t",
// to the "lua" sol::state
transferred_into = sol::function(lua, t);
};
lua.script(R"(
i = 0
function test()
co = coroutine.create(
function()
local g = function() i = i + 1 end
f(g)
g = nil
collectgarbage()
end
)
coroutine.resume(co)
co = nil
collectgarbage()
end
)");
// give it a try
lua.safe_script("test()");
// should call 'g' from main thread, increment i by 1
transferred_into();
// check
int i = lua["i"];
c_assert(i == 1);
std::cout << std::endl;
return 0;
}
|
Project-OSRM/osrm-backend
|
third_party/sol2/examples/coroutine_state.cpp
|
C++
|
bsd-2-clause
| 1,131 |
/*
* w-driver-volleyball-lstm-evaluator.cpp
*
* Created on: Jul 13, 2015
* Author: msibrahi
*/
#include <iostream>
#include <vector>
#include <stdio.h>
#include <string>
#include <set>
#include <set>
#include <map>
#include <iomanip>
using std::vector;
using std::set;
using std::multiset;
using std::map;
using std::pair;
using std::string;
using std::endl;
using std::cerr;
#include "boost/algorithm/string.hpp"
#include "google/protobuf/text_format.h"
#include "caffe/blob.hpp"
#include "caffe/common.hpp"
#include "caffe/net.hpp"
#include "caffe/proto/caffe.pb.h"
#include "caffe/util/db.hpp"
#include "caffe/util/io.hpp"
#include "caffe/vision_layers.hpp"
using caffe::Blob;
using caffe::Caffe;
using caffe::Datum;
using caffe::Net;
using caffe::Layer;
using caffe::LayerParameter;
using caffe::DataParameter;
using caffe::NetParameter;
using boost::shared_ptr;
namespace db = caffe::db;
#include "../src/utilities.h"
#include "../src/leveldb-reader.h"
void evaluate(vector<int> truthLabels, vector<int> resultLabels, int w) {
set<int> total_labels;
map<int, map<int, int> > confusion_freq_maps;
map<int, int> label_freq;
int correct = 0;
cerr<<"\n\n";
for (int i = 0; i < (int) truthLabels.size(); ++i) {
correct += truthLabels[i] == resultLabels[i];
cerr << "Test " << i + 1 << ": Result = " << resultLabels[i] << " GroundTruth = " << truthLabels[i] << "\n";
confusion_freq_maps[truthLabels[i]][resultLabels[i]]++;
total_labels.insert(truthLabels[i]);
total_labels.insert(resultLabels[i]);
label_freq[truthLabels[i]]++;
}
cerr.setf(std::ios::fixed);
cerr.precision(2);
cerr<<"\n\n";
cerr << "Total testing frames: " << truthLabels.size() << " with temporal window: " << w << "\n";
cerr << "Temporal accuracy : " << 100.0 * correct / truthLabels.size() << " %\n";
cerr << "\n=======================================================================================\n";
cerr << "\nConfusion Matrix - Truth (col) / Result(row)\n\n";
cerr << std::setw(5) << "T/R" << ": ";
for (auto r_label : total_labels)
cerr << std::setw(5) << r_label;
cerr << "\n=======================================================================================\n";
for (auto t_label : total_labels) {
int sum = 0;
cerr << std::setw(5) << t_label << ": ";
for (auto r_label : total_labels)
{
cerr << std::setw(5) << confusion_freq_maps[t_label][r_label];
sum += confusion_freq_maps[t_label][r_label];
}
double percent = 0;
if (label_freq[t_label] > 0)
percent = 100.0 * confusion_freq_maps[t_label][t_label] / label_freq[t_label];
cerr << " \t=> Total Correct = " << std::setw(5) << confusion_freq_maps[t_label][t_label] << " / " << std::setw(5) << sum << " = " << percent << " %\n";
}
cerr<<"\n\n";
cerr << std::setw(7) << "T/R" << ": ";
for (auto r_label : total_labels)
cerr << std::setw(7) << r_label;
cerr << "\n=======================================================================================\n";
for (auto t_label : total_labels) {
cerr << std::setw(7) << t_label << ": ";
for (auto r_label : total_labels)
{
double percent = 0;
if (label_freq[t_label] > 0)
percent = 100.0 * confusion_freq_maps[t_label][r_label] / label_freq[t_label];
cerr << std::setw(7) << percent;
}
cerr<<"\n";
}
cerr<<"\nTo get labels corresponding to IDs..see dataset loading logs\n";
}
int getArgmax(vector<float> &v) {
int pos = 0;
assert(v.size() > 0);
for (int j = 1; j < (int) v.size(); ++j) {
if (v[j] > v[pos])
pos = j;
}
return pos;
}
template<typename Dtype>
void feature_extraction_pipeline(int &argc, char** &argv) {
int frames_window = MostCV::consumeIntParam(argc, argv);
LOG(ERROR)<< "Temporal Window = " << frames_window;
string computation_mode = MostCV::consumeStringParam(argc, argv);
if (strcmp(computation_mode.c_str(), "GPU") == 0) {
uint device_id = MostCV::consumeIntParam(argc, argv);
LOG(ERROR)<< "Using GPU";
LOG(ERROR)<< "Using Device_id = " << device_id;
Caffe::SetDevice(device_id);
Caffe::set_mode(Caffe::GPU);
} else {
LOG(ERROR)<< "Using CPU";
Caffe::set_mode(Caffe::CPU);
}
string pretrained_binary_proto(MostCV::consumeStringParam(argc, argv));
string feature_extraction_proto(MostCV::consumeStringParam(argc, argv));
LOG(ERROR)<<"Model: "<<pretrained_binary_proto<<"\n";
LOG(ERROR)<<"Proto: "<<feature_extraction_proto<<"\n";
LOG(ERROR)<<"Creating the test network\n";
shared_ptr<Net<Dtype> > feature_extraction_net(new Net<Dtype>(feature_extraction_proto, caffe::Phase::TEST));
LOG(ERROR)<<"Loading the Model\n";
feature_extraction_net->CopyTrainedLayersFrom(pretrained_binary_proto);
string blob_name = MostCV::consumeStringParam(argc, argv);
LOG(ERROR)<<"blob_name: "<<blob_name<<"\n";
CHECK(feature_extraction_net->has_blob(blob_name)) << "Unknown feature blob name " << blob_name << " in the network " << feature_extraction_proto;
int num_mini_batches = MostCV::consumeIntParam(argc, argv);
LOG(ERROR)<<"num_mini_batches: "<<num_mini_batches<<"\n";
vector<Blob<float>*> input_vec;
int batch_size = -1;
int dim_features = -1;
std::set<int> labels; // every (2w+1) * batch size MUST all have same label
vector<int> truthLabels;
vector<int> propAvgMaxResultLabels;
for (int batch_index = 0; batch_index < num_mini_batches; ++batch_index) { // e.g. 100 iterations. Probably roll on data if needed
feature_extraction_net->Forward(input_vec); // Take one batch of data (e.g. 50 images), and pass them to end of network
// Load the Labels
const shared_ptr<Blob<Dtype> > label_blob = feature_extraction_net->blob_by_name("label");
batch_size = label_blob->num(); // e.g. 50 batches
assert(batch_size == frames_window);
int current_label = -1;
for (int n = 0; n < batch_size; ++n) {
const Dtype* label_blob_data = label_blob->cpu_data() + label_blob->offset(n); // move offset to ith blob in batch
current_label = label_blob_data[0]; // all will be same value
labels.insert(current_label);
if (n == 0)
truthLabels.push_back(current_label);
}
if (labels.size() != 1) { // every 1 batch should have same value
LOG(ERROR)<< "Something wrong. every 1 batch should have same value. New value at element " << batch_index + 1 << "\n";
assert(false);
}
labels.clear();
const shared_ptr<Blob<Dtype> > feature_blob = feature_extraction_net->blob_by_name(blob_name); // get e.g. fc7 blob for the batch
dim_features = feature_blob->count() / batch_size;
assert(dim_features > 1);
const Dtype* feature_blob_data = nullptr;
vector<float> test_case_sum(dim_features);
for (int n = 0; n < batch_size; ++n) {
feature_blob_data = feature_blob->cpu_data() + feature_blob->offset(n); // move offset to ith blob in batch
vector<float> test_case;
for (int j = 0; j < dim_features; ++j) {
test_case.push_back(feature_blob_data[j]);
test_case_sum[j] += feature_blob_data[j];
}
}
propAvgMaxResultLabels.push_back( getArgmax(test_case_sum) );
}
evaluate(truthLabels, propAvgMaxResultLabels, 1);
}
int main(int argc, char** argv) {
::google::InitGoogleLogging(argv[0]);
MostCV::consumeStringParam(argc, argv); // read program entry data
if (argc < 6) {
LOG(ERROR)<< "At least 6 parameters expected\n";
assert(false);
}
LOG(ERROR)<< "Make sure to have LD_LIBRARY_PATH pointing to LSTM implementation in case of LSTM\n\n";
feature_extraction_pipeline<float>(argc, argv);
return 0;
}
|
mostafa-saad/deep-activity-rec
|
eclipse-project/ibrahim16-deep-act-rec-part/apps/exePhase4.cpp
|
C++
|
bsd-2-clause
| 7,728 |
/*
* Copyright (c) 2008-2011 Juli Mallett. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#include <common/test.h>
#include <event/action.h>
struct Cancelled {
Test test_;
bool cancelled_;
Cancelled(TestGroup& g)
: test_(g, "Cancellation does occur."),
cancelled_(false)
{
Action *a = cancellation(this, &Cancelled::cancel);
a->cancel();
}
~Cancelled()
{
if (cancelled_)
test_.pass();
}
void cancel(void)
{
ASSERT("/cancelled", !cancelled_);
cancelled_ = true;
}
};
struct NotCancelled {
Test test_;
bool cancelled_;
Action *action_;
NotCancelled(TestGroup& g)
: test_(g, "Cancellation does not occur."),
cancelled_(false),
action_(NULL)
{
action_ = cancellation(this, &NotCancelled::cancel);
}
~NotCancelled()
{
if (!cancelled_) {
if (action_ != NULL) {
action_->cancel();
action_ = NULL;
ASSERT("/not/cancelled", cancelled_);
}
}
}
void cancel(void)
{
ASSERT("/not/cancelled", !cancelled_);
cancelled_ = true;
test_.pass();
}
};
int
main(void)
{
TestGroup g("/test/action/cancel1", "Action::cancel #1");
{
Cancelled _(g);
}
{
NotCancelled _(g);
}
}
|
diegows/wanproxy
|
event/test/action-cancel1/action-cancel1.cc
|
C++
|
bsd-2-clause
| 2,406 |
//
// Created by 王晓辰 on 15/10/2.
//
#include "test_dirname.h"
#include <ftxpath.h>
#include "tester.h"
bool test_dirname_path()
{
std::string path = "/a/b/c/d";
std::string dirname = "/a/b/c";
return dirname == ftx::path::dirname(path);
}
bool test_dirname_onename()
{
std::string name = "name";
return ftx::path::dirname(name).empty();
}
bool test_dirname_filepath()
{
std::string filepath = "a/b/c/d.txt";
std::string dirname = "a/b/c";
return dirname == ftx::path::dirname(filepath);
}
bool test_dirname_folderpath()
{
std::string folderpath = "a/b/c/folder/";
std::string dirname = "a/b/c/folder";
return dirname == ftx::path::dirname(folderpath);
}
bool test_dirname_root()
{
std::string root = "/";
return root == ftx::path::dirname(root);
}
bool test_dirname() {
LOG_TEST_STRING("");
TEST_BOOL_TO_BOOL(test_dirname_path(), "dir dirname failed");
TEST_BOOL_TO_BOOL(test_dirname_onename(), "one name dirname failed");
TEST_BOOL_TO_BOOL(test_dirname_filepath(), "file path dirname failed");
TEST_BOOL_TO_BOOL(test_dirname_folderpath(), "folder path dirname failed");
TEST_BOOL_TO_BOOL(test_dirname_root(), "root dirname failed");
return true;
}
|
XiaochenFTX/ftxpath
|
test/test_dirname.cpp
|
C++
|
bsd-2-clause
| 1,239 |
/*
* Copyright (C) 2009 Google Inc. All rights reserved.
* Copyright (C) 2013 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "RuntimeEnabledFeatures.h"
#include "DatabaseManager.h"
#include "MediaPlayer.h"
#include "SharedWorkerRepository.h"
#include "WebSocket.h"
#include <wtf/NeverDestroyed.h>
namespace WebCore {
RuntimeEnabledFeatures::RuntimeEnabledFeatures()
: m_isLocalStorageEnabled(true)
, m_isSessionStorageEnabled(true)
, m_isWebkitNotificationsEnabled(false)
, m_isApplicationCacheEnabled(true)
, m_isDataTransferItemsEnabled(true)
, m_isGeolocationEnabled(true)
, m_isIndexedDBEnabled(false)
, m_isTouchEnabled(true)
, m_isDeviceMotionEnabled(true)
, m_isDeviceOrientationEnabled(true)
, m_isSpeechInputEnabled(true)
, m_isCSSExclusionsEnabled(true)
, m_isCSSShapesEnabled(true)
, m_isCSSRegionsEnabled(false)
, m_isCSSCompositingEnabled(false)
, m_isLangAttributeAwareFormControlUIEnabled(false)
#if PLATFORM(IOS)
, m_isPluginReplacementEnabled(true)
#else
, m_isPluginReplacementEnabled(false)
#endif
#if ENABLE(SCRIPTED_SPEECH)
, m_isScriptedSpeechEnabled(false)
#endif
#if ENABLE(MEDIA_STREAM)
, m_isMediaStreamEnabled(true)
, m_isPeerConnectionEnabled(true)
#endif
#if ENABLE(LEGACY_CSS_VENDOR_PREFIXES)
, m_isLegacyCSSVendorPrefixesEnabled(false)
#endif
#if ENABLE(JAVASCRIPT_I18N_API)
, m_isJavaScriptI18NAPIEnabled(false)
#endif
#if ENABLE(VIDEO_TRACK)
, m_isVideoTrackEnabled(true)
#endif
#if ENABLE(INPUT_TYPE_DATE)
, m_isInputTypeDateEnabled(true)
#endif
#if ENABLE(INPUT_TYPE_DATETIME_INCOMPLETE)
, m_isInputTypeDateTimeEnabled(false)
#endif
#if ENABLE(INPUT_TYPE_DATETIMELOCAL)
, m_isInputTypeDateTimeLocalEnabled(true)
#endif
#if ENABLE(INPUT_TYPE_MONTH)
, m_isInputTypeMonthEnabled(true)
#endif
#if ENABLE(INPUT_TYPE_TIME)
, m_isInputTypeTimeEnabled(true)
#endif
#if ENABLE(INPUT_TYPE_WEEK)
, m_isInputTypeWeekEnabled(true)
#endif
#if ENABLE(CSP_NEXT)
, m_areExperimentalContentSecurityPolicyFeaturesEnabled(false)
#endif
#if ENABLE(FONT_LOAD_EVENTS)
, m_isFontLoadEventsEnabled(false)
#endif
#if ENABLE(GAMEPAD)
, m_areGamepadsEnabled(false)
#endif
{
}
RuntimeEnabledFeatures& RuntimeEnabledFeatures::sharedFeatures()
{
static NeverDestroyed<RuntimeEnabledFeatures> runtimeEnabledFeatures;
return runtimeEnabledFeatures;
}
#if ENABLE(JAVASCRIPT_I18N_API)
bool RuntimeEnabledFeatures::javaScriptI18NAPIEnabled()
{
return m_isJavaScriptI18NAPIEnabled;
}
#endif
#if ENABLE(VIDEO)
bool RuntimeEnabledFeatures::audioEnabled() const
{
return MediaPlayer::isAvailable();
}
bool RuntimeEnabledFeatures::htmlMediaElementEnabled() const
{
return MediaPlayer::isAvailable();
}
bool RuntimeEnabledFeatures::htmlAudioElementEnabled() const
{
return MediaPlayer::isAvailable();
}
bool RuntimeEnabledFeatures::htmlVideoElementEnabled() const
{
return MediaPlayer::isAvailable();
}
bool RuntimeEnabledFeatures::htmlSourceElementEnabled() const
{
return MediaPlayer::isAvailable();
}
bool RuntimeEnabledFeatures::mediaControllerEnabled() const
{
return MediaPlayer::isAvailable();
}
bool RuntimeEnabledFeatures::mediaErrorEnabled() const
{
return MediaPlayer::isAvailable();
}
bool RuntimeEnabledFeatures::timeRangesEnabled() const
{
return MediaPlayer::isAvailable();
}
#endif
#if ENABLE(SHARED_WORKERS)
bool RuntimeEnabledFeatures::sharedWorkerEnabled() const
{
return SharedWorkerRepository::isAvailable();
}
#endif
#if ENABLE(WEB_SOCKETS)
bool RuntimeEnabledFeatures::webSocketEnabled() const
{
return WebSocket::isAvailable();
}
#endif
} // namespace WebCore
|
aosm/WebCore
|
bindings/generic/RuntimeEnabledFeatures.cpp
|
C++
|
bsd-2-clause
| 5,200 |
package org.joshy.gfx.test.itunes;
/**
* Created by IntelliJ IDEA.
* User: josh
* Date: Jan 28, 2010
* Time: 9:20:01 PM
* To change this template use File | Settings | File Templates.
*/
class Song {
int trackNumber;
int totalTracks;
String name;
String album;
String artist;
int duration;
public Song(int trackNumber, int totalTracks, String name, String album, String artist, int duration) {
this.trackNumber = trackNumber;
this.totalTracks = totalTracks;
this.name = name;
this.album = album;
this.artist = artist;
this.duration = duration;
}
}
|
tonykwok/leonardosketch.amino
|
src/org/joshy/gfx/test/itunes/Song.java
|
Java
|
bsd-2-clause
| 622 |
class Dxpy < Formula
include Language::Python::Virtualenv
desc "DNAnexus toolkit utilities and platform API bindings for Python"
homepage "https://github.com/dnanexus/dx-toolkit"
url "https://files.pythonhosted.org/packages/7e/d8/9529a045270fe2cee67c01fde759864b9177ecdd486d016c3a38863f3895/dxpy-0.320.0.tar.gz"
sha256 "aef4c16d73cf9e7513d1f8e503f7e0d3ed7f2135fe6f8596a97196a8df109977"
license "Apache-2.0"
bottle do
sha256 cellar: :any, arm64_monterey: "f833c2a2b486b3a54ba1b4f5c205aa73dc815b498053bfe1197a7c497a6e4646"
sha256 cellar: :any, arm64_big_sur: "984872c26ab277ba25764029bea2df0bb697452e589c537cc6c2fbec77fb463e"
sha256 cellar: :any, monterey: "90709e2bff817faabb7ab6e3a726bde68dadbc33bc4ef9e830154f8d5b852b4b"
sha256 cellar: :any, big_sur: "3a063ce13281a975cfc03a14b58ddd33b8ceb5dcc851c6f5f8e01e7d4c4d5fdf"
sha256 cellar: :any, catalina: "56f0159e7a3193f3baa3c6710a70ee2d141d74fb46b9b9aa6128149240d6a603"
sha256 cellar: :any_skip_relocation, x86_64_linux: "c6c14d2acbe34e5763b3b6352af63d3252655b479ae5cefe556dbbe51720e6c4"
end
depends_on "rust" => :build # for cryptography
depends_on "[email protected]"
depends_on "six"
on_macos do
depends_on "readline"
end
on_linux do
depends_on "pkg-config" => :build
depends_on "libffi"
end
resource "argcomplete" do
url "https://files.pythonhosted.org/packages/05/f8/67851ae4fe5396ba6868c5d84219b81ea6a5d53991a6853616095c30adc0/argcomplete-2.0.0.tar.gz"
sha256 "6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20"
end
resource "certifi" do
url "https://files.pythonhosted.org/packages/6c/ae/d26450834f0acc9e3d1f74508da6df1551ceab6c2ce0766a593362d6d57f/certifi-2021.10.8.tar.gz"
sha256 "78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/00/9e/92de7e1217ccc3d5f352ba21e52398372525765b2e0c4530e6eb2ba9282a/cffi-1.15.0.tar.gz"
sha256 "920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"
end
resource "charset-normalizer" do
url "https://files.pythonhosted.org/packages/e8/e8/b6cfd28fb430b2ec9923ad0147025bf8bbdf304b1eb3039b69f1ce44ed6e/charset-normalizer-2.0.11.tar.gz"
sha256 "98398a9d69ee80548c762ba991a4728bfc3836768ed226b3945908d1a688371c"
end
resource "cryptography" do
url "https://files.pythonhosted.org/packages/f9/4b/1cf8e281f7ae4046a59e5e39dd7471d46db9f61bb564fddbff9084c4334f/cryptography-36.0.1.tar.gz"
sha256 "53e5c1dc3d7a953de055d77bef2ff607ceef7a2aac0353b5d630ab67f7423638"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/62/08/e3fc7c8161090f742f504f40b1bccbfc544d4a4e09eb774bf40aafce5436/idna-3.3.tar.gz"
sha256 "9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"
end
resource "psutil" do
url "https://files.pythonhosted.org/packages/47/b6/ea8a7728f096a597f0032564e8013b705aa992a0990becd773dcc4d7b4a7/psutil-5.9.0.tar.gz"
sha256 "869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/5e/0b/95d387f5f4433cb0f53ff7ad859bd2c6051051cebbb564f139a999ab46de/pycparser-2.21.tar.gz"
sha256 "e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"
end
resource "python-dateutil" do
url "https://files.pythonhosted.org/packages/4c/c4/13b4776ea2d76c115c1d1b84579f3764ee6d57204f6be27119f13a61d0a9/python-dateutil-2.8.2.tar.gz"
sha256 "0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/e7/01/3569e0b535fb2e4a6c384bdbed00c55b9d78b5084e0fb7f4d0bf523d7670/requests-2.26.0.tar.gz"
sha256 "b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/b0/b1/7bbf5181f8e3258efae31702f5eab87d8a74a72a0aa78bc8c08c1466e243/urllib3-1.26.8.tar.gz"
sha256 "0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"
end
resource "websocket-client" do
url "https://files.pythonhosted.org/packages/8b/0f/52de51b9b450ed52694208ab952d5af6ebbcbce7f166a48784095d930d8c/websocket_client-0.57.0.tar.gz"
sha256 "d735b91d6d1692a6a181f2a8c9e0238e5f6373356f561bb9dc4c7af36f452010"
end
def install
virtualenv_install_with_resources
end
test do
dxenv = <<~EOS
API server protocol https
API server host api.dnanexus.com
API server port 443
Current workspace None
Current folder None
Current user None
EOS
assert_match dxenv, shell_output("#{bin}/dx env")
end
end
|
filcab/homebrew-core
|
Formula/dxpy.rb
|
Ruby
|
bsd-2-clause
| 4,787 |
/* global define */
define([
'jquery',
'underscore',
'./dist',
'./axis'
], function($, _, dist, axis) {
var EditableFieldChart = dist.FieldChart.extend({
template: 'charts/editable-chart',
toolbarAnimationTime: 200,
formAnimationTime: 300,
events: _.extend({
'click .fullsize': 'toggleExpanded'
}, dist.FieldChart.prototype.events),
ui: _.extend({
toolbar: '.btn-toolbar',
fullsizeToggle: '.fullsize',
form: '.editable',
xAxis: '[name=x-Axis]',
yAxis: '[name=y-Axis]',
series: '[name=series]'
}, dist.FieldChart.prototype.ui),
onRender: function() {
if (this.options.editable === false) {
this.ui.form.detach();
this.ui.toolbar.detach();
}
else {
this.xAxis = new axis.FieldAxis({
el: this.ui.xAxis,
collection: this.collection
});
this.yAxis = new axis.FieldAxis({
el: this.ui.yAxis,
collection: this.collection
});
this.series = new axis.FieldAxis({
el: this.ui.series,
enumerableOnly: true,
collection: this.collection
});
if (this.model) {
if (this.model.get('xAxis')) {
this.ui.form.hide();
}
if (this.model.get('expanded')) {
this.expand();
}
else {
this.contract();
}
}
}
},
customizeOptions: function(options) {
this.ui.status.detach();
this.ui.heading.text(options.title.text);
options.title.text = '';
// Check if any data is present.
if (!options.series[0]) {
this.ui.chart.html('<p class=no-data>Unfortunately, there is no ' +
'data to graph here.</p>');
return;
}
this.ui.form.hide();
var statusText = [];
if (options.clustered) {
statusText.push('Clustered');
}
if (statusText[0]) {
this.ui.status.text(statusText.join(', ')).show();
this.ui.heading.append(this.$status);
}
if (this.interactive(options)) {
this.enableChartEvents();
}
$.extend(true, options, this.chartOptions);
options.chart.renderTo = this.ui.chart[0];
return options;
},
// Ensure rapid successions of this method do not occur.
changeChart: function(event) {
if (event) {
event.preventDefault();
}
var _this = this;
this.collection.when(function() {
var xAxis, yAxis, series, seriesIdx;
// TODO fix this nonsense
if (event === null || typeof event === 'undefined') {
xAxis = _this.model.get('xAxis');
if (xAxis) {
_this.xAxis.$el.val(xAxis.toString());
}
yAxis = _this.model.get('yAxis');
if (yAxis) {
_this.yAxis.$el.val(yAxis.toString());
}
series = _this.model.get('series');
if (series) {
this.series.$el.val(series.toString());
}
}
xAxis = _this.xAxis.getSelected();
yAxis = _this.yAxis.getSelected();
series = _this.series.getSelected();
if (!xAxis) return;
var url = _this.model.links.distribution;
var fields = [xAxis];
var data = 'dimension=' + xAxis.id;
if (yAxis) {
fields.push(yAxis);
data = data + '&dimension=' + yAxis.id;
}
if (series) {
if (yAxis) {
seriesIdx = 2;
}
else {
seriesIdx = 1;
}
data = data + '&dimension=' + series.id;
}
if (event && _this.model) {
_this.model.set({
xAxis: xAxis.id,
yAxis: (yAxis) ? yAxis.id : null,
series: (series) ? series.id : null
});
}
_this.update(url, data, fields, seriesIdx);
});
},
// Disable selected fields since using the same field for multiple
// axes doesn't make sense.
disableSelected: function(event) {
var $target = $(event.target);
// Changed to an empty value, unhide other dropdowns.
if (this.xAxis.el === event.target) {
this.yAxis.$('option').prop('disabled', false);
this.series.$('option').prop('disabled', false);
}
else if (this.yAxis.el === event.target) {
this.xAxis.$('option').prop('disabled', false);
this.series.$('option').prop('disabled', false);
}
else {
this.xAxis.$('option').prop('disabled', false);
this.yAxis.$('option').prop('disabled', false);
}
var value = $target.val();
if (value !== '') {
if (this.xAxis.el === event.target) {
this.yAxis.$('option[value=' + value + ']')
.prop('disabled', true).val('');
this.series.$('option[value=' + value + ']')
.prop('disabled', true).val('');
}
else if (this.yAxis.el === event.target) {
this.xAxis.$('option[value=' + value + ']')
.prop('disable', true).val('');
this.series.$('option[value=' + value + ']')
.prop('disable', true).val('');
}
else {
this.xAxis.$('option[value=' + value + ']')
.prop('disable', true).val('');
this.yAxis.$('option[value=' + value + ']')
.prop('disable', true).val('');
}
}
},
toggleExpanded: function() {
var expanded = this.model.get('expanded');
if (expanded) {
this.contract();
}
else {
this.expand();
}
this.model.save({
expanded: !expanded
});
},
resize: function() {
var chartWidth = this.ui.chart.width();
if (this.chart) {
this.chart.setSize(chartWidth, null, false);
}
},
expand: function() {
this.$fullsizeToggle.children('i')
.removeClass('icon-resize-small')
.addClass('icon-resize-full');
this.$el.addClass('expanded');
this.resize();
},
contract: function() {
this.$fullsizeToggle.children('i')
.removeClass('icon-resize-full')
.addClass('icon-resize-small');
this.$el.removeClass('expanded');
this.resize();
},
hideToolbar: function() {
this.ui.toolbar.fadeOut(this.toolbarAnimationTime);
},
showToolbar: function() {
this.ui.toolbar.fadeIn(this.toolbarAnimationTime);
},
toggleEdit: function() {
if (this.ui.form.is(':visible')) {
this.ui.form.fadeOut(this.formAnimationTime);
}
else {
this.ui.form.fadeIn(this.formAnimationTime);
}
}
});
return {
EditableFieldChart: EditableFieldChart
};
});
|
chop-dbhi/cilantro
|
src/js/cilantro/ui/charts/editable.js
|
JavaScript
|
bsd-2-clause
| 8,396 |
# frozen_string_literal: true
require_relative "commands/break"
require_relative "commands/catch"
require_relative "commands/condition"
require_relative "commands/continue"
require_relative "commands/debug"
require_relative "commands/delete"
require_relative "commands/disable"
require_relative "commands/display"
require_relative "commands/down"
require_relative "commands/edit"
require_relative "commands/enable"
require_relative "commands/finish"
require_relative "commands/frame"
require_relative "commands/help"
require_relative "commands/history"
require_relative "commands/info"
require_relative "commands/interrupt"
require_relative "commands/irb"
require_relative "commands/kill"
require_relative "commands/list"
require_relative "commands/method"
require_relative "commands/next"
require_relative "commands/pry"
require_relative "commands/quit"
require_relative "commands/restart"
require_relative "commands/save"
require_relative "commands/set"
require_relative "commands/show"
require_relative "commands/skip"
require_relative "commands/source"
require_relative "commands/step"
require_relative "commands/thread"
require_relative "commands/tracevar"
require_relative "commands/undisplay"
require_relative "commands/untracevar"
require_relative "commands/up"
require_relative "commands/var"
require_relative "commands/where"
|
deivid-rodriguez/byebug
|
lib/byebug/commands.rb
|
Ruby
|
bsd-2-clause
| 1,337 |
#!/usr/bin/env python3
import canmatrix.formats
from canmatrix.join import join_frame_by_signal_start_bit
files = ["../test/db_B.dbc", "../test/db_A.dbc"]
target = join_frame_by_signal_start_bit(files)
#
# export the new (target)-Matrix for example as .dbc:
#
canmatrix.formats.dumpp(target, "target.dbc")
canmatrix.formats.dumpp(target, "target.xlsx")
|
ebroecker/canmatrix
|
examples/exampleJoin.py
|
Python
|
bsd-2-clause
| 357 |
module.exports =
{
"WMSC": {
"WMSC_1_1_1" : require('./1.1.1/WMSC_1_1_1')
}
};
|
juanrapoport/ogc-schemas
|
scripts/tests/WMSC/WMSC.js
|
JavaScript
|
bsd-2-clause
| 84 |
package cz.metacentrum.perun.core.bl;
import cz.metacentrum.perun.core.api.Attribute;
import cz.metacentrum.perun.core.api.Member;
import cz.metacentrum.perun.core.api.PerunSession;
import cz.metacentrum.perun.core.api.User;
import cz.metacentrum.perun.core.api.exceptions.InternalErrorException;
import cz.metacentrum.perun.core.api.exceptions.AttributeNotExistsException;
import cz.metacentrum.perun.core.api.exceptions.WrongAttributeAssignmentException;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
/**
* Searcher Class for searching objects by Map of Attributes
*
* @author Michal Stava <[email protected]>
*/
public interface SearcherBl {
/**
* This method get Map of Attributes with searching values and try to find all users, which have specific attributes in format.
* Better information about format below. When there are more than 1 attribute in Map, it means all must be true "looking for all of them" (AND)
*
* @param sess perun session
* @param attributesWithSearchingValues map of attributes names
* when attribute is type String, so value is string and we are looking for total match (Partial is not supported now, will be supported later by symbol *)
* when attribute is type Integer, so value is integer in String and we are looking for total match
* when attribute is type List<String>, so value is String and we are looking for at least one total or partial matching element
* when attribute is type Map<String> so value is String in format "key=value" and we are looking total match of both or if is it "key" so we are looking for total match of key
* IMPORTANT: In map there is not allowed char '=' in key. First char '=' is delimiter in MAP item key=value!!!
* @return list of users who have attributes with specific values (behavior above)
* if no user exist, return empty list of users
*
* @throws AttributeNotExistsException
* @throws InternalErrorException
* @throws WrongAttributeAssignmentException
*/
List<User> getUsers(PerunSession sess, Map<String, String> attributesWithSearchingValues) throws InternalErrorException, AttributeNotExistsException, WrongAttributeAssignmentException;
/**
* This method take map of coreAttributes with search values and return all
* users who have the specific match for all of these core attributes.
*
* @param sess
* @param coreAttributesWithSearchingValues
* @return
* @throws InternalErrorException
* @throws AttributeNotExistsException
* @throws WrongAttributeAssignmentException
*/
List<User> getUsersForCoreAttributes(PerunSession sess, Map<String, String> coreAttributesWithSearchingValues) throws InternalErrorException, AttributeNotExistsException, WrongAttributeAssignmentException;
/**
* Return members with expiration date set, which will expire on today +/- X days.
* You can specify operator for comparison (by default "=") returning exact match.
* So you can get all expired members (including today) using "<=" and zero days shift.
* or using "<" and +1 day shift.
*
* Method ignores current member state, just compares expiration date !
*
* @param sess PerunSession
* @param operator One of "=", "<", ">", "<=", ">=". If null, "=" is anticipated.
* @param days X days before/after today
* @return Members with expiration relative to method params.
* @throws InternalErrorException
*/
List<Member> getMembersByExpiration(PerunSession sess, String operator, int days) throws InternalErrorException;
/**
* Return members with expiration date set, which will expire on specified date.
* You can specify operator for comparison (by default "=") returning exact match.
* So you can get all expired members (including today) using "<=" and today date.
* or using "<" and tomorrow date.
*
* Method ignores current member state, just compares expiration date !
*
* @param sess PerunSession
* @param operator One of "=", "<", ">", "<=", ">=". If null, "=" is anticipated.
* @param date Date to compare expiration with (if null, current date is used).
* @return Members with expiration relative to method params.
* @throws InternalErrorException
*/
List<Member> getMembersByExpiration(PerunSession sess, String operator, Calendar date) throws InternalErrorException;
}
|
jirmauritz/perun
|
perun-core/src/main/java/cz/metacentrum/perun/core/bl/SearcherBl.java
|
Java
|
bsd-2-clause
| 4,338 |
cask "aleo-studio" do
version "0.15.2"
sha256 "ac33308a0ae210cb23cd90b67d92ccba02245d869a9be8477050e08b41a6c084"
url "https://aleo-studio-releases.sfo2.digitaloceanspaces.com/latest/macos/x64/Aleo%20Studio-#{version}-mac.zip",
verified: "aleo-studio-releases.sfo2.digitaloceanspaces.com/"
name "Aleo Studio"
desc "IDE for zero-knowledge proofs"
homepage "https://aleo.studio/"
livecheck do
url "https://aleo-studio-releases.sfo2.digitaloceanspaces.com/latest/macos/x64/latest-mac.yml"
strategy :electron_builder
end
depends_on macos: ">= :high_sierra"
app "Aleo Studio.app"
zap trash: [
"~/.aleo-studio",
"~/Library/Application Support/aleo-studio",
]
end
|
scottsuch/homebrew-cask
|
Casks/aleo-studio.rb
|
Ruby
|
bsd-2-clause
| 707 |
<?php
class Kwc_Menu_EditableItems_Controller extends Kwf_Controller_Action_Auto_Kwc_Grid
{
protected $_buttons = array();
protected $_model = 'Kwc_Menu_EditableItems_Model';
protected $_defaultOrder = array('field' => 'pos', 'direction' => 'ASC');
protected function _initColumns()
{
$this->_columns->add(new Kwf_Grid_Column('pos'));
$this->_columns->add(new Kwf_Grid_Column('name', trlKwf('Page name'), 200));
}
protected function _getSelect()
{
$ret = parent::_getSelect();
$ret->whereEquals('parent_component_id', $this->_getParam('componentId'));
$ret->whereEquals('ignore_visible', true);
return $ret;
}
}
|
fraxachun/koala-framework
|
Kwc/Menu/EditableItems/Controller.php
|
PHP
|
bsd-2-clause
| 698 |
class Wimlib < Formula
desc "Library to create, extract, and modify Windows Imaging files"
homepage "https://wimlib.net/"
url "https://wimlib.net/downloads/wimlib-1.13.1.tar.gz"
sha256 "47f4bc645c1b6ee15068d406a90bb38aec816354e140291ccb01e536f2cdaf5f"
bottle do
cellar :any
rebuild 1
sha256 "ea449f8e0aeea806e5925974a0a3f8a04ac256a5a44edc858272a57c5f88814d" => :catalina
sha256 "7969f20ce9f26b7435b4242fb241c2527848581469be0cad09a3f5de77b11a05" => :mojave
sha256 "33a3397f536e339ca4177d3639b55e223040883af9d5afbbb47cc3e9b1bb87e9" => :high_sierra
sha256 "66a39e7eaa96a26f988a0c6eba0ad614ca449b0bb5688ebd70830f8863da5244" => :sierra
end
depends_on "pkg-config" => :build
depends_on "[email protected]"
def install
# fuse requires librt, unavailable on OSX
args = %W[
--disable-debug
--disable-dependency-tracking
--disable-silent-rules
--prefix=#{prefix}
--without-fuse
--without-ntfs-3g
]
system "./configure", *args
system "make", "install"
end
test do
# make a directory containing a dummy 1M file
mkdir("foo")
system "dd", "if=/dev/random", "of=foo/bar", "bs=1m", "count=1"
# capture an image
ENV.append "WIMLIB_IMAGEX_USE_UTF8", "1"
system "#{bin}/wimcapture", "foo", "bar.wim"
assert_predicate testpath/"bar.wim", :exist?
# get info on the image
system "#{bin}/wiminfo", "bar.wim"
end
end
|
wolffaxn/homebrew-core
|
Formula/wimlib.rb
|
Ruby
|
bsd-2-clause
| 1,434 |
#include "hamiltonian/Hamiltonian.hpp"
namespace cpb {
namespace {
struct IsValid {
template<class scalar_t>
bool operator()(SparseMatrixRC<scalar_t> const& p) const { return p != nullptr; }
};
struct Reset {
template<class scalar_t>
void operator()(SparseMatrixRC<scalar_t>& p) const { p.reset(); }
};
struct GetSparseRef {
template<class scalar_t>
ComplexCsrConstRef operator()(SparseMatrixRC<scalar_t> const& m) const { return csrref(*m); }
};
struct NonZeros {
template<class scalar_t>
idx_t operator()(SparseMatrixRC<scalar_t> const& m) const { return m->nonZeros(); }
};
struct Rows {
template<class scalar_t>
idx_t operator()(SparseMatrixRC<scalar_t> const& m) const { return m->rows(); }
};
struct Cols {
template<class scalar_t>
idx_t operator()(SparseMatrixRC<scalar_t> const& m) const { return m->cols(); }
};
} // namespace
Hamiltonian::operator bool() const {
return var::apply_visitor(IsValid(), variant_matrix);
}
void Hamiltonian::reset() {
return var::apply_visitor(Reset(), variant_matrix);
}
ComplexCsrConstRef Hamiltonian::csrref() const {
return var::apply_visitor(GetSparseRef(), variant_matrix);
}
idx_t Hamiltonian::non_zeros() const {
return var::apply_visitor(NonZeros(), variant_matrix);
}
idx_t Hamiltonian::rows() const {
return var::apply_visitor(Rows(), variant_matrix);
}
idx_t Hamiltonian::cols() const {
return var::apply_visitor(Cols(), variant_matrix);
}
} // namespace cpb
|
dean0x7d/pybinding
|
cppcore/src/hamiltonian/Hamiltonian.cpp
|
C++
|
bsd-2-clause
| 1,497 |
@extends('skins.bootstrap.admin.layout')
@section('module')
<section id="admin-site">
{{
Form::open(array(
'autocomplete' => 'off',
'role' => 'form',
'class' => 'form-horizontal',
))
}}
<div class="row">
<div class="col-sm-12">
<fieldset>
<legend>{{ Lang::get('admin.site_settings') }}</legend>
<ul id="tabs-site" class="nav nav-tabs">
<li class="active">
<a href="#site-general" data-toggle="tab">{{ Lang::get('admin.general') }}</a>
</li>
<li>
<a href="#site-content" data-toggle="tab">{{ Lang::get('admin.content') }}</a>
</li>
<li>
<a href="#site-banners" data-toggle="tab">{{ Lang::get('admin.banners') }}</a>
</li>
</ul>
<div class="tab-content">
<div id="site-general" class="tab-pane fade in active">
<div class="form-group">
{{
Form::label('fqdn', Lang::get('admin.fqdn'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::text('fqdn', $site->general->fqdn, array(
'class' => 'form-control',
))
}}
<div class="help-block">
{{ Lang::get('admin.fqdn_exp') }}
</div>
</div>
</div>
<div class="form-group">
{{
Form::label('title', Lang::get('admin.site_title'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::text('title', $site->general->title, array(
'class' => 'form-control',
'maxlength' => 20
))
}}
</div>
</div>
<div class="form-group">
{{
Form::label('lang', Lang::get('admin.language'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::select('lang', $langs, $site->general->lang, array(
'class' => 'form-control'
))
}}
</div>
</div>
<div class="form-group">
{{
Form::label('copyright', Lang::get('admin.copyright'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::textarea('copyright', $site->general->copyright, array(
'class' => 'form-control',
'rows' => 4,
))
}}
<div class="help-block">
{{ Lang::get('admin.copyright_exp') }}
</div>
</div>
</div>
<div class="form-group">
{{
Form::label('ajax_nav', Lang::get('admin.ajax_nav'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::select('ajax_nav', array(
'1' => Lang::get('admin.enabled'),
'0' => Lang::get('admin.disabled'),
), $site->general->ajaxNav, array(
'class' => 'form-control'
))
}}
<div class="help-block">
{{ Lang::get('admin.ajax_nav_exp') }}
</div>
</div>
</div>
<div class="form-group">
{{
Form::label('per_page', Lang::get('admin.list_length'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::text('per_page', $site->general->perPage, array(
'class' => 'form-control',
))
}}
<div class="help-block">
{{ Lang::get('admin.list_length_exp') }}
</div>
</div>
</div>
<div class="form-group">
{{
Form::label('proxy', Lang::get('admin.ip_tracking'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::select('proxy', array(
'0' => Lang::get('admin.ignore_proxy'),
'1' => Lang::get('admin.trust_proxy'),
), $site->general->proxy, array(
'class' => 'form-control'
))
}}
<div class="help-block">
{{ Lang::get('admin.ip_tracking_exp') }}
</div>
</div>
</div>
<div class="form-group">
{{
Form::label('csrf', Lang::get('admin.csrf_token'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::select('csrf', array(
'1' => Lang::get('admin.enabled'),
'0' => Lang::get('admin.disabled'),
), $site->general->csrf, array(
'class' => 'form-control'
))
}}
<div class="help-block">
{{ Lang::get('admin.csrf_token_exp') }}
</div>
</div>
</div>
</div>
<div id="site-content" class="tab-pane fade">
<div class="form-group">
{{
Form::label('guest_posts', Lang::get('admin.guest_posts'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::select('guest_posts', array(
'1' => Lang::get('admin.enabled'),
'0' => Lang::get('admin.disabled'),
), $site->general->guestPosts, array(
'class' => 'form-control'
))
}}
<div class="help-block">
{{ Lang::get('admin.guest_posts_exp') }}
</div>
</div>
</div>
<div class="form-group">
{{
Form::label('paste_visibility', Lang::get('admin.visibility'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::select('paste_visibility', array(
'default' => Lang::get('admin.allow_all'),
'public' => Lang::get('admin.enforce_public'),
'private' => Lang::get('admin.enforce_private'),
), $site->general->pasteVisibility, array(
'class' => 'form-control'
))
}}
<div class="help-block">
{{ Lang::get('admin.visibility_exp') }}
</div>
</div>
</div>
<div class="form-group">
{{
Form::label('flag_paste', Lang::get('admin.flagging'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::select('flag_paste', array(
'all' => Lang::get('admin.flag_all'),
'user' => Lang::get('admin.flag_user'),
'off' => Lang::get('admin.flag_off'),
), $site->general->flagPaste, array(
'class' => 'form-control'
))
}}
<div class="help-block">
{{ Lang::get('admin.flagging_exp') }}
</div>
</div>
</div>
<div class="form-group">
{{
Form::label('allow_paste_del', Lang::get('admin.delete_pastes'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::select('allow_paste_del', array(
'1' => Lang::get('admin.enabled'),
'0' => Lang::get('admin.disabled'),
), $site->general->allowPasteDel, array(
'class' => 'form-control'
))
}}
<div class="help-block">
{{ Lang::get('admin.delete_pastes_exp') }}
</div>
</div>
</div>
<div class="form-group">
{{
Form::label('allow_attachment', Lang::get('admin.attachment'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::select('allow_attachment', array(
'1' => Lang::get('admin.enabled'),
'0' => Lang::get('admin.disabled'),
), $site->general->allowAttachment, array(
'class' => 'form-control'
))
}}
<div class="help-block">
{{ Lang::get('admin.attachment_exp') }}
</div>
</div>
</div>
<div class="form-group">
{{
Form::label('paste_age', Lang::get('admin.paste_age'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::select('paste_age', Paste::getExpiration('admin'), $site->general->pasteAge, array(
'class' => 'form-control'
))
}}
<div class="help-block">
{{ Lang::get('admin.paste_age_exp') }}
</div>
</div>
</div>
<div class="form-group">
{{
Form::label('max_paste_size', Lang::get('admin.size_limit'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
<div class="input-group">
{{
Form::text('max_paste_size', $site->general->maxPasteSize, array(
'class' => 'form-control',
))
}}
<div class="input-group-addon">
{{ Lang::get('admin.bytes') }}
</div>
</div>
<div class="help-block">
{{ Lang::get('admin.size_limit_exp') }}
</div>
</div>
</div>
<div class="form-group">
{{
Form::label('no_expire', Lang::get('admin.expiration'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::select('no_expire', array(
'none' => Lang::get('admin.noexpire_none'),
'user' => Lang::get('admin.noexpire_user'),
'all' => Lang::get('admin.noexpire_all'),
), $site->general->noExpire, array(
'class' => 'form-control'
))
}}
<div class="help-block">
{{ Lang::get('admin.expiration_exp') }}
</div>
</div>
</div>
<div class="form-group">
{{
Form::label('show_exp', Lang::get('admin.show_exp'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::select('show_exp', array(
'1' => Lang::get('admin.enabled'),
'0' => Lang::get('admin.disabled'),
), $site->general->showExp, array(
'class' => 'form-control'
))
}}
<div class="help-block">
{{ Lang::get('admin.show_exp_exp') }}
</div>
</div>
</div>
<div class="form-group">
{{
Form::label('paste_search', Lang::get('admin.paste_search'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::select('paste_search', array(
'1' => Lang::get('admin.enabled'),
'0' => Lang::get('admin.disabled'),
), $site->general->pasteSearch, array(
'class' => 'form-control'
))
}}
<div class="help-block">
{{ Lang::get('admin.paste_search_exp') }}
</div>
</div>
</div>
<div class="form-group">
{{
Form::label('comments', Lang::get('global.comments'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::select('comments', array(
'1' => Lang::get('admin.enabled'),
'0' => Lang::get('admin.disabled'),
), $site->general->comments, array(
'class' => 'form-control'
))
}}
<div class="help-block">
{{ Lang::get('admin.comments_exp') }}
</div>
</div>
</div>
<div class="form-group">
{{
Form::label('share', Lang::get('admin.share'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::select('share', array(
'1' => Lang::get('admin.enabled'),
'0' => Lang::get('admin.disabled'),
), $site->general->share, array(
'class' => 'form-control'
))
}}
<div class="help-block">
{{ Lang::get('admin.share_exp') }}
</div>
</div>
</div>
</div>
<div id="site-banners" class="tab-pane fade">
<div class="row">
<div class="col-sm-12">
<div class="alert alert-info">
{{ Lang::get('admin.banners_exp') }}
</div>
<div class="alert alert-success">
{{{ sprintf(Lang::get('admin.allowed_tags'), $site->general->allowedTags) }}}
</div>
</div>
</div>
<div class="form-group">
{{
Form::label('banner_top', Lang::get('admin.banner_top'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::textarea('banner_top', $site->general->bannerTop, array(
'class' => 'form-control',
'rows' => 5,
))
}}
</div>
</div>
<div class="form-group">
{{
Form::label('banner_bottom', Lang::get('admin.banner_bottom'), array(
'class' => 'control-label col-sm-3 col-lg-2'
))
}}
<div class="col-sm-9 col-lg-10">
{{
Form::textarea('banner_bottom', $site->general->bannerBottom, array(
'class' => 'form-control',
'rows' => 5,
))
}}
</div>
</div>
</div>
</div>
<hr />
<div class="form-group">
<div class="col-sm-12">
{{
Form::submit(Lang::get('admin.save_all'), array(
'name' => '_save',
'class' => 'btn btn-primary'
))
}}
</div>
</div>
</fieldset>
</div>
</div>
{{ Form::close() }}
</section>
@stop
|
solitaryr/sticky-notes
|
app/views/skins/bootstrap/admin/site.blade.php
|
PHP
|
bsd-2-clause
| 14,466 |
Impact of disabling icmp redirect (ie re-enabling fastforward) on forwarding performance
- Intel Xeon CPU E5-2697A v4 @ 2.60GHz (16c, 32t)
- Chelsio 40-Gigabit T580-LP-CR (QSFP+ 40GBASE-SR4 (MPO 1x12 Parallel Optic))
- FreeBSD 13-HEAD r354862 (19/11/2019)
- 5000 flows of smallest UDP packets (1 byte payload)
```
x net.inet.ip.redirect=1 (default): Number of inet4 packets-per-second forwarded
+ net.inet.ip.redirect=0: Number of inet4 pps forwarded
+--------------------------------------------------------------------------+
|x +|
|xx ++|
|xx ++|
|MA |
| |A|
+--------------------------------------------------------------------------+
N Min Max Median Avg Stddev
x 5 2199966.5 2309062 2210097 2230250.6 45711.484
+ 5 8211578 8259515.5 8244041 8235045.2 20946.73
Difference at 95.0% confidence
6.00479e+06 +/- 51854.8
269.243% +/- 7.86461%
(Student's t, pooled s = 35554.9)
```
|
ocochard/netbenches
|
Xeon_E5-2697Av4_16Cores-Chelsio_T580/ip.redirect/results/13-r354862/README.md
|
Markdown
|
bsd-2-clause
| 1,366 |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Chrome-specific IPC messages for extensions.
// Extension-related messages that aren't specific to Chrome live in
// extensions/common/extension_messages.h.
//
// Multiply-included message file, hence no include guard.
#include <stdint.h>
#include <string>
#include "base/strings/string16.h"
#include "base/values.h"
#include "chrome/common/extensions/api/automation_internal.h"
#include "chrome/common/extensions/api/webstore/webstore_api_constants.h"
#include "chrome/common/extensions/webstore_install_result.h"
#include "extensions/common/stack_frame.h"
#include "ipc/ipc_message_macros.h"
#include "ui/accessibility/ax_enums.h"
#include "ui/accessibility/ax_node_data.h"
#include "ui/accessibility/ax_relative_bounds.h"
#include "ui/accessibility/ax_tree_data.h"
#include "ui/accessibility/ax_tree_update.h"
#include "ui/gfx/transform.h"
#include "url/gurl.h"
#define IPC_MESSAGE_START ChromeExtensionMsgStart
IPC_ENUM_TRAITS_MAX_VALUE(extensions::api::webstore::InstallStage,
extensions::api::webstore::INSTALL_STAGE_INSTALLING)
IPC_ENUM_TRAITS_MAX_VALUE(extensions::webstore_install::Result,
extensions::webstore_install::RESULT_LAST)
// Messages sent from the browser to the renderer.
// Toggles visual muting of the render view area. This is on when a constrained
// window is showing.
IPC_MESSAGE_ROUTED1(ChromeViewMsg_SetVisuallyDeemphasized,
bool /* deemphazied */)
// Sent to the renderer if install stage updates were requested for an inline
// install.
IPC_MESSAGE_ROUTED1(ExtensionMsg_InlineInstallStageChanged,
extensions::api::webstore::InstallStage /* stage */)
// Sent to the renderer if download progress updates were requested for an
// inline install.
IPC_MESSAGE_ROUTED1(ExtensionMsg_InlineInstallDownloadProgress,
int /* percent_downloaded */)
// Send to renderer once the installation mentioned on
// ExtensionHostMsg_InlineWebstoreInstall is complete.
IPC_MESSAGE_ROUTED4(ExtensionMsg_InlineWebstoreInstallResponse,
int32_t /* install id */,
bool /* whether the install was successful */,
std::string /* error */,
extensions::webstore_install::Result /* result */)
IPC_STRUCT_TRAITS_BEGIN(ui::AXNodeData)
IPC_STRUCT_TRAITS_MEMBER(id)
IPC_STRUCT_TRAITS_MEMBER(role)
IPC_STRUCT_TRAITS_MEMBER(state)
IPC_STRUCT_TRAITS_MEMBER(location)
IPC_STRUCT_TRAITS_MEMBER(transform)
IPC_STRUCT_TRAITS_MEMBER(string_attributes)
IPC_STRUCT_TRAITS_MEMBER(int_attributes)
IPC_STRUCT_TRAITS_MEMBER(float_attributes)
IPC_STRUCT_TRAITS_MEMBER(bool_attributes)
IPC_STRUCT_TRAITS_MEMBER(intlist_attributes)
IPC_STRUCT_TRAITS_MEMBER(html_attributes)
IPC_STRUCT_TRAITS_MEMBER(child_ids)
IPC_STRUCT_TRAITS_MEMBER(offset_container_id)
IPC_STRUCT_TRAITS_END()
IPC_STRUCT_TRAITS_BEGIN(ui::AXTreeData)
IPC_STRUCT_TRAITS_MEMBER(tree_id)
IPC_STRUCT_TRAITS_MEMBER(parent_tree_id)
IPC_STRUCT_TRAITS_MEMBER(focused_tree_id)
IPC_STRUCT_TRAITS_MEMBER(url)
IPC_STRUCT_TRAITS_MEMBER(title)
IPC_STRUCT_TRAITS_MEMBER(mimetype)
IPC_STRUCT_TRAITS_MEMBER(doctype)
IPC_STRUCT_TRAITS_MEMBER(loaded)
IPC_STRUCT_TRAITS_MEMBER(loading_progress)
IPC_STRUCT_TRAITS_MEMBER(focus_id)
IPC_STRUCT_TRAITS_MEMBER(sel_anchor_object_id)
IPC_STRUCT_TRAITS_MEMBER(sel_anchor_offset)
IPC_STRUCT_TRAITS_MEMBER(sel_anchor_affinity)
IPC_STRUCT_TRAITS_MEMBER(sel_focus_object_id)
IPC_STRUCT_TRAITS_MEMBER(sel_focus_offset)
IPC_STRUCT_TRAITS_MEMBER(sel_focus_affinity)
IPC_STRUCT_TRAITS_END()
IPC_STRUCT_TRAITS_BEGIN(ui::AXTreeUpdate)
IPC_STRUCT_TRAITS_MEMBER(has_tree_data)
IPC_STRUCT_TRAITS_MEMBER(tree_data)
IPC_STRUCT_TRAITS_MEMBER(node_id_to_clear)
IPC_STRUCT_TRAITS_MEMBER(root_id)
IPC_STRUCT_TRAITS_MEMBER(nodes)
IPC_STRUCT_TRAITS_END()
IPC_STRUCT_BEGIN(ExtensionMsg_AccessibilityEventParams)
// ID of the accessibility tree that this event applies to.
IPC_STRUCT_MEMBER(int, tree_id)
// The global offset of all coordinates in this accessibility tree.
IPC_STRUCT_MEMBER(gfx::Vector2d, location_offset)
// The tree update.
IPC_STRUCT_MEMBER(ui::AXTreeUpdate, update)
// Type of event.
IPC_STRUCT_MEMBER(ui::AXEvent, event_type)
// ID of the node that the event applies to.
IPC_STRUCT_MEMBER(int, id)
// The source of this event.
IPC_STRUCT_MEMBER(ui::AXEventFrom, event_from)
IPC_STRUCT_END()
IPC_STRUCT_BEGIN(ExtensionMsg_AccessibilityLocationChangeParams)
// ID of the accessibility tree that this event applies to.
IPC_STRUCT_MEMBER(int, tree_id)
// ID of the object whose location is changing.
IPC_STRUCT_MEMBER(int, id)
// The object's new location info.
IPC_STRUCT_MEMBER(ui::AXRelativeBounds, new_location)
IPC_STRUCT_END()
// Forward an accessibility message to an extension process where an
// extension is using the automation API to listen for accessibility events.
IPC_MESSAGE_ROUTED2(ExtensionMsg_AccessibilityEvent,
ExtensionMsg_AccessibilityEventParams,
bool /* is_active_profile */)
// Forward an accessibility location change message to an extension process
// where an extension is using the automation API to listen for
// accessibility events.
IPC_MESSAGE_ROUTED1(ExtensionMsg_AccessibilityLocationChange,
ExtensionMsg_AccessibilityLocationChangeParams)
// Messages sent from the renderer to the browser.
// Sent by the renderer to implement chrome.webstore.install().
IPC_MESSAGE_ROUTED4(ExtensionHostMsg_InlineWebstoreInstall,
int32_t /* install id */,
int32_t /* return route id */,
std::string /* Web Store item ID */,
int /* listeners_mask */)
|
ssaroha/node-webrtc
|
third_party/webrtc/include/chromium/src/chrome/common/extensions/chrome_extension_messages.h
|
C
|
bsd-2-clause
| 5,967 |
package cz.metacentrum.perun.webgui.tabs.cabinettabs;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.resources.client.ImageResource;
import com.google.gwt.safehtml.shared.SafeHtmlUtils;
import com.google.gwt.user.cellview.client.CellTable;
import com.google.gwt.user.client.ui.*;
import cz.metacentrum.perun.webgui.client.PerunWebSession;
import cz.metacentrum.perun.webgui.client.UiElements;
import cz.metacentrum.perun.webgui.client.mainmenu.MainMenu;
import cz.metacentrum.perun.webgui.client.resources.*;
import cz.metacentrum.perun.webgui.json.GetEntityById;
import cz.metacentrum.perun.webgui.json.JsonCallbackEvents;
import cz.metacentrum.perun.webgui.json.JsonUtils;
import cz.metacentrum.perun.webgui.json.cabinetManager.*;
import cz.metacentrum.perun.webgui.model.Author;
import cz.metacentrum.perun.webgui.model.Category;
import cz.metacentrum.perun.webgui.model.Publication;
import cz.metacentrum.perun.webgui.model.Thanks;
import cz.metacentrum.perun.webgui.tabs.CabinetTabs;
import cz.metacentrum.perun.webgui.tabs.TabItem;
import cz.metacentrum.perun.webgui.tabs.TabItemWithUrl;
import cz.metacentrum.perun.webgui.tabs.UrlMapper;
import cz.metacentrum.perun.webgui.widgets.CustomButton;
import cz.metacentrum.perun.webgui.widgets.ListBoxWithObjects;
import cz.metacentrum.perun.webgui.widgets.TabMenu;
import java.sql.Date;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Map;
/**
* Tab which shows publication's details.
*
* @author Pavel Zlamal <[email protected]>
*/
public class PublicationDetailTabItem implements TabItem, TabItemWithUrl {
/**
* Perun web session
*/
private PerunWebSession session = PerunWebSession.getInstance();
/**
* Content widget - should be simple panel
*/
private SimplePanel contentWidget = new SimplePanel();
/**
* Title widget
*/
private Label titleWidget = new Label("Loading publication");
//data
private Publication publication;
private int publicationId;
private boolean fromSelf = false; // accessed from perun admin by default
/**
* Creates a tab instance
*
* @param pub publication
*/
public PublicationDetailTabItem(Publication pub){
this.publication = pub;
this.publicationId = pub.getId();
}
/**
* Creates a tab instance
* @param pub publication
* @param fromSelf TRUE if accessed from user section / FALSE otherwise
*/
public PublicationDetailTabItem(Publication pub, boolean fromSelf){
this.publication = pub;
this.publicationId = pub.getId();
this.fromSelf = fromSelf;
}
/**
* Creates a tab instance
* @param publicationId publication
* @param fromSelf TRUE if accessed from user section / FALSE otherwise
*/
public PublicationDetailTabItem(int publicationId, boolean fromSelf){
this.publicationId = publicationId;
this.fromSelf = fromSelf;
GetEntityById call = new GetEntityById(PerunEntity.PUBLICATION, publicationId, new JsonCallbackEvents(){
public void onFinished(JavaScriptObject jso){
publication = jso.cast();
}
});
// do not use cache this time because of update publ. method !!
call.retrieveData();
}
public boolean isPrepared(){
return !(publication == null);
}
@Override
public boolean isRefreshParentOnClose() {
return false;
}
@Override
public void onClose() {
}
public Widget draw() {
// show only part of title
titleWidget.setText(Utils.getStrippedStringWithEllipsis(publication.getTitle()));
// MAIN PANEL
ScrollPanel sp = new ScrollPanel();
sp.addStyleName("perun-tableScrollPanel");
VerticalPanel vp = new VerticalPanel();
vp.addStyleName("perun-table");
sp.add(vp);
// resize perun table to correct size on screen
session.getUiElements().resizePerunTable(sp, 350, this);
// content
final FlexTable ft = new FlexTable();
ft.setStyleName("inputFormFlexTable");
if (publication.getLocked() == false) {
ft.setHTML(1, 0, "Id / Origin:");
ft.setHTML(2, 0, "Title:");
ft.setHTML(3, 0, "Year:");
ft.setHTML(4, 0, "Category:");
ft.setHTML(5, 0, "Rank:");
ft.setHTML(6, 0, "ISBN / ISSN:");
ft.setHTML(7, 0, "DOI:");
ft.setHTML(8, 0, "Full cite:");
ft.setHTML(9, 0, "Created by:");
ft.setHTML(10, 0, "Created date:");
for (int i=0; i<ft.getRowCount(); i++) {
ft.getFlexCellFormatter().setStyleName(i, 0, "itemName");
}
ft.getFlexCellFormatter().setWidth(1, 0, "100px");
final ListBoxWithObjects<Category> listbox = new ListBoxWithObjects<Category>();
// fill listbox
JsonCallbackEvents events = new JsonCallbackEvents(){
public void onFinished(JavaScriptObject jso) {
for (Category cat : JsonUtils.<Category>jsoAsList(jso)){
listbox.addItem(cat);
// if right, selected
if (publication.getCategoryId() == cat.getId()) {
listbox.setSelected(cat, true);
}
}
}
};
GetCategories categories = new GetCategories(events);
categories.retrieveData();
final TextBox rank = new TextBox();
rank.setWidth("30px");
rank.setMaxLength(4);
rank.setText(String.valueOf(publication.getRank()));
final TextBox title = new TextBox();
title.setMaxLength(1024);
title.setText(publication.getTitle());
title.setWidth("500px");
final TextBox year = new TextBox();
year.setText(String.valueOf(publication.getYear()));
year.setMaxLength(4);
year.setWidth("30px");
final TextBox isbn = new TextBox();
isbn.setText(publication.getIsbn());
isbn.setMaxLength(32);
final TextBox doi = new TextBox();
doi.setText(publication.getDoi());
doi.setMaxLength(256);
final TextArea main = new TextArea();
main.setText(publication.getMain());
main.setSize("500px", "70px");
// set max length
main.getElement().setAttribute("maxlength", "4000");
ft.setHTML(1, 1, publication.getId()+" / <Strong>Ext. Id: </strong>"+publication.getExternalId()+" <Strong>System: </strong>"+ SafeHtmlUtils.fromString(publication.getPublicationSystemName()).asString());
ft.setWidget(2, 1, title);
ft.setWidget(3, 1, year);
ft.setWidget(4, 1, listbox);
if (session.isPerunAdmin()) {
// only perunadmin can change rank
ft.setWidget(5, 1, rank);
} else {
ft.setHTML(5, 1, SafeHtmlUtils.fromString(String.valueOf(publication.getRank()) +"").asString());
}
ft.setWidget(6, 1, isbn);
ft.setWidget(7, 1, doi);
ft.setWidget(8, 1, main);
ft.setHTML(9, 1, SafeHtmlUtils.fromString((publication.getCreatedBy() != null) ? publication.getCreatedBy() : "").asString());
ft.setHTML(10, 1, SafeHtmlUtils.fromString((String.valueOf(publication.getCreatedDate()) != null) ? String.valueOf(publication.getCreatedDate()) : "").asString());
// update button
final CustomButton change = TabMenu.getPredefinedButton(ButtonType.SAVE, "Save changes in publication details");
change.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
Publication pub = JsonUtils.clone(publication).cast();
if (!JsonUtils.checkParseInt(year.getText())){
JsonUtils.cantParseIntConfirm("YEAR", year.getText());
} else {
pub.setYear(Integer.parseInt(year.getText()));
}
if (session.isPerunAdmin()) {
pub.setRank(Double.parseDouble(rank.getText()));
}
pub.setCategoryId(listbox.getSelectedObject().getId());
pub.setTitle(title.getText());
pub.setMain(main.getText());
pub.setIsbn(isbn.getText());
pub.setDoi(doi.getText());
UpdatePublication upCall = new UpdatePublication(JsonCallbackEvents.disableButtonEvents(change, new JsonCallbackEvents(){
public void onFinished(JavaScriptObject jso) {
// refresh page content
Publication p = jso.cast();
publication = p;
draw();
}
}));
upCall.updatePublication(pub);
}
});
ft.setWidget(0, 0, change);
} else {
ft.getFlexCellFormatter().setColSpan(0, 0, 2);
ft.setWidget(0, 0, new HTML(new Image(SmallIcons.INSTANCE.lockIcon())+" <strong>Publication is locked. Ask administrator to perform any changes for you at [email protected].</strong>"));
ft.setHTML(1, 0, "Id / Origin:");
ft.setHTML(2, 0, "Title:");
ft.setHTML(3, 0, "Year:");
ft.setHTML(4, 0, "Category:");
ft.setHTML(5, 0, "Rank:");
ft.setHTML(6, 0, "ISBN / ISSN:");
ft.setHTML(7, 0, "DOI:");
ft.setHTML(8, 0, "Full cite:");
ft.setHTML(9, 0, "Created by:");
ft.setHTML(10, 0, "Created date:");
for (int i=0; i<ft.getRowCount(); i++) {
ft.getFlexCellFormatter().setStyleName(i, 0, "itemName");
}
ft.getFlexCellFormatter().setWidth(1, 0, "100px");
ft.setHTML(1, 1, publication.getId()+" / <Strong>Ext. Id: </strong>"+publication.getExternalId()+" <Strong>System: </strong>"+SafeHtmlUtils.fromString(publication.getPublicationSystemName()).asString());
ft.setHTML(2, 1, SafeHtmlUtils.fromString((publication.getTitle() != null) ? publication.getTitle() : "").asString());
ft.setHTML(3, 1, SafeHtmlUtils.fromString((String.valueOf(publication.getYear()) != null) ? String.valueOf(publication.getYear()) : "").asString());
ft.setHTML(4, 1, SafeHtmlUtils.fromString((publication.getCategoryName() != null) ? publication.getCategoryName() : "").asString());
ft.setHTML(5, 1, SafeHtmlUtils.fromString(String.valueOf(publication.getRank()) + " (default is 0)").asString());
ft.setHTML(6, 1, SafeHtmlUtils.fromString((publication.getIsbn() != null) ? publication.getIsbn() : "").asString());
ft.setHTML(7, 1, SafeHtmlUtils.fromString((publication.getDoi() != null) ? publication.getDoi() : "").asString());
ft.setHTML(8, 1, SafeHtmlUtils.fromString((publication.getMain() != null) ? publication.getMain() : "").asString());
ft.setHTML(9, 1, SafeHtmlUtils.fromString((publication.getCreatedBy() != null) ? publication.getCreatedBy() : "").asString());
ft.setHTML(10, 1, SafeHtmlUtils.fromString((String.valueOf(publication.getCreatedDate()) != null) ? String.valueOf(publication.getCreatedDate()) : "").asString());
}
// LOCK / UNLOCK button for PerunAdmin
if (session.isPerunAdmin()) {
final CustomButton lock;
if (publication.getLocked()) {
lock = new CustomButton("Unlock", "Allow editing of publication details (for users).", SmallIcons.INSTANCE.lockOpenIcon());
ft.setWidget(0, 0, lock);
ft.getFlexCellFormatter().setColSpan(0, 0, 1);
ft.setWidget(0, 1, new HTML(new Image(SmallIcons.INSTANCE.lockIcon())+" Publication is locked."));
} else {
lock = new CustomButton("Lock", "Deny editing of publication details (for users).", SmallIcons.INSTANCE.lockIcon());
ft.setWidget(0, 1, lock);
}
lock.addClickHandler(new ClickHandler(){
public void onClick(ClickEvent event) {
LockUnlockPublications upCall = new LockUnlockPublications(JsonCallbackEvents.disableButtonEvents(lock, new JsonCallbackEvents(){
public void onFinished(JavaScriptObject jso) {
// refresh page content
publication.setLocked(!publication.getLocked());
draw();
}
}));
Publication p = JsonUtils.clone(publication).cast();
upCall.lockUnlockPublication(!publication.getLocked(), p);
}
});
}
DisclosurePanel dp = new DisclosurePanel();
dp.setWidth("100%");
dp.setContent(ft);
dp.setOpen(true);
FlexTable detailsHeader = new FlexTable();
detailsHeader.setWidget(0, 0, new Image(LargeIcons.INSTANCE.bookIcon()));
detailsHeader.setHTML(0, 1, "<h3>Details</h3>");
dp.setHeader(detailsHeader);
vp.add(dp);
vp.add(loadAuthorsSubTab());
vp.add(loadThanksSubTab());
this.contentWidget.setWidget(sp);
return getWidget();
}
/**
* Returns widget with authors management for publication
*
* @return widget
*/
private Widget loadAuthorsSubTab(){
DisclosurePanel dp = new DisclosurePanel();
dp.setWidth("100%");
dp.setOpen(true);
VerticalPanel vp = new VerticalPanel();
vp.setSize("100%", "100%");
dp.setContent(vp);
FlexTable header = new FlexTable();
header.setWidget(0, 0, new Image(LargeIcons.INSTANCE.userGreenIcon()));
header.setHTML(0, 1, "<h3>Authors / Reported by</h3>");
dp.setHeader(header);
// menu
TabMenu menu = new TabMenu();
// callback
final FindAuthorsByPublicationId call = new FindAuthorsByPublicationId(publication.getId());
call.setCheckable(false);
if (!publication.getLocked()) {
// editable if not locked
vp.add(menu);
vp.setCellHeight(menu, "30px");
call.setCheckable(true);
}
final CustomButton addButton = new CustomButton("Add myself", "Add you as author of publication", SmallIcons.INSTANCE.addIcon());
addButton.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
JsonCallbackEvents events = JsonCallbackEvents.refreshTableEvents(call);
CreateAuthorship request = new CreateAuthorship(JsonCallbackEvents.disableButtonEvents(addButton, events));
request.createAuthorship(publicationId, session.getActiveUser().getId());
}
});
menu.addWidget(addButton);
CustomButton addOthersButton = new CustomButton("Add others", "Add more authors", SmallIcons.INSTANCE.addIcon());
addOthersButton.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
session.getTabManager().addTabToCurrentTab(new AddAuthorTabItem(publication, JsonCallbackEvents.refreshTableEvents(call)), true);
}
});
menu.addWidget(addOthersButton);
// fill table
CellTable<Author> table = call.getEmptyTable();
call.retrieveData();
final CustomButton removeButton = TabMenu.getPredefinedButton(ButtonType.REMOVE, "Remove select author(s) from publication");
removeButton.setEnabled(false);
JsonUtils.addTableManagedButton(call, table, removeButton);
menu.addWidget(removeButton);
removeButton.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
final ArrayList<Author> list = call.getTableSelectedList();
String text = "Following users will be removed from publication's authors. They will lose any benefit granted by publication's rank.";
UiElements.showDeleteConfirm(list, text, new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
// TODO - SHOULD HAVE ONLY ONE CALLBACK TO CORE
for(int i=0; i<list.size(); i++){
// calls the request
if (i == list.size()-1) {
DeleteAuthorship request = new DeleteAuthorship(JsonCallbackEvents.disableButtonEvents(removeButton, JsonCallbackEvents.refreshTableEvents(call)));
request.deleteAuthorship(publicationId, list.get(i).getId());
} else {
DeleteAuthorship request = new DeleteAuthorship();
request.deleteAuthorship(publicationId, list.get(i).getId());
}
}
}
});
}
});
ScrollPanel sp = new ScrollPanel();
sp.add(table);
table.addStyleName("perun-table");
sp.addStyleName("perun-tableScrollPanel");
vp.add(sp);
return dp;
}
/**
* Returns thanks management widget for publication
*
* @return widget
*/
private Widget loadThanksSubTab(){
DisclosurePanel dp = new DisclosurePanel();
dp.setWidth("100%");
dp.setOpen(true);
VerticalPanel vp = new VerticalPanel();
vp.setSize("100%", "100%");
dp.setContent(vp);
FlexTable header = new FlexTable();
header.setWidget(0, 0, new Image(LargeIcons.INSTANCE.smallBusinessIcon()));
header.setHTML(0, 1, "<h3>Acknowledgement</h3>");
dp.setHeader(header);
// menu
TabMenu menu = new TabMenu();
// callback
final GetRichThanksByPublicationId thanksCall = new GetRichThanksByPublicationId(publicationId);
thanksCall.setCheckable(false);
if (!publication.getLocked()) {
// editable if not locked
vp.add(menu);
vp.setCellHeight(menu, "30px");
thanksCall.setCheckable(true);
}
CellTable<Thanks> table = thanksCall.getTable();
menu.addWidget(TabMenu.getPredefinedButton(ButtonType.ADD, "Add acknowledgement to publication", new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
session.getTabManager().addTabToCurrentTab(new CreateThanksTabItem(publication, JsonCallbackEvents.refreshTableEvents(thanksCall)), true);
}
}));
final CustomButton removeButton = TabMenu.getPredefinedButton(ButtonType.REMOVE, "Remove acknowledgement from publication");
removeButton.setEnabled(false);
JsonUtils.addTableManagedButton(thanksCall, table, removeButton);
menu.addWidget(removeButton);
removeButton.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
final ArrayList<Thanks> list = thanksCall.getTableSelectedList();
String text = "Following acknowledgements will be removed from publication.";
UiElements.showDeleteConfirm(list, text, new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
// TODO - SHOULD HAVE ONLY ONE CALLBACK TO CORE
for(int i=0; i<list.size(); i++){
// calls the request
if (i == list.size()-1) {
DeleteThanks request = new DeleteThanks(JsonCallbackEvents.disableButtonEvents(removeButton, JsonCallbackEvents.refreshTableEvents(thanksCall)));
request.deleteThanks(list.get(i).getId());
} else {
DeleteThanks request = new DeleteThanks(JsonCallbackEvents.disableButtonEvents(removeButton));
request.deleteThanks(list.get(i).getId());
}
}
}
});
}
});
table.addStyleName("perun-table");
ScrollPanel sp = new ScrollPanel();
sp.add(table);
sp.addStyleName("perun-tableScrollPanel");
vp.add(sp);
return dp;
}
public Widget getWidget() {
return this.contentWidget;
}
public Widget getTitle() {
return this.titleWidget;
}
public ImageResource getIcon() {
return SmallIcons.INSTANCE.bookIcon();
}
@Override
public int hashCode() {
final int prime = 613;
int result = 1;
result = prime * result * 22 * publicationId;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PublicationDetailTabItem other = (PublicationDetailTabItem)obj;
if (publicationId != other.publicationId)
return false;
return true;
}
public boolean multipleInstancesEnabled() {
return false;
}
public void open() {
if (fromSelf) {
session.getUiElements().getBreadcrumbs().setLocation(MainMenu.USER, "My publications", CabinetTabs.URL+UrlMapper.TAB_NAME_SEPARATOR+"userpubs?user=" + session.getUser().getId(), publication.getTitle(), getUrlWithParameters());
} else {
session.getUiElements().getBreadcrumbs().setLocation(MainMenu.PERUN_ADMIN, "Publications", CabinetTabs.URL+UrlMapper.TAB_NAME_SEPARATOR+"all", publication.getTitle(), getUrlWithParameters());
}
}
public boolean isAuthorized() {
if (session.isSelf()) {
return true;
} else {
return false;
}
}
public final static String URL = "pbl";
public String getUrl()
{
return URL;
}
public String getUrlWithParameters() {
return CabinetTabs.URL + UrlMapper.TAB_NAME_SEPARATOR + getUrl() + "?id=" + publicationId + "&self="+fromSelf;
}
static public PublicationDetailTabItem load(Map<String, String> parameters) {
int pubId = Integer.parseInt(parameters.get("id"));
boolean fromSelf = Boolean.parseBoolean(parameters.get("self"));
return new PublicationDetailTabItem(pubId, fromSelf);
}
}
|
stavamichal/perun
|
perun-web-gui/src/main/java/cz/metacentrum/perun/webgui/tabs/cabinettabs/PublicationDetailTabItem.java
|
Java
|
bsd-2-clause
| 19,457 |
@echo off
:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
:: Copyright (c) 2015, Synopsys, Inc. :
:: All rights reserved. :
:: :
:: Redistribution and use in source and binary forms, with or without :
:: modification, are permitted provided that the following conditions are :
:: met: :
:: :
:: 1. Redistributions of source code must retain the above copyright :
:: notice, this list of conditions and the following disclaimer. :
:: :
:: 2. Redistributions in binary form must reproduce the above copyright :
:: notice, this list of conditions and the following disclaimer in the :
:: documentation and/or other materials provided with the distribution. :
:: :
:: THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS :
:: "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT :
:: LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR :
:: A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT :
:: HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, :
:: SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT :
:: LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, :
:: DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY :
:: THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT :
:: (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE :
:: OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. :
::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
echo %% Unlock does not supported
|
kit-transue/software-emancipation-discover
|
config/config/NT/sourcesafe/cm_unlock.bat
|
Batchfile
|
bsd-2-clause
| 2,096 |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_CHROMEOS_FILEAPI_FILE_SYSTEM_BACKEND_H_
#define CHROME_BROWSER_CHROMEOS_FILEAPI_FILE_SYSTEM_BACKEND_H_
#include <stdint.h>
#include <memory>
#include <string>
#include <vector>
#include "base/compiler_specific.h"
#include "base/files/file_path.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "storage/browser/fileapi/file_system_backend.h"
#include "storage/browser/fileapi/task_runner_bound_observer_list.h"
#include "storage/common/fileapi/file_system_types.h"
namespace storage {
class CopyOrMoveFileValidatorFactory;
class ExternalMountPoints;
class FileSystemURL;
class WatcherManager;
} // namespace storage
namespace chromeos {
class FileSystemBackendDelegate;
class FileAccessPermissions;
// FileSystemBackend is a Chrome OS specific implementation of
// ExternalFileSystemBackend. This class is responsible for a
// number of things, including:
//
// - Add system mount points
// - Grant/revoke/check file access permissions
// - Create FileSystemOperation per file system type
// - Create FileStreamReader/Writer per file system type
//
// Chrome OS specific mount points:
//
// "Downloads" is a mount point for user's Downloads directory on the local
// disk, where downloaded files are stored by default.
//
// "archive" is a mount point for an archive file, such as a zip file. This
// mount point exposes contents of an archive file via cros_disks and AVFS
// <http://avf.sourceforge.net/>.
//
// "removable" is a mount point for removable media such as an SD card.
// Insertion and removal of removable media are handled by cros_disks.
//
// "oem" is a read-only mount point for a directory containing OEM data.
//
// "drive" is a mount point for Google Drive. Drive is integrated with the
// FileSystem API layer via drive::FileSystemProxy. This mount point is added
// by drive::DriveIntegrationService.
//
// These mount points are placed under the "external" namespace, and file
// system URLs for these mount points look like:
//
// filesystem:<origin>/external/<mount_name>/...
//
class FileSystemBackend : public storage::ExternalFileSystemBackend {
public:
using storage::FileSystemBackend::OpenFileSystemCallback;
// |system_mount_points| should outlive FileSystemBackend instance.
FileSystemBackend(
std::unique_ptr<FileSystemBackendDelegate> drive_delegate,
std::unique_ptr<FileSystemBackendDelegate> file_system_provider_delegate,
std::unique_ptr<FileSystemBackendDelegate> mtp_delegate,
std::unique_ptr<FileSystemBackendDelegate> arc_content_delegate,
scoped_refptr<storage::ExternalMountPoints> mount_points,
storage::ExternalMountPoints* system_mount_points);
~FileSystemBackend() override;
// Adds system mount points, such as "archive", and "removable". This
// function is no-op if these mount points are already present.
void AddSystemMountPoints();
// Returns true if CrosMountpointProvider can handle |url|, i.e. its
// file system type matches with what this provider supports.
// This could be called on any threads.
static bool CanHandleURL(const storage::FileSystemURL& url);
// storage::FileSystemBackend overrides.
bool CanHandleType(storage::FileSystemType type) const override;
void Initialize(storage::FileSystemContext* context) override;
void ResolveURL(const storage::FileSystemURL& url,
storage::OpenFileSystemMode mode,
const OpenFileSystemCallback& callback) override;
storage::AsyncFileUtil* GetAsyncFileUtil(
storage::FileSystemType type) override;
storage::WatcherManager* GetWatcherManager(
storage::FileSystemType type) override;
storage::CopyOrMoveFileValidatorFactory* GetCopyOrMoveFileValidatorFactory(
storage::FileSystemType type,
base::File::Error* error_code) override;
storage::FileSystemOperation* CreateFileSystemOperation(
const storage::FileSystemURL& url,
storage::FileSystemContext* context,
base::File::Error* error_code) const override;
bool SupportsStreaming(const storage::FileSystemURL& url) const override;
bool HasInplaceCopyImplementation(
storage::FileSystemType type) const override;
std::unique_ptr<storage::FileStreamReader> CreateFileStreamReader(
const storage::FileSystemURL& path,
int64_t offset,
int64_t max_bytes_to_read,
const base::Time& expected_modification_time,
storage::FileSystemContext* context) const override;
std::unique_ptr<storage::FileStreamWriter> CreateFileStreamWriter(
const storage::FileSystemURL& url,
int64_t offset,
storage::FileSystemContext* context) const override;
storage::FileSystemQuotaUtil* GetQuotaUtil() override;
const storage::UpdateObserverList* GetUpdateObservers(
storage::FileSystemType type) const override;
const storage::ChangeObserverList* GetChangeObservers(
storage::FileSystemType type) const override;
const storage::AccessObserverList* GetAccessObservers(
storage::FileSystemType type) const override;
// storage::ExternalFileSystemBackend overrides.
bool IsAccessAllowed(const storage::FileSystemURL& url) const override;
std::vector<base::FilePath> GetRootDirectories() const override;
void GrantFileAccessToExtension(const std::string& extension_id,
const base::FilePath& virtual_path) override;
void RevokeAccessForExtension(const std::string& extension_id) override;
bool GetVirtualPath(const base::FilePath& filesystem_path,
base::FilePath* virtual_path) const override;
void GetRedirectURLForContents(
const storage::FileSystemURL& url,
const storage::URLCallback& callback) const override;
storage::FileSystemURL CreateInternalURL(
storage::FileSystemContext* context,
const base::FilePath& entry_path) const override;
private:
std::unique_ptr<FileAccessPermissions> file_access_permissions_;
std::unique_ptr<storage::AsyncFileUtil> local_file_util_;
// The delegate instance for the drive file system related operations.
std::unique_ptr<FileSystemBackendDelegate> drive_delegate_;
// The delegate instance for the provided file system related operations.
std::unique_ptr<FileSystemBackendDelegate> file_system_provider_delegate_;
// The delegate instance for the MTP file system related operations.
std::unique_ptr<FileSystemBackendDelegate> mtp_delegate_;
// The delegate instance for the ARC content file system related operations.
std::unique_ptr<FileSystemBackendDelegate> arc_content_delegate_;
// Mount points specific to the owning context (i.e. per-profile mount
// points).
//
// It is legal to have mount points with the same name as in
// system_mount_points_. Also, mount point paths may overlap with mount point
// paths in system_mount_points_. In both cases mount points in
// |mount_points_| will have a priority.
// E.g. if |mount_points_| map 'foo1' to '/foo/foo1' and
// |file_system_mount_points_| map 'xxx' to '/foo/foo1/xxx', |GetVirtualPaths|
// will resolve '/foo/foo1/xxx/yyy' as 'foo1/xxx/yyy' (i.e. the mapping from
// |mount_points_| will be used).
scoped_refptr<storage::ExternalMountPoints> mount_points_;
// Globally visible mount points. System MountPonts instance should outlive
// all FileSystemBackend instances, so raw pointer is safe.
storage::ExternalMountPoints* system_mount_points_;
DISALLOW_COPY_AND_ASSIGN(FileSystemBackend);
};
} // namespace chromeos
#endif // CHROME_BROWSER_CHROMEOS_FILEAPI_FILE_SYSTEM_BACKEND_H_
|
ssaroha/node-webrtc
|
third_party/webrtc/include/chromium/src/chrome/browser/chromeos/fileapi/file_system_backend.h
|
C
|
bsd-2-clause
| 7,738 |
/*-
* Copyright (c) 2003, 2005 Alan L. Cox <[email protected]>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* $FreeBSD: release/9.1.0/sys/amd64/include/sf_buf.h 142840 2005-02-28 23:38:15Z peter $
*/
#ifndef _MACHINE_SF_BUF_H_
#define _MACHINE_SF_BUF_H_
#include <vm/vm.h>
#include <vm/vm_param.h>
#include <vm/vm_page.h>
/*
* On this machine, the only purpose for which sf_buf is used is to implement
* an opaque pointer required by the machine-independent parts of the kernel.
* That pointer references the vm_page that is "mapped" by the sf_buf. The
* actual mapping is provided by the direct virtual-to-physical mapping.
*/
struct sf_buf;
static __inline vm_offset_t
sf_buf_kva(struct sf_buf *sf)
{
return (PHYS_TO_DMAP(VM_PAGE_TO_PHYS((vm_page_t)sf)));
}
static __inline vm_page_t
sf_buf_page(struct sf_buf *sf)
{
return ((vm_page_t)sf);
}
#endif /* !_MACHINE_SF_BUF_H_ */
|
splbio/wanproxy
|
network/uinet/sys/amd64/include/sf_buf.h
|
C
|
bsd-2-clause
| 2,166 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'', include('project.core.urls', namespace='core')),
)
|
pombredanne/django-boilerplate-1
|
project/urls.py
|
Python
|
bsd-2-clause
| 248 |
"""Auxiliary functions."""
import cPickle as pickle
import os
import sys
import gzip
import urllib
import numpy
import theano
import theano.tensor as T
import theano.sandbox.cuda
from athenet.utils import BIN_DIR, DATA_DIR
def load_data_from_pickle(filename):
"""Load data from pickle file.
:param filename: File with pickled data, may be gzipped.
:return: Data loaded from file.
"""
try:
f = gzip.open(filename, 'rb')
data = pickle.load(f)
except:
f = open(filename, 'rb')
data = pickle.load(f)
f.close()
return data
def save_data_to_pickle(data, filename):
"""Saves data to gzipped pickle file.
:param data: Data to be saved.
:param filename: Name of file to save data.
"""
with gzip.open(filename, 'wb') as f:
pickle.dump(data, f)
def load_data(filename, url=None):
"""Load data from file, download file if it doesn't exist.
:param filename: File with pickled data, may be gzipped.
:param url: Url for downloading file.
:return: Unpickled data.
"""
if not os.path.isfile(filename):
if not url:
return None
download_file(filename, url)
data = load_data_from_pickle(filename)
return data
def download_file(filename, url):
"""Download file from given url.
:param filename: Name of a file to be downloaded.
:param url: Url for downloading file.
"""
directory = os.path.dirname(filename)
if not os.path.exists(directory):
os.makedirs(directory)
print 'Downloading ' + os.path.basename(filename) + '...',
sys.stdout.flush()
urllib.urlretrieve(url, filename)
print 'Done'
def get_data_path(name):
"""Return absolute path to the data file.
:param name: Name of the file.
:return: Full path to the file.
"""
return os.path.join(DATA_DIR, name)
def get_bin_path(name):
"""Return absolute path to the binary data file.
:param name: Name of the file.
:return: Full path to the file.
"""
return os.path.join(BIN_DIR, name)
def zero_fraction(network):
"""Returns fraction of zeros in weights of Network.
Biases are not considered.
:param network: Network for which we count fraction of zeros.
:return: Fraction of zeros.
"""
params = [layer.W for layer in network.weighted_layers]
n_non_zero = 0
n_fields = 0
for param in params:
n_fields += numpy.size(param)
n_non_zero += numpy.count_nonzero(param)
n_zero = n_fields - n_non_zero
return (1.0 * n_zero) / (1.0 * n_fields)
def count_zeros_in_layer(layer):
return layer.W.size - numpy.count_nonzero(layer.W)
def count_zeros(network):
"""
Returns zeros in weights of Network.
Biases are not considered.
:param network: Network for which we count zeros.
:return: List of number of weights being zero for each layer.
"""
return numpy.array([count_zeros_in_layer(layer)
for layer in network.weighted_layers])
len_prev = 0
def overwrite(text='', length=None):
"""Write text in a current line, overwriting previously written text.
Previously written text also needs to be written using this function for
it to work properly. Otherwise optional argument length can be given to
specify length of a previous text.
:param string text: Text to be written.
:param integer length: Length of a previous text.
"""
global len_prev
if length is None:
length = len_prev
print '\r' + ' '*length,
print '\r' + text,
len_prev = len(text)
def cudnn_available():
"""Check if cuDNN is available.
:return: True, if cuDNN is available, False otherwise.
"""
try:
return theano.sandbox.cuda.dnn_available()
except:
return False
def reshape_for_padding(layer_input, image_shape, batch_size, padding,
value=0.0):
"""Returns padded tensor.
:param theano.tensor4 layer_input: input in shape
(batch_size, number of channels,
height, width)
:param tuple of integers image_shape: shape of input images in format
(height, width, number of channels)
:param integer batch_size: size of input batch size
:param pair of integers padding: padding to be applied to layer_input
:param float value: value of new fields
:returns: padded layer_input
:rtype: theano.tensor4
"""
if padding == (0, 0):
return layer_input
h, w, n_channels = image_shape
pad_h, pad_w = padding
h_in = h + 2*pad_h
w_in = w + 2*pad_w
extra_pixels = T.alloc(numpy.array(value, dtype=theano.config.floatX),
batch_size, n_channels, h_in, w_in)
extra_pixels = T.set_subtensor(
extra_pixels[:, :, pad_h:pad_h+h, pad_w:pad_w+w], layer_input)
return extra_pixels
def convolution(layer_input, w_shared, stride, n_groups, image_shape,
padding, batch_size, filter_shape):
"""Returns result of applying convolution to layer_input.
:param theano.tensor4 layer_input: input of convolution in format
(batch_size, number of channels,
height, width)
:param theano.tensor4 w_shared: weights in format
(number of output channels,
number of input channels,
height, width)
:param pair of integers stride: stride of convolution
:param integer n_groups: number of groups in convolution
:param image_shape: shape of single image in layer_input in format
(height, width, number of channels)
:type image_shape: tuple of 3 integers
:param pair of integers padding: padding of convolution
:param integer batch_size: size of batch of layer_input
:param filter_shape: shape of single filter in format
(height, width, number of output channels)
:type filter_shape: tuple of 3 integers
"""
n_channels = image_shape[2]
n_filters = filter_shape[2]
n_group_channels = n_channels / n_groups
n_group_filters = n_filters / n_groups
h, w = image_shape[0:2]
pad_h, pad_w = padding
group_image_shape = (batch_size, n_group_channels,
h + 2*pad_h, w + 2*pad_w)
h, w = filter_shape[0:2]
group_filter_shape = (n_group_filters, n_group_channels, h, w)
conv_outputs = [T.nnet.conv.conv2d(
input=layer_input[:, i*n_group_channels:(i+1)*n_group_channels,
:, :],
filters=w_shared[i*n_group_filters:(i+1)*n_group_filters,
:, :, :],
filter_shape=group_filter_shape,
image_shape=group_image_shape,
subsample=stride
) for i in xrange(n_groups)]
return T.concatenate(conv_outputs, axis=1)
|
heurezjusz/Athena
|
athenet/utils/misc.py
|
Python
|
bsd-2-clause
| 7,019 |
#pragma once
//========================================================================
// ObstacleEntity.h
//
// This code is part of Minotower Games Engine 1.0v
//
// (c) Copyright 2012 Muralev Evgeny
//========================================================================
#include "Entities\Entity.h"
class ObstacleEntity : public Entity
{
public:
ObstacleEntity(EntityId id) : Entity(id) {}
};
|
Solidstatewater/Anubis-Engine
|
VSMDemo/Source/Game/Entities/ObstacleEntity/ObstacleEntity.h
|
C
|
bsd-2-clause
| 403 |
<?php
$player = new stdClass();
$player->name = "Chuck";
$player->score = 0;
$player->score++;
print_r($player);
class Player {
public $name = "Sally";
public $score = 0;
}
$p2 = new Player();
$p2->score++;
print_r($p2);
|
csev/wa4e
|
code/objects/scratch.php
|
PHP
|
bsd-2-clause
| 236 |
using Example.Domain.Domain;
using LightBDD.Framework;
using LightBDD.Framework.Parameters;
using LightBDD.XUnit2;
namespace Example.LightBDD.XUnit2.Features
{
public partial class Calculator_feature : FeatureFixture
{
private Calculator _calculator;
private void Given_a_calculator()
{
_calculator = new Calculator();
}
private void Then_adding_X_to_Y_should_give_RESULT(int x, int y, Verifiable<int> result)
{
result.SetActual(() => _calculator.Add(x, y));
}
private void Then_dividing_X_by_Y_should_give_RESULT(int x, int y, Verifiable<int> result)
{
result.SetActual(() => _calculator.Divide(x, y));
}
private void Then_multiplying_X_by_Y_should_give_RESULT(int x, int y, Verifiable<int> result)
{
if (x < 0 || y < 0)
StepExecution.Current.IgnoreScenario("Negative numbers are not supported yet");
result.SetActual(() => _calculator.Multiply(x, y));
}
}
}
|
Suremaker/LightBDD
|
examples/Example.LightBDD.XUnit2/Features/Calculator_feature.Steps.cs
|
C#
|
bsd-2-clause
| 1,059 |
```
x forwarding.inet4.pps
+ forwarding.inet6.pps
+--------------------------------------------------------------------------+
|+ + + x + + x x x x |
| |___________________A__M_______________||
||_______________M______A_____________________| |
+--------------------------------------------------------------------------+
N Min Max Median Avg Stddev
x 5 7749927 8192920.5 8063114 8030532.5 169100.49
+ 5 7564846 7980960.5 7705865 7761727.3 194270.78
Difference at 95.0% confidence
-268805 +/- 265613
-3.34729% +/- 3.26028%
(Student's t, pooled s = 182121)
```
```
x ../../../kern.random.harvest.mask/results/fbsd11.1/511-default.pps
+ forwarding.inet4.pps
+--------------------------------------------------------------------------+
|xxx x + ++++|
||A_| |
| |_AM_||
+--------------------------------------------------------------------------+
N Min Max Median Avg Stddev
x 5 3473029 3692134 3492298 3538525 90877.729
+ 5 7749927 8192920.5 8063114 8030532.5 169100.49
Difference at 95.0% confidence
4.49201e+06 +/- 197977
126.946% +/- 7.77288%
(Student's t, pooled s = 135746)
```
|
ocochard/netbenchs
|
Xeon_E5-2650v4_2x12Cores-Mellanox_ConnectX-3/forwarding-pf-ipfw/results/fbsd11.1-yandex/README.md
|
Markdown
|
bsd-2-clause
| 1,602 |
package com.danielwestheide.slickeffecttypes
import com.danielwestheide.slickeffecttypes.db.DatabaseModule
import com.danielwestheide.slickeffecttypes.statuses._
import scala.concurrent.Await
import scala.concurrent.duration._
object App extends App with SchemaCreation {
val databaseModule = new DatabaseModule
val statusModule = new StatusModule(databaseModule)
createSchema(databaseModule)
val status = Await.result(
statusModule.statusService.postStatus(
"danielw",
"Played around with Slick effect types today",
"scala"),
3.seconds)
val update = statusModule.statusService.categorize(status.id, "phantom-types")
Await.result(update, 1.second)
Await.result(statusModule.statusReadService
.statusesByAuthor("danielw", 0, 10), 100.milliseconds) foreach println
}
|
dwestheide/slick-effect-types
|
src/main/scala/com/danielwestheide/slickeffecttypes/App.scala
|
Scala
|
bsd-2-clause
| 818 |
class Nsd < Formula
desc "Name server daemon"
homepage "https://www.nlnetlabs.nl/projects/nsd/"
url "https://www.nlnetlabs.nl/downloads/nsd/nsd-4.2.2.tar.gz"
sha256 "83b333940a25fe6d453bcac6ea39edfa244612a879117c4a624c97eb250246fb"
revision 1
bottle do
sha256 "ca69db82461ccb04f94857b03715b967f80896539d7abce2b2cebb4dc3124082" => :catalina
sha256 "70d66d7396db21ace3541a6be4b556b9f339570e71fa817941df594c1205686a" => :mojave
sha256 "9fbd67d1d673c34b06f0f597e45b02121e98d286733e9df1d553daf8292b9b25" => :high_sierra
sha256 "9391533efaae88803ac27fc7f450523d7a40ab02b4da422e15f8c6bb925cc6cb" => :sierra
end
depends_on "libevent"
depends_on "[email protected]"
def install
system "./configure", "--prefix=#{prefix}",
"--sysconfdir=#{etc}",
"--localstatedir=#{var}",
"--with-libevent=#{Formula["libevent"].opt_prefix}",
"--with-ssl=#{Formula["[email protected]"].opt_prefix}"
system "make", "install"
end
test do
system "#{sbin}/nsd", "-v"
end
end
|
zmwangx/homebrew-core
|
Formula/nsd.rb
|
Ruby
|
bsd-2-clause
| 1,093 |
class Ejabberd < Formula
desc "XMPP application server"
homepage "https://www.ejabberd.im"
url "https://static.process-one.net/ejabberd/downloads/20.03/ejabberd-20.03.tgz"
sha256 "1a54ef1cdc391a25b81c3ed3e9e8aa43434a8d4549b4e86c54af3ecf121d0144"
bottle do
cellar :any
sha256 "9518b32a672d6a756a29594892982c175a54c32aee9b5253c20e57dcf7760c44" => :catalina
sha256 "74e8c73f8032241b193fb8e4b54a5164efb0aca5fb357857392feeee05d758f6" => :mojave
sha256 "9b6bf1b79ee1d24bf099b93173d4b4c461be63834cd79185be1e3bd3884bd9e7" => :high_sierra
end
head do
url "https://github.com/processone/ejabberd.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
end
depends_on "erlang"
depends_on "gd"
depends_on "libyaml"
depends_on "[email protected]"
def install
ENV["TARGET_DIR"] = ENV["DESTDIR"] = "#{lib}/ejabberd/erlang/lib/ejabberd-#{version}"
ENV["MAN_DIR"] = man
ENV["SBIN_DIR"] = sbin
args = ["--prefix=#{prefix}",
"--sysconfdir=#{etc}",
"--localstatedir=#{var}",
"--enable-pgsql",
"--enable-mysql",
"--enable-odbc",
"--enable-pam"]
system "./autogen.sh" if build.head?
system "./configure", *args
# Set CPP to work around cpp shim issue:
# https://github.com/Homebrew/brew/issues/5153
system "make", "CPP=clang -E"
ENV.deparallelize
system "make", "install"
(etc/"ejabberd").mkpath
end
def post_install
(var/"lib/ejabberd").mkpath
(var/"spool/ejabberd").mkpath
end
def caveats
<<~EOS
If you face nodedown problems, concat your machine name to:
/private/etc/hosts
after 'localhost'.
EOS
end
plist_options :manual => "#{HOMEBREW_PREFIX}/sbin/ejabberdctl start"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>EnvironmentVariables</key>
<dict>
<key>HOME</key>
<string>#{var}/lib/ejabberd</string>
</dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_sbin}/ejabberdctl</string>
<string>start</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{var}/lib/ejabberd</string>
</dict>
</plist>
EOS
end
test do
system sbin/"ejabberdctl", "ping"
end
end
|
BrewTestBot/homebrew-core
|
Formula/ejabberd.rb
|
Ruby
|
bsd-2-clause
| 2,601 |
# encoding: utf-8
from django.db.utils import IntegrityError, DatabaseError
try:
from django.contrib.auth import get_user_model
except ImportError: # django < 1.5
from django.contrib.auth.models import User
else:
User = get_user_model()
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding unique constraint on 'PollAnswerUser', fields ['poll_answer', 'user']
db.create_unique('pybb_pollansweruser', ['poll_answer_id', 'user_id'])
def backwards(self, orm):
# Removing unique constraint on 'PollAnswerUser', fields ['poll_answer', 'user']
db.delete_unique('pybb_pollansweruser', ['poll_answer_id', 'user_id'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'pybb.attachment': {
'Meta': {'object_name': 'Attachment'},
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attachments'", 'to': "orm['pybb.Post']"}),
'size': ('django.db.models.fields.IntegerField', [], {})
},
'pybb.category': {
'Meta': {'ordering': "['position']", 'object_name': 'Category'},
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
'pybb.forum': {
'Meta': {'ordering': "['position']", 'object_name': 'Forum'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'forums'", 'to': "orm['pybb.Category']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'headline': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderators': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['%s.%s']"% (User._meta.app_label, User._meta.object_name), 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'post_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'readed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'readed_forums'", 'symmetrical': 'False', 'through': "orm['pybb.ForumReadTracker']", 'to': "orm['%s.%s']"% (User._meta.app_label, User._meta.object_name)}),
'topic_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'pybb.forumreadtracker': {
'Meta': {'object_name': 'ForumReadTracker'},
'forum': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['pybb.Forum']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'time_stamp': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s.%s']"% (User._meta.app_label, User._meta.object_name)})
},
'pybb.pollanswer': {
'Meta': {'object_name': 'PollAnswer'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'topic': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'poll_answers'", 'to': "orm['pybb.Topic']"})
},
'pybb.pollansweruser': {
'Meta': {'unique_together': "(('poll_answer', 'user'),)", 'object_name': 'PollAnswerUser'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'poll_answer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'users'", 'to': "orm['pybb.PollAnswer']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'poll_answers'", 'to': "orm['%s.%s']"% (User._meta.app_label, User._meta.object_name)})
},
'pybb.post': {
'Meta': {'ordering': "['created']", 'object_name': 'Post'},
'body': ('django.db.models.fields.TextField', [], {}),
'body_html': ('django.db.models.fields.TextField', [], {}),
'body_text': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'on_moderation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'topic': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'posts'", 'to': "orm['pybb.Topic']"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'posts'", 'to': "orm['%s.%s']"% (User._meta.app_label, User._meta.object_name)}),
'user_ip': ('django.db.models.fields.IPAddressField', [], {'default': "'0.0.0.0'", 'max_length': '15', 'blank': 'True'})
},
'pybb.profile': {
'Meta': {'object_name': 'Profile'},
'autosubscribe': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'avatar': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'ru-RU'", 'max_length': '10', 'blank': 'True'}),
'post_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'show_signatures': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'signature': ('django.db.models.fields.TextField', [], {'max_length': '1024', 'blank': 'True'}),
'signature_html': ('django.db.models.fields.TextField', [], {'max_length': '1054', 'blank': 'True'}),
'time_zone': ('django.db.models.fields.FloatField', [], {'default': '3.0'}),
'user': ('annoying.fields.AutoOneToOneField', [], {'related_name': "'pybb_profile'", 'unique': 'True', 'to': "orm['%s.%s']"% (User._meta.app_label, User._meta.object_name)})
},
'pybb.topic': {
'Meta': {'ordering': "['-created']", 'object_name': 'Topic'},
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'forum': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'topics'", 'to': "orm['pybb.Forum']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'on_moderation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'poll_question': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'poll_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'post_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'readed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'readed_topics'", 'symmetrical': 'False', 'through': "orm['pybb.TopicReadTracker']", 'to': "orm['%s.%s']"% (User._meta.app_label, User._meta.object_name)}),
'sticky': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subscribers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'subscriptions'", 'blank': 'True', 'to': "orm['%s.%s']"% (User._meta.app_label, User._meta.object_name)}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s.%s']"% (User._meta.app_label, User._meta.object_name)}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
'pybb.topicreadtracker': {
'Meta': {'object_name': 'TopicReadTracker'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'time_stamp': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'topic': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['pybb.Topic']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s.%s']"% (User._meta.app_label, User._meta.object_name)})
}
}
complete_apps = ['pybb']
|
zekone/dj_pybb
|
pybb/migrations/0023_auto__add_unique_pollansweruser_poll_answer_user.py
|
Python
|
bsd-2-clause
| 12,958 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.