gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.spi.discovery.tcp.ipfinder.kubernetes;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.internal.IgniteInterruptedCheckedException;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.resources.LoggerResource;
import org.apache.ignite.spi.IgniteSpiException;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinderAdapter;
import javax.net.ssl.*;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.security.SecureRandom;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* IP finder for automatic lookup of Ignite nodes running in Kubernetes environment. All Ignite nodes have to deployed
* as Kubernetes pods in order to be discovered. An application that uses Ignite client nodes as a gateway to the
* cluster is required to be containerized as well. Applications and Ignite nodes running outside of Kubernetes will
* not be able to reach the containerized counterparts.
* <p>
* The implementation is based on a distinct Kubernetes service that has to be created and should be deployed prior
* Ignite nodes startup. The service will maintain a list of all endpoints (internal IP addresses) of all containerized
* Ignite pods running so far. The name of the service must be equal to {@link #setServiceName(String)} which is
* `ignite` by default.
* <p>
* As for Ignite pods, it's recommended to label them in such a way that the service will use the label in its selector
* configuration excluding endpoints of irrelevant Kubernetes pods running in parallel.
* <p>
* The IP finder, in its turn, will call this service to retrieve Ignite pods IP addresses. The port will be
* either the one that is set with {@link TcpDiscoverySpi#setLocalPort(int)} or {@link TcpDiscoverySpi#DFLT_PORT}.
* Make sure that all Ignite pods occupy a similar discovery port, otherwise they will not be able to discover each
* other using this IP finder.
* <h2 class="header">Optional configuration</h2>
* <ul>
* <li>The Kubernetes service name for IP addresses lookup (see {@link #setServiceName(String)})</li>
* <li>The Kubernetes service namespace for IP addresses lookup (see {@link #setNamespace(String)}</li>
* <li>The host name of the Kubernetes API server (see {@link #setMasterUrl(String)})</li>
* <li>Path to the service token (see {@link #setAccountToken(String)}</li>
* <li>To include not-ready pods (see {@link #includeNotReadyAddresses(boolean)}</li>
* </ul>
* <p>
* Both {@link #registerAddresses(Collection)} and {@link #unregisterAddresses(Collection)} have no effect.
* <p>
* Note, this IP finder is only workable when it used in Kubernetes environment.
* Choose another implementation of {@link org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder} for local
* or home network tests.
*/
public class TcpDiscoveryKubernetesIpFinder extends TcpDiscoveryIpFinderAdapter {
/** Grid logger. */
@LoggerResource
private IgniteLogger log;
/** Init routine guard. */
private final AtomicBoolean initGuard = new AtomicBoolean();
/** Init routine latch. */
private final CountDownLatch initLatch = new CountDownLatch(1);
/** Trust manager. */
private TrustManager[] trustAll = new TrustManager[] {
new X509TrustManager() {
@Override public void checkServerTrusted(X509Certificate[] certs, String authType) {}
@Override public void checkClientTrusted(X509Certificate[] certs, String authType) {}
@Override public X509Certificate[] getAcceptedIssuers() { return null; }
}
};
/** Host verifier. */
private HostnameVerifier trustAllHosts = new HostnameVerifier() {
@Override public boolean verify(String hostname, SSLSession session) {
return true;
}
};
/** Ignite's Kubernetes Service name. */
private String serviceName = "ignite";
/** Ignite Pod setNamespace name. */
private String namespace = "default";
/** Kubernetes API server URL in a string form. */
private String master = "https://kubernetes.default.svc.cluster.local:443";
/** Account token location. */
private String accountToken = "/var/run/secrets/kubernetes.io/serviceaccount/token";
/** Kubernetes API server URL. */
private URL url;
/** SSL context */
private SSLContext ctx;
/** Whether addresses of pods in not-ready state should be included. */
private boolean includeNotReadyAddresses;
/**
* Creates an instance of Kubernetes IP finder.
*/
public TcpDiscoveryKubernetesIpFinder() {
setShared(true);
}
/** {@inheritDoc} */
@Override public Collection<InetSocketAddress> getRegisteredAddresses() throws IgniteSpiException {
init();
Collection<InetSocketAddress> addrs = new ArrayList<>();
try {
if (log.isDebugEnabled())
log.debug("Getting Apache Ignite endpoints from: " + url);
HttpsURLConnection conn = (HttpsURLConnection)url.openConnection();
conn.setHostnameVerifier(trustAllHosts);
conn.setSSLSocketFactory(ctx.getSocketFactory());
conn.addRequestProperty("Authorization", "Bearer " + serviceAccountToken(accountToken));
// Sending the request and processing a response.
ObjectMapper mapper = new ObjectMapper();
Endpoints endpoints = mapper.readValue(conn.getInputStream(), Endpoints.class);
if (endpoints != null && endpoints.subsets != null && !endpoints.subsets.isEmpty()) {
for (Subset subset : endpoints.subsets) {
addrs.addAll(parseAddresses(subset.addresses));
if (includeNotReadyAddresses)
addrs.addAll(parseAddresses(subset.notReadyAddresses));
}
}
}
catch (Exception e) {
throw new IgniteSpiException("Failed to retrieve Ignite pods IP addresses.", e);
}
return addrs;
}
private Collection<InetSocketAddress> parseAddresses(List<Address> addresses) {
Collection<InetSocketAddress> addrs = new ArrayList<>();
if (addresses != null && !addresses.isEmpty()) {
for (Address address : addresses) {
addrs.add(new InetSocketAddress(address.ip, 0));
if (log.isDebugEnabled())
log.debug("Added an address to the list: " + address.ip);
}
}
return addrs;
}
/** {@inheritDoc} */
@Override public void registerAddresses(Collection<InetSocketAddress> addrs) throws IgniteSpiException {
// No-op
}
/** {@inheritDoc} */
@Override public void unregisterAddresses(Collection<InetSocketAddress> addrs) throws IgniteSpiException {
// No-op
}
/**
* Sets the name of Kubernetes service for Ignite pods' IP addresses lookup. The name of the service must be equal
* to the name set in service's Kubernetes configuration. If this parameter is not changed then the name of the
* service has to be set to 'ignite' in the corresponding Kubernetes configuration.
*
* @param service Kubernetes service name for IP addresses lookup. If it's not set then 'ignite' is used by default.
*/
public void setServiceName(String service) {
this.serviceName = service;
}
/**
* Sets the namespace the Kubernetes service belongs to. By default, it's supposed that the service is running under
* Kubernetes `default` namespace.
*
* @param namespace The Kubernetes service namespace for IP addresses lookup.
*/
public void setNamespace(String namespace) {
this.namespace = namespace;
}
/**
* Sets the host name of the Kubernetes API server. By default the following host name is used:
* 'https://kubernetes.default.svc.cluster.local:443'.
*
* @param master The host name of the Kubernetes API server.
*/
public void setMasterUrl(String master) {
this.master = master;
}
/**
* Specifies the path to the service token file. By default the following account token is used:
* '/var/run/secrets/kubernetes.io/serviceaccount/token'.
*
* @param accountToken The path to the service token file.
*/
public void setAccountToken(String accountToken) {
this.accountToken = accountToken;
}
/**
* Determines whether addresses of not-ready pods should be included. Default is false.
*
* @param includeNotReadyAddresses Flag to include not-ready pods.
*/
public void includeNotReadyAddresses(boolean includeNotReadyAddresses) {
this.includeNotReadyAddresses = includeNotReadyAddresses;
}
/**
* Kubernetes IP finder initialization.
*
* @throws IgniteSpiException In case of error.
*/
private void init() throws IgniteSpiException {
if (initGuard.compareAndSet(false, true)) {
if (serviceName == null || serviceName.isEmpty() ||
namespace == null || namespace.isEmpty() ||
master == null || master.isEmpty() ||
accountToken == null || accountToken.isEmpty()) {
throw new IgniteSpiException(
"One or more configuration parameters are invalid [setServiceName=" +
serviceName + ", setNamespace=" + namespace + ", setMasterUrl=" +
master + ", setAccountToken=" + accountToken + "]");
}
try {
// Preparing the URL and SSL context to be used for connection purposes.
String path = String.format("/api/v1/namespaces/%s/endpoints/%s", namespace, serviceName);
url = new URL(master + path);
ctx = SSLContext.getInstance("SSL");
ctx.init(null, trustAll, new SecureRandom());
}
catch (Exception e) {
throw new IgniteSpiException("Failed to connect to Ignite's Kubernetes Service.", e);
}
finally {
initLatch.countDown();
}
}
else {
try {
U.await(initLatch);
}
catch (IgniteInterruptedCheckedException e) {
throw new IgniteSpiException("Thread has been interrupted.", e);
}
if (url == null || ctx == null)
throw new IgniteSpiException("IP finder has not been initialized properly.");
}
}
/**
* Reads content of the service account token file.
*
* @param file The path to the service account token.
* @return Service account token.
*/
private String serviceAccountToken(String file) {
try {
return new String(Files.readAllBytes(Paths.get(file)));
} catch (IOException e) {
throw new IgniteSpiException("Failed to load services account token [setAccountToken= " + file + "]", e);
}
}
/**
* Object used by Jackson for processing of Kubernetes lookup service's response.
*/
@JsonIgnoreProperties(ignoreUnknown = true)
private static class Address {
/** */
public String ip;
}
/**
* Object used by Jackson for processing of Kubernetes lookup service's response.
*/
@JsonIgnoreProperties(ignoreUnknown = true)
private static class Subset {
/** */
public List<Address> addresses;
/** */
public List<Address> notReadyAddresses;
}
/**
* Object used by Jackson for processing of Kubernetes lookup service's response.
*/
@JsonIgnoreProperties(ignoreUnknown = true)
private static class Endpoints {
/** */
public List<Subset> subsets;
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive;
import com.facebook.presto.hive.orc.DwrfPageSourceFactory;
import com.facebook.presto.hive.orc.DwrfRecordCursorProvider;
import com.facebook.presto.hive.orc.OrcPageSourceFactory;
import com.facebook.presto.hive.orc.OrcRecordCursorProvider;
import com.facebook.presto.hive.rcfile.RcFilePageSourceFactory;
import com.facebook.presto.spi.ConnectorPageSource;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.TupleDomain;
import com.facebook.presto.spi.type.TimeZoneKey;
import com.facebook.presto.type.TypeRegistry;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
import org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat;
import org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat;
import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
import org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputFormat;
import org.joda.time.DateTimeZone;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Properties;
import java.util.TimeZone;
import static com.facebook.presto.hive.HiveTestUtils.getTypes;
import static com.google.common.base.Predicates.not;
import static com.google.common.collect.Iterables.filter;
import static com.google.common.collect.Iterables.transform;
import static java.util.Locale.ENGLISH;
import static java.util.stream.Collectors.toList;
import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.FILE_INPUT_FORMAT;
import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB;
import static org.testng.Assert.assertEquals;
public class TestHiveFileFormats
extends AbstractTestHiveFileFormats
{
private static final TimeZoneKey TIME_ZONE_KEY = TimeZoneKey.getTimeZoneKey(DateTimeZone.getDefault().getID());
private static final ConnectorSession SESSION = new ConnectorSession("user", TIME_ZONE_KEY, ENGLISH, System.currentTimeMillis(), null);
private static final TypeRegistry TYPE_MANAGER = new TypeRegistry();
@BeforeClass(alwaysRun = true)
public void setUp()
throws Exception
{
// ensure the expected timezone is configured for this VM
assertEquals(TimeZone.getDefault().getID(),
"Asia/Katmandu",
"Timezone not configured correctly. Add -Duser.timezone=Asia/Katmandu to your JVM arguments");
}
@Test
public void testRCText()
throws Exception
{
HiveOutputFormat<?, ?> outputFormat = new RCFileOutputFormat();
InputFormat<?, ?> inputFormat = new RCFileInputFormat<>();
@SuppressWarnings("deprecation")
SerDe serde = new ColumnarSerDe();
File file = File.createTempFile("presto_test", "rc-text");
try {
FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, TEST_COLUMNS);
testCursorProvider(new ColumnarTextHiveRecordCursorProvider(), split, inputFormat, serde, TEST_COLUMNS);
testCursorProvider(new GenericHiveRecordCursorProvider(), split, inputFormat, serde, TEST_COLUMNS);
}
finally {
//noinspection ResultOfMethodCallIgnored
file.delete();
}
}
@Test(enabled = false)
public void testRcTextPageSource()
throws Exception
{
HiveOutputFormat<?, ?> outputFormat = new RCFileOutputFormat();
InputFormat<?, ?> inputFormat = new RCFileInputFormat<>();
@SuppressWarnings("deprecation")
SerDe serde = new ColumnarSerDe();
File file = File.createTempFile("presto_test", "rc-binary");
file.delete();
try {
FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, TEST_COLUMNS);
testPageSourceFactory(new RcFilePageSourceFactory(TYPE_MANAGER), split, inputFormat, serde, TEST_COLUMNS);
}
finally {
//noinspection ResultOfMethodCallIgnored
file.delete();
}
}
@Test
public void testRCBinary()
throws Exception
{
HiveOutputFormat<?, ?> outputFormat = new RCFileOutputFormat();
InputFormat<?, ?> inputFormat = new RCFileInputFormat<>();
@SuppressWarnings("deprecation")
SerDe serde = new LazyBinaryColumnarSerDe();
File file = File.createTempFile("presto_test", "rc-binary");
try {
FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, TEST_COLUMNS);
testCursorProvider(new ColumnarBinaryHiveRecordCursorProvider(), split, inputFormat, serde, TEST_COLUMNS);
testCursorProvider(new GenericHiveRecordCursorProvider(), split, inputFormat, serde, TEST_COLUMNS);
}
finally {
//noinspection ResultOfMethodCallIgnored
file.delete();
}
}
@Test(enabled = false)
public void testRcBinaryPageSource()
throws Exception
{
HiveOutputFormat<?, ?> outputFormat = new RCFileOutputFormat();
InputFormat<?, ?> inputFormat = new RCFileInputFormat<>();
@SuppressWarnings("deprecation")
SerDe serde = new LazyBinaryColumnarSerDe();
File file = File.createTempFile("presto_test", "rc-binary");
file.delete();
try {
FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, TEST_COLUMNS);
testPageSourceFactory(new RcFilePageSourceFactory(TYPE_MANAGER), split, inputFormat, serde, TEST_COLUMNS);
}
finally {
//noinspection ResultOfMethodCallIgnored
file.delete();
}
}
@Test
public void testOrc()
throws Exception
{
HiveOutputFormat<?, ?> outputFormat = new org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat();
InputFormat<?, ?> inputFormat = new org.apache.hadoop.hive.ql.io.orc.OrcInputFormat();
@SuppressWarnings("deprecation")
SerDe serde = new org.apache.hadoop.hive.ql.io.orc.OrcSerde();
File file = File.createTempFile("presto_test", "orc");
file.delete();
try {
FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, TEST_COLUMNS);
testCursorProvider(new OrcRecordCursorProvider(), split, inputFormat, serde, TEST_COLUMNS);
}
finally {
//noinspection ResultOfMethodCallIgnored
file.delete();
}
}
@Test
public void testOrcDataStream()
throws Exception
{
HiveOutputFormat<?, ?> outputFormat = new org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat();
InputFormat<?, ?> inputFormat = new org.apache.hadoop.hive.ql.io.orc.OrcInputFormat();
@SuppressWarnings("deprecation")
SerDe serde = new org.apache.hadoop.hive.ql.io.orc.OrcSerde();
File file = File.createTempFile("presto_test", "orc");
file.delete();
try {
FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, TEST_COLUMNS);
testPageSourceFactory(new OrcPageSourceFactory(TYPE_MANAGER), split, inputFormat, serde, TEST_COLUMNS);
}
finally {
//noinspection ResultOfMethodCallIgnored
file.delete();
}
}
@Test
public void testParquet()
throws Exception
{
List<TestColumn> testColumns = ImmutableList.copyOf(filter(TEST_COLUMNS, new Predicate<TestColumn>()
{
@Override
public boolean apply(TestColumn testColumn)
{
// Write of complex hive data to Parquet is broken
if (testColumn.getName().equals("t_complex")) {
return false;
}
// Parquet does not support DATE, TIMESTAMP, or BINARY
ObjectInspector objectInspector = testColumn.getObjectInspector();
return !hasType(objectInspector, PrimitiveCategory.DATE, PrimitiveCategory.TIMESTAMP, PrimitiveCategory.BINARY);
}
}));
HiveOutputFormat<?, ?> outputFormat = new MapredParquetOutputFormat();
InputFormat<?, ?> inputFormat = new MapredParquetInputFormat();
@SuppressWarnings("deprecation")
SerDe serde = new ParquetHiveSerDe();
File file = File.createTempFile("presto_test", "ord");
file.delete();
try {
FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, testColumns);
HiveRecordCursorProvider cursorProvider = new ParquetRecordCursorProvider();
testCursorProvider(cursorProvider, split, inputFormat, serde, testColumns);
}
finally {
//noinspection ResultOfMethodCallIgnored
file.delete();
}
}
@Test
public void testDwrf()
throws Exception
{
List<TestColumn> testColumns = ImmutableList.copyOf(filter(TEST_COLUMNS, new Predicate<TestColumn>()
{
@Override
public boolean apply(TestColumn testColumn)
{
ObjectInspector objectInspector = testColumn.getObjectInspector();
return !hasType(objectInspector, PrimitiveCategory.DATE);
}
}));
HiveOutputFormat<?, ?> outputFormat = new com.facebook.hive.orc.OrcOutputFormat();
InputFormat<?, ?> inputFormat = new com.facebook.hive.orc.OrcInputFormat();
@SuppressWarnings("deprecation")
SerDe serde = new com.facebook.hive.orc.OrcSerde();
File file = File.createTempFile("presto_test", "dwrf");
file.delete();
try {
FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, testColumns);
testCursorProvider(new DwrfRecordCursorProvider(), split, inputFormat, serde, testColumns);
}
finally {
//noinspection ResultOfMethodCallIgnored
file.delete();
}
}
@Test
public void testDwrfDataStream()
throws Exception
{
List<TestColumn> testColumns = ImmutableList.copyOf(filter(TEST_COLUMNS, new Predicate<TestColumn>()
{
@Override
public boolean apply(TestColumn testColumn)
{
ObjectInspector objectInspector = testColumn.getObjectInspector();
return !hasType(objectInspector, PrimitiveCategory.DATE);
}
}));
HiveOutputFormat<?, ?> outputFormat = new com.facebook.hive.orc.OrcOutputFormat();
InputFormat<?, ?> inputFormat = new com.facebook.hive.orc.OrcInputFormat();
@SuppressWarnings("deprecation")
SerDe serde = new com.facebook.hive.orc.OrcSerde();
File file = File.createTempFile("presto_test", "dwrf");
file.delete();
try {
FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, testColumns);
testPageSourceFactory(new DwrfPageSourceFactory(TYPE_MANAGER), split, inputFormat, serde, testColumns);
}
finally {
//noinspection ResultOfMethodCallIgnored
file.delete();
}
}
private void testCursorProvider(HiveRecordCursorProvider cursorProvider,
FileSplit split,
InputFormat<?, ?> inputFormat,
@SuppressWarnings("deprecation") SerDe serde,
List<TestColumn> testColumns)
throws IOException
{
Properties splitProperties = new Properties();
splitProperties.setProperty(FILE_INPUT_FORMAT, inputFormat.getClass().getName());
splitProperties.setProperty(SERIALIZATION_LIB, serde.getClass().getName());
splitProperties.setProperty("columns", Joiner.on(',').join(transform(filter(testColumns, not(TestColumn::isPartitionKey)), TestColumn::getName)));
splitProperties.setProperty("columns.types", Joiner.on(',').join(transform(filter(testColumns, not(TestColumn::isPartitionKey)), TestColumn::getType)));
List<HivePartitionKey> partitionKeys = testColumns.stream()
.filter(TestColumn::isPartitionKey)
.map(input -> new HivePartitionKey(input.getName(), HiveType.getHiveType(input.getObjectInspector()), (String) input.getWriteValue()))
.collect(toList());
HiveRecordCursor cursor = cursorProvider.createHiveRecordCursor(
"test",
new Configuration(),
SESSION,
split.getPath(),
split.getStart(),
split.getLength(),
splitProperties,
getColumnHandles(testColumns),
partitionKeys,
TupleDomain.<HiveColumnHandle>all(),
DateTimeZone.getDefault(),
TYPE_MANAGER).get();
checkCursor(cursor, testColumns);
}
private void testPageSourceFactory(HivePageSourceFactory sourceFactory, FileSplit split, InputFormat<?, ?> inputFormat, SerDe serde, List<TestColumn> testColumns)
throws IOException
{
Properties splitProperties = new Properties();
splitProperties.setProperty(FILE_INPUT_FORMAT, inputFormat.getClass().getName());
splitProperties.setProperty(SERIALIZATION_LIB, serde.getClass().getName());
splitProperties.setProperty("columns", Joiner.on(',').join(transform(filter(testColumns, not(TestColumn::isPartitionKey)), TestColumn::getName)));
splitProperties.setProperty("columns.types", Joiner.on(',').join(transform(filter(testColumns, not(TestColumn::isPartitionKey)), TestColumn::getType)));
List<HivePartitionKey> partitionKeys = testColumns.stream()
.filter(TestColumn::isPartitionKey)
.map(input -> new HivePartitionKey(input.getName(), HiveType.getHiveType(input.getObjectInspector()), (String) input.getWriteValue()))
.collect(toList());
List<HiveColumnHandle> columnHandles = getColumnHandles(testColumns);
ConnectorPageSource pageSource = sourceFactory.createPageSource(
new Configuration(),
SESSION,
split.getPath(),
split.getStart(),
split.getLength(),
splitProperties,
columnHandles,
partitionKeys,
TupleDomain.<HiveColumnHandle>all(),
DateTimeZone.getDefault()
).get();
checkPageSource(pageSource, testColumns, getTypes(columnHandles));
}
public static boolean hasType(ObjectInspector objectInspector, PrimitiveCategory... types)
{
if (objectInspector instanceof PrimitiveObjectInspector) {
PrimitiveObjectInspector primitiveInspector = (PrimitiveObjectInspector) objectInspector;
PrimitiveCategory primitiveCategory = primitiveInspector.getPrimitiveCategory();
for (PrimitiveCategory type : types) {
if (primitiveCategory == type) {
return true;
}
}
return false;
}
if (objectInspector instanceof ListObjectInspector) {
ListObjectInspector listInspector = (ListObjectInspector) objectInspector;
return hasType(listInspector.getListElementObjectInspector(), types);
}
if (objectInspector instanceof MapObjectInspector) {
MapObjectInspector mapInspector = (MapObjectInspector) objectInspector;
return hasType(mapInspector.getMapKeyObjectInspector(), types) ||
hasType(mapInspector.getMapValueObjectInspector(), types);
}
if (objectInspector instanceof StructObjectInspector) {
for (StructField field : ((StructObjectInspector) objectInspector).getAllStructFieldRefs()) {
if (hasType(field.getFieldObjectInspector(), types)) {
return true;
}
}
return false;
}
throw new IllegalArgumentException("Unknown object inspector type " + objectInspector);
}
}
|
|
// Copyright 2011 The Whiley Project Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package wycc.io;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.math.BigInteger;
import java.nio.charset.CharsetDecoder;
import java.util.ArrayList;
import java.util.List;
/**
* <p>
* Provides a generic mechanism for turning an input file or string into a list
* of tokens. Every token records the text that constituted it, as well as its
* start and end position in the stream.
* </p>
* <p>
* There are several use cases for this class. The most important use case is
* obviously lexing a source file so that it can be fed into a parser and then
* into the remainder of a compiler. However, other use cases include pretty
* printers which read a stream and format it according to certain rules for
* indentation, etc.
* </p>
* <p>
* This class also provides several standard tokens and rules which are common
* across the various languages used within the Whiley compiler system.
* </p>
*
* @author David J. Pearce
*
*/
public class AbstractLexer {
/**
* The input stream.
*/
private final StringBuffer input;
/**
* Lexing rules which determine how the input stream is broken down into
* tokens.
*/
private final Rule[] rules;
/**
* Construct from an input stream using UTF-8 as the default character
* encoding.
*
* @param instream
* @throws IOException
*/
public AbstractLexer(Rule[] rules, InputStream instream) throws IOException {
this(rules, new InputStreamReader(instream, "UTF-8"));
}
/**
* Construct from an input stream using a given character set decoder.
*
* @param instream
* @throws IOException
*/
public AbstractLexer(Rule[] rules, InputStream instream, CharsetDecoder decoder)
throws IOException {
this(rules, new InputStreamReader(instream, decoder));
}
/**
* Construct from a reader (which already has some notion of character
* enconding included).
*
* @param reader
* @throws IOException
*/
public AbstractLexer(Rule[] rules, Reader reader) throws IOException {
BufferedReader in = new BufferedReader(reader);
StringBuffer text = new StringBuffer();
java.lang.String tmp;
while ((tmp = in.readLine()) != null) {
text.append(tmp);
text.append("\n");
}
this.input = text;
this.rules = rules;
}
/**
* Scan the given input stream and produce a list of tokens, or an error.
*
* @return
*/
public List<Token> scan() throws Error {
ArrayList<Token> tokens = new ArrayList<>();
int pos = 0;
while (pos < input.length()) {
int start = pos;
for (int i = 0; i != rules.length; ++i) {
Rule rule = rules[i];
int left = input.length() - pos;
if (left >= rule.lookahead()) {
Token t = rule.match(input, pos);
if (t != null) {
tokens.add(t);
pos = pos + t.text.length();
break; // restart rule application loop
}
}
}
if(pos == start) {
throw new Error("unrecognised token encountered (" + input.charAt(pos) + ")",pos);
}
}
return tokens;
}
/**
* A lexer rule is responsible for matching a given character sequence and
* turning it into a token.
*
* @author David J. Pearce
*
*/
public static abstract class Rule {
/**
* Determines the maximum amount of lookahead required by this rule. The
* system will guarantee there is enough lookahead space in the input
* before calling the rule.
*
* @return
*/
public abstract int lookahead();
/**
* Attempt to match this rule at a given position in the input stream.
* Observe that upon a successful match (i.e. when the returned value is
* not <code>null</code>) the stream will be advanced to
* <code>Token.end + 1</code>.
*
* @param buffer
* @param start
* @return
*/
public abstract Token match(StringBuffer buffer, int start) throws Error;
}
// ===================================================================
// Standard Rules
// ===================================================================
/**
* Standard rule for parsing Whitespace.
*
* @author David J. Pearce
*
*/
public static class WhitespaceRule extends Rule {
@Override
public int lookahead() {
return 1;
}
@Override
public Token match(StringBuffer input, int pos) {
int start = pos;
if(pos < input.length()) {
// First, look for new lines
if(input.charAt(pos) == '\n') {
pos++;
return new Token.NewLine("\n",start);
} else if((pos+1) < input.length() && input.charAt(pos) == '\r' && input.charAt(pos+1) == '\n') {
return new Token.NewLine("\r\n",start);
}
// Second, look for spaces
if(input.charAt(pos) == ' ') {
while (pos < input.length()
&& input.charAt(pos) == ' ') {
pos++;
}
return new Token.Spaces(input.substring(start, pos), start);
}
// Third, look for tabs
if(input.charAt(pos) == '\t') {
while (pos < input.length()
&& input.charAt(pos) == '\t') {
pos++;
}
return new Token.Tabs(input.substring(start, pos), start);
}
}
return null;
}
}
/**
* Standard rule for parsing Operators.
*
* @author David J. Pearce
*
*/
public static class OperatorRule extends Rule {
private final java.lang.String[] operators;
private final int minLookahead;
public OperatorRule(java.lang.String[] operators) {
this.operators = operators;
int min = Integer.MAX_VALUE;
for(int i=0;i!=operators.length;++i) {
min = Math.min(operators[i].length(),min);
}
this.minLookahead = min;
}
@Override
public int lookahead() {
return minLookahead;
}
@Override
public Token match(StringBuffer input, int pos) {
int start = pos;
int nRemaining = input.length() - pos;
for (int i = 0; i != operators.length; ++i) {
java.lang.String operator = operators[i];
if (operator.length() <= nRemaining
&& matchString(input, pos, operator)) {
return new Token.Operator(operator, start);
}
}
return null;
}
}
/**
* Standard rule for parsing keywords.
*
* @author David J. Pearce
*
*/
public static class KeywordRule extends Rule {
private final java.lang.String[] keywords;
private final int minLookahead;
public KeywordRule(java.lang.String[] keywords) {
this.keywords = keywords;
int min = Integer.MAX_VALUE;
for(int i=0;i!=keywords.length;++i) {
min = Math.min(keywords[i].length(),min);
}
this.minLookahead = min;
}
@Override
public int lookahead() {
return minLookahead;
}
@Override
public Token match(StringBuffer input, int pos) {
int start = pos;
while (pos < input.length()
&& Character.isLetter(input.charAt(pos))) {
pos = pos + 1;
}
java.lang.String word = input.substring(start, pos);
for (int i = 0; i != keywords.length; ++i) {
java.lang.String keyword = keywords[i];
if (keyword.equals(word)) {
return new Token.Keyword(keyword, start);
}
}
return null;
}
}
/**
* A standard rule for parsing identifiers. Identifiers may not start with a
* numeric character, or an operator. But, they may start with e.g. '$', or
* '_' and, obviously, any alpabetic character.
*
* @author David J. Pearce
*
*/
public static class IdentifierRule extends Rule {
@Override
public int lookahead() {
return 1;
}
@Override
public Token match(StringBuffer input, int pos) {
int start = pos;
if (!Character.isJavaIdentifierStart(input.charAt(pos))) {
return null;
}
pos = pos + 1;
while (pos < input.length()
&& Character.isJavaIdentifierPart(input.charAt(pos))) {
pos++;
}
java.lang.String text = input.substring(start, pos);
return new Token.Identifier(text, start);
}
}
/**
* A standard rule for parsing strings which begin with quote marks. For
* example, <code>"Hello World"</code>. This rule correctly handles escape
* sequences, such as "\n", "\t" and "\\", etc.
*
* @author David J. Pearce
*
*/
public static class StringRule extends Rule {
@Override
public int lookahead() {
return 1;
}
@Override
public Token match(StringBuffer input, int pos) throws Error {
if(input.charAt(pos) != '\"') { return null; }
int start = pos;
boolean flag = false;
pos ++;
while(pos < input.length()) {
char c = input.charAt(pos);
if (flag) {
flag = false;
continue;
}
if (c == '\\') {
flag = true;
continue;
}
if (c == '\"') {
java.lang.String v = input.substring(start,++pos);
return new Token.String(scan(v, pos - v.length()),start);
}
pos = pos + 1;
}
throw new Error("unexpected end-of-string",pos-1);
}
private java.lang.String scan(java.lang.String v, int start) throws Error {
// Second, step through the string and replace escaped characters
int end = v.length()-1;
for (int i = 1; i < end; i++) {
if (v.charAt(i) == '\\') {
if (v.length() <= i + 1) {
throw new Error("unexpected end-of-string",start+i);
} else {
char replace = 0;
int len = 2;
switch (v.charAt(i + 1)) {
case 'b' :
replace = '\b';
break;
case 't' :
replace = '\t';
break;
case 'n' :
replace = '\n';
break;
case 'f' :
replace = '\f';
break;
case 'r' :
replace = '\r';
break;
case '"' :
replace = '\"';
break;
case '\'' :
replace = '\'';
break;
case '\\' :
replace = '\\';
break;
case 'u' :
len = 6; // unicode escapes are six digits long,
// including "slash u"
java.lang.String unicode = v.substring(i + 2, i + 6);
replace = (char) Integer.parseInt(unicode, 16); // unicode
break;
default :
throw new Error("unknown escape character",start+i);
}
v = v.substring(0, i) + replace + v.substring(i + len);
}
}
}
return v;
}
}
/**
* A standard rule for parsing characters which begin with single quote
* marks. For example, <code>'H'</code>. This rule correctly handles escape
* sequences, such as '\n', '\t' and '\\', etc.
*
* @author David J. Pearce
*
*/
public static class CharRule extends Rule {
@Override
public int lookahead() {
return 1;
}
@Override
public Token match(StringBuffer input, int pos) throws Error {
char ans = ' '; // set to keep javac out of trouble.
int start = pos;
boolean addflag = false;
boolean escflag = false;
boolean gotflag = false;
boolean ovflag = false;
pos ++;
while(pos < input.length()) {
char c = input.charAt(pos);
if (addflag) {
addflag = false;
ans = c;
continue;
}
if (c == '\\') {
gotflag = true;
escflag = true;
continue;
}
if (c == '\'') {
break;
}
ans = c;
ovflag = gotflag;
gotflag = true;
pos = pos + 1;
}
if (!( pos < input.length())) {
throw new Error("unexpected end-of-character", pos-1);
}
if (!gotflag) {
throw new Error("empty character", pos-1);
}
if (ovflag) {
throw new Error("character overflow", pos-1);
}
if (escflag) {
// escape code
switch(ans) {
case 't':
ans = '\t';
break;
case 'n':
ans = '\n';
break;
default:
throw new Error("unrecognised escape character",pos-1);
}
}
return new Token.Char(ans,input.substring(start,pos),start);
}
}
/**
* A standard rule for parsing numbers represented in decimal (i.e. base
* 10).
*
* @author David J. Pearce
*
*/
public static class DecimalRule extends Rule {
@Override
public int lookahead() {
return 1;
}
@Override
public Token match(StringBuffer input, int pos) throws Error {
int start = pos;
if(!Character.isDigit(input.charAt(pos))) {
return null;
}
while (pos < input.length() && Character.isDigit(input.charAt(pos))) {
pos = pos + 1;
}
BigInteger beforePoint = new BigInteger(input.substring(start, pos));
BigInteger afterPoint = null;
if (pos < input.length() && input.charAt(pos) == '.') {
pos = pos + 1;
int dotStart = pos;
if (pos < input.length() && Character.isDigit(input.charAt(pos))) {
while (pos < input.length()
&& Character.isDigit(input.charAt(pos))) {
pos = pos + 1;
}
afterPoint = new BigInteger(input.substring(dotStart, pos));
} else {
// this is case for range e.g. 0..1
pos = pos - 1;
}
}
return new Token.Number(10,beforePoint,afterPoint,input.substring(start,pos),start);
}
}
/**
* Standard rule for parsing line comments with user-defineable syntax.
*
* @author David J. Pearce
*
*/
public static class LineCommentRule extends Rule {
private java.lang.String syntax;
public LineCommentRule(java.lang.String syntax) {
this.syntax = syntax;
}
@Override
public int lookahead() {
return syntax.length();
}
@Override
public Token match(StringBuffer input, int pos) throws Error {
// first, check whether this rule applies or not.
if(!matchString(input,pos,syntax)) {
return null;
}
// second scan until the end-of-line is reached.
int start = pos;
while (pos < input.length() && input.charAt(pos) != '\n') {
pos++;
}
return new Token.LineComment(input.substring(start, pos), start);
}
}
/**
* Standard rule for parsing block comments with user-defineable start and
* end syntax.
*
* @author David J. Pearce
*
*/
public static class BlockCommentRule extends Rule {
private java.lang.String startSyntax;
private java.lang.String endSyntax;
public BlockCommentRule(java.lang.String startSyntax, java.lang.String endSyntax) {
this.startSyntax = startSyntax;
this.endSyntax = endSyntax;
}
@Override
public int lookahead() {
return startSyntax.length();
}
@Override
public Token match(StringBuffer input, int pos) throws Error {
// first, check whether this rule applies or not.
if (!matchString(input, pos, startSyntax)) {
return null;
}
// second, parse the block comment!
int start = pos;
while ((pos + 1) < input.length()
&& !matchString(input, pos, endSyntax)) {
pos++;
}
pos += endSyntax.length();
return new Token.BlockComment(input.substring(start, pos), start);
}
}
// ===================================================================
// Helper Classes / Methods
// ===================================================================
private static boolean matchString(StringBuffer input, int pos,
java.lang.String syntax) {
int diff = input.length() - pos;
if(syntax.length() > diff) {
return false;
} else {
for (int i = 0; i != syntax.length(); ++i) {
if (syntax.charAt(i) != input.charAt(pos + i)) {
return false;
}
}
return true;
}
}
/**
* Used to report lexing errors.
*
* @author David J. Pearce
*
*/
public static class Error extends Exception {
private final int position;
public Error(java.lang.String msg, int position) {
super(msg);
this.position = position;
}
public int getPosition() {
return position;
}
}
}
|
|
/*
* WSO2 API Manager - Admin
* This document specifies a **RESTful API** for WSO2 **API Manager** - Admin Portal. Please see [full swagger definition](https://raw.githubusercontent.com/wso2/carbon-apimgt/v6.1.66/components/apimgt/org.wso2.carbon.apimgt.rest.api.admin/src/main/resources/admin-api.yaml) of the API which is written using [swagger 2.0](http://swagger.io/) specification.
*
* OpenAPI spec version: 0.11.0
* Contact: [email protected]
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package org.wso2.carbon.apimgt.samples.utils.admin.rest.client.api;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.ApiCallback;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.ApiClient;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.ApiException;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.ApiResponse;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.Configuration;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.Pair;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.ProgressRequestBody;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.ProgressResponseBody;
import com.google.gson.reflect.TypeToken;
import java.io.IOException;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.model.ApplicationThrottlePolicy;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.model.ApplicationThrottlePolicyList;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.model.Error;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ApplicationPolicyCollectionApi {
private ApiClient apiClient;
public ApplicationPolicyCollectionApi() {
this(Configuration.getDefaultApiClient());
}
public ApplicationPolicyCollectionApi(ApiClient apiClient) {
this.apiClient = apiClient;
}
public ApiClient getApiClient() {
return apiClient;
}
public void setApiClient(ApiClient apiClient) {
this.apiClient = apiClient;
}
/* Build call for throttlingPoliciesApplicationGet */
private com.squareup.okhttp.Call throttlingPoliciesApplicationGetCall(String accept, String ifNoneMatch, String ifModifiedSince, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/throttling/policies/application".replaceAll("\\{format\\}","json");
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
if (accept != null)
localVarHeaderParams.put("Accept", apiClient.parameterToString(accept));
if (ifNoneMatch != null)
localVarHeaderParams.put("If-None-Match", apiClient.parameterToString(ifNoneMatch));
if (ifModifiedSince != null)
localVarHeaderParams.put("If-Modified-Since", apiClient.parameterToString(ifModifiedSince));
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { };
return apiClient.buildCall(localVarPath, "GET", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call throttlingPoliciesApplicationGetValidateBeforeCall(String accept, String ifNoneMatch, String ifModifiedSince, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
com.squareup.okhttp.Call call = throttlingPoliciesApplicationGetCall(accept, ifNoneMatch, ifModifiedSince, progressListener, progressRequestListener);
return call;
}
/**
* Get all Application Throttling Policies
* Retrieves all existing application throttling policies.
* @param accept Media types acceptable for the response. Default is application/json. (optional, default to application/json)
* @param ifNoneMatch Validator for conditional requests; based on the ETag of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @param ifModifiedSince Validator for conditional requests; based on Last Modified header of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @return ApplicationThrottlePolicyList
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApplicationThrottlePolicyList throttlingPoliciesApplicationGet(String accept, String ifNoneMatch, String ifModifiedSince) throws ApiException {
ApiResponse<ApplicationThrottlePolicyList> resp = throttlingPoliciesApplicationGetWithHttpInfo(accept, ifNoneMatch, ifModifiedSince);
return resp.getData();
}
/**
* Get all Application Throttling Policies
* Retrieves all existing application throttling policies.
* @param accept Media types acceptable for the response. Default is application/json. (optional, default to application/json)
* @param ifNoneMatch Validator for conditional requests; based on the ETag of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @param ifModifiedSince Validator for conditional requests; based on Last Modified header of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @return ApiResponse<ApplicationThrottlePolicyList>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<ApplicationThrottlePolicyList> throttlingPoliciesApplicationGetWithHttpInfo(String accept, String ifNoneMatch, String ifModifiedSince) throws ApiException {
com.squareup.okhttp.Call call = throttlingPoliciesApplicationGetValidateBeforeCall(accept, ifNoneMatch, ifModifiedSince, null, null);
Type localVarReturnType = new TypeToken<ApplicationThrottlePolicyList>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* Get all Application Throttling Policies (asynchronously)
* Retrieves all existing application throttling policies.
* @param accept Media types acceptable for the response. Default is application/json. (optional, default to application/json)
* @param ifNoneMatch Validator for conditional requests; based on the ETag of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @param ifModifiedSince Validator for conditional requests; based on Last Modified header of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call throttlingPoliciesApplicationGetAsync(String accept, String ifNoneMatch, String ifModifiedSince, final ApiCallback<ApplicationThrottlePolicyList> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = throttlingPoliciesApplicationGetValidateBeforeCall(accept, ifNoneMatch, ifModifiedSince, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<ApplicationThrottlePolicyList>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
/* Build call for throttlingPoliciesApplicationPost */
private com.squareup.okhttp.Call throttlingPoliciesApplicationPostCall(ApplicationThrottlePolicy body, String contentType, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = body;
// create path and map variables
String localVarPath = "/throttling/policies/application".replaceAll("\\{format\\}","json");
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
if (contentType != null)
localVarHeaderParams.put("Content-Type", apiClient.parameterToString(contentType));
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { };
return apiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call throttlingPoliciesApplicationPostValidateBeforeCall(ApplicationThrottlePolicy body, String contentType, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'body' is set
if (body == null) {
throw new ApiException("Missing the required parameter 'body' when calling throttlingPoliciesApplicationPost(Async)");
}
// verify the required parameter 'contentType' is set
if (contentType == null) {
throw new ApiException("Missing the required parameter 'contentType' when calling throttlingPoliciesApplicationPost(Async)");
}
com.squareup.okhttp.Call call = throttlingPoliciesApplicationPostCall(body, contentType, progressListener, progressRequestListener);
return call;
}
/**
* Add an Application Throttling Policy
* This operation can be used to add a new application level throttling policy.
* @param body Application level policy object that should to be added (required)
* @param contentType Media type of the entity in the body. Default is application/json. (required)
* @return ApplicationThrottlePolicy
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApplicationThrottlePolicy throttlingPoliciesApplicationPost(ApplicationThrottlePolicy body, String contentType) throws ApiException {
ApiResponse<ApplicationThrottlePolicy> resp = throttlingPoliciesApplicationPostWithHttpInfo(body, contentType);
return resp.getData();
}
/**
* Add an Application Throttling Policy
* This operation can be used to add a new application level throttling policy.
* @param body Application level policy object that should to be added (required)
* @param contentType Media type of the entity in the body. Default is application/json. (required)
* @return ApiResponse<ApplicationThrottlePolicy>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<ApplicationThrottlePolicy> throttlingPoliciesApplicationPostWithHttpInfo(ApplicationThrottlePolicy body, String contentType) throws ApiException {
com.squareup.okhttp.Call call = throttlingPoliciesApplicationPostValidateBeforeCall(body, contentType, null, null);
Type localVarReturnType = new TypeToken<ApplicationThrottlePolicy>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* Add an Application Throttling Policy (asynchronously)
* This operation can be used to add a new application level throttling policy.
* @param body Application level policy object that should to be added (required)
* @param contentType Media type of the entity in the body. Default is application/json. (required)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call throttlingPoliciesApplicationPostAsync(ApplicationThrottlePolicy body, String contentType, final ApiCallback<ApplicationThrottlePolicy> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = throttlingPoliciesApplicationPostValidateBeforeCall(body, contentType, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<ApplicationThrottlePolicy>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.segment.standby.client;
import static org.apache.jackrabbit.oak.api.Type.BINARIES;
import static org.apache.jackrabbit.oak.api.Type.BINARY;
import java.io.IOException;
import org.apache.jackrabbit.oak.api.Blob;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState;
import org.apache.jackrabbit.oak.plugins.segment.RecordId;
import org.apache.jackrabbit.oak.plugins.segment.SegmentBlob;
import org.apache.jackrabbit.oak.plugins.segment.SegmentNodeState;
import org.apache.jackrabbit.oak.plugins.segment.SegmentStore;
import org.apache.jackrabbit.oak.plugins.segment.standby.store.RemoteSegmentLoader;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.oak.spi.state.NodeStateDiff;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class StandbyApplyDiff implements NodeStateDiff {
private static final Logger log = LoggerFactory
.getLogger(StandbyApplyDiff.class);
private final NodeBuilder builder;
private final SegmentStore store;
private final RemoteSegmentLoader loader;
private final String path;
/**
* read-only traversal of the diff that has 2 properties: one is to log all
* the content changes, second is to drill down to properly level, so that
* missing binaries can be sync'ed if needed
*/
private final boolean logOnly;
public StandbyApplyDiff(NodeBuilder builder, SegmentStore store,
RemoteSegmentLoader loader) {
this(builder, store, loader, "/", false);
}
private StandbyApplyDiff(NodeBuilder builder, SegmentStore store,
RemoteSegmentLoader loader, String path, boolean logOnly) {
this.builder = builder;
this.store = store;
this.loader = loader;
this.path = path;
this.logOnly = logOnly;
}
@Override
public boolean propertyAdded(PropertyState after) {
if (!loader.isRunning()) {
return false;
}
if (!logOnly) {
builder.setProperty(binaryCheck(after));
} else {
binaryCheck(after);
}
return true;
}
@Override
public boolean propertyChanged(PropertyState before, PropertyState after) {
if (!loader.isRunning()) {
return false;
}
if (!logOnly) {
builder.setProperty(binaryCheck(after));
} else {
binaryCheck(after);
}
return true;
}
@Override
public boolean propertyDeleted(PropertyState before) {
if (!loader.isRunning()) {
return false;
}
if (!logOnly) {
builder.removeProperty(before.getName());
}
return true;
}
private PropertyState binaryCheck(PropertyState property) {
Type<?> type = property.getType();
if (type == BINARY) {
binaryCheck(property.getValue(Type.BINARY), property.getName());
} else if (type == BINARIES) {
for (Blob blob : property.getValue(BINARIES)) {
binaryCheck(blob, property.getName());
}
}
return property;
}
private void binaryCheck(Blob b, String pName) {
if (b instanceof SegmentBlob) {
SegmentBlob sb = (SegmentBlob) b;
// verify if the blob exists
if (sb.isExternal() && b.getReference() == null) {
String blobId = sb.getBlobId();
if (blobId != null) {
readBlob(blobId, pName);
}
}
} else {
log.warn("Unknown Blob {} at {}, ignoring", b.getClass().getName(),
path + "#" + pName);
}
}
private void readBlob(String blobId, String pName) {
Blob read = loader.readBlob(blobId);
if (read != null) {
try {
store.getBlobStore().writeBlob(read.getNewStream());
} catch (IOException f) {
throw new IllegalStateException("Unable to persist blob "
+ blobId + " at " + path + "#" + pName, f);
}
} else {
throw new IllegalStateException("Unable to load remote blob "
+ blobId + " at " + path + "#" + pName);
}
}
@Override
public boolean childNodeAdded(String name, NodeState after) {
if (!loader.isRunning()) {
return false;
}
if (after instanceof SegmentNodeState) {
if (log.isTraceEnabled()) {
log.trace("childNodeAdded {}, RO:{}", path + name, logOnly);
}
if (!logOnly) {
RecordId id = ((SegmentNodeState) after).getRecordId();
builder.setChildNode(name, new SegmentNodeState(id));
}
return after.compareAgainstBaseState(EmptyNodeState.EMPTY_NODE,
new StandbyApplyDiff(builder.getChildNode(name), store,
loader, path + name + "/", true));
}
return false;
}
@Override
public boolean childNodeChanged(String name, NodeState before,
NodeState after) {
if (!loader.isRunning()) {
return false;
}
if (after instanceof SegmentNodeState) {
RecordId id = ((SegmentNodeState) after).getRecordId();
if (log.isTraceEnabled()) {
RecordId oldId = ((SegmentNodeState) before).getRecordId();
log.trace("childNodeChanged {}, {} -> {}, RO:{}", path + name,
oldId, id, logOnly);
}
if (!logOnly) {
builder.setChildNode(name, new SegmentNodeState(id));
}
return after.compareAgainstBaseState(before, new StandbyApplyDiff(
builder.getChildNode(name), store, loader, path + name
+ "/", true));
}
return false;
}
@Override
public boolean childNodeDeleted(String name, NodeState before) {
if (!loader.isRunning()) {
return false;
}
log.trace("childNodeDeleted {}, RO:{}", path + name, logOnly);
if (!logOnly) {
builder.getChildNode(name).remove();
}
return true;
}
}
|
|
/*
* Note: this was copied from Doug Lea's CVS repository
* http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/jsr166e/
*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
* http://creativecommons.org/publicdomain/zero/1.0/
*/
package org.cinchapi.vendor.jsr166e.extra;
import static java.lang.Double.doubleToRawLongBits;
import static java.lang.Double.longBitsToDouble;
/**
* A {@code double} value that may be updated atomically. See the
* {@link java.util.concurrent.atomic} package specification for
* description of the properties of atomic variables. An {@code AtomicDouble} is
* used in applications such as atomic accumulation,
* and cannot be used as a replacement for a {@link Double}. However,
* this class does extend {@code Number} to allow uniform access by
* tools and utilities that deal with numerically-based classes.
*
* <p id="bitEquals">
* This class compares primitive {@code double} values in methods such as
* {@link #compareAndSet} by comparing their bitwise representation using
* {@link Double#doubleToRawLongBits}, which differs from both the primitive
* double {@code ==} operator and from {@link Double#equals}, as if implemented
* by:
*
* <pre>
* {@code
* static boolean bitEquals(double x, double y) {
* long xBits = Double.doubleToRawLongBits(x);
* long yBits = Double.doubleToRawLongBits(y);
* return xBits == yBits;
* }}
* </pre>
*
* @see org.cinchapi.vendor.jsr166e.DoubleAdder
* @see org.cinchapi.vendor.jsr166e.DoubleMaxUpdater
*
* @author Doug Lea
* @author Martin Buchholz
*/
public class AtomicDouble extends Number implements java.io.Serializable {
private static final long serialVersionUID = -8405198993435143622L;
private transient volatile long value;
/**
* Creates a new {@code AtomicDouble} with the given initial value.
*
* @param initialValue the initial value
*/
public AtomicDouble(double initialValue) {
value = doubleToRawLongBits(initialValue);
}
/**
* Creates a new {@code AtomicDouble} with initial value {@code 0.0}.
*/
public AtomicDouble() {
// assert doubleToRawLongBits(0.0) == 0L;
}
/**
* Gets the current value.
*
* @return the current value
*/
public final double get() {
return longBitsToDouble(value);
}
/**
* Sets to the given value.
*
* @param newValue the new value
*/
public final void set(double newValue) {
long next = doubleToRawLongBits(newValue);
value = next;
}
/**
* Eventually sets to the given value.
*
* @param newValue the new value
*/
public final void lazySet(double newValue) {
long next = doubleToRawLongBits(newValue);
unsafe.putOrderedLong(this, valueOffset, next);
}
/**
* Atomically sets to the given value and returns the old value.
*
* @param newValue the new value
* @return the previous value
*/
public final double getAndSet(double newValue) {
long next = doubleToRawLongBits(newValue);
while (true) {
long current = value;
if(unsafe.compareAndSwapLong(this, valueOffset, current, next))
return longBitsToDouble(current);
}
}
/**
* Atomically sets the value to the given updated value
* if the current value is <a href="#bitEquals">bitwise equal</a>
* to the expected value.
*
* @param expect the expected value
* @param update the new value
* @return {@code true} if successful. False return indicates that
* the actual value was not bitwise equal to the expected value.
*/
public final boolean compareAndSet(double expect, double update) {
return unsafe.compareAndSwapLong(this, valueOffset,
doubleToRawLongBits(expect), doubleToRawLongBits(update));
}
/**
* Atomically sets the value to the given updated value
* if the current value is <a href="#bitEquals">bitwise equal</a>
* to the expected value.
*
* <p>
* <a href=
* "http://download.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/package-summary.html#Spurious"
* > May fail spuriously and does not provide ordering guarantees</a>, so is
* only rarely an appropriate alternative to {@code compareAndSet}.
*
* @param expect the expected value
* @param update the new value
* @return {@code true} if successful
*/
public final boolean weakCompareAndSet(double expect, double update) {
return compareAndSet(expect, update);
}
/**
* Atomically adds the given value to the current value.
*
* @param delta the value to add
* @return the previous value
*/
public final double getAndAdd(double delta) {
while (true) {
long current = value;
double currentVal = longBitsToDouble(current);
double nextVal = currentVal + delta;
long next = doubleToRawLongBits(nextVal);
if(unsafe.compareAndSwapLong(this, valueOffset, current, next))
return currentVal;
}
}
/**
* Atomically adds the given value to the current value.
*
* @param delta the value to add
* @return the updated value
*/
public final double addAndGet(double delta) {
while (true) {
long current = value;
double currentVal = longBitsToDouble(current);
double nextVal = currentVal + delta;
long next = doubleToRawLongBits(nextVal);
if(unsafe.compareAndSwapLong(this, valueOffset, current, next))
return nextVal;
}
}
/**
* Returns the String representation of the current value.
*
* @return the String representation of the current value
*/
public String toString() {
return Double.toString(get());
}
/**
* Returns the value of this {@code AtomicDouble} as an {@code int} after a
* narrowing primitive conversion.
*/
public int intValue() {
return (int) get();
}
/**
* Returns the value of this {@code AtomicDouble} as a {@code long} after a
* narrowing primitive conversion.
*/
public long longValue() {
return (long) get();
}
/**
* Returns the value of this {@code AtomicDouble} as a {@code float} after a
* narrowing primitive conversion.
*/
public float floatValue() {
return (float) get();
}
/**
* Returns the value of this {@code AtomicDouble} as a {@code double}.
*/
public double doubleValue() {
return get();
}
/**
* Saves the state to a stream (that is, serializes it).
*
* @param s the stream
* @throws java.io.IOException if an I/O error occurs
* @serialData The current value is emitted (a {@code double}).
*/
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
s.defaultWriteObject();
s.writeDouble(get());
}
/**
* Reconstitutes the instance from a stream (that is, deserializes it).
*
* @param s the stream
* @throws ClassNotFoundException if the class of a serialized object
* could not be found
* @throws java.io.IOException if an I/O error occurs
*/
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
set(s.readDouble());
}
// Unsafe mechanics
private static final sun.misc.Unsafe unsafe = getUnsafe();
private static final long valueOffset;
static {
try {
valueOffset = unsafe.objectFieldOffset(AtomicDouble.class
.getDeclaredField("value"));
}
catch (Exception ex) {
throw new Error(ex);
}
}
/**
* Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
* Replace with a simple call to Unsafe.getUnsafe when integrating
* into a jdk.
*
* @return a sun.misc.Unsafe
*/
private static sun.misc.Unsafe getUnsafe() {
try {
return sun.misc.Unsafe.getUnsafe();
}
catch (SecurityException tryReflectionInstead) {}
try {
return java.security.AccessController
.doPrivileged(new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() {
public sun.misc.Unsafe run() throws Exception {
Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class;
for (java.lang.reflect.Field f : k
.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if(k.isInstance(x))
return k.cast(x);
}
throw new NoSuchFieldError("the Unsafe");
}
});
}
catch (java.security.PrivilegedActionException e) {
throw new RuntimeException("Could not initialize intrinsics",
e.getCause());
}
}
}
|
|
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.core;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.impl.file.PsiPackageImpl;
import com.intellij.psi.impl.file.impl.JavaFileManager;
import com.intellij.psi.search.GlobalSearchScope;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* @author yole
*/
public class CoreJavaFileManager implements JavaFileManager {
private static final Logger LOG = Logger.getInstance("#com.intellij.core.CoreJavaFileManager");
private final List<VirtualFile> myClasspath = new ArrayList<VirtualFile>();
private final PsiManager myPsiManager;
public CoreJavaFileManager(PsiManager psiManager) {
myPsiManager = psiManager;
}
private List<VirtualFile> roots() {
return myClasspath;
}
@Override
public PsiPackage findPackage(@NotNull String packageName) {
final List<VirtualFile> files = findDirectoriesByPackageName(packageName);
if (!files.isEmpty()) {
return new PsiPackageImpl(myPsiManager, packageName);
}
return null;
}
private List<VirtualFile> findDirectoriesByPackageName(String packageName) {
List<VirtualFile> result = new ArrayList<VirtualFile>();
String dirName = packageName.replace(".", "/");
for (VirtualFile root : roots()) {
VirtualFile classDir = root.findFileByRelativePath(dirName);
if (classDir != null) {
result.add(classDir);
}
}
return result;
}
@Nullable
public PsiPackage getPackage(PsiDirectory dir) {
final VirtualFile file = dir.getVirtualFile();
for (VirtualFile root : myClasspath) {
if (VfsUtilCore.isAncestor(root, file, false)) {
String relativePath = FileUtil.getRelativePath(root.getPath(), file.getPath(), '/');
if (relativePath == null) continue;
return new PsiPackageImpl(myPsiManager, relativePath.replace('/', '.'));
}
}
return null;
}
@Override
public PsiClass findClass(@NotNull String qName, @NotNull GlobalSearchScope scope) {
for (VirtualFile root : roots()) {
final PsiClass psiClass = findClassInClasspathRoot(qName, root, myPsiManager, scope);
if (psiClass != null) {
return psiClass;
}
}
return null;
}
@Nullable
public static PsiClass findClassInClasspathRoot(@NotNull String qName,
@NotNull VirtualFile root,
@NotNull PsiManager psiManager,
@NotNull GlobalSearchScope scope) {
String pathRest = qName;
VirtualFile cur = root;
while (true) {
int dot = pathRest.indexOf('.');
if (dot < 0) break;
String pathComponent = pathRest.substring(0, dot);
VirtualFile child = cur.findChild(pathComponent);
if (child == null) break;
pathRest = pathRest.substring(dot + 1);
cur = child;
}
String classNameWithInnerClasses = pathRest;
String topLevelClassName = substringBeforeFirstDot(classNameWithInnerClasses);
VirtualFile vFile = cur.findChild(topLevelClassName + ".class");
if (vFile == null) vFile = cur.findChild(topLevelClassName + ".java");
if (vFile == null) {
return null;
}
if (!vFile.isValid()) {
LOG.error("Invalid child of valid parent: " + vFile.getPath() + "; " + root.isValid() + " path=" + root.getPath());
return null;
}
if (!scope.contains(vFile)) {
return null;
}
final PsiFile file = psiManager.findFile(vFile);
if (!(file instanceof PsiClassOwner)) {
return null;
}
return findClassInPsiFile(classNameWithInnerClasses, (PsiClassOwner)file);
}
@NotNull
private static String substringBeforeFirstDot(@NotNull String classNameWithInnerClasses) {
int dot = classNameWithInnerClasses.indexOf('.');
if (dot < 0) {
return classNameWithInnerClasses;
}
else {
return classNameWithInnerClasses.substring(0, dot);
}
}
@Nullable
private static PsiClass findClassInPsiFile(@NotNull String classNameWithInnerClassesDotSeparated, @NotNull PsiClassOwner file) {
for (PsiClass topLevelClass : file.getClasses()) {
PsiClass candidate = findClassByTopLevelClass(classNameWithInnerClassesDotSeparated, topLevelClass);
if (candidate != null) {
return candidate;
}
}
return null;
}
@Nullable
private static PsiClass findClassByTopLevelClass(@NotNull String className, @NotNull PsiClass topLevelClass) {
if (className.indexOf('.') < 0) {
return className.equals(topLevelClass.getName()) ? topLevelClass : null;
}
Iterator<String> segments = StringUtil.split(className, ".").iterator();
if (!segments.hasNext() || !segments.next().equals(topLevelClass.getName())) {
return null;
}
PsiClass curClass = topLevelClass;
while (segments.hasNext()) {
String innerClassName = segments.next();
PsiClass innerClass = curClass.findInnerClassByName(innerClassName, false);
if (innerClass == null) {
return null;
}
curClass = innerClass;
}
return curClass;
}
@NotNull
@Override
public PsiClass[] findClasses(@NotNull String qName, @NotNull GlobalSearchScope scope) {
List<PsiClass> result = new ArrayList<PsiClass>();
for (VirtualFile file : roots()) {
final PsiClass psiClass = findClassInClasspathRoot(qName, file, myPsiManager, scope);
if (psiClass != null) {
result.add(psiClass);
}
}
return result.toArray(new PsiClass[result.size()]);
}
@NotNull
@Override
public Collection<String> getNonTrivialPackagePrefixes() {
return Collections.emptyList();
}
@NotNull
@Override
public Collection<PsiJavaModule> findModules(@NotNull String moduleName, @NotNull GlobalSearchScope scope) {
return Collections.emptySet();
}
public void addToClasspath(VirtualFile root) {
myClasspath.add(root);
}
}
|
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v10/services/keyword_plan_campaign_keyword_service.proto
package com.google.ads.googleads.v10.services;
/**
* <pre>
* Response message for a Keyword Plan campaign keyword mutate.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse}
*/
public final class MutateKeywordPlanCampaignKeywordsResponse extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse)
MutateKeywordPlanCampaignKeywordsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use MutateKeywordPlanCampaignKeywordsResponse.newBuilder() to construct.
private MutateKeywordPlanCampaignKeywordsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MutateKeywordPlanCampaignKeywordsResponse() {
results_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new MutateKeywordPlanCampaignKeywordsResponse();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MutateKeywordPlanCampaignKeywordsResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
results_ = new java.util.ArrayList<com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult>();
mutable_bitField0_ |= 0x00000001;
}
results_.add(
input.readMessage(com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult.parser(), extensionRegistry));
break;
}
case 26: {
com.google.rpc.Status.Builder subBuilder = null;
if (partialFailureError_ != null) {
subBuilder = partialFailureError_.toBuilder();
}
partialFailureError_ = input.readMessage(com.google.rpc.Status.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(partialFailureError_);
partialFailureError_ = subBuilder.buildPartial();
}
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
results_ = java.util.Collections.unmodifiableList(results_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.services.KeywordPlanCampaignKeywordServiceProto.internal_static_google_ads_googleads_v10_services_MutateKeywordPlanCampaignKeywordsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.services.KeywordPlanCampaignKeywordServiceProto.internal_static_google_ads_googleads_v10_services_MutateKeywordPlanCampaignKeywordsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse.class, com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse.Builder.class);
}
public static final int PARTIAL_FAILURE_ERROR_FIELD_NUMBER = 3;
private com.google.rpc.Status partialFailureError_;
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
* @return Whether the partialFailureError field is set.
*/
@java.lang.Override
public boolean hasPartialFailureError() {
return partialFailureError_ != null;
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
* @return The partialFailureError.
*/
@java.lang.Override
public com.google.rpc.Status getPartialFailureError() {
return partialFailureError_ == null ? com.google.rpc.Status.getDefaultInstance() : partialFailureError_;
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
*/
@java.lang.Override
public com.google.rpc.StatusOrBuilder getPartialFailureErrorOrBuilder() {
return getPartialFailureError();
}
public static final int RESULTS_FIELD_NUMBER = 2;
private java.util.List<com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult> results_;
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult> getResultsList() {
return results_;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResultOrBuilder>
getResultsOrBuilderList() {
return results_;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
@java.lang.Override
public int getResultsCount() {
return results_.size();
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult getResults(int index) {
return results_.get(index);
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResultOrBuilder getResultsOrBuilder(
int index) {
return results_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < results_.size(); i++) {
output.writeMessage(2, results_.get(i));
}
if (partialFailureError_ != null) {
output.writeMessage(3, getPartialFailureError());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < results_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, results_.get(i));
}
if (partialFailureError_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getPartialFailureError());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse)) {
return super.equals(obj);
}
com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse other = (com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse) obj;
if (hasPartialFailureError() != other.hasPartialFailureError()) return false;
if (hasPartialFailureError()) {
if (!getPartialFailureError()
.equals(other.getPartialFailureError())) return false;
}
if (!getResultsList()
.equals(other.getResultsList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasPartialFailureError()) {
hash = (37 * hash) + PARTIAL_FAILURE_ERROR_FIELD_NUMBER;
hash = (53 * hash) + getPartialFailureError().hashCode();
}
if (getResultsCount() > 0) {
hash = (37 * hash) + RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getResultsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Response message for a Keyword Plan campaign keyword mutate.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse)
com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.services.KeywordPlanCampaignKeywordServiceProto.internal_static_google_ads_googleads_v10_services_MutateKeywordPlanCampaignKeywordsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.services.KeywordPlanCampaignKeywordServiceProto.internal_static_google_ads_googleads_v10_services_MutateKeywordPlanCampaignKeywordsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse.class, com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse.Builder.class);
}
// Construct using com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getResultsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (partialFailureErrorBuilder_ == null) {
partialFailureError_ = null;
} else {
partialFailureError_ = null;
partialFailureErrorBuilder_ = null;
}
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
resultsBuilder_.clear();
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v10.services.KeywordPlanCampaignKeywordServiceProto.internal_static_google_ads_googleads_v10_services_MutateKeywordPlanCampaignKeywordsResponse_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse getDefaultInstanceForType() {
return com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse build() {
com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse buildPartial() {
com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse result = new com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse(this);
int from_bitField0_ = bitField0_;
if (partialFailureErrorBuilder_ == null) {
result.partialFailureError_ = partialFailureError_;
} else {
result.partialFailureError_ = partialFailureErrorBuilder_.build();
}
if (resultsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
results_ = java.util.Collections.unmodifiableList(results_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.results_ = results_;
} else {
result.results_ = resultsBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse) {
return mergeFrom((com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse other) {
if (other == com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse.getDefaultInstance()) return this;
if (other.hasPartialFailureError()) {
mergePartialFailureError(other.getPartialFailureError());
}
if (resultsBuilder_ == null) {
if (!other.results_.isEmpty()) {
if (results_.isEmpty()) {
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureResultsIsMutable();
results_.addAll(other.results_);
}
onChanged();
}
} else {
if (!other.results_.isEmpty()) {
if (resultsBuilder_.isEmpty()) {
resultsBuilder_.dispose();
resultsBuilder_ = null;
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000001);
resultsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getResultsFieldBuilder() : null;
} else {
resultsBuilder_.addAllMessages(other.results_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private com.google.rpc.Status partialFailureError_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> partialFailureErrorBuilder_;
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
* @return Whether the partialFailureError field is set.
*/
public boolean hasPartialFailureError() {
return partialFailureErrorBuilder_ != null || partialFailureError_ != null;
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
* @return The partialFailureError.
*/
public com.google.rpc.Status getPartialFailureError() {
if (partialFailureErrorBuilder_ == null) {
return partialFailureError_ == null ? com.google.rpc.Status.getDefaultInstance() : partialFailureError_;
} else {
return partialFailureErrorBuilder_.getMessage();
}
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
*/
public Builder setPartialFailureError(com.google.rpc.Status value) {
if (partialFailureErrorBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
partialFailureError_ = value;
onChanged();
} else {
partialFailureErrorBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
*/
public Builder setPartialFailureError(
com.google.rpc.Status.Builder builderForValue) {
if (partialFailureErrorBuilder_ == null) {
partialFailureError_ = builderForValue.build();
onChanged();
} else {
partialFailureErrorBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
*/
public Builder mergePartialFailureError(com.google.rpc.Status value) {
if (partialFailureErrorBuilder_ == null) {
if (partialFailureError_ != null) {
partialFailureError_ =
com.google.rpc.Status.newBuilder(partialFailureError_).mergeFrom(value).buildPartial();
} else {
partialFailureError_ = value;
}
onChanged();
} else {
partialFailureErrorBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
*/
public Builder clearPartialFailureError() {
if (partialFailureErrorBuilder_ == null) {
partialFailureError_ = null;
onChanged();
} else {
partialFailureError_ = null;
partialFailureErrorBuilder_ = null;
}
return this;
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
*/
public com.google.rpc.Status.Builder getPartialFailureErrorBuilder() {
onChanged();
return getPartialFailureErrorFieldBuilder().getBuilder();
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
*/
public com.google.rpc.StatusOrBuilder getPartialFailureErrorOrBuilder() {
if (partialFailureErrorBuilder_ != null) {
return partialFailureErrorBuilder_.getMessageOrBuilder();
} else {
return partialFailureError_ == null ?
com.google.rpc.Status.getDefaultInstance() : partialFailureError_;
}
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
getPartialFailureErrorFieldBuilder() {
if (partialFailureErrorBuilder_ == null) {
partialFailureErrorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>(
getPartialFailureError(),
getParentForChildren(),
isClean());
partialFailureError_ = null;
}
return partialFailureErrorBuilder_;
}
private java.util.List<com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult> results_ =
java.util.Collections.emptyList();
private void ensureResultsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
results_ = new java.util.ArrayList<com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult>(results_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult, com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult.Builder, com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResultOrBuilder> resultsBuilder_;
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public java.util.List<com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult> getResultsList() {
if (resultsBuilder_ == null) {
return java.util.Collections.unmodifiableList(results_);
} else {
return resultsBuilder_.getMessageList();
}
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public int getResultsCount() {
if (resultsBuilder_ == null) {
return results_.size();
} else {
return resultsBuilder_.getCount();
}
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult getResults(int index) {
if (resultsBuilder_ == null) {
return results_.get(index);
} else {
return resultsBuilder_.getMessage(index);
}
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public Builder setResults(
int index, com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.set(index, value);
onChanged();
} else {
resultsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public Builder setResults(
int index, com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.set(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public Builder addResults(com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(value);
onChanged();
} else {
resultsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public Builder addResults(
int index, com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(index, value);
onChanged();
} else {
resultsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public Builder addResults(
com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public Builder addResults(
int index, com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public Builder addAllResults(
java.lang.Iterable<? extends com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult> values) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, results_);
onChanged();
} else {
resultsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public Builder clearResults() {
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
resultsBuilder_.clear();
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public Builder removeResults(int index) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.remove(index);
onChanged();
} else {
resultsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult.Builder getResultsBuilder(
int index) {
return getResultsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResultOrBuilder getResultsOrBuilder(
int index) {
if (resultsBuilder_ == null) {
return results_.get(index); } else {
return resultsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public java.util.List<? extends com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResultOrBuilder>
getResultsOrBuilderList() {
if (resultsBuilder_ != null) {
return resultsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(results_);
}
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult.Builder addResultsBuilder() {
return getResultsFieldBuilder().addBuilder(
com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult.getDefaultInstance());
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult.Builder addResultsBuilder(
int index) {
return getResultsFieldBuilder().addBuilder(
index, com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult.getDefaultInstance());
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult results = 2;</code>
*/
public java.util.List<com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult.Builder>
getResultsBuilderList() {
return getResultsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult, com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult.Builder, com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResultOrBuilder>
getResultsFieldBuilder() {
if (resultsBuilder_ == null) {
resultsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult, com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResult.Builder, com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordResultOrBuilder>(
results_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
results_ = null;
}
return resultsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse)
private static final com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse();
}
public static com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MutateKeywordPlanCampaignKeywordsResponse>
PARSER = new com.google.protobuf.AbstractParser<MutateKeywordPlanCampaignKeywordsResponse>() {
@java.lang.Override
public MutateKeywordPlanCampaignKeywordsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MutateKeywordPlanCampaignKeywordsResponse(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<MutateKeywordPlanCampaignKeywordsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MutateKeywordPlanCampaignKeywordsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.MutateKeywordPlanCampaignKeywordsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
|
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.web.cluster;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.List;
import org.junit.Assert;
import org.apache.curator.test.TestingServer;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooKeeper;
import org.jboss.netty.channel.Channel;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.navercorp.pinpoint.common.util.NetUtils;
import com.navercorp.pinpoint.rpc.client.MessageListener;
import com.navercorp.pinpoint.rpc.client.PinpointSocket;
import com.navercorp.pinpoint.rpc.client.PinpointSocketFactory;
import com.navercorp.pinpoint.rpc.packet.RequestPacket;
import com.navercorp.pinpoint.rpc.packet.SendPacket;
import com.navercorp.pinpoint.web.config.WebConfig;
import com.navercorp.pinpoint.web.server.PinpointSocketManager;
import com.navercorp.pinpoint.web.util.PinpointWebTestUtils;
/**
* @author Taejin Koo
*/
public class ClusterTest {
private static final Logger LOGGER = LoggerFactory.getLogger(ClusterTest.class);
// some tests may fail when executed in local environment
// when failures happen, you have to copy pinpoint-web.properties of resource-test to resource-local. Tests will succeed.
private static final String DEFAULT_IP = PinpointWebTestUtils.getRepresentationLocalV4Ip();
private static String CLUSTER_NODE_PATH;
private static int acceptorPort;
private static int zookeeperPort;
private static String zookeeperAddress;
private static TestingServer ts = null;
static PinpointSocketManager socketManager;
@BeforeClass
public static void setUp() throws Exception {
acceptorPort = PinpointWebTestUtils.findAvailablePort();
zookeeperPort = PinpointWebTestUtils.findAvailablePort(acceptorPort + 1);
zookeeperAddress = DEFAULT_IP + ":" + zookeeperPort;
CLUSTER_NODE_PATH = "/pinpoint-cluster/web/" + DEFAULT_IP + ":" + acceptorPort;
LOGGER.info("CLUSTER_NODE_PATH:{}", CLUSTER_NODE_PATH);
WebConfig config = mock(WebConfig.class);
when(config.isClusterEnable()).thenReturn(true);
when(config.getClusterTcpPort()).thenReturn(acceptorPort);
when(config.getClusterZookeeperAddress()).thenReturn(zookeeperAddress);
when(config.getClusterZookeeperRetryInterval()).thenReturn(60000);
when(config.getClusterZookeeperSessionTimeout()).thenReturn(3000);
socketManager = new PinpointSocketManager(config);
socketManager.start();
ts = createZookeeperServer(zookeeperPort);
}
@AfterClass
public static void tearDown() throws Exception {
closeZookeeperServer(ts);
socketManager.stop();
}
@Before
public void before() throws IOException {
ts.stop();
}
@Test
public void clusterTest1() throws Exception {
ts.restart();
Thread.sleep(5000);
ZooKeeper zookeeper = new ZooKeeper(zookeeperAddress, 5000, null);
getNodeAndCompareContents(zookeeper);
if (zookeeper != null) {
zookeeper.close();
}
}
@Test
public void clusterTest2() throws Exception {
ts.restart();
Thread.sleep(5000);
ZooKeeper zookeeper = new ZooKeeper(zookeeperAddress, 5000, null);
getNodeAndCompareContents(zookeeper);
ts.stop();
Thread.sleep(5000);
try {
zookeeper.getData(CLUSTER_NODE_PATH, null, null);
Assert.fail();
} catch (KeeperException e) {
Assert.assertEquals(KeeperException.Code.CONNECTIONLOSS, e.code());
// TODO Auto-generated catch block
e.printStackTrace();
}
ts.restart();
getNodeAndCompareContents(zookeeper);
if (zookeeper != null) {
zookeeper.close();
}
}
@Test
public void clusterTest3() throws Exception {
ts.restart();
PinpointSocketFactory factory = null;
PinpointSocket socket = null;
ZooKeeper zookeeper = null;
try {
Thread.sleep(5000);
zookeeper = new ZooKeeper(zookeeperAddress, 5000, null);
getNodeAndCompareContents(zookeeper);
Assert.assertEquals(0, socketManager.getCollectorList().size());
factory = new PinpointSocketFactory();
factory.setMessageListener(new SimpleListener());
socket = factory.connect(DEFAULT_IP, acceptorPort);
Thread.sleep(1000);
Assert.assertEquals(1, socketManager.getCollectorList().size());
} finally {
closePinpointSocket(factory, socket);
if (zookeeper != null) {
zookeeper.close();
}
}
}
private static TestingServer createZookeeperServer(int port) throws Exception {
TestingServer mockZookeeperServer = new TestingServer(port);
mockZookeeperServer.start();
return mockZookeeperServer;
}
private static void closeZookeeperServer(TestingServer mockZookeeperServer) throws Exception {
try {
if (mockZookeeperServer != null) {
mockZookeeperServer.close();
}
} catch (Exception e) {
e.printStackTrace();
}
}
private void getNodeAndCompareContents(ZooKeeper zookeeper) throws KeeperException, InterruptedException {
LOGGER.info("getNodeAndCompareContents() {}", CLUSTER_NODE_PATH);
byte[] conetents = zookeeper.getData(CLUSTER_NODE_PATH, null, null);
String[] registeredIplist = new String(conetents).split("\r\n");
List<String> ipList = NetUtils.getLocalV4IpList();
Assert.assertEquals(registeredIplist.length, ipList.size());
for (String ip : registeredIplist) {
Assert.assertTrue(ipList.contains(ip));
}
}
private void closePinpointSocket(PinpointSocketFactory factory, PinpointSocket socket) {
if (socket != null) {
socket.close();
}
if (factory != null) {
factory.release();
}
}
class SimpleListener implements MessageListener {
@Override
public void handleSend(SendPacket sendPacket, Channel channel) {
}
@Override
public void handleRequest(RequestPacket requestPacket, Channel channel) {
// TODO Auto-generated method stub
}
}
}
|
|
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package mapregion.diskRegion;
import java.io.BufferedWriter;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import objects.ObjectHelper;
import hydra.CacheHelper;
import hydra.ConfigPrms;
import hydra.HydraThreadLocal;
import hydra.Log;
import hydra.RegionHelper;
import hydra.RemoteTestModule;
import hydra.TestTask;
import util.TestException;
import util.TestHelper;
import cacheperf.CachePerfPrms;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.RegionAttributes;
import com.gemstone.gemfire.distributed.DistributedSystem;
import mapregion.*;
public class TempPerfForDiskReg
{
static TempPerfForDiskReg testInstance;
static DistributedSystem ds;
static private Cache cache;
static Region diskRegion;
static String regionName;
static RegionAttributes attr;
static int totalThreads = 0;
public TempPerfForDiskReg(){}//end of constructor
//////////////hydra init task methods//////////////
public synchronized static void HydraTask_initialize()
{
if (testInstance == null) {
testInstance = new TempPerfForDiskReg();
testInstance.initialize();
}
testInstance.initHydraThreadLocals();
}//end of HydraTask_initialize
protected void initialize()
{
try {
////initialize cache
initCache();
attr = RegionHelper.getRegionAttributes(ConfigPrms.getRegionConfig());
regionName = RegionHelper.getRegionDescription(ConfigPrms.getRegionConfig()).getRegionName();
TestTask task = RemoteTestModule.getCurrentThread().getCurrentTask();
totalThreads = task.getTotalThreads();
///create region...
if (TempPerfForDiskReg.diskRegion == null) {
diskRegion = RegionHelper.createRegion(regionName, attr);
}
}
catch (Exception ex) {
throw new TestException(TestHelper.getStackTrace(ex));
}
}//end of initialize
private static HydraThreadLocal localkeycount = new HydraThreadLocal();
protected void initHydraThreadLocals() {
int startPoint = getStartPoint();
setKeyCount(startPoint);
}
/**
* Sets the key count for a thread's workload.
*/
protected void setKeyCount( int n ) {
localkeycount.set( new Integer( n ) );
}
/**
* Gets the key count for a thread's workload.
*/
protected int getKeyCount() {
Integer n = (Integer) localkeycount.get();
if ( n == null ) {
n = new Integer(0);
localkeycount.set( n );
}
return n.intValue();
}
protected synchronized int getStartPoint() {
startPoint++;
return startPoint;
}
/**
* Connects to DistributedSystem and creates cache
*/
private synchronized void initCache()
{
try {
if (cache == null || cache.isClosed()) {
cache = CacheHelper.createCache(ConfigPrms.getCacheConfig());
}
}
catch (Exception ex) {
throw new TestException(TestHelper.getStackTrace(ex));
}
}//end of initCache
public static void HydraTask_PerformPuts()
{
testInstance.performPutOperations();
}//end of HydraTask_PerformPuts
private static volatile long totalPuts=0;
static Object totalLock = "total";
static int maxKeys = CachePerfPrms.getMaxKeys();
static volatile long totalTime=0;
static volatile int startPoint = 0;
static ArrayList al = new ArrayList();
protected void performPutOperations ()
{
try
{
Object key=null, val=null;
String objectType = MapPrms.getObjectType();
long start, end;
long puts = 0;
int putKeyInt= 0;
int startPoint = getKeyCount();
startPoint = startPoint * maxKeys;
start = System.currentTimeMillis();
do{
putKeyInt++;
key = ObjectHelper.createName(startPoint + putKeyInt);
val = ObjectHelper.createObject(objectType, (startPoint + putKeyInt));
diskRegion.put(key, val);
puts++;
} while (putKeyInt < maxKeys);
end = System.currentTimeMillis();
synchronized (totalLock) {
al.add( new Long((long)(end - start)));
totalPuts = (totalPuts+puts);
totalTime =totalTime + (end - start);
}
}
catch(Exception ex){
throw new TestException(TestHelper.getStackTrace(ex));
}
}//end of performPutOperations
public synchronized static void HydraTask_CloseTask(){
testInstance.printValues();
testInstance.closeCache();
}//end of closeTask
private static FileOutputStream file;
private static BufferedWriter wr;
protected void printValues() {
try{
Date date = new Date(System.currentTimeMillis());
file = new FileOutputStream("./diskRegPerf-"+(date.toGMTString().substring(0, date.toGMTString().indexOf("200")+4).replace(' ', '-'))+".txt");
wr = new BufferedWriter(new OutputStreamWriter(file));
double avgTime = totalTime/totalThreads;
wr.write("====================DISK REGION PERFORMANCE REPORT===============================");
wr.newLine();
wr.flush();
wr.write(" TOTAL NUMBER OF THREADS ARE: "+totalThreads);
wr.flush();
wr.newLine();
wr.write(" TOTAL NUMBER OF PUTS IS: "+totalPuts);
wr.flush();
wr.newLine();
wr.write(" TOTAL TIME TAKEN IS (total time per thread): "+avgTime);
wr.flush();
wr.newLine();
wr.write(" AVERAGE NUMBER OF PUTS PER SECOND IS: "+((totalPuts*1000)/(avgTime)));
wr.flush();
//write in vm logs too!
Log.getLogWriter().info(" TOTAL NUMBER OF THREADS ARE: "+totalThreads);
Log.getLogWriter().info(" key count for this thread is: "+getKeyCount());
Log.getLogWriter().info(" TOTAL NUMBER OF PUTS IS: "+totalPuts);
Log.getLogWriter().info(" TOTAL TIME TAKEN IS (total time per thread): "+avgTime);
Log.getLogWriter().info(" AVERAGE NUMBER OF PUTS PER SECOND IS: "+((totalPuts*1000)/(avgTime)));
}
catch (Exception ex) {
throw new TestException(TestHelper.getStackTrace(ex));
}
Iterator itr = al.iterator();
while(itr.hasNext()){
System.out.println( ( (Long)itr.next() ).longValue() );
}
System.out.println( "-------------------------------------------------------" );
}
public void closeCache()
{
try {
if (cache != null || !cache.isClosed()) {
CacheHelper.closeCache();
}
}
catch (Exception ex) {
throw new TestException(TestHelper.getStackTrace(ex));
}
}//end of closeCache
}//end of TempPerfForDiskReg
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed;
import junit.framework.*;
import org.apache.ignite.*;
import org.apache.ignite.configuration.*;
import org.apache.ignite.internal.*;
import org.apache.ignite.internal.util.lang.*;
import org.apache.ignite.internal.util.typedef.*;
import org.apache.ignite.spi.communication.tcp.*;
import org.apache.ignite.spi.discovery.tcp.*;
import org.apache.ignite.spi.discovery.tcp.ipfinder.*;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*;
import org.apache.ignite.testframework.*;
import org.apache.ignite.testframework.junits.common.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.*;
import static org.apache.ignite.cache.CacheAtomicityMode.*;
import static org.apache.ignite.cache.CacheMode.*;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.*;
/**
*
*/
public class IgniteCacheManyClientsTest extends GridCommonAbstractTest {
/** */
protected static TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
/** */
private static final int SRVS = 4;
/** */
private boolean client;
/** */
private boolean clientDiscovery;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(gridName);
cfg.setConnectorConfiguration(null);
cfg.setPeerClassLoadingEnabled(false);
cfg.setTimeServerPortRange(200);
((TcpCommunicationSpi)cfg.getCommunicationSpi()).setLocalPortRange(200);
((TcpCommunicationSpi)cfg.getCommunicationSpi()).setSharedMemoryPort(-1);
((TcpDiscoverySpi)cfg.getDiscoverySpi()).setIpFinder(ipFinder);
((TcpDiscoverySpi)cfg.getDiscoverySpi()).setIpFinderCleanFrequency(10 * 60_000);
((TcpDiscoverySpi)cfg.getDiscoverySpi()).setJoinTimeout(2 * 60_000);
if (!clientDiscovery)
((TcpDiscoverySpi)cfg.getDiscoverySpi()).setForceServerMode(true);
cfg.setClientMode(client);
CacheConfiguration ccfg = new CacheConfiguration();
ccfg.setCacheMode(PARTITIONED);
ccfg.setAtomicityMode(ATOMIC);
ccfg.setWriteSynchronizationMode(PRIMARY_SYNC);
ccfg.setBackups(1);
cfg.setCacheConfiguration(ccfg);
return cfg;
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
startGrids(SRVS);
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
super.afterTestsStopped();
stopAllGrids();
}
/** {@inheritDoc} */
@Override protected long getTestTimeout() {
return 10 * 60_000;
}
/**
* @throws Exception If failed.
*/
public void testManyClients() throws Throwable {
manyClientsPutGet();
}
/**
* @throws Exception If failed.
*/
public void testManyClientsClientDiscovery() throws Throwable {
clientDiscovery = true;
manyClientsPutGet();
}
/**
* @throws Exception If failed.
*/
public void testManyClientsSequentiallyClientDiscovery() throws Exception {
clientDiscovery = true;
manyClientsSequentially();
}
/**
* @throws Exception If failed.
*/
private void manyClientsSequentially() throws Exception {
client = true;
List<Ignite> clients = new ArrayList<>();
final int CLIENTS = 50;
int idx = SRVS;
ThreadLocalRandom rnd = ThreadLocalRandom.current();
for (int i = 0; i < CLIENTS; i++) {
Ignite ignite = startGrid(idx++);
log.info("Started node: " + ignite.name());
assertTrue(ignite.configuration().isClientMode());
clients.add(ignite);
IgniteCache<Object, Object> cache = ignite.cache(null);
Integer key = rnd.nextInt(0, 1000);
cache.put(key, i);
assertNotNull(cache.get(key));
}
log.info("All clients started.");
try {
checkNodes(SRVS + CLIENTS);
}
finally {
for (Ignite client : clients)
client.close();
}
}
/**
* @param expCnt Expected number of nodes.
*/
private void checkNodes(int expCnt) {
assertEquals(expCnt, G.allGrids().size());
long topVer = -1L;
for (Ignite ignite : G.allGrids()) {
log.info("Check node: " + ignite.name());
if (topVer == -1L)
topVer = ignite.cluster().topologyVersion();
else
assertEquals("Unexpected topology version for node: " + ignite.name(),
topVer,
ignite.cluster().topologyVersion());
assertEquals("Unexpected number of nodes for node: " + ignite.name(),
expCnt,
ignite.cluster().nodes().size());
}
}
/**
* @throws Exception If failed.
*/
private void manyClientsPutGet() throws Throwable {
client = true;
final AtomicInteger idx = new AtomicInteger(SRVS);
final AtomicBoolean stop = new AtomicBoolean();
final AtomicReference<Throwable> err = new AtomicReference<>();
final int THREADS = 50;
final CountDownLatch latch = new CountDownLatch(THREADS);
try {
IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new Callable<Object>() {
@Override public Object call() throws Exception {
boolean counted = false;
try {
int nodeIdx = idx.getAndIncrement();
Thread.currentThread().setName("client-thread-node-" + nodeIdx);
try (Ignite ignite = startGrid(nodeIdx)) {
log.info("Started node: " + ignite.name());
assertTrue(ignite.configuration().isClientMode());
IgniteCache<Object, Object> cache = ignite.cache(null);
ThreadLocalRandom rnd = ThreadLocalRandom.current();
int iter = 0;
Integer key = rnd.nextInt(0, 1000);
cache.put(key, iter++);
assertNotNull(cache.get(key));
latch.countDown();
counted = true;
while (!stop.get() && err.get() == null) {
key = rnd.nextInt(0, 1000);
cache.put(key, iter++);
assertNotNull(cache.get(key));
Thread.sleep(1);
}
log.info("Stopping node: " + ignite.name());
}
return null;
}
catch (Throwable e) {
err.compareAndSet(null, e);
log.error("Unexpected error in client thread: " + e, e);
throw e;
}
finally {
if (!counted)
latch.countDown();
}
}
}, THREADS, "client-thread");
assertTrue(latch.await(getTestTimeout(), TimeUnit.MILLISECONDS));
log.info("All clients started.");
Thread.sleep(10_000);
Throwable err0 = err.get();
if (err0 != null)
throw err0;
boolean wait = GridTestUtils.waitForCondition(new GridAbsPredicate() {
@Override public boolean apply() {
try {
checkNodes(SRVS + THREADS);
return true;
}
catch (AssertionFailedError e) {
log.info("Check failed, will retry: " + e);
}
return false;
}
}, 10_000);
if (!wait)
checkNodes(SRVS + THREADS);
log.info("Stop clients.");
stop.set(true);
fut.get();
}
catch (Throwable e) {
log.error("Unexpected error: " + e, e);
throw e;
}
finally {
stop.set(true);
}
}
}
|
|
package com.planet_ink.coffee_mud.Abilities.Prayers;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2003-2022 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Prayer_AnimateMummy extends Prayer
{
@Override
public String ID()
{
return "Prayer_AnimateMummy";
}
private final static String localizedName = CMLib.lang().L("Animate Mummy");
@Override
public String name()
{
return localizedName;
}
@Override
public int classificationCode()
{
return Ability.ACODE_PRAYER | Ability.DOMAIN_DEATHLORE;
}
@Override
public int abstractQuality()
{
return Ability.QUALITY_INDIFFERENT;
}
@Override
public int enchantQuality()
{
return Ability.QUALITY_INDIFFERENT;
}
@Override
public long flags()
{
return Ability.FLAG_UNHOLY;
}
@Override
protected int canTargetCode()
{
return CAN_ITEMS;
}
private final static String localizedDiplayText = CMLib.lang().L("Newly animate dead");
@Override
public String displayText()
{
return localizedDiplayText;
}
@Override
public void unInvoke()
{
final Physical P=affected;
super.unInvoke();
if((P instanceof MOB)&&(this.canBeUninvoked)&&(this.unInvoked))
{
if((!P.amDestroyed())&&(((MOB)P).amFollowing()==null))
{
final Room R=CMLib.map().roomLocation(P);
if(!CMLib.law().doesHavePriviledgesHere(invoker(), R))
{
if((R!=null)&&(!((MOB)P).amDead()))
R.showHappens(CMMsg.MSG_OK_ACTION, P,L("<S-NAME> wander(s) off."));
P.destroy();
}
}
}
}
@Override
public boolean tick(final Tickable ticking, final int tickID)
{
final int tickSet = super.tickDown;
if(!super.tick(ticking, tickID))
return false;
if(ticking instanceof MOB)
{
final MOB mob=(MOB)ticking;
if(mob.amFollowing() != null)
super.tickDown = tickSet;
}
return true;
}
public int getUndeadLevel(final MOB mob, final double baseLvl, final double corpseLevel)
{
final ExpertiseLibrary exLib=CMLib.expertises();
final double deathLoreExpertiseLevel = super.getXLEVELLevel(mob);
final double appropriateLoreExpertiseLevel = super.getX1Level(mob);
final double charLevel = mob.phyStats().level();
final double maxDeathLoreExpertiseLevel = exLib.getHighestListableStageBySkill(mob,ID(),ExpertiseLibrary.XType.LEVEL);
final double maxApproLoreExpertiseLevel = exLib.getHighestListableStageBySkill(mob,ID(),ExpertiseLibrary.XType.X1);
double lvl = 0;
if ((maxApproLoreExpertiseLevel > 0)
&& (maxDeathLoreExpertiseLevel > 0))
{
lvl = (charLevel * (10 + appropriateLoreExpertiseLevel) / (10 + maxApproLoreExpertiseLevel))
-(baseLvl+4+(2*maxDeathLoreExpertiseLevel));
}
if(lvl < 0.0)
lvl = 0.0;
lvl += baseLvl + (2*deathLoreExpertiseLevel);
if(lvl > corpseLevel)
lvl = corpseLevel;
return (int)Math.round(lvl);
}
@Override
public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel)
{
final Physical target=getAnyTarget(mob,commands,givenTarget,Wearable.FILTER_UNWORNONLY);
if(target==null)
return false;
if(target==mob)
{
mob.tell(L("@x1 doesn't look dead yet.",target.name(mob)));
return false;
}
if(!(target instanceof DeadBody))
{
mob.tell(L("You can't animate that."));
return false;
}
final DeadBody body=(DeadBody)target;
if(body.isPlayerCorpse()||(body.getMobName().length()==0)
||((body.charStats()!=null)&&(body.charStats().getMyRace()!=null)&&(body.charStats().getMyRace().racialCategory().equalsIgnoreCase("Undead"))))
{
mob.tell(L("You can't animate that."));
return false;
}
String race="a";
if((body.charStats()!=null)&&(body.charStats().getMyRace()!=null))
race=CMLib.english().startWithAorAn(body.charStats().getMyRace().name()).toLowerCase();
String description=body.getMobDescription();
if(description.trim().length()==0)
description="It looks dead.";
else
description+="\n\rIt also looks dead.";
if(body.basePhyStats().level()<20)
{
mob.tell(L("This creature is too weak to create a mummy from."));
return false;
}
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
final CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),
auto?"":L("^S<S-NAME> @x1 to animate <T-NAMESELF> as a mummy.^?",prayForWord(mob)));
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
final String undeadRace = ((body.charStats()!=null) && (body.charStats().getMyRace() != null) && (body.charStats().getMyRace().useRideClass())) ?
"GenRideableUndead" : "GenUndead";
final MOB newMOB=CMClass.getMOB(undeadRace);
newMOB.setName(L("@x1 mummy",race));
newMOB.setDescription(description);
newMOB.setDisplayText(L("@x1 mummy is here",race));
newMOB.basePhyStats().setLevel(getUndeadLevel(mob,19,body.phyStats().level()));
newMOB.baseCharStats().setStat(CharStats.STAT_GENDER,body.charStats().getStat(CharStats.STAT_GENDER));
newMOB.baseCharStats().setMyRace(CMClass.getRace("Undead"));
newMOB.baseCharStats().setBodyPartsFromStringAfterRace(body.charStats().getBodyPartsAsString());
final Ability P=CMClass.getAbility("Prop_StatTrainer");
if(P!=null)
{
P.setMiscText("NOTEACH STR=20 INT=10 WIS=10 CON=10 DEX=20 CHA=2");
newMOB.addNonUninvokableEffect(P);
}
newMOB.recoverCharStats();
newMOB.basePhyStats().setAttackAdjustment(CMLib.leveler().getLevelAttack(newMOB));
newMOB.basePhyStats().setDamage(CMLib.leveler().getLevelMOBDamage(newMOB));
newMOB.basePhyStats().setSensesMask(PhyStats.CAN_SEE_DARK|PhyStats.CAN_SEE_INVISIBLE|PhyStats.CAN_SEE_SNEAKERS);
CMLib.factions().setAlignment(newMOB,Faction.Align.EVIL);
newMOB.baseState().setHitPoints(30*newMOB.basePhyStats().level());
newMOB.baseState().setMovement(CMLib.leveler().getLevelMove(newMOB));
newMOB.basePhyStats().setArmor(CMLib.leveler().getLevelMOBArmor(newMOB));
newMOB.addNonUninvokableEffect(CMClass.getAbility("Prop_ModExperience","0"));
newMOB.addTattoo("SYSTEM_SUMMONED");
newMOB.baseState().setMana(100);
newMOB.recoverCharStats();
newMOB.recoverPhyStats();
newMOB.recoverMaxState();
newMOB.resetToMaxState();
newMOB.addAbility(CMClass.getAbility("Disease_MummyRot"));
Behavior B=CMClass.getBehavior("CombatAbilities");
newMOB.addBehavior(B);
B=CMClass.getBehavior("Aggressive");
if(B!=null)
{
B.setParms("+NAMES \"-"+mob.Name()+"\" -LEVEL +>"+newMOB.basePhyStats().level());
newMOB.addBehavior(B);
}
newMOB.text();
newMOB.bringToLife(mob.location(),true);
CMLib.beanCounter().clearZeroMoney(newMOB,null);
newMOB.setMoneyVariation(0);
//newMOB.location().showOthers(newMOB,null,CMMsg.MSG_OK_ACTION,L("<S-NAME> appears!"));
int it=0;
while(it<newMOB.location().numItems())
{
final Item item=newMOB.location().getItem(it);
if((item!=null)&&(item.container()==body))
{
final CMMsg msg2=CMClass.getMsg(newMOB,body,item,CMMsg.MSG_GET,null);
newMOB.location().send(newMOB,msg2);
final CMMsg msg4=CMClass.getMsg(newMOB,item,null,CMMsg.MSG_GET,null);
newMOB.location().send(newMOB,msg4);
final CMMsg msg3=CMClass.getMsg(newMOB,item,null,CMMsg.MSG_WEAR,null);
newMOB.location().send(newMOB,msg3);
if(!newMOB.isMine(item))
it++;
else
it=0;
}
else
it++;
}
body.destroy();
newMOB.setStartRoom(null);
beneficialAffect(mob,newMOB,0,0);
mob.location().show(newMOB,null,CMMsg.MSG_OK_ACTION,L("<S-NAME> begin(s) to rise!"));
mob.location().recoverRoomStats();
}
}
else
return beneficialWordsFizzle(mob,target,L("<S-NAME> @x1 to animate <T-NAMESELF>, but fail(s) miserably.",prayForWord(mob)));
// return whether it worked
return success;
}
}
|
|
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util;
import com.intellij.ReviseWhenPortedToJDK;
import com.intellij.diagnostic.ThreadDumper;
import com.intellij.openapi.util.ThrowableComputable;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.TestOnly;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* @author cdr
*/
public class ConcurrencyUtil {
/**
* Invokes and waits all tasks using threadPool, avoiding thread starvation on the way
* (see <a href="http://gafter.blogspot.com/2006/11/thread-pool-puzzler.html">"A Thread Pool Puzzler"</a>).
*/
public static <T> List<Future<T>> invokeAll(@NotNull Collection<? extends Callable<T>> tasks, ExecutorService executorService) throws Throwable {
if (executorService == null) {
for (Callable<T> task : tasks) {
task.call();
}
return null;
}
List<Future<T>> futures = new ArrayList<Future<T>>(tasks.size());
boolean done = false;
try {
for (Callable<T> t : tasks) {
Future<T> future = executorService.submit(t);
futures.add(future);
}
// force not started futures to execute using the current thread
for (Future f : futures) {
((Runnable)f).run();
}
for (Future f : futures) {
try {
f.get();
}
catch (CancellationException ignore) {
}
catch (ExecutionException e) {
Throwable cause = e.getCause();
if (cause != null) {
throw cause;
}
}
}
done = true;
}
finally {
if (!done) {
for (Future f : futures) {
f.cancel(false);
}
}
}
return futures;
}
/**
* @return defaultValue if there is no entry in the map (in that case defaultValue is placed into the map),
* or corresponding value if entry already exists.
*/
@NotNull
public static <K, V> V cacheOrGet(@NotNull ConcurrentMap<K, V> map, @NotNull final K key, @NotNull final V defaultValue) {
V v = map.get(key);
if (v != null) return v;
V prev = map.putIfAbsent(key, defaultValue);
return prev == null ? defaultValue : prev;
}
/**
* @return defaultValue if the reference contains null (in that case defaultValue is placed there), or reference value otherwise.
*/
@ReviseWhenPortedToJDK("8") // todo "replace with return ref.updateAndGet(prev -> prev == null ? defaultValue : prev)"
@NotNull
public static <T> T cacheOrGet(@NotNull AtomicReference<T> ref, @NotNull T defaultValue) {
T value = ref.get();
while (value == null) {
value = ref.compareAndSet(null, defaultValue) ? defaultValue : ref.get();
}
return value;
}
@NotNull
public static ThreadPoolExecutor newSingleThreadExecutor(@NotNull @NonNls String name) {
return newSingleThreadExecutor(name, Thread.NORM_PRIORITY);
}
@NotNull
public static ThreadPoolExecutor newSingleThreadExecutor(@NonNls @NotNull String name, int priority) {
return new ThreadPoolExecutor(1, 1, 0L, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<Runnable>(), newNamedThreadFactory(name, true, priority));
}
@NotNull
public static ScheduledThreadPoolExecutor newSingleScheduledThreadExecutor(@NotNull @NonNls String name) {
return newSingleScheduledThreadExecutor(name, Thread.NORM_PRIORITY);
}
@NotNull
public static ScheduledThreadPoolExecutor newSingleScheduledThreadExecutor(@NonNls @NotNull String name, int priority) {
ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(1, newNamedThreadFactory(name, true, priority));
executor.setContinueExistingPeriodicTasksAfterShutdownPolicy(false);
executor.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
return executor;
}
@NotNull
public static ThreadFactory newNamedThreadFactory(@NonNls @NotNull final String name, final boolean isDaemon, final int priority) {
return new ThreadFactory() {
@NotNull
@Override
public Thread newThread(@NotNull Runnable r) {
Thread thread = new Thread(r, name);
thread.setDaemon(isDaemon);
thread.setPriority(priority);
return thread;
}
};
}
@NotNull
public static ThreadFactory newNamedThreadFactory(@NonNls @NotNull final String name) {
return new ThreadFactory() {
@NotNull
@Override
public Thread newThread(@NotNull final Runnable r) {
return new Thread(r, name);
}
};
}
/**
* Awaits for all tasks in the {@code executor} to finish for the specified {@code timeout}
*/
@TestOnly
public static void awaitQuiescence(@NotNull ThreadPoolExecutor executor, long timeout, @NotNull TimeUnit unit) {
executor.setKeepAliveTime(1, TimeUnit.NANOSECONDS); // no need for zombies in tests
executor.setCorePoolSize(0); // interrupt idle workers
ReentrantLock mainLock = ReflectionUtil.getField(executor.getClass(), executor, ReentrantLock.class, "mainLock");
Set workers;
mainLock.lock();
try {
HashSet workersField = ReflectionUtil.getField(executor.getClass(), executor, HashSet.class, "workers");
workers = new HashSet(workersField); // to be able to iterate thread-safely outside the lock
}
finally {
mainLock.unlock();
}
for (Object worker : workers) {
Thread thread = ReflectionUtil.getField(worker.getClass(), worker, Thread.class, "thread");
try {
thread.join(unit.toMillis(timeout));
}
catch (InterruptedException e) {
String trace = "Thread leaked: " + thread+"; " + thread.getState()+" ("+ thread.isAlive()+")\n--- its stacktrace:\n";
for (final StackTraceElement stackTraceElement : thread.getStackTrace()) {
trace += " at "+stackTraceElement +"\n";
}
trace += "---\n";
System.err.println("Executor " + executor + " is still active after " + unit.toSeconds(timeout) + " seconds://///\n" +
"Thread "+thread+" dump:\n" + trace+
"all thread dump:\n"+ThreadDumper.dumpThreadsToString() + "\n/////");
break;
}
}
}
public static void joinAll(@NotNull Collection<? extends Thread> threads) throws RuntimeException {
for (Thread thread : threads) {
try {
thread.join();
}
catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
public static void joinAll(@NotNull Thread... threads) throws RuntimeException {
joinAll(Arrays.asList(threads));
}
@NotNull
@Contract(pure = true)
public static Runnable underThreadNameRunnable(@NotNull final String name, @NotNull final Runnable runnable) {
return new Runnable() {
@Override
public void run() {
runUnderThreadName(name, runnable);
}
};
}
public static void runUnderThreadName(@NotNull final String name, @NotNull final Runnable runnable) {
Thread currentThread = Thread.currentThread();
String oldThreadName = currentThread.getName();
if (name.equals(oldThreadName)) {
runnable.run();
}
else {
currentThread.setName(name);
try {
runnable.run();
}
finally {
currentThread.setName(oldThreadName);
}
}
}
@NotNull
public static Runnable once(@NotNull final Runnable delegate) {
final AtomicBoolean done = new AtomicBoolean(false);
return new Runnable() {
@Override
public void run() {
if (done.compareAndSet(false, true)) {
delegate.run();
}
}
};
}
public static <T, E extends Throwable> T withLock(@NotNull Lock lock, @NotNull ThrowableComputable<T, E> runnable) throws E {
lock.lock();
try {
return runnable.compute();
}
finally {
lock.unlock();
}
}
public static <E extends Throwable> void withLock(@NotNull Lock lock, @NotNull ThrowableRunnable<E> runnable) throws E {
lock.lock();
try {
runnable.run();
}
finally {
lock.unlock();
}
}
}
|
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.waf.model;
import java.io.Serializable;
/**
*
*/
public class ListWebACLsResult implements Serializable, Cloneable {
/**
* <p>
* If you have more <code>WebACL</code> objects than the number that you
* specified for <code>Limit</code> in the request, the response includes a
* <code>NextMarker</code> value. To list more <code>WebACL</code> objects,
* submit another <code>ListWebACLs</code> request, and specify the
* <code>NextMarker</code> value from the response in the
* <code>NextMarker</code> value in the next request.
* </p>
*/
private String nextMarker;
/**
* <p>
* An array of <a>WebACLSummary</a> objects.
* </p>
*/
private java.util.List<WebACLSummary> webACLs;
/**
* <p>
* If you have more <code>WebACL</code> objects than the number that you
* specified for <code>Limit</code> in the request, the response includes a
* <code>NextMarker</code> value. To list more <code>WebACL</code> objects,
* submit another <code>ListWebACLs</code> request, and specify the
* <code>NextMarker</code> value from the response in the
* <code>NextMarker</code> value in the next request.
* </p>
*
* @param nextMarker
* If you have more <code>WebACL</code> objects than the number that
* you specified for <code>Limit</code> in the request, the response
* includes a <code>NextMarker</code> value. To list more
* <code>WebACL</code> objects, submit another
* <code>ListWebACLs</code> request, and specify the
* <code>NextMarker</code> value from the response in the
* <code>NextMarker</code> value in the next request.
*/
public void setNextMarker(String nextMarker) {
this.nextMarker = nextMarker;
}
/**
* <p>
* If you have more <code>WebACL</code> objects than the number that you
* specified for <code>Limit</code> in the request, the response includes a
* <code>NextMarker</code> value. To list more <code>WebACL</code> objects,
* submit another <code>ListWebACLs</code> request, and specify the
* <code>NextMarker</code> value from the response in the
* <code>NextMarker</code> value in the next request.
* </p>
*
* @return If you have more <code>WebACL</code> objects than the number that
* you specified for <code>Limit</code> in the request, the response
* includes a <code>NextMarker</code> value. To list more
* <code>WebACL</code> objects, submit another
* <code>ListWebACLs</code> request, and specify the
* <code>NextMarker</code> value from the response in the
* <code>NextMarker</code> value in the next request.
*/
public String getNextMarker() {
return this.nextMarker;
}
/**
* <p>
* If you have more <code>WebACL</code> objects than the number that you
* specified for <code>Limit</code> in the request, the response includes a
* <code>NextMarker</code> value. To list more <code>WebACL</code> objects,
* submit another <code>ListWebACLs</code> request, and specify the
* <code>NextMarker</code> value from the response in the
* <code>NextMarker</code> value in the next request.
* </p>
*
* @param nextMarker
* If you have more <code>WebACL</code> objects than the number that
* you specified for <code>Limit</code> in the request, the response
* includes a <code>NextMarker</code> value. To list more
* <code>WebACL</code> objects, submit another
* <code>ListWebACLs</code> request, and specify the
* <code>NextMarker</code> value from the response in the
* <code>NextMarker</code> value in the next request.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ListWebACLsResult withNextMarker(String nextMarker) {
setNextMarker(nextMarker);
return this;
}
/**
* <p>
* An array of <a>WebACLSummary</a> objects.
* </p>
*
* @return An array of <a>WebACLSummary</a> objects.
*/
public java.util.List<WebACLSummary> getWebACLs() {
return webACLs;
}
/**
* <p>
* An array of <a>WebACLSummary</a> objects.
* </p>
*
* @param webACLs
* An array of <a>WebACLSummary</a> objects.
*/
public void setWebACLs(java.util.Collection<WebACLSummary> webACLs) {
if (webACLs == null) {
this.webACLs = null;
return;
}
this.webACLs = new java.util.ArrayList<WebACLSummary>(webACLs);
}
/**
* <p>
* An array of <a>WebACLSummary</a> objects.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setWebACLs(java.util.Collection)} or
* {@link #withWebACLs(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param webACLs
* An array of <a>WebACLSummary</a> objects.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ListWebACLsResult withWebACLs(WebACLSummary... webACLs) {
if (this.webACLs == null) {
setWebACLs(new java.util.ArrayList<WebACLSummary>(webACLs.length));
}
for (WebACLSummary ele : webACLs) {
this.webACLs.add(ele);
}
return this;
}
/**
* <p>
* An array of <a>WebACLSummary</a> objects.
* </p>
*
* @param webACLs
* An array of <a>WebACLSummary</a> objects.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ListWebACLsResult withWebACLs(
java.util.Collection<WebACLSummary> webACLs) {
setWebACLs(webACLs);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getNextMarker() != null)
sb.append("NextMarker: " + getNextMarker() + ",");
if (getWebACLs() != null)
sb.append("WebACLs: " + getWebACLs());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListWebACLsResult == false)
return false;
ListWebACLsResult other = (ListWebACLsResult) obj;
if (other.getNextMarker() == null ^ this.getNextMarker() == null)
return false;
if (other.getNextMarker() != null
&& other.getNextMarker().equals(this.getNextMarker()) == false)
return false;
if (other.getWebACLs() == null ^ this.getWebACLs() == null)
return false;
if (other.getWebACLs() != null
&& other.getWebACLs().equals(this.getWebACLs()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getNextMarker() == null) ? 0 : getNextMarker().hashCode());
hashCode = prime * hashCode
+ ((getWebACLs() == null) ? 0 : getWebACLs().hashCode());
return hashCode;
}
@Override
public ListWebACLsResult clone() {
try {
return (ListWebACLsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
|
|
/*
Copyright 2011-2013 The Cassandra Consortium (cassandra-fp7.eu)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package eu.cassandra.utils;
import java.util.Arrays;
import org.apache.log4j.Logger;
/**
* This is an auxiliary class used during the washing machine identification
* procedure. It is used for storing the values of consecutive minutes where the
* reactive power is over zero.
*
* @author Antonios Chrysopoulos
* @version 0.9, Date: 29.07.2013
*/
public class ConsecutiveValues
{
static Logger log = Logger.getLogger(ConsecutiveValues.class);
/**
* The starting minute of the set of consecutive minutes with positive
* reactive values.
*/
private int start = -1;
/**
* The ending minute of the set of consecutive minutes with positive
* reactive values
*/
private int end = -1;
/**
* The array that contains the active power values of the consecutive minutes.
*/
private double[] pValues;
/**
* The array that contains the reactive power values of the consecutive
* minutes.
*/
private double[] qValues;
/**
* A metric showing how consistent is the reactive power value in the set.
*/
private double difference;
/**
* The number of consecutive minutes.
*/
private int numberOfElements = 0;
/**
* The maximum value of the reactive power array.
*/
private double maxQ = 0;
/**
* A constructor of a consecutive values set of minutes.
*
* @param start
* The starting minute of the consecutive set of minutes
* @param end
* The ending minute of the consecutive set of minutes
* @param pValues
* The array that contains the active power values of the consecutive
* minutes.
* @param qValues
* The array that contains the reactive power values of the
* consecutive minutes.
*/
public ConsecutiveValues (int start, int end, double[] pValues,
double[] qValues)
{
this.start = start;
this.end = end;
this.pValues = pValues;
this.qValues = qValues;
fillMetrics();
}
/**
*
* This function is used as a getter for the start minute of the consecutive
* minutes set.
*
* @return the start minute of the set.
*/
public int getStart ()
{
return start;
}
/**
*
* This function is used as a getter for the end minute of the consecutive
* minutes set.
*
* @return the end minute of the set.
*/
public int getEnd ()
{
return end;
}
/**
*
* This function is used as a getter for the difference metric of the
* consecutive
* minutes set.
*
* @return the difference metric of the set.
*/
public double getDifference ()
{
return difference;
}
/**
*
* This function is used as a getter for the number of minutes.
*
* @return the number of minutes of the set.
*/
public int getNumberOfElements ()
{
return numberOfElements;
}
/**
*
* This function is used as a getter for the max reactive value of the
* consecutive minutes set.
*
* @return the max reactive value of the set.
*/
public double getMaxQ ()
{
return maxQ;
}
/**
*
* This function is used as a getter for the active power consumption during
* this consecutive set.
*
* @return the active power consumption during this period.
*/
public double[] getPValues ()
{
return pValues;
}
/**
*
* This function is used as a getter for the reactive power consumption during
* this consecutive set.
*
* @return the reactive power consumption during this period.
*/
public double[] getQValues ()
{
return qValues;
}
/**
* This function is used for the calculation of the several metrics that are
* used for the washing machine identification procedure.
*/
private void fillMetrics ()
{
double[] diffActive = new double[pValues.length - 1];
double[] diffReactive = new double[pValues.length - 1];
double[] tempReactive = Arrays.copyOf(qValues, qValues.length);
double metric2 = 0;
for (int i = 0; i < diffActive.length; i++) {
diffActive[i] = pValues[i + 1] - pValues[i];
diffReactive[i] = qValues[i + 1] - qValues[i];
if (diffActive[i] * diffReactive[i] < 0)
tempReactive[i + 1] = tempReactive[i]; // ASK
}
qValues = Arrays.copyOf(tempReactive, tempReactive.length);
maxQ = Utils.findMax(qValues);
numberOfElements = tempReactive.length;
for (int i = 0; i < tempReactive.length; i++) {
tempReactive[i] /= maxQ;
metric2 += tempReactive[i];
}
difference = 100 * ((numberOfElements - metric2) / numberOfElements);
}
/**
* This function is used to show the attributes and the details of the
* consecutive values object.
*/
public void status ()
{
log.debug("");
log.debug("Start: " + start + " End: " + end);
log.debug("PValues: " + Arrays.toString(pValues));
log.debug("QValues: " + Arrays.toString(qValues));
log.debug("Difference: " + difference);
log.debug("Number of Elements: " + numberOfElements);
log.debug("MaxQ: " + maxQ);
log.debug("");
}
}
|
|
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.inputmethod.keyboard.internal;
import android.content.res.TypedArray;
import android.util.Log;
import android.util.SparseArray;
import com.android.inputmethod.latin.CollectionUtils;
import com.android.inputmethod.latin.R;
import com.android.inputmethod.latin.XmlParseUtils;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import java.util.HashMap;
public final class KeyStylesSet {
private static final String TAG = KeyStylesSet.class.getSimpleName();
private static final boolean DEBUG = false;
private final HashMap<String, KeyStyle> mStyles = CollectionUtils.newHashMap();
private final KeyboardTextsSet mTextsSet;
private final KeyStyle mEmptyKeyStyle;
private static final String EMPTY_STYLE_NAME = "<empty>";
public KeyStylesSet(final KeyboardTextsSet textsSet) {
mTextsSet = textsSet;
mEmptyKeyStyle = new EmptyKeyStyle(textsSet);
mStyles.put(EMPTY_STYLE_NAME, mEmptyKeyStyle);
}
private static final class EmptyKeyStyle extends KeyStyle {
EmptyKeyStyle(final KeyboardTextsSet textsSet) {
super(textsSet);
}
@Override
public String[] getStringArray(final TypedArray a, final int index) {
return parseStringArray(a, index);
}
@Override
public String getString(final TypedArray a, final int index) {
return parseString(a, index);
}
@Override
public int getInt(final TypedArray a, final int index, final int defaultValue) {
return a.getInt(index, defaultValue);
}
@Override
public int getFlag(final TypedArray a, final int index) {
return a.getInt(index, 0);
}
}
private static final class DeclaredKeyStyle extends KeyStyle {
private final HashMap<String, KeyStyle> mStyles;
private final String mParentStyleName;
private final SparseArray<Object> mStyleAttributes = CollectionUtils.newSparseArray();
public DeclaredKeyStyle(final String parentStyleName, final KeyboardTextsSet textsSet,
final HashMap<String, KeyStyle> styles) {
super(textsSet);
mParentStyleName = parentStyleName;
mStyles = styles;
}
@Override
public String[] getStringArray(final TypedArray a, final int index) {
if (a.hasValue(index)) {
return parseStringArray(a, index);
}
final Object value = mStyleAttributes.get(index);
if (value != null) {
return (String[])value;
}
final KeyStyle parentStyle = mStyles.get(mParentStyleName);
return parentStyle.getStringArray(a, index);
}
@Override
public String getString(final TypedArray a, final int index) {
if (a.hasValue(index)) {
return parseString(a, index);
}
final Object value = mStyleAttributes.get(index);
if (value != null) {
return (String)value;
}
final KeyStyle parentStyle = mStyles.get(mParentStyleName);
return parentStyle.getString(a, index);
}
@Override
public int getInt(final TypedArray a, final int index, final int defaultValue) {
if (a.hasValue(index)) {
return a.getInt(index, defaultValue);
}
final Object value = mStyleAttributes.get(index);
if (value != null) {
return (Integer)value;
}
final KeyStyle parentStyle = mStyles.get(mParentStyleName);
return parentStyle.getInt(a, index, defaultValue);
}
@Override
public int getFlag(final TypedArray a, final int index) {
int flags = a.getInt(index, 0);
final Object value = mStyleAttributes.get(index);
if (value != null) {
flags |= (Integer)value;
}
final KeyStyle parentStyle = mStyles.get(mParentStyleName);
return flags | parentStyle.getFlag(a, index);
}
public void readKeyAttributes(final TypedArray keyAttr) {
// TODO: Currently not all Key attributes can be declared as style.
readString(keyAttr, R.styleable.Keyboard_Key_code);
readString(keyAttr, R.styleable.Keyboard_Key_altCode);
readString(keyAttr, R.styleable.Keyboard_Key_keyLabel);
readString(keyAttr, R.styleable.Keyboard_Key_keyOutputText);
readString(keyAttr, R.styleable.Keyboard_Key_keyHintLabel);
readStringArray(keyAttr, R.styleable.Keyboard_Key_moreKeys);
readStringArray(keyAttr, R.styleable.Keyboard_Key_additionalMoreKeys);
readFlag(keyAttr, R.styleable.Keyboard_Key_keyLabelFlags);
readString(keyAttr, R.styleable.Keyboard_Key_keyIcon);
readString(keyAttr, R.styleable.Keyboard_Key_keyIconDisabled);
readString(keyAttr, R.styleable.Keyboard_Key_keyIconPreview);
readInt(keyAttr, R.styleable.Keyboard_Key_maxMoreKeysColumn);
readInt(keyAttr, R.styleable.Keyboard_Key_backgroundType);
readFlag(keyAttr, R.styleable.Keyboard_Key_keyActionFlags);
}
private void readString(final TypedArray a, final int index) {
if (a.hasValue(index)) {
mStyleAttributes.put(index, parseString(a, index));
}
}
private void readInt(final TypedArray a, final int index) {
if (a.hasValue(index)) {
mStyleAttributes.put(index, a.getInt(index, 0));
}
}
private void readFlag(final TypedArray a, final int index) {
if (a.hasValue(index)) {
final Integer value = (Integer)mStyleAttributes.get(index);
mStyleAttributes.put(index, a.getInt(index, 0) | (value != null ? value : 0));
}
}
private void readStringArray(final TypedArray a, final int index) {
if (a.hasValue(index)) {
mStyleAttributes.put(index, parseStringArray(a, index));
}
}
}
public void parseKeyStyleAttributes(final TypedArray keyStyleAttr, final TypedArray keyAttrs,
final XmlPullParser parser) throws XmlPullParserException {
final String styleName = keyStyleAttr.getString(R.styleable.Keyboard_KeyStyle_styleName);
if (DEBUG) {
Log.d(TAG, String.format("<%s styleName=%s />",
KeyboardBuilder.TAG_KEY_STYLE, styleName));
if (mStyles.containsKey(styleName)) {
Log.d(TAG, "key-style " + styleName + " is overridden at "
+ parser.getPositionDescription());
}
}
String parentStyleName = EMPTY_STYLE_NAME;
if (keyStyleAttr.hasValue(R.styleable.Keyboard_KeyStyle_parentStyle)) {
parentStyleName = keyStyleAttr.getString(R.styleable.Keyboard_KeyStyle_parentStyle);
if (!mStyles.containsKey(parentStyleName)) {
throw new XmlParseUtils.ParseException(
"Unknown parentStyle " + parentStyleName, parser);
}
}
final DeclaredKeyStyle style = new DeclaredKeyStyle(parentStyleName, mTextsSet, mStyles);
style.readKeyAttributes(keyAttrs);
mStyles.put(styleName, style);
}
public KeyStyle getKeyStyle(final TypedArray keyAttr, final XmlPullParser parser)
throws XmlParseUtils.ParseException {
if (!keyAttr.hasValue(R.styleable.Keyboard_Key_keyStyle)) {
return mEmptyKeyStyle;
}
final String styleName = keyAttr.getString(R.styleable.Keyboard_Key_keyStyle);
if (!mStyles.containsKey(styleName)) {
throw new XmlParseUtils.ParseException("Unknown key style: " + styleName, parser);
}
return mStyles.get(styleName);
}
}
|
|
package com.intellij.compiler.instrumentation;
import org.jetbrains.org.objectweb.asm.ClassReader;
import org.jetbrains.org.objectweb.asm.ClassVisitor;
import org.jetbrains.org.objectweb.asm.MethodVisitor;
import org.jetbrains.org.objectweb.asm.Opcodes;
import java.io.*;
import java.lang.reflect.Constructor;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/**
* @author Eugene Zhuravlev
* Date: 2/16/12
*/
public class InstrumentationClassFinder {
private static final PseudoClass[] EMPTY_PSEUDOCLASS_ARRAY = new PseudoClass[0];
private static final String CLASS_RESOURCE_EXTENSION = ".class";
private static final URL[] URL_EMPTY_ARRAY = new URL[0];
private final Map<String, PseudoClass> myLoaded = new HashMap<String, PseudoClass>(); // className -> class object
private final ClassFinderClasspath myPlatformClasspath;
private final ClassFinderClasspath myClasspath;
private final URL[] myPlatformUrls;
private final URL[] myClasspathUrls;
private ClassLoader myLoader;
private byte[] myBuffer;
public InstrumentationClassFinder(final URL[] cp) {
this(URL_EMPTY_ARRAY, cp);
}
public InstrumentationClassFinder(final URL[] platformUrls, final URL[] classpathUrls) {
myPlatformUrls = platformUrls;
myClasspathUrls = classpathUrls;
myPlatformClasspath = new ClassFinderClasspath(platformUrls);
myClasspath = new ClassFinderClasspath(classpathUrls);
}
public static URL createJDKPlatformUrl(String jdkHomePath) throws MalformedURLException {
return new URL(ClassFinderClasspath.Loader.JRT_PROTOCOL, null, jdkHomePath.replace(File.separatorChar, '/'));
}
// compatibility with legacy code requiring ClassLoader
public ClassLoader getLoader() {
ClassLoader loader = myLoader;
if (loader != null) {
return loader;
}
final URLClassLoader platformLoader = myPlatformUrls.length > 0 ? new URLClassLoader(myPlatformUrls, null) : null;
final ClassLoader cpLoader = new URLClassLoader(myClasspathUrls, platformLoader);
loader = new ClassLoader(cpLoader) {
public InputStream getResourceAsStream(String name) {
InputStream is = null;
is = super.getResourceAsStream(name);
if (is == null) {
try {
is = InstrumentationClassFinder.this.getResourceAsStream(name);
}
catch (IOException ignored) {
}
}
return is;
}
protected Class findClass(String name) throws ClassNotFoundException {
final InputStream is = lookupClassBeforeClasspath(name.replace('.', '/'));
if (is == null) {
throw new ClassNotFoundException("Class not found: " + name.replace('/', '.')); // ensure presentable class name in error message
}
try {
final byte[] bytes = loadBytes(is);
return defineClass(name, bytes, 0, bytes.length);
}
finally {
try {
is.close();
}
catch (IOException ignored) {
}
}
}
};
myLoader = loader;
return loader;
}
public void releaseResources() {
myPlatformClasspath.releaseResources();
myClasspath.releaseResources();
myLoaded.clear();
myBuffer = null;
myLoader = null;
}
public PseudoClass loadClass(final String name) throws IOException, ClassNotFoundException{
final String internalName = name.replace('.', '/'); // normalize
final PseudoClass aClass = myLoaded.get(internalName);
if (aClass != null && aClass != PseudoClass.NULL_OBJ) {
return aClass;
}
final InputStream is = aClass == null? getClassBytesStream(internalName) : null;
if (is == null) {
if (aClass == null) {
myLoaded.put(internalName, PseudoClass.NULL_OBJ);
}
// ensure presentable class name in error message
throw new ClassNotFoundException("Class not found: " + name.replace('/', '.')) {
@Override
public synchronized Throwable fillInStackTrace() {
return this;
}
};
}
try {
final PseudoClass result = loadPseudoClass(is);
myLoaded.put(internalName, result);
return result;
}
finally {
is.close();
}
}
public void cleanCachedData(String className) {
myLoaded.remove(className.replace('.', '/'));
}
public InputStream getClassBytesAsStream(String className) throws IOException {
final String internalName = className.replace('.', '/'); // normalize
final PseudoClass aClass = myLoaded.get(internalName);
if (aClass == PseudoClass.NULL_OBJ) {
return null;
}
InputStream bytes = null;
try {
bytes = getClassBytesStream(internalName);
}
finally {
if (aClass == null && bytes == null) {
myLoaded.put(internalName, PseudoClass.NULL_OBJ);
}
}
return bytes;
}
private InputStream getClassBytesStream(String internalName) throws IOException {
InputStream is = null;
// first look into platformCp
final String resourceName = internalName + CLASS_RESOURCE_EXTENSION;
Resource resource = myPlatformClasspath.getResource(resourceName);
if (resource != null) {
is = resource.getInputStream();
}
// second look into memory and classspath
if (is == null) {
is = lookupClassBeforeClasspath(internalName);
}
if (is == null) {
resource = myClasspath.getResource(resourceName);
if (resource != null) {
is = resource.getInputStream();
}
}
if (is == null) {
is = lookupClassAfterClasspath(internalName);
}
return is;
}
public InputStream getResourceAsStream(String resourceName) throws IOException {
InputStream is = null;
Resource resource = myPlatformClasspath.getResource(resourceName);
if (resource != null) {
is = resource.getInputStream();
}
if (is == null) {
resource = myClasspath.getResource(resourceName);
if (resource != null) {
is = resource.getInputStream();
}
}
return is;
}
protected InputStream lookupClassBeforeClasspath(final String internalClassName) {
return null;
}
protected InputStream lookupClassAfterClasspath(final String internalClassName) {
return null;
}
private PseudoClass loadPseudoClass(InputStream is) throws IOException {
final ClassReader reader = new ClassReader(is);
final V visitor = new V();
reader.accept(visitor, ClassReader.SKIP_CODE | ClassReader.SKIP_DEBUG | ClassReader.SKIP_FRAMES);
return new PseudoClass(this, visitor.myName, visitor.mySuperclassName, visitor.myInterfaces, visitor.myModifiers, visitor.myMethods);
}
public static class PseudoClass {
static final PseudoClass NULL_OBJ = new PseudoClass(null, null, null, null, 0, null);
private final String myName;
private final String mySuperClass;
private final String[] myInterfaces;
private final int myModifiers;
private final List<PseudoMethod> myMethods;
private final InstrumentationClassFinder myFinder;
private PseudoClass(InstrumentationClassFinder finder,
final String name,
final String superClass,
final String[] interfaces,
final int modifiers,
List<PseudoMethod> methods) {
myName = name;
mySuperClass = superClass;
myInterfaces = interfaces;
myModifiers = modifiers;
myMethods = methods;
myFinder = finder;
}
public int getModifiers() {
return myModifiers;
}
public boolean isInterface() {
return (myModifiers & Opcodes.ACC_INTERFACE) > 0;
}
public String getName() {
return myName;
}
public List<PseudoMethod> getMethods() {
return myMethods;
}
public List<PseudoMethod> findMethods(String name) {
final List<PseudoMethod> result = new ArrayList<PseudoMethod>();
for (PseudoMethod method : myMethods) {
if (method.getName().equals(name)){
result.add(method);
}
}
return result;
}
public PseudoMethod findMethod(String name, String descriptor) {
for (PseudoMethod method : myMethods) {
if (method.getName().equals(name) && method.getSignature().equals(descriptor)){
return method;
}
}
return null;
}
public PseudoMethod findMethodInHierarchy(final String name, final String descriptor) throws IOException, ClassNotFoundException {
// first find in superclasses
for (PseudoClass c = this; c != null; c = c.getSuperClass()) {
final PseudoMethod method = c.findMethod(name, descriptor);
if (method != null) {
return method;
}
}
// second, check interfaces
for (PseudoClass iface : getInterfaces()) {
final PseudoMethod method = findInterfaceMethodRecursively(iface, name, descriptor);
if (method != null) {
return method;
}
}
return null;
}
private static PseudoMethod findInterfaceMethodRecursively(PseudoClass fromIface, final String name, final String descriptor) throws IOException, ClassNotFoundException {
PseudoMethod method = fromIface.findMethod(name, descriptor);
if (method != null) {
return method;
}
for (PseudoClass superIface : fromIface.getInterfaces()) {
method = findInterfaceMethodRecursively(superIface, name, descriptor);
if (method != null) {
return method;
}
}
return null;
}
public InstrumentationClassFinder getFinder() {
return myFinder;
}
public PseudoClass getSuperClass() throws IOException, ClassNotFoundException {
final String superClass = mySuperClass;
return superClass != null? myFinder.loadClass(superClass) : null;
}
public PseudoClass[] getInterfaces() throws IOException, ClassNotFoundException {
if (myInterfaces == null) {
return EMPTY_PSEUDOCLASS_ARRAY;
}
final PseudoClass[] result = new PseudoClass[myInterfaces.length];
for (int i = 0; i < result.length; i++) {
result[i] = myFinder.loadClass(myInterfaces[i]);
}
return result;
}
public boolean equals (final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
return getName().equals(((PseudoClass)o).getName());
}
private boolean isSubclassOf(final PseudoClass x) throws IOException, ClassNotFoundException {
for (PseudoClass c = this; c != null; c = c.getSuperClass()) {
final PseudoClass superClass = c.getSuperClass();
if (superClass != null && superClass.equals(x)) {
return true;
}
}
return false;
}
private boolean implementsInterface(final PseudoClass x) throws IOException, ClassNotFoundException {
for (PseudoClass c = this; c != null; c = c.getSuperClass()) {
final PseudoClass[] tis = c.getInterfaces();
for (final PseudoClass ti : tis) {
if (ti.equals(x) || ti.implementsInterface(x)) {
return true;
}
}
}
return false;
}
public boolean isAssignableFrom(final PseudoClass x) throws IOException, ClassNotFoundException {
if (this.equals(x)) {
return true;
}
if (x.isSubclassOf(this)) {
return true;
}
if (x.implementsInterface(this)) {
return true;
}
if (x.isInterface() && "java/lang/Object".equals(getName())) {
return true;
}
return false;
}
public boolean hasDefaultPublicConstructor() {
for (PseudoMethod method : myMethods) {
if ("<init>".equals(method.getName()) && "()V".equals(method.getSignature())) {
return true;
}
}
return false;
}
public String getDescriptor() {
return "L" + myName + ";";
}
}
public static final class PseudoMethod {
private final int myAccess;
private final String myName;
private final String mySignature;
public PseudoMethod(int access, String name, String signature) {
myAccess = access;
myName = name;
mySignature = signature;
}
public int getModifiers() {
return myAccess;
}
public String getName() {
return myName;
}
public String getSignature() {
return mySignature;
}
}
private static class V extends ClassVisitor {
public String mySuperclassName = null;
public String[] myInterfaces = null;
public String myName = null;
public int myModifiers;
private final List<PseudoMethod> myMethods = new ArrayList<PseudoMethod>();
private V() {
super(Opcodes.ASM5);
}
public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
if ((access & Opcodes.ACC_PUBLIC) > 0) {
myMethods.add(new PseudoMethod(access, name, desc));
}
return super.visitMethod(access, name, desc, signature, exceptions);
}
public void visit(int version, int access, String pName, String signature, String pSuperName, String[] pInterfaces) {
mySuperclassName = pSuperName;
myInterfaces = pInterfaces;
myName = pName;
myModifiers = access;
}
}
public interface Resource {
InputStream getInputStream() throws IOException;
}
static class ClassFinderClasspath {
private final Stack<URL> myUrls = new Stack<URL>();
private final List<Loader> myLoaders = new ArrayList<Loader>();
private final Map<URL,Loader> myLoadersMap = new HashMap<URL, Loader>();
public ClassFinderClasspath(URL[] urls) {
if (urls.length > 0) {
for (int i = urls.length - 1; i >= 0; i--) {
myUrls.push(urls[i]);
}
}
}
public Resource getResource(String s) {
int i = 0;
for (Loader loader; (loader = getLoader(i)) != null; i++) {
Resource resource = loader.getResource(s);
if (resource != null) {
return resource;
}
}
return null;
}
public void releaseResources() {
for (Loader loader : myLoaders) {
loader.releaseResources();
}
myLoaders.clear();
myLoadersMap.clear();
}
private synchronized Loader getLoader(int i) {
while (myLoaders.size() < i + 1) {
URL url;
synchronized (myUrls) {
if (myUrls.empty()) {
return null;
}
url = myUrls.pop();
}
if (myLoadersMap.containsKey(url)) {
continue;
}
Loader loader;
try {
loader = getLoader(url, myLoaders.size());
if (loader == null) {
continue;
}
}
catch (IOException ioexception) {
continue;
}
myLoaders.add(loader);
myLoadersMap.put(url, loader);
}
return myLoaders.get(i);
}
private static Loader getLoader(final URL url, int index) throws IOException {
String s;
try {
s = url.toURI().getSchemeSpecificPart();
}
catch (URISyntaxException thisShouldNotHappen) {
thisShouldNotHappen.printStackTrace();
s = url.getFile();
}
if (s != null && s.length() > 0) {
final String protocol = url.getProtocol();
if (Loader.JRT_PROTOCOL.equals(protocol)) {
final Loader jrtLoader = JrtClassHolder.create(url, index);
if (jrtLoader != null) {
return jrtLoader;
}
}
if (new File(s).isDirectory()) {
return Loader.FILE_PROTOCOL.equals(protocol) ? new FileLoader(url, index) : null;
}
}
// by default treat the url as a jar archive
return new JarLoader(url, index);
}
abstract static class Loader {
protected static final String JAR_PROTOCOL = "jar";
protected static final String FILE_PROTOCOL = "file";
protected static final String JRT_PROTOCOL = "jrt";
private final URL myURL;
private final int myIndex;
protected Loader(URL url, int index) {
myURL = url;
myIndex = index;
}
protected URL getBaseURL() {
return myURL;
}
public abstract Resource getResource(final String name);
public abstract void releaseResources();
public int getIndex() {
return myIndex;
}
}
private static class FileLoader extends Loader {
private final File myRootDir;
@SuppressWarnings({"HardCodedStringLiteral"})
FileLoader(URL url, int index) throws IOException {
super(url, index);
if (!FILE_PROTOCOL.equals(url.getProtocol())) {
throw new IllegalArgumentException("url");
}
else {
final String s = unescapePercentSequences(url.getFile().replace('/', File.separatorChar));
myRootDir = new File(s);
}
}
public void releaseResources() {
}
public Resource getResource(final String name) {
try {
final URL url = new URL(getBaseURL(), name);
if (!url.getFile().startsWith(getBaseURL().getFile())) {
return null;
}
final File file = new File(myRootDir, name.replace('/', File.separatorChar));
if (file.exists()) {
return new Resource() {
@Override
public InputStream getInputStream() throws IOException {
return new BufferedInputStream(new FileInputStream(file));
}
public String toString() {
return file.getAbsolutePath();
}
};
}
}
catch (Exception ignored) {
}
return null;
}
public String toString() {
return "FileLoader [" + myRootDir + "]";
}
}
private static class JarLoader extends Loader {
private final URL myURL;
private ZipFile myZipFile;
JarLoader(URL url, int index) throws IOException {
super(new URL(JAR_PROTOCOL, "", -1, url + "!/"), index);
myURL = url;
}
public void releaseResources() {
final ZipFile zipFile = myZipFile;
if (zipFile != null) {
myZipFile = null;
try {
zipFile.close();
}
catch (IOException e) {
throw new RuntimeException();
}
}
}
private ZipFile acquireZipFile() throws IOException {
ZipFile zipFile = myZipFile;
if (zipFile == null) {
zipFile = doGetZipFile();
myZipFile = zipFile;
}
return zipFile;
}
private ZipFile doGetZipFile() throws IOException {
if (FILE_PROTOCOL.equals(myURL.getProtocol())) {
String s = unescapePercentSequences(myURL.getFile().replace('/', File.separatorChar));
if (!new File(s).exists()) {
throw new FileNotFoundException(s);
}
return new ZipFile(s);
}
return null;
}
public Resource getResource(String name) {
try {
final ZipFile file = acquireZipFile();
if (file != null) {
final ZipEntry entry = file.getEntry(name);
if (entry != null) {
return new Resource() {
@Override
public InputStream getInputStream() throws IOException {
try {
final ZipFile file = acquireZipFile();
if (file != null) {
final InputStream inputStream = file.getInputStream(entry);
if (inputStream != null) {
return new FilterInputStream(inputStream) {};
}
}
}
catch (IOException e) {
e.printStackTrace();
}
return null;
}
public String toString() {
return "JarLoader [" + myURL + "!/" + entry.getName() + "]";
}
};
}
}
}
catch (Exception e) {
return null;
}
return null;
}
}
}
private static String unescapePercentSequences(String s) {
if (s.indexOf('%') == -1) {
return s;
}
StringBuilder decoded = new StringBuilder();
final int len = s.length();
int i = 0;
while (i < len) {
char c = s.charAt(i);
if (c == '%') {
List<Integer> bytes = new ArrayList<Integer>();
while (i + 2 < len && s.charAt(i) == '%') {
final int d1 = decode(s.charAt(i + 1));
final int d2 = decode(s.charAt(i + 2));
if (d1 != -1 && d2 != -1) {
bytes.add(((d1 & 0xf) << 4 | d2 & 0xf));
i += 3;
}
else {
break;
}
}
if (!bytes.isEmpty()) {
final byte[] bytesArray = new byte[bytes.size()];
for (int j = 0; j < bytes.size(); j++) {
bytesArray[j] = (byte)bytes.get(j).intValue();
}
try {
decoded.append(new String(bytesArray, "UTF-8"));
continue;
}
catch (UnsupportedEncodingException ignored) {
}
}
}
decoded.append(c);
i++;
}
return decoded.toString();
}
private static int decode(char c) {
if ((c >= '0') && (c <= '9')){
return c - '0';
}
if ((c >= 'a') && (c <= 'f')){
return c - 'a' + 10;
}
if ((c >= 'A') && (c <= 'F')){
return c - 'A' + 10;
}
return -1;
}
public byte[] loadBytes(InputStream stream) {
byte[] buf = myBuffer;
if (buf == null) {
buf = new byte[512];
myBuffer = buf;
}
final ByteArrayOutputStream result = new ByteArrayOutputStream();
try {
while (true) {
int n = stream.read(buf, 0, buf.length);
if (n <= 0) {
break;
}
result.write(buf, 0, n);
}
result.close();
}
catch (IOException ignored) {
}
return result.toByteArray();
}
private static final class JrtClassHolder {
public static final Class<? extends ClassFinderClasspath.Loader> ourClass;
public static final Constructor<? extends ClassFinderClasspath.Loader> ourConstructor;
static {
Class<? extends ClassFinderClasspath.Loader> aClass = null;
Constructor<? extends ClassFinderClasspath.Loader> constructor = null;
try {
aClass = (Class<? extends ClassFinderClasspath.Loader>)Class.forName("com.intellij.compiler.instrumentation.JrtLoader");
constructor = aClass.getDeclaredConstructor(URL.class, int.class);
constructor.setAccessible(true);
}
catch (Throwable ignored) {
}
ourClass = aClass;
ourConstructor = constructor;
}
public static ClassFinderClasspath.Loader create(URL url, int index) {
if (ourConstructor != null) {
try {
return ourConstructor.newInstance(url, index);
}
catch (Throwable ignored) {
}
}
return null;
}
}
}
|
|
//
// ========================================================================
// Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.http;
import org.eclipse.jetty.io.Buffer;
import org.eclipse.jetty.io.BufferCache;
/* ------------------------------------------------------------------------------- */
/**
*/
public class HttpHeaders extends BufferCache
{
/* ------------------------------------------------------------ */
/** General Fields.
*/
public final static String
CONNECTION= "Connection",
CACHE_CONTROL= "Cache-Control",
DATE= "Date",
PRAGMA= "Pragma",
PROXY_CONNECTION = "Proxy-Connection",
TRAILER= "Trailer",
TRANSFER_ENCODING= "Transfer-Encoding",
UPGRADE= "Upgrade",
VIA= "Via",
WARNING= "Warning",
NEGOTIATE= "Negotiate";
/* ------------------------------------------------------------ */
/** Entity Fields.
*/
public final static String ALLOW= "Allow",
CONTENT_ENCODING= "Content-Encoding",
CONTENT_LANGUAGE= "Content-Language",
CONTENT_LENGTH= "Content-Length",
CONTENT_LOCATION= "Content-Location",
CONTENT_MD5= "Content-MD5",
CONTENT_RANGE= "Content-Range",
CONTENT_TYPE= "Content-Type",
EXPIRES= "Expires",
LAST_MODIFIED= "Last-Modified";
/* ------------------------------------------------------------ */
/** Request Fields.
*/
public final static String ACCEPT= "Accept",
ACCEPT_CHARSET= "Accept-Charset",
ACCEPT_ENCODING= "Accept-Encoding",
ACCEPT_LANGUAGE= "Accept-Language",
AUTHORIZATION= "Authorization",
EXPECT= "Expect",
FORWARDED= "Forwarded",
FROM= "From",
HOST= "Host",
IF_MATCH= "If-Match",
IF_MODIFIED_SINCE= "If-Modified-Since",
IF_NONE_MATCH= "If-None-Match",
IF_RANGE= "If-Range",
IF_UNMODIFIED_SINCE= "If-Unmodified-Since",
KEEP_ALIVE= "Keep-Alive",
MAX_FORWARDS= "Max-Forwards",
PROXY_AUTHORIZATION= "Proxy-Authorization",
RANGE= "Range",
REQUEST_RANGE= "Request-Range",
REFERER= "Referer",
TE= "TE",
USER_AGENT= "User-Agent",
X_FORWARDED_FOR= "X-Forwarded-For",
X_FORWARDED_PROTO= "X-Forwarded-Proto",
X_FORWARDED_SERVER= "X-Forwarded-Server",
X_FORWARDED_HOST= "X-Forwarded-Host";
/* ------------------------------------------------------------ */
/** Response Fields.
*/
public final static String ACCEPT_RANGES= "Accept-Ranges",
AGE= "Age",
ETAG= "ETag",
LOCATION= "Location",
PROXY_AUTHENTICATE= "Proxy-Authenticate",
RETRY_AFTER= "Retry-After",
SERVER= "Server",
SERVLET_ENGINE= "Servlet-Engine",
VARY= "Vary",
WWW_AUTHENTICATE= "WWW-Authenticate";
/* ------------------------------------------------------------ */
/** Other Fields.
*/
public final static String COOKIE= "Cookie",
SET_COOKIE= "Set-Cookie",
SET_COOKIE2= "Set-Cookie2",
MIME_VERSION= "MIME-Version",
IDENTITY= "identity";
public final static int CONNECTION_ORDINAL= 1,
DATE_ORDINAL= 2,
PRAGMA_ORDINAL= 3,
TRAILER_ORDINAL= 4,
TRANSFER_ENCODING_ORDINAL= 5,
UPGRADE_ORDINAL= 6,
VIA_ORDINAL= 7,
WARNING_ORDINAL= 8,
ALLOW_ORDINAL= 9,
CONTENT_ENCODING_ORDINAL= 10,
CONTENT_LANGUAGE_ORDINAL= 11,
CONTENT_LENGTH_ORDINAL= 12,
CONTENT_LOCATION_ORDINAL= 13,
CONTENT_MD5_ORDINAL= 14,
CONTENT_RANGE_ORDINAL= 15,
CONTENT_TYPE_ORDINAL= 16,
EXPIRES_ORDINAL= 17,
LAST_MODIFIED_ORDINAL= 18,
ACCEPT_ORDINAL= 19,
ACCEPT_CHARSET_ORDINAL= 20,
ACCEPT_ENCODING_ORDINAL= 21,
ACCEPT_LANGUAGE_ORDINAL= 22,
AUTHORIZATION_ORDINAL= 23,
EXPECT_ORDINAL= 24,
FORWARDED_ORDINAL= 25,
FROM_ORDINAL= 26,
HOST_ORDINAL= 27,
IF_MATCH_ORDINAL= 28,
IF_MODIFIED_SINCE_ORDINAL= 29,
IF_NONE_MATCH_ORDINAL= 30,
IF_RANGE_ORDINAL= 31,
IF_UNMODIFIED_SINCE_ORDINAL= 32,
KEEP_ALIVE_ORDINAL= 33,
MAX_FORWARDS_ORDINAL= 34,
PROXY_AUTHORIZATION_ORDINAL= 35,
RANGE_ORDINAL= 36,
REQUEST_RANGE_ORDINAL= 37,
REFERER_ORDINAL= 38,
TE_ORDINAL= 39,
USER_AGENT_ORDINAL= 40,
X_FORWARDED_FOR_ORDINAL= 41,
ACCEPT_RANGES_ORDINAL= 42,
AGE_ORDINAL= 43,
ETAG_ORDINAL= 44,
LOCATION_ORDINAL= 45,
PROXY_AUTHENTICATE_ORDINAL= 46,
RETRY_AFTER_ORDINAL= 47,
SERVER_ORDINAL= 48,
SERVLET_ENGINE_ORDINAL= 49,
VARY_ORDINAL= 50,
WWW_AUTHENTICATE_ORDINAL= 51,
COOKIE_ORDINAL= 52,
SET_COOKIE_ORDINAL= 53,
SET_COOKIE2_ORDINAL= 54,
MIME_VERSION_ORDINAL= 55,
IDENTITY_ORDINAL= 56,
CACHE_CONTROL_ORDINAL=57,
PROXY_CONNECTION_ORDINAL=58,
X_FORWARDED_PROTO_ORDINAL=59,
X_FORWARDED_SERVER_ORDINAL=60,
X_FORWARDED_HOST_ORDINAL=61;
public final static HttpHeaders CACHE= new HttpHeaders();
public final static Buffer
HOST_BUFFER=CACHE.add(HOST,HOST_ORDINAL),
ACCEPT_BUFFER=CACHE.add(ACCEPT,ACCEPT_ORDINAL),
ACCEPT_CHARSET_BUFFER=CACHE.add(ACCEPT_CHARSET,ACCEPT_CHARSET_ORDINAL),
ACCEPT_ENCODING_BUFFER=CACHE.add(ACCEPT_ENCODING,ACCEPT_ENCODING_ORDINAL),
ACCEPT_LANGUAGE_BUFFER=CACHE.add(ACCEPT_LANGUAGE,ACCEPT_LANGUAGE_ORDINAL),
CONTENT_LENGTH_BUFFER=CACHE.add(CONTENT_LENGTH,CONTENT_LENGTH_ORDINAL),
CONNECTION_BUFFER=CACHE.add(CONNECTION,CONNECTION_ORDINAL),
CACHE_CONTROL_BUFFER=CACHE.add(CACHE_CONTROL,CACHE_CONTROL_ORDINAL),
DATE_BUFFER=CACHE.add(DATE,DATE_ORDINAL),
PRAGMA_BUFFER=CACHE.add(PRAGMA,PRAGMA_ORDINAL),
TRAILER_BUFFER=CACHE.add(TRAILER,TRAILER_ORDINAL),
TRANSFER_ENCODING_BUFFER=CACHE.add(TRANSFER_ENCODING,TRANSFER_ENCODING_ORDINAL),
UPGRADE_BUFFER=CACHE.add(UPGRADE,UPGRADE_ORDINAL),
VIA_BUFFER=CACHE.add(VIA,VIA_ORDINAL),
WARNING_BUFFER=CACHE.add(WARNING,WARNING_ORDINAL),
ALLOW_BUFFER=CACHE.add(ALLOW,ALLOW_ORDINAL),
CONTENT_ENCODING_BUFFER=CACHE.add(CONTENT_ENCODING,CONTENT_ENCODING_ORDINAL),
CONTENT_LANGUAGE_BUFFER=CACHE.add(CONTENT_LANGUAGE,CONTENT_LANGUAGE_ORDINAL),
CONTENT_LOCATION_BUFFER=CACHE.add(CONTENT_LOCATION,CONTENT_LOCATION_ORDINAL),
CONTENT_MD5_BUFFER=CACHE.add(CONTENT_MD5,CONTENT_MD5_ORDINAL),
CONTENT_RANGE_BUFFER=CACHE.add(CONTENT_RANGE,CONTENT_RANGE_ORDINAL),
CONTENT_TYPE_BUFFER=CACHE.add(CONTENT_TYPE,CONTENT_TYPE_ORDINAL),
EXPIRES_BUFFER=CACHE.add(EXPIRES,EXPIRES_ORDINAL),
LAST_MODIFIED_BUFFER=CACHE.add(LAST_MODIFIED,LAST_MODIFIED_ORDINAL),
AUTHORIZATION_BUFFER=CACHE.add(AUTHORIZATION,AUTHORIZATION_ORDINAL),
EXPECT_BUFFER=CACHE.add(EXPECT,EXPECT_ORDINAL),
FORWARDED_BUFFER=CACHE.add(FORWARDED,FORWARDED_ORDINAL),
FROM_BUFFER=CACHE.add(FROM,FROM_ORDINAL),
IF_MATCH_BUFFER=CACHE.add(IF_MATCH,IF_MATCH_ORDINAL),
IF_MODIFIED_SINCE_BUFFER=CACHE.add(IF_MODIFIED_SINCE,IF_MODIFIED_SINCE_ORDINAL),
IF_NONE_MATCH_BUFFER=CACHE.add(IF_NONE_MATCH,IF_NONE_MATCH_ORDINAL),
IF_RANGE_BUFFER=CACHE.add(IF_RANGE,IF_RANGE_ORDINAL),
IF_UNMODIFIED_SINCE_BUFFER=CACHE.add(IF_UNMODIFIED_SINCE,IF_UNMODIFIED_SINCE_ORDINAL),
KEEP_ALIVE_BUFFER=CACHE.add(KEEP_ALIVE,KEEP_ALIVE_ORDINAL),
MAX_FORWARDS_BUFFER=CACHE.add(MAX_FORWARDS,MAX_FORWARDS_ORDINAL),
PROXY_AUTHORIZATION_BUFFER=CACHE.add(PROXY_AUTHORIZATION,PROXY_AUTHORIZATION_ORDINAL),
RANGE_BUFFER=CACHE.add(RANGE,RANGE_ORDINAL),
REQUEST_RANGE_BUFFER=CACHE.add(REQUEST_RANGE,REQUEST_RANGE_ORDINAL),
REFERER_BUFFER=CACHE.add(REFERER,REFERER_ORDINAL),
TE_BUFFER=CACHE.add(TE,TE_ORDINAL),
USER_AGENT_BUFFER=CACHE.add(USER_AGENT,USER_AGENT_ORDINAL),
X_FORWARDED_FOR_BUFFER=CACHE.add(X_FORWARDED_FOR,X_FORWARDED_FOR_ORDINAL),
X_FORWARDED_PROTO_BUFFER=CACHE.add(X_FORWARDED_PROTO,X_FORWARDED_PROTO_ORDINAL),
X_FORWARDED_SERVER_BUFFER=CACHE.add(X_FORWARDED_SERVER,X_FORWARDED_SERVER_ORDINAL),
X_FORWARDED_HOST_BUFFER=CACHE.add(X_FORWARDED_HOST,X_FORWARDED_HOST_ORDINAL),
ACCEPT_RANGES_BUFFER=CACHE.add(ACCEPT_RANGES,ACCEPT_RANGES_ORDINAL),
AGE_BUFFER=CACHE.add(AGE,AGE_ORDINAL),
ETAG_BUFFER=CACHE.add(ETAG,ETAG_ORDINAL),
LOCATION_BUFFER=CACHE.add(LOCATION,LOCATION_ORDINAL),
PROXY_AUTHENTICATE_BUFFER=CACHE.add(PROXY_AUTHENTICATE,PROXY_AUTHENTICATE_ORDINAL),
RETRY_AFTER_BUFFER=CACHE.add(RETRY_AFTER,RETRY_AFTER_ORDINAL),
SERVER_BUFFER=CACHE.add(SERVER,SERVER_ORDINAL),
SERVLET_ENGINE_BUFFER=CACHE.add(SERVLET_ENGINE,SERVLET_ENGINE_ORDINAL),
VARY_BUFFER=CACHE.add(VARY,VARY_ORDINAL),
WWW_AUTHENTICATE_BUFFER=CACHE.add(WWW_AUTHENTICATE,WWW_AUTHENTICATE_ORDINAL),
COOKIE_BUFFER=CACHE.add(COOKIE,COOKIE_ORDINAL),
SET_COOKIE_BUFFER=CACHE.add(SET_COOKIE,SET_COOKIE_ORDINAL),
SET_COOKIE2_BUFFER=CACHE.add(SET_COOKIE2,SET_COOKIE2_ORDINAL),
MIME_VERSION_BUFFER=CACHE.add(MIME_VERSION,MIME_VERSION_ORDINAL),
IDENTITY_BUFFER=CACHE.add(IDENTITY,IDENTITY_ORDINAL),
PROXY_CONNECTION_BUFFER=CACHE.add(PROXY_CONNECTION,PROXY_CONNECTION_ORDINAL);
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.tests.integration.openwire;
import javax.jms.Connection;
import javax.jms.Destination;
import javax.jms.InvalidDestinationException;
import javax.jms.JMSException;
import javax.jms.MessageConsumer;
import javax.jms.MessageProducer;
import javax.jms.Queue;
import javax.jms.Session;
import javax.jms.TemporaryQueue;
import javax.jms.TemporaryTopic;
import javax.jms.TextMessage;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.activemq.command.ActiveMQQueue;
import org.apache.activemq.command.ActiveMQTopic;
import org.apache.activemq.artemis.core.settings.impl.AddressSettings;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
public class SimpleOpenWireTest extends BasicOpenWireTest
{
@Rule
public ExpectedException thrown = ExpectedException.none();
@Override
@Before
public void setUp() throws Exception
{
this.realStore = true;
super.setUp();
}
@Test
public void testSimpleQueue() throws Exception
{
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
System.out.println("creating queue: " + queueName);
Destination dest = new ActiveMQQueue(queueName);
System.out.println("creating producer...");
MessageProducer producer = session.createProducer(dest);
final int num = 1;
final String msgBase = "MfromAMQ-";
for (int i = 0; i < num; i++)
{
TextMessage msg = session.createTextMessage("MfromAMQ-" + i);
producer.send(msg);
System.out.println("sent: ");
}
//receive
MessageConsumer consumer = session.createConsumer(dest);
System.out.println("receiving messages...");
for (int i = 0; i < num; i++)
{
TextMessage msg = (TextMessage) consumer.receive(5000);
System.out.println("received: " + msg);
String content = msg.getText();
System.out.println("content: " + content);
assertEquals(msgBase + i, content);
}
assertNull(consumer.receive(1000));
session.close();
}
@Test
public void testSimpleTopic() throws Exception
{
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
System.out.println("creating queue: " + topicName);
Destination dest = new ActiveMQTopic(topicName);
MessageConsumer consumer1 = session.createConsumer(dest);
MessageConsumer consumer2 = session.createConsumer(dest);
MessageProducer producer = session.createProducer(dest);
final int num = 1;
final String msgBase = "MfromAMQ-";
for (int i = 0; i < num; i++)
{
TextMessage msg = session.createTextMessage("MfromAMQ-" + i);
producer.send(msg);
System.out.println("Sent a message");
}
//receive
System.out.println("receiving messages...");
for (int i = 0; i < num; i++)
{
TextMessage msg = (TextMessage) consumer1.receive(5000);
System.out.println("received: " + msg);
String content = msg.getText();
assertEquals(msgBase + i, content);
}
assertNull(consumer1.receive(500));
System.out.println("receiving messages...");
for (int i = 0; i < num; i++)
{
TextMessage msg = (TextMessage) consumer2.receive(5000);
System.out.println("received: " + msg);
String content = msg.getText();
assertEquals(msgBase + i, content);
}
assertNull(consumer2.receive(500));
session.close();
}
@Test
public void testSimpleTempTopic() throws Exception
{
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
System.out.println("creating temp topic");
TemporaryTopic tempTopic = session.createTemporaryTopic();
System.out.println("create consumer 1");
MessageConsumer consumer1 = session.createConsumer(tempTopic);
System.out.println("create consumer 2");
MessageConsumer consumer2 = session.createConsumer(tempTopic);
System.out.println("create producer");
MessageProducer producer = session.createProducer(tempTopic);
System.out.println("sending messages");
final int num = 1;
final String msgBase = "MfromAMQ-";
for (int i = 0; i < num; i++)
{
TextMessage msg = session.createTextMessage("MfromAMQ-" + i);
producer.send(msg);
System.out.println("Sent a message");
}
//receive
System.out.println("receiving messages...");
for (int i = 0; i < num; i++)
{
TextMessage msg = (TextMessage) consumer1.receive(5000);
System.out.println("received: " + msg);
String content = msg.getText();
assertEquals(msgBase + i, content);
}
assertNull(consumer1.receive(500));
System.out.println("receiving messages...");
for (int i = 0; i < num; i++)
{
TextMessage msg = (TextMessage) consumer2.receive(5000);
System.out.println("received: " + msg);
String content = msg.getText();
assertEquals(msgBase + i, content);
}
assertNull(consumer2.receive(500));
session.close();
}
@Test
public void testSimpleTempQueue() throws Exception
{
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
System.out.println("creating temp queue");
TemporaryQueue tempQueue = session.createTemporaryQueue();
System.out.println("create consumer 1");
MessageConsumer consumer1 = session.createConsumer(tempQueue);
System.out.println("create producer");
MessageProducer producer = session.createProducer(tempQueue);
System.out.println("sending messages");
final int num = 1;
final String msgBase = "MfromAMQ-";
for (int i = 0; i < num; i++)
{
TextMessage msg = session.createTextMessage("MfromAMQ-" + i);
producer.send(msg);
System.out.println("Sent a message");
}
//receive
System.out.println("receiving messages...");
for (int i = 0; i < num; i++)
{
TextMessage msg = (TextMessage) consumer1.receive(5000);
System.out.println("received: " + msg);
String content = msg.getText();
assertEquals(msgBase + i, content);
}
assertNull(consumer1.receive(500));
session.close();
}
@Test
public void testInvalidDestinationExceptionWhenNoQueueExistsOnCreateProducer() throws Exception
{
AddressSettings addressSetting = new AddressSettings();
addressSetting.setAutoCreateJmsQueues(false);
server.getAddressSettingsRepository().addMatch("jms.queue.foo", addressSetting);
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
Queue queue = session.createQueue("foo");
thrown.expect(InvalidDestinationException.class);
thrown.expect(JMSException.class);
session.createProducer(queue);
session.close();
}
@Test
public void testAutoDestinationCreationOnProducerSend() throws JMSException
{
AddressSettings addressSetting = new AddressSettings();
addressSetting.setAutoCreateJmsQueues(true);
String address = "foo";
server.getAddressSettingsRepository().addMatch("jms.queue." + address, addressSetting);
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
TextMessage message = session.createTextMessage("bar");
Queue queue = new ActiveMQQueue(address);
MessageProducer producer = session.createProducer(null);
producer.send(queue, message);
MessageConsumer consumer = session.createConsumer(queue);
TextMessage message1 = (TextMessage) consumer.receive(1000);
assertTrue(message1.getText().equals(message.getText()));
}
@Test
public void testAutoDestinationCreationOnConsumer() throws JMSException
{
AddressSettings addressSetting = new AddressSettings();
addressSetting.setAutoCreateJmsQueues(true);
String address = "foo";
server.getAddressSettingsRepository().addMatch("jms.queue." + address, addressSetting);
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
TextMessage message = session.createTextMessage("bar");
Queue queue = new ActiveMQQueue(address);
MessageConsumer consumer = session.createConsumer(queue);
MessageProducer producer = session.createProducer(null);
producer.send(queue, message);
TextMessage message1 = (TextMessage) consumer.receive(1000);
assertTrue(message1.getText().equals(message.getText()));
}
@Test
public void testAutoDestinationNoCreationOnConsumer() throws JMSException
{
AddressSettings addressSetting = new AddressSettings();
addressSetting.setAutoCreateJmsQueues(false);
String address = "foo";
server.getAddressSettingsRepository().addMatch("jms.queue." + address, addressSetting);
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
TextMessage message = session.createTextMessage("bar");
Queue queue = new ActiveMQQueue(address);
try
{
MessageConsumer consumer = session.createConsumer(queue);
fail("supposed to throw an exception here");
}
catch (JMSException e)
{
}
}
/**
* This is the example shipped with the distribution
*
* @throws Exception
*/
@Test
public void testOpenWireExample() throws Exception
{
Connection exConn = null;
try
{
String urlString = "tcp://" + OWHOST + ":" + OWPORT + "?wireFormat.cacheEnabled=true";
ActiveMQConnectionFactory exFact = new ActiveMQConnectionFactory(urlString);
// Step 2. Perfom a lookup on the queue
Queue queue = new ActiveMQQueue(durableQueueName);
// Step 4.Create a JMS Connection
exConn = exFact.createConnection();
// Step 10. Start the Connection
exConn.start();
// Step 5. Create a JMS Session
Session session = exConn.createSession(false, Session.AUTO_ACKNOWLEDGE);
// Step 6. Create a JMS Message Producer
MessageProducer producer = session.createProducer(queue);
// Step 7. Create a Text Message
TextMessage message = session.createTextMessage("This is a text message");
//System.out.println("Sent message: " + message.getText());
// Step 8. Send the Message
producer.send(message);
// Step 9. Create a JMS Message Consumer
MessageConsumer messageConsumer = session.createConsumer(queue);
// Step 11. Receive the message
TextMessage messageReceived = (TextMessage) messageConsumer.receive(5000);
System.out.println("Received message: " + messageReceived);
assertEquals("This is a text message", messageReceived.getText());
}
finally
{
if (exConn != null)
{
exConn.close();
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.loadtests.dsi;
import org.apache.ignite.*;
import org.apache.ignite.cluster.*;
import org.apache.ignite.compute.*;
import org.apache.ignite.internal.util.*;
import org.apache.ignite.internal.util.typedef.*;
import org.apache.ignite.lang.*;
import org.apache.ignite.testframework.*;
import org.jetbrains.annotations.*;
import java.io.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.*;
/**
*
*/
public class GridDsiClient implements Callable {
/** Stats update interval in seconds. */
private static final int UPDATE_INTERVAL_SEC = 10;
/** Grid. */
private static Ignite g;
/** Transaction count. */
private static AtomicLong txCnt = new AtomicLong();
/** Latency. */
private static AtomicLong latency = new AtomicLong();
/** Submit time. */
private static GridAtomicLong submitTime = new GridAtomicLong();
/** Server stats. */
private static volatile T3<Long, Integer, Integer> srvStats;
/** Finish flag. */
private static AtomicBoolean finish = new AtomicBoolean();
/** Terminal ID. */
private String terminalId;
/** Node ID. */
private UUID nodeId;
/**
* Client constructor.
*
* @param terminalId Terminal ID.
* @param nodeId Node ID.
*/
GridDsiClient(String terminalId, UUID nodeId) {
this.terminalId = terminalId;
this.nodeId = nodeId;
}
/**
* Predicate to look for server node.
*
* @return {@code true} if node segment is 'server'.
*/
public static IgnitePredicate<ClusterNode> serverNode() {
return new IgnitePredicate<ClusterNode>() {
@Override public boolean apply(ClusterNode node) {
return "server".equals(node.attribute("segment"));
}
};
}
/**
* Predicate to look for client node.
*
* @return {@code true} if node segment is 'client'.
*/
public static IgnitePredicate<ClusterNode> clientNode() {
return new IgnitePredicate<ClusterNode>() {
@Override public boolean apply(ClusterNode node) {
return "client".equals(node.attribute("segment"));
}
};
}
/** {@inheritDoc} */
@SuppressWarnings({"unchecked", "InfiniteLoopStatement"})
@Nullable @Override public Object call() throws Exception {
IgniteCompute comp = g.compute(g.cluster().forPredicate(serverNode())).withAsync();
while (!finish.get()) {
try {
long t0 = System.currentTimeMillis();
long submitTime1 = t0;
comp.execute(GridDsiRequestTask.class, new GridDsiMessage(terminalId, nodeId));
ComputeTaskFuture<T3<Long, Integer, Integer>> f1 = comp.future();
submitTime.setIfGreater(System.currentTimeMillis() - submitTime1);
T3<Long, Integer, Integer> res1 = f1.get();
submitTime1 = System.currentTimeMillis();
comp.execute(GridDsiResponseTask.class, new GridDsiMessage(terminalId, nodeId));
ComputeTaskFuture<T3<Long, Integer, Integer>> f2 = comp.future();
submitTime.setIfGreater(System.currentTimeMillis() - submitTime1);
T3<Long, Integer, Integer> res2 = f2.get();
long t1 = System.currentTimeMillis();
txCnt.incrementAndGet();
latency.addAndGet(t1 - t0);
if (res1 != null)
srvStats = res1;
if (res2 != null)
srvStats = res2;
}
catch (IgniteException e) {
e.printStackTrace();
}
}
return null;
}
/**
* Method to print request statistics.
*/
private static void displayReqCount() {
new Thread(new Runnable() {
@SuppressWarnings({"BusyWait", "InfiniteLoopStatement"})
@Override public void run() {
int interval = 30;
while (true) {
long cnt0 = txCnt.get();
long lt0 = latency.get();
try {
Thread.sleep(interval * 1000);
}
catch (InterruptedException e) {
e.printStackTrace();
}
long cnt1 = txCnt.get();
long lt1 = latency.get();
X.println(">>>");
X.println(">>> Transaction/s: " + (cnt1 - cnt0) / interval);
X.println(
">>> Avg Latency: " + ((cnt1 - cnt0) > 0 ? (lt1 - lt0) / (cnt1 - cnt0) + "ms" : "invalid"));
X.println(">>> Max Submit Time: " + submitTime.getAndSet(0));
}
}
}).start();
}
/**
* Execute DSI load client.
*
* @param args Command line arguments, two required - first one is the number of threads,
* second one should point to the Spring XML configuration file.
* @throws Exception If client fails.
*/
@SuppressWarnings("unchecked")
public static void main(String[] args) throws Exception {
GridFileLock fileLock = GridLoadTestUtils.fileLock();
fileLock.lock(true); // Get shared lock, allowing multiple instances.
try {
Ignition.start(args.length < 4 ? "modules/core/src/test/config/load/dsi-load-client.xml" : args[3]);
Thread collector = null;
Thread timer = null;
try {
g = Ignition.ignite("dsi");
int noThreads = Integer.parseInt(args[0]);
final int duration = args.length < 2 ? 0 : Integer.parseInt(args[1]);
final String outputFileName = args.length < 3 ? null : args[2];
X.println("Thread count: " + noThreads);
Collection<ClusterNode> srvNodes = g.cluster().forPredicate(serverNode()).nodes();
if (srvNodes.isEmpty()) {
X.println("No server nodes available");
System.exit(-1);
}
X.println("No of servers: " + srvNodes.size());
int srvMaxNoTerminals = noThreads / srvNodes.size();
if (srvMaxNoTerminals * srvNodes.size() != noThreads) {
noThreads = srvMaxNoTerminals * srvNodes.size();
X.println("Using " + noThreads + " threads instead to ensure equal distribution of terminals");
}
Collection<Callable<Object>> clients = new ArrayList<>(noThreads);
// No 2 client should use the same simulator.
HashMap<UUID, Collection<String>> terminals = (HashMap<UUID, Collection<String>>)
g.jcache("CLIENT_PARTITIONED_CACHE").get("terminals");
if (terminals == null) {
X.println(">>> Terminals map has not been initialized.");
terminals = new HashMap<>(srvNodes.size());
// Distribute terminals evenly across all servers.
for (ClusterNode node : srvNodes) {
UUID srvrId = node.id();
X.println(">>> Node ID: " + srvrId);
Collection<String> list = terminals.get(srvrId);
if (list == null)
list = new ArrayList<>(0);
int terminalsPerSrv = 0;
int tid = 0; // Terminal ID.
while (true) {
String terminalId = String.valueOf(++tid);
// Server partition cache.
if (!srvrId.equals(g.cluster().mapKeyToNode("PARTITIONED_CACHE", terminalId).id()))
continue;
if (terminalsPerSrv < srvMaxNoTerminals) {
list.add(terminalId);
clients.add(new GridDsiClient(terminalId, srvrId));
terminalsPerSrv++;
X.println("Terminal ID: " + terminalId);
}
else
break;
}
terminals.put(srvrId, list);
}
g.jcache("CLIENT_PARTITIONED_CACHE").put("terminals", terminals);
}
else {
X.println(">>> Terminals map has been initialized.");
for (Map.Entry<UUID, Collection<String>> e : terminals.entrySet()) {
X.println(">>> Node ID: " + e.getKey());
for (String s : e.getValue()) {
clients.add(new GridDsiClient(s, e.getKey()));
X.println("Terminal ID: " + s);
}
}
}
if (duration > 0) {
timer = new Thread(new Runnable() {
@Override public void run() {
try {
Thread.sleep(duration * 1000);
finish.set(true);
}
catch (InterruptedException ignored) {
// No-op.
}
}
});
timer.start();
}
collector = new Thread(new Runnable() {
@SuppressWarnings({"BusyWait", "InfiniteLoopStatement"})
@Override public void run() {
long txPerSecond = -1;
long avgLatency = -1;
long maxSubmitTime = -1;
T3<Long, Integer, Integer> sst = null;
try {
while (!finish.get()) {
long cnt0 = txCnt.get();
long lt0 = latency.get();
Thread.sleep(UPDATE_INTERVAL_SEC * 1000);
long cnt1 = txCnt.get();
long lt1 = latency.get();
X.println(">>>");
txPerSecond = (cnt1 - cnt0) / UPDATE_INTERVAL_SEC;
X.println(">>> Transaction/s: " + txPerSecond);
avgLatency = (cnt1 - cnt0) > 0 ? (lt1 - lt0) / (cnt1 - cnt0) : -1;
X.println(
">>> Avg Latency: " + (avgLatency >= 0 ? avgLatency + "ms" : "invalid"));
maxSubmitTime = submitTime.getAndSet(0);
X.println(">>> Max Submit Time: " + maxSubmitTime);
sst = srvStats;
if (sst != null)
X.println(String.format(">>> Server stats: [tx/sec=%d, nearSize=%d, dhtSize=%d]",
sst.get1(), sst.get2(), sst.get3()));
}
}
catch (InterruptedException ignored) {
X.println(">>> Interrupted.");
Thread.currentThread().interrupt();
}
// Output data to a file, if specified.
if (outputFileName != null) {
X.println("Writing client results to a file: " + outputFileName);
try {
GridLoadTestUtils.appendLineToFile(
outputFileName,
"%s,%d,%d,%d",
GridLoadTestUtils.DATE_TIME_FORMAT.format(new Date()),
txPerSecond,
avgLatency,
maxSubmitTime);
}
catch (IOException e) {
X.println("Failed to write client results: ", e);
}
if (sst != null) {
String srvOutputFileName = outputFileName + "-server";
X.println("Writing server results to a file: " + srvOutputFileName);
try {
GridLoadTestUtils.appendLineToFile(
srvOutputFileName,
"%s,%d,%d,%d",
GridLoadTestUtils.DATE_TIME_FORMAT.format(new Date()),
sst.get1(),
sst.get2(),
sst.get3());
}
catch (IOException e) {
X.println("Failed to write server results: ", e);
}
}
}
}
});
collector.start();
ExecutorService pool = Executors.newFixedThreadPool(noThreads);
pool.invokeAll(clients);
collector.interrupt();
pool.shutdown();
}
finally {
if (collector != null && !collector.isInterrupted())
collector.interrupt();
if (timer != null)
timer.interrupt();
Ignition.stopAll(true);
}
}
finally {
fileLock.close();
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.utils;
import java.lang.annotation.Annotation;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListenableFutureTask;
import org.junit.Assert;
import org.junit.Test;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Snapshot;
import com.codahale.metrics.Timer;
import org.apache.cassandra.concurrent.NamedThreadFactory;
import org.apache.cassandra.utils.btree.*;
import static com.google.common.base.Predicates.notNull;
import static com.google.common.collect.Iterables.filter;
import static com.google.common.collect.Iterables.transform;
import static java.util.Comparator.naturalOrder;
import static java.util.Comparator.reverseOrder;
import static org.apache.cassandra.utils.btree.BTree.iterable;
import static org.apache.cassandra.utils.Clock.Global.currentTimeMillis;
import static org.junit.Assert.assertTrue;
public class LongBTreeTest
{
private static final boolean DEBUG = false;
private static int perThreadTrees = 100;
private static int minTreeSize = 4;
private static int maxTreeSize = 10000;
private static float generateTreeByUpdateChance = 0.8f;
private static float generateTreeByCopyChance = 0.1f;
private static float generateTreeByBuilderChance = 0.1f;
private static float generateTreeTotalChance = generateTreeByUpdateChance + generateTreeByCopyChance + generateTreeByBuilderChance;
private static int threads = DEBUG ? 1 : Runtime.getRuntime().availableProcessors() * 8;
private static final MetricRegistry metrics = new MetricRegistry();
private static final Timer BTREE_TIMER = metrics.timer(MetricRegistry.name(BTree.class, "BTREE"));
private static final Timer TREE_TIMER = metrics.timer(MetricRegistry.name(BTree.class, "TREE"));
private static final ExecutorService MODIFY = Executors.newFixedThreadPool(threads, new NamedThreadFactory("MODIFY"));
private static final ExecutorService COMPARE = DEBUG ? MODIFY : Executors.newFixedThreadPool(threads, new NamedThreadFactory("COMPARE"));
private static final RandomAbort<Integer> SPORADIC_ABORT = new RandomAbort<>(new Random(), 0.0001f);
static
{
System.setProperty("cassandra.btree.fanfactor", "4");
}
/************************** TEST ACCESS ********************************************/
@Test
public void testSearchIterator() throws InterruptedException
{
final int perTreeSelections = 100;
testRandomSelection(perThreadTrees, perTreeSelections, testSearchIteratorFactory());
}
private BTreeTestFactory testSearchIteratorFactory()
{
return (test) -> {
IndexedSearchIterator<Integer, Integer> iter1 = test.testAsSet.iterator();
IndexedSearchIterator<Integer, Integer> iter2 = test.testAsList.iterator();
return (key) ->
{
Integer found1 = iter1.hasNext() ? iter1.next(key) : null;
Integer found2 = iter2.hasNext() ? iter2.next(key) : null;
Assert.assertSame(found1, found2);
if (found1 != null)
Assert.assertEquals(iter1.indexOfCurrent(), iter2.indexOfCurrent());
int index = Collections.binarySearch(test.canonicalList, key, test.comparator);
if (index < 0)
{
Assert.assertNull(found1);
}
else
{
Assert.assertEquals(key, found1);
Assert.assertEquals(index, iter1.indexOfCurrent());
}
// check that by advancing the same key again we get null, but only do it on one of the two iterators
// to ensure they both advance differently
if (ThreadLocalRandom.current().nextBoolean())
Assert.assertNull(iter1.next(key));
else
Assert.assertNull(iter2.next(key));
};
};
}
@Test
public void testInequalityLookups() throws InterruptedException
{
final int perTreeSelections = 2;
testRandomSelectionOfSet(perThreadTrees, perTreeSelections, testInequalityLookupsFactory());
}
private BTreeSetTestFactory testInequalityLookupsFactory()
{
return (test, canonical) -> {
if (!canonical.isEmpty() || !test.isEmpty())
{
Assert.assertEquals(canonical.isEmpty(), test.isEmpty());
Assert.assertEquals(canonical.first(), test.first());
Assert.assertEquals(canonical.last(), test.last());
}
return (key) ->
{
Assert.assertEquals(test.ceiling(key), canonical.ceiling(key));
Assert.assertEquals(test.higher(key), canonical.higher(key));
Assert.assertEquals(test.floor(key), canonical.floor(key));
Assert.assertEquals(test.lower(key), canonical.lower(key));
};
};
}
@Test
public void testListIndexes() throws InterruptedException
{
testRandomSelectionOfList(perThreadTrees, 4, testListIndexesFactory());
}
private BTreeListTestFactory testListIndexesFactory()
{
return (test, canonical, cmp) ->
(key) ->
{
int javaIndex = Collections.binarySearch(canonical, key, cmp);
int btreeIndex = test.indexOf(key);
Assert.assertEquals(javaIndex, btreeIndex);
if (javaIndex >= 0)
Assert.assertEquals(canonical.get(javaIndex), test.get(btreeIndex));
};
}
@Test
public void testToArray() throws InterruptedException
{
testRandomSelection(perThreadTrees, 4,
(selection) ->
{
Integer[] array = new Integer[selection.canonicalList.size() + 1];
selection.testAsList.toArray(array, 1);
Assert.assertEquals(null, array[0]);
for (int j = 0; j < selection.canonicalList.size(); j++)
Assert.assertEquals(selection.canonicalList.get(j), array[j + 1]);
});
}
private static final class CountingFunction implements Function<Integer, Integer>
{
final Function<Integer, Integer> wrapped;
int count = 0;
protected CountingFunction(Function<Integer, Integer> wrapped)
{
this.wrapped = wrapped;
}
public Integer apply(Integer integer)
{
count++;
return wrapped.apply(integer);
}
}
@Test
public void testTransformAndFilter() throws InterruptedException
{
testRandomSelection(perThreadTrees, 4, false, false, false,
(selection) ->
{
Map<Integer, Integer> update = new LinkedHashMap<>();
for (Integer i : selection.testKeys)
update.put(i, new Integer(i));
CountingFunction function;
Object[] original = selection.testAsSet.tree();
Object[] transformed;
// test replacing none, leaving all present
function = new CountingFunction((x) -> x);
transformed = BTree.transformAndFilter(original, function);
Assert.assertEquals(BTree.size(original), function.count);
Assert.assertSame(original, transformed);
// test replacing some, leaving all present
function = new CountingFunction((x) -> update.containsKey(x) ? update.get(x) : x);
transformed = BTree.transformAndFilter(original, function);
Assert.assertEquals(BTree.size(original), function.count);
assertSame(transform(selection.canonicalList, function.wrapped), iterable(transformed));
// test replacing some, removing some
function = new CountingFunction(update::get);
transformed = BTree.transformAndFilter(original, function);
Assert.assertEquals(BTree.size(original), function.count);
assertSame(filter(transform(selection.canonicalList, function.wrapped), notNull()), iterable(transformed));
// test replacing none, removing some
function = new CountingFunction((x) -> update.containsKey(x) ? null : x);
transformed = BTree.transformAndFilter(selection.testAsList.tree(), function);
Assert.assertEquals(BTree.size(original), function.count);
assertSame(filter(transform(selection.canonicalList, function.wrapped), notNull()), iterable(transformed));
});
}
private static void assertSame(Iterable<Integer> i1, Iterable<Integer> i2)
{
assertSame(i1.iterator(), i2.iterator());
}
private static void assertSame(Iterator<Integer> i1, Iterator<Integer> i2)
{
while (i1.hasNext() && i2.hasNext())
Assert.assertSame(i1.next(), i2.next());
Assert.assertEquals(i1.hasNext(), i2.hasNext());
}
private void testRandomSelectionOfList(int perThreadTrees, int perTreeSelections, BTreeListTestFactory testRun) throws InterruptedException
{
testRandomSelection(perThreadTrees, perTreeSelections,
(BTreeTestFactory) (selection) -> testRun.get(selection.testAsList, selection.canonicalList, selection.comparator));
}
private void testRandomSelectionOfSet(int perThreadTrees, int perTreeSelections, BTreeSetTestFactory testRun) throws InterruptedException
{
testRandomSelection(perThreadTrees, perTreeSelections,
(BTreeTestFactory) (selection) -> testRun.get(selection.testAsSet, selection.canonicalSet));
}
static interface BTreeSetTestFactory
{
TestEachKey get(BTreeSet<Integer> test, NavigableSet<Integer> canonical);
}
static interface BTreeListTestFactory
{
TestEachKey get(BTreeSet<Integer> test, List<Integer> canonical, Comparator<Integer> comparator);
}
static interface BTreeTestFactory
{
TestEachKey get(RandomSelection test);
}
static interface TestEachKey
{
void testOne(Integer value);
}
private void run(BTreeTestFactory testRun, RandomSelection selection)
{
TestEachKey testEachKey = testRun.get(selection);
for (Integer key : selection.testKeys)
testEachKey.testOne(key);
}
private void run(BTreeSetTestFactory testRun, RandomSelection selection)
{
TestEachKey testEachKey = testRun.get(selection.testAsSet, selection.canonicalSet);
for (Integer key : selection.testKeys)
testEachKey.testOne(key);
}
private void testRandomSelection(int perThreadTrees, int perTreeSelections, BTreeTestFactory testRun) throws InterruptedException
{
testRandomSelection(perThreadTrees, perTreeSelections, (RandomSelection selection) -> run(testRun, selection));
}
private void testRandomSelection(int perThreadTrees, int perTreeSelections, Consumer<RandomSelection> testRun) throws InterruptedException
{
testRandomSelection(perThreadTrees, perTreeSelections, true, true, true, testRun);
}
private void testRandomSelection(int perThreadTrees, int perTreeSelections, boolean narrow, boolean mixInNotPresentItems, boolean permitReversal, Consumer<RandomSelection> testRun) throws InterruptedException
{
int threads = Runtime.getRuntime().availableProcessors();
final CountDownLatch latch = new CountDownLatch(threads);
final AtomicLong errors = new AtomicLong();
final AtomicLong count = new AtomicLong();
final long totalCount = threads * perThreadTrees * perTreeSelections;
for (int t = 0 ; t < threads ; t++)
{
Runnable runnable = () ->
{
try
{
for (int i = 0 ; i < perThreadTrees ; i++)
{
// not easy to usefully log seed, as run tests in parallel; need to really pass through to exceptions
long seed = ThreadLocalRandom.current().nextLong();
Random random = new Random(seed);
RandomTree tree = randomTree(minTreeSize, maxTreeSize, random);
for (int j = 0 ; j < perTreeSelections ; j++)
{
testRun.accept(tree.select(narrow, mixInNotPresentItems, permitReversal));
count.incrementAndGet();
}
}
}
catch (Throwable t1)
{
errors.incrementAndGet();
t1.printStackTrace();
}
latch.countDown();
};
MODIFY.execute(runnable);
}
while (latch.getCount() > 0)
{
for (int i = 0 ; i < 10L ; i++)
{
latch.await(1L, TimeUnit.SECONDS);
Assert.assertEquals(0, errors.get());
}
log("%.1f%% complete %s", 100 * count.get() / (double) totalCount, errors.get() > 0 ? ("Errors: " + errors.get()) : "");
}
}
private static class RandomSelection
{
final List<Integer> testKeys;
final NavigableSet<Integer> canonicalSet;
final List<Integer> canonicalList;
final BTreeSet<Integer> testAsSet;
final BTreeSet<Integer> testAsList;
final Comparator<Integer> comparator;
private RandomSelection(List<Integer> testKeys, NavigableSet<Integer> canonicalSet, BTreeSet<Integer> testAsSet,
List<Integer> canonicalList, BTreeSet<Integer> testAsList, Comparator<Integer> comparator)
{
this.testKeys = testKeys;
this.canonicalList = canonicalList;
this.canonicalSet = canonicalSet;
this.testAsSet = testAsSet;
this.testAsList = testAsList;
this.comparator = comparator;
}
}
private static class RandomTree
{
final Random random;
final NavigableSet<Integer> canonical;
final BTreeSet<Integer> test;
private RandomTree(NavigableSet<Integer> canonical, BTreeSet<Integer> test, Random random)
{
this.canonical = canonical;
this.test = test;
this.random = random;
}
RandomSelection select(boolean narrow, boolean mixInNotPresentItems, boolean permitReversal)
{
NavigableSet<Integer> canonicalSet = this.canonical;
BTreeSet<Integer> testAsSet = this.test;
List<Integer> canonicalList = new ArrayList<>(canonicalSet);
BTreeSet<Integer> testAsList = this.test;
Assert.assertEquals(canonicalSet.size(), testAsSet.size());
Assert.assertEquals(canonicalList.size(), testAsList.size());
// sometimes select keys first, so we cover full range
List<Integer> allKeys = randomKeys(canonical, mixInNotPresentItems, random);
List<Integer> keys = allKeys;
int narrowCount = random.nextInt(3);
while (narrow && canonicalList.size() > 10 && keys.size() > 10 && narrowCount-- > 0)
{
boolean useLb = random.nextBoolean();
boolean useUb = random.nextBoolean();
if (!(useLb | useUb))
continue;
// select a range smaller than the total span when we have more narrowing iterations left
int indexRange = keys.size() / (narrowCount + 1);
boolean lbInclusive = true;
Integer lbKey = canonicalList.get(0);
int lbKeyIndex = 0, lbIndex = 0;
boolean ubInclusive = true;
Integer ubKey = canonicalList.get(canonicalList.size() - 1);
int ubKeyIndex = keys.size(), ubIndex = canonicalList.size();
if (useLb)
{
lbKeyIndex = random.nextInt(indexRange - 1);
Integer candidate = keys.get(lbKeyIndex);
if (useLb = (candidate > lbKey && candidate <= ubKey))
{
lbInclusive = random.nextBoolean();
lbKey = keys.get(lbKeyIndex);
lbIndex = Collections.binarySearch(canonicalList, lbKey);
if (lbIndex >= 0 && !lbInclusive) lbIndex++;
else if (lbIndex < 0) lbIndex = -1 -lbIndex;
}
}
if (useUb)
{
int lb = Math.max(lbKeyIndex, keys.size() - indexRange);
ubKeyIndex = random.nextInt(keys.size() - (1 + lb)) + lb;
Integer candidate = keys.get(ubKeyIndex);
if (useUb = (candidate < ubKey && candidate >= lbKey))
{
ubInclusive = random.nextBoolean();
ubKey = keys.get(ubKeyIndex);
ubIndex = Collections.binarySearch(canonicalList, ubKey);
if (ubIndex >= 0 && ubInclusive) { ubIndex++; }
else if (ubIndex < 0) ubIndex = -1 -ubIndex;
}
}
if (ubIndex < lbIndex) { ubIndex = lbIndex; ubKey = lbKey; ubInclusive = false; }
canonicalSet = !useLb ? canonicalSet.headSet(ubKey, ubInclusive)
: !useUb ? canonicalSet.tailSet(lbKey, lbInclusive)
: canonicalSet.subSet(lbKey, lbInclusive, ubKey, ubInclusive);
testAsSet = !useLb ? testAsSet.headSet(ubKey, ubInclusive)
: !useUb ? testAsSet.tailSet(lbKey, lbInclusive)
: testAsSet.subSet(lbKey, lbInclusive, ubKey, ubInclusive);
keys = keys.subList(lbKeyIndex, ubKeyIndex);
canonicalList = canonicalList.subList(lbIndex, ubIndex);
testAsList = testAsList.subList(lbIndex, ubIndex);
Assert.assertEquals(canonicalSet.size(), testAsSet.size());
Assert.assertEquals(canonicalList.size(), testAsList.size());
}
// possibly restore full set of keys, to test case where we are provided existing keys that are out of bounds
if (keys != allKeys && random.nextBoolean())
keys = allKeys;
Comparator<Integer> comparator = naturalOrder();
if (permitReversal && random.nextBoolean())
{
if (allKeys != keys)
keys = new ArrayList<>(keys);
if (canonicalSet != canonical)
canonicalList = new ArrayList<>(canonicalList);
Collections.reverse(keys);
Collections.reverse(canonicalList);
testAsList = testAsList.descendingSet();
canonicalSet = canonicalSet.descendingSet();
testAsSet = testAsSet.descendingSet();
comparator = reverseOrder();
}
Assert.assertEquals(canonicalSet.size(), testAsSet.size());
Assert.assertEquals(canonicalList.size(), testAsList.size());
if (!canonicalSet.isEmpty())
{
Assert.assertEquals(canonicalSet.first(), canonicalList.get(0));
Assert.assertEquals(canonicalSet.last(), canonicalList.get(canonicalList.size() - 1));
Assert.assertEquals(canonicalSet.first(), testAsSet.first());
Assert.assertEquals(canonicalSet.last(), testAsSet.last());
Assert.assertEquals(canonicalSet.first(), testAsList.get(0));
Assert.assertEquals(canonicalSet.last(), testAsList.get(testAsList.size() - 1));
}
return new RandomSelection(keys, canonicalSet, testAsSet, canonicalList, testAsList, comparator);
}
}
private static RandomTree randomTree(int minSize, int maxSize, Random random)
{
// perform most of our tree constructions via update, as this is more efficient; since every run uses this
// we test builder disproportionately more often than if it had its own test anyway
int maxIntegerValue = random.nextInt(Integer.MAX_VALUE - 1) + 1;
float f = random.nextFloat() / generateTreeTotalChance;
f -= generateTreeByUpdateChance;
if (f < 0)
return randomTreeByUpdate(minSize, maxSize, maxIntegerValue, random);
f -= generateTreeByCopyChance;
if (f < 0)
return randomTreeByCopy(minSize, maxSize, maxIntegerValue, random);
return randomTreeByBuilder(minSize, maxSize, maxIntegerValue, random);
}
private static RandomTree randomTreeByCopy(int minSize, int maxSize, int maxIntegerValue, Random random)
{
assert minSize > 3;
TreeSet<Integer> canonical = new TreeSet<>();
int targetSize = random.nextInt(maxSize - minSize) + minSize;
int curSize = 0;
while (curSize < targetSize)
{
Integer next = random.nextInt(maxIntegerValue);
if (canonical.add(next))
++curSize;
}
return new RandomTree(canonical, BTreeSet.<Integer>wrap(BTree.build(canonical, UpdateFunction.noOp()), naturalOrder()), random);
}
private static RandomTree randomTreeByUpdate(int minSize, int maxSize, int maxIntegerValue, Random random)
{
assert minSize > 3;
TreeSet<Integer> canonical = new TreeSet<>();
int targetSize = random.nextInt(maxSize - minSize) + minSize;
int maxModificationSize = random.nextInt(targetSize - 2) + 2;
Object[] accmumulate = BTree.empty();
int curSize = 0;
while (curSize < targetSize)
{
int nextSize = maxModificationSize == 1 ? 1 : random.nextInt(maxModificationSize - 1) + 1;
TreeSet<Integer> build = new TreeSet<>();
for (int i = 0 ; i < nextSize ; i++)
{
Integer next = random.nextInt(maxIntegerValue);
build.add(next);
canonical.add(next);
}
accmumulate = BTree.update(accmumulate, naturalOrder(), build, UpdateFunction.<Integer>noOp());
curSize += nextSize;
maxModificationSize = Math.min(maxModificationSize, targetSize - curSize);
}
return new RandomTree(canonical, BTreeSet.<Integer>wrap(accmumulate, naturalOrder()), random);
}
private static RandomTree randomTreeByBuilder(int minSize, int maxSize, int maxIntegerValue, Random random)
{
assert minSize > 3;
BTree.Builder<Integer> builder = BTree.builder(naturalOrder());
int targetSize = random.nextInt(maxSize - minSize) + minSize;
int maxModificationSize = (int) Math.sqrt(targetSize);
TreeSet<Integer> canonical = new TreeSet<>();
int curSize = 0;
TreeSet<Integer> ordered = new TreeSet<>();
List<Integer> shuffled = new ArrayList<>();
while (curSize < targetSize)
{
int nextSize = maxModificationSize <= 1 ? 1 : random.nextInt(maxModificationSize - 1) + 1;
// leave a random selection of previous values
(random.nextBoolean() ? ordered.headSet(random.nextInt()) : ordered.tailSet(random.nextInt())).clear();
shuffled = new ArrayList<>(shuffled.subList(0, shuffled.size() < 2 ? 0 : random.nextInt(shuffled.size() / 2)));
for (int i = 0 ; i < nextSize ; i++)
{
Integer next = random.nextInt(maxIntegerValue);
ordered.add(next);
shuffled.add(next);
canonical.add(next);
}
switch (random.nextInt(5))
{
case 0:
builder.addAll(ordered);
break;
case 1:
builder.addAll(BTreeSet.of(ordered));
break;
case 2:
for (Integer i : ordered)
builder.add(i);
case 3:
builder.addAll(shuffled);
break;
case 4:
for (Integer i : shuffled)
builder.add(i);
}
curSize += nextSize;
maxModificationSize = Math.min(maxModificationSize, targetSize - curSize);
}
BTreeSet<Integer> btree = BTreeSet.<Integer>wrap(builder.build(), naturalOrder());
Assert.assertEquals(canonical.size(), btree.size());
return new RandomTree(canonical, btree, random);
}
// select a random subset of the keys, with an optional random population of keys inbetween those that are present
// return a value with the search position
private static List<Integer> randomKeys(Iterable<Integer> canonical, boolean mixInNotPresentItems, Random random)
{
boolean useFake = mixInNotPresentItems && random.nextBoolean();
final float fakeRatio = random.nextFloat();
List<Integer> results = new ArrayList<>();
Long fakeLb = (long) Integer.MIN_VALUE, fakeUb = null;
Integer max = null;
for (Integer v : canonical)
{
if ( !useFake
|| (fakeUb == null ? v - 1 : fakeUb) <= fakeLb + 1
|| random.nextFloat() < fakeRatio)
{
// if we cannot safely construct a fake value, or our randomizer says not to, we emit the next real value
results.add(v);
fakeLb = v.longValue();
fakeUb = null;
}
else
{
// otherwise we emit a fake value in the range immediately proceeding the last real value, and not
// exceeding the real value that would have proceeded (ignoring any other suppressed real values since)
if (fakeUb == null)
fakeUb = v.longValue() - 1;
long mid = (fakeLb + fakeUb) / 2;
assert mid < fakeUb;
results.add((int) mid);
fakeLb = mid;
}
max = v;
}
if (useFake && max != null && max < Integer.MAX_VALUE)
results.add(max + 1);
final float useChance = random.nextFloat();
return Lists.newArrayList(filter(results, (x) -> random.nextFloat() < useChance));
}
/************************** TEST MUTATION ********************************************/
@Test
public void testBuildNewTree()
{
int max = 10000;
final List<Integer> list = new ArrayList<>(max);
final NavigableSet<Integer> set = new TreeSet<>();
BTreeSetTestFactory test = testInequalityLookupsFactory();
for (int i = 0 ; i < max ; ++i)
{
list.add(i);
set.add(i);
Object[] tree = BTree.build(list, UpdateFunction.noOp());
Assert.assertTrue(BTree.isWellFormed(tree, Comparator.naturalOrder()));
BTreeSet<Integer> btree = new BTreeSet<>(tree, Comparator.naturalOrder());
RandomSelection selection = new RandomSelection(list, set, btree, list, btree, Comparator.naturalOrder());
run(test, selection);
}
}
@Test
public void testOversizedMiddleInsert()
{
TreeSet<Integer> canon = new TreeSet<>();
for (int i = 0 ; i < 10000000 ; i++)
canon.add(i);
Object[] btree = BTree.build(Arrays.asList(Integer.MIN_VALUE, Integer.MAX_VALUE), UpdateFunction.noOp());
btree = BTree.update(btree, naturalOrder(), canon, UpdateFunction.<Integer>noOp());
canon.add(Integer.MIN_VALUE);
canon.add(Integer.MAX_VALUE);
assertTrue(BTree.isWellFormed(btree, naturalOrder()));
testEqual("Oversize", BTree.iterator(btree), canon.iterator());
}
@Test
public void testIndividualInsertsSmallOverlappingRange() throws ExecutionException, InterruptedException
{
testInsertions(50, 1, 1, true);
}
@Test
public void testBatchesSmallOverlappingRange() throws ExecutionException, InterruptedException
{
testInsertions(50, 1, 5, true);
}
@Test
public void testIndividualInsertsMediumSparseRange() throws ExecutionException, InterruptedException
{
testInsertions(perThreadTrees / 10, 500, 10, 1, true);
}
@Test
public void testBatchesMediumSparseRange() throws ExecutionException, InterruptedException
{
testInsertions(500, 10, 10, true);
}
@Test
public void testLargeBatchesLargeRange() throws ExecutionException, InterruptedException
{
testInsertions(perThreadTrees / 10, Math.max(maxTreeSize, 5000), 3, 100, true);
}
@Test
public void testRandomRangeAndBatches() throws ExecutionException, InterruptedException
{
ThreadLocalRandom random = ThreadLocalRandom.current();
int treeSize = random.nextInt(maxTreeSize / 10, maxTreeSize * 10);
for (int i = 0 ; i < perThreadTrees / 10 ; i++)
testInsertions(threads * 10, treeSize, random.nextInt(1, 100) / 10f, treeSize / 100, true);
}
@Test
public void testSlicingSmallRandomTrees() throws ExecutionException, InterruptedException
{
testInsertions(50, 10, 10, false);
}
private static void testInsertions(int perTestCount, float testKeyRatio, int modificationBatchSize, boolean quickEquality) throws ExecutionException, InterruptedException
{
int tests = perThreadTrees * threads;
testInsertions(tests, perTestCount, testKeyRatio, modificationBatchSize, quickEquality);
}
private static void testInsertions(int tests, int perTestCount, float testKeyRatio, int modificationBatchSize, boolean quickEquality) throws ExecutionException, InterruptedException
{
int batchesPerTest = perTestCount / modificationBatchSize;
int testKeyRange = (int) (perTestCount * testKeyRatio);
long totalCount = (long) perTestCount * tests;
log("Performing %d tests of %d operations, with %.2f max size/key-range ratio in batches of ~%d ops",
tests, perTestCount, 1 / testKeyRatio, modificationBatchSize);
// if we're not doing quick-equality, we can spam with garbage for all the checks we perform, so we'll split the work into smaller chunks
int chunkSize = quickEquality ? tests : (int) (100000 / Math.pow(perTestCount, 2));
for (int chunk = 0 ; chunk < tests ; chunk += chunkSize)
{
final List<ListenableFutureTask<List<ListenableFuture<?>>>> outer = new ArrayList<>();
for (int i = 0 ; i < chunkSize ; i++)
{
int maxRunLength = modificationBatchSize == 1 ? 1 : ThreadLocalRandom.current().nextInt(1, modificationBatchSize);
outer.add(doOneTestInsertions(testKeyRange, maxRunLength, modificationBatchSize, batchesPerTest, quickEquality));
}
final List<ListenableFuture<?>> inner = new ArrayList<>();
long complete = 0;
int reportInterval = Math.max(1000, (int) (totalCount / 10000));
long lastReportAt = 0;
for (ListenableFutureTask<List<ListenableFuture<?>>> f : outer)
{
inner.addAll(f.get());
complete += perTestCount;
if (complete - lastReportAt >= reportInterval)
{
long done = (chunk * perTestCount) + complete;
float ratio = done / (float) totalCount;
log("Completed %.1f%% (%d of %d operations)", ratio * 100, done, totalCount);
lastReportAt = complete;
}
}
Futures.allAsList(inner).get();
}
Snapshot snap = BTREE_TIMER.getSnapshot();
log("btree: %.2fns, %.2fns, %.2fns", snap.getMedian(), snap.get95thPercentile(), snap.get999thPercentile());
snap = TREE_TIMER.getSnapshot();
log("java: %.2fns, %.2fns, %.2fns", snap.getMedian(), snap.get95thPercentile(), snap.get999thPercentile());
log("Done");
}
private static ListenableFutureTask<List<ListenableFuture<?>>> doOneTestInsertions(final int upperBound, final int maxRunLength, final int averageModsPerIteration, final int iterations, final boolean quickEquality)
{
ListenableFutureTask<List<ListenableFuture<?>>> f = ListenableFutureTask.create(new Callable<List<ListenableFuture<?>>>()
{
@Override
public List<ListenableFuture<?>> call()
{
final List<ListenableFuture<?>> r = new ArrayList<>();
NavigableMap<Integer, Integer> canon = new TreeMap<>();
Object[] btree = BTree.empty();
final TreeMap<Integer, Integer> buffer = new TreeMap<>();
ThreadLocalRandom rnd = ThreadLocalRandom.current();
for (int i = 0 ; i < iterations ; i++)
{
buffer.clear();
int mods = rnd.nextInt(1, averageModsPerIteration * 2);
while (mods > 0)
{
int v = rnd.nextInt(upperBound);
int rc = Math.max(0, Math.min(mods, maxRunLength) - 1);
int c = 1 + (rc <= 0 ? 0 : rnd.nextInt(rc));
for (int j = 0 ; j < c ; j++)
{
buffer.put(v, v);
v++;
}
mods -= c;
}
Timer.Context ctxt;
ctxt = TREE_TIMER.time();
canon.putAll(buffer);
ctxt.stop();
ctxt = BTREE_TIMER.time();
Object[] next = null;
while (next == null)
next = BTree.update(btree, naturalOrder(), buffer.keySet(), SPORADIC_ABORT);
btree = next;
ctxt.stop();
if (!BTree.isWellFormed(btree, naturalOrder()))
{
log("ERROR: Not well formed");
throw new AssertionError("Not well formed!");
}
if (quickEquality)
testEqual("", BTree.iterator(btree), canon.keySet().iterator());
else
r.addAll(testAllSlices("RND", btree, new TreeSet<>(canon.keySet())));
}
return r;
}
});
if (DEBUG)
f.run();
else
MODIFY.execute(f);
return f;
}
@Test
public void testSlicingAllSmallTrees() throws ExecutionException, InterruptedException
{
Object[] cur = BTree.empty();
TreeSet<Integer> canon = new TreeSet<>();
// we set FAN_FACTOR to 4, so 128 items is four levels deep, three fully populated
for (int i = 0 ; i < 128 ; i++)
{
String id = String.format("[0..%d)", canon.size());
log("Testing " + id);
Futures.allAsList(testAllSlices(id, cur, canon)).get();
Object[] next = null;
while (next == null)
next = BTree.update(cur, naturalOrder(), Arrays.asList(i), SPORADIC_ABORT);
cur = next;
canon.add(i);
}
}
private static List<ListenableFuture<?>> testAllSlices(String id, Object[] btree, NavigableSet<Integer> canon)
{
List<ListenableFuture<?>> waitFor = new ArrayList<>();
testAllSlices(id + " ASC", new BTreeSet<>(btree, naturalOrder()), canon, true, waitFor);
testAllSlices(id + " DSC", new BTreeSet<Integer>(btree, naturalOrder()).descendingSet(), canon.descendingSet(), false, waitFor);
return waitFor;
}
private static void testAllSlices(String id, NavigableSet<Integer> btree, NavigableSet<Integer> canon, boolean ascending, List<ListenableFuture<?>> results)
{
testOneSlice(id, btree, canon, results);
for (Integer lb : range(canon.size(), Integer.MIN_VALUE, ascending))
{
// test head/tail sets
testOneSlice(String.format("%s->[..%d)", id, lb), btree.headSet(lb, true), canon.headSet(lb, true), results);
testOneSlice(String.format("%s->(..%d)", id, lb), btree.headSet(lb, false), canon.headSet(lb, false), results);
testOneSlice(String.format("%s->(%d..]", id, lb), btree.tailSet(lb, true), canon.tailSet(lb, true), results);
testOneSlice(String.format("%s->(%d..]", id, lb), btree.tailSet(lb, false), canon.tailSet(lb, false), results);
for (Integer ub : range(canon.size(), lb, ascending))
{
// test subsets
testOneSlice(String.format("%s->[%d..%d]", id, lb, ub), btree.subSet(lb, true, ub, true), canon.subSet(lb, true, ub, true), results);
testOneSlice(String.format("%s->(%d..%d]", id, lb, ub), btree.subSet(lb, false, ub, true), canon.subSet(lb, false, ub, true), results);
testOneSlice(String.format("%s->[%d..%d)", id, lb, ub), btree.subSet(lb, true, ub, false), canon.subSet(lb, true, ub, false), results);
testOneSlice(String.format("%s->(%d..%d)", id, lb, ub), btree.subSet(lb, false, ub, false), canon.subSet(lb, false, ub, false), results);
}
}
}
private static void testOneSlice(final String id, final NavigableSet<Integer> test, final NavigableSet<Integer> canon, List<ListenableFuture<?>> results)
{
ListenableFutureTask<?> f = ListenableFutureTask.create(new Runnable()
{
@Override
public void run()
{
test(id + " Count", test.size(), canon.size());
testEqual(id, test.iterator(), canon.iterator());
testEqual(id + "->DSCI", test.descendingIterator(), canon.descendingIterator());
testEqual(id + "->DSCS", test.descendingSet().iterator(), canon.descendingSet().iterator());
testEqual(id + "->DSCS->DSCI", test.descendingSet().descendingIterator(), canon.descendingSet().descendingIterator());
}
}, null);
results.add(f);
if (DEBUG)
f.run();
else
COMPARE.execute(f);
}
private static void test(String id, int test, int expect)
{
if (test != expect)
{
log("%s: Expected %d, Got %d", id, expect, test);
}
}
private static <V> void testEqual(String id, Iterator<V> btree, Iterator<V> canon)
{
boolean equal = true;
while (btree.hasNext() && canon.hasNext())
{
Object i = btree.next();
Object j = canon.next();
if (!Objects.equals(i, j))
{
log("%s: Expected %d, Got %d", id, j, i);
equal = false;
}
}
while (btree.hasNext())
{
log("%s: Expected <Nil>, Got %d", id, btree.next());
equal = false;
}
while (canon.hasNext())
{
log("%s: Expected %d, Got Nil", id, canon.next());
equal = false;
}
if (!equal)
throw new AssertionError("Not equal");
}
// should only be called on sets that range from 0->N or N->0
private static final Iterable<Integer> range(final int size, final int from, final boolean ascending)
{
return new Iterable<Integer>()
{
int cur;
int delta;
int end;
{
if (ascending)
{
end = size + 1;
cur = from == Integer.MIN_VALUE ? -1 : from;
delta = 1;
}
else
{
end = -2;
cur = from == Integer.MIN_VALUE ? size : from;
delta = -1;
}
}
@Override
public Iterator<Integer> iterator()
{
return new Iterator<Integer>()
{
@Override
public boolean hasNext()
{
return cur != end;
}
@Override
public Integer next()
{
Integer r = cur;
cur += delta;
return r;
}
@Override
public void remove()
{
throw new UnsupportedOperationException();
}
};
}
};
}
private static final class RandomAbort<V> implements UpdateFunction<V, V>
{
final Random rnd;
final float chance;
private RandomAbort(Random rnd, float chance)
{
this.rnd = rnd;
this.chance = chance;
}
public V apply(V replacing, V update)
{
return update;
}
public boolean abortEarly()
{
return rnd.nextFloat() < chance;
}
public void allocated(long heapSize)
{
}
public V apply(V v)
{
return v;
}
}
public static void main(String[] args) throws ExecutionException, InterruptedException, InvocationTargetException, IllegalAccessException
{
for (String arg : args)
{
if (arg.startsWith("fan="))
System.setProperty("cassandra.btree.fanfactor", arg.substring(4));
else if (arg.startsWith("min="))
minTreeSize = Integer.parseInt(arg.substring(4));
else if (arg.startsWith("max="))
maxTreeSize = Integer.parseInt(arg.substring(4));
else if (arg.startsWith("count="))
perThreadTrees = Integer.parseInt(arg.substring(6));
else
exit();
}
List<Method> methods = new ArrayList<>();
for (Method m : LongBTreeTest.class.getDeclaredMethods())
{
if (m.getParameters().length > 0)
continue;
for (Annotation annotation : m.getAnnotations())
if (annotation.annotationType() == Test.class)
methods.add(m);
}
LongBTreeTest test = new LongBTreeTest();
Collections.sort(methods, (a, b) -> a.getName().compareTo(b.getName()));
log(Lists.transform(methods, (m) -> m.getName()).toString());
for (Method m : methods)
{
log(m.getName());
m.invoke(test);
}
log("success");
}
private static void exit()
{
log("usage: fan=<int> min=<int> max=<int> count=<int>");
log("fan: btree fanout");
log("min: minimum btree size (must be >= 4)");
log("max: maximum btree size (must be >= 4)");
log("count: number of trees to assign each core, for each test");
}
private static void log(String formatstr, Object ... args)
{
args = Arrays.copyOf(args, args.length + 1);
System.arraycopy(args, 0, args, 1, args.length - 1);
args[0] = currentTimeMillis();
System.out.printf("%tT: " + formatstr + "\n", args);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.tooling.util;
import java.io.File;
import java.net.URI;
import java.net.URL;
import java.util.ArrayList;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.camel.util.json.JsonObject;
import org.apache.camel.util.json.Jsoner;
/**
* A helper class for <a href="http://json-schema.org/">JSON schema</a>.
*/
public final class JSonSchemaHelper {
private static final String VALID_CHARS = ".,-='/\\!&%():;#${}";
private JSonSchemaHelper() {
}
public static String toJson(String name, String displayName, String kind, Boolean required, String type, String defaultValue, String description,
Boolean deprecated, String deprecationNote, Boolean secret, String group, String label, boolean enumType, Set<String> enums,
boolean oneOfType, Set<String> oneOffTypes, boolean asPredicate, String optionalPrefix, String prefix, boolean multiValue,
String configurationClass, String configurationField) {
String typeName = JSonSchemaHelper.getType(type, enumType);
StringBuilder sb = new StringBuilder();
sb.append(Strings.doubleQuote(name));
sb.append(": { \"kind\": ");
sb.append(Strings.doubleQuote(kind));
// compute a display name if we don't have anything
if (Strings.isNullOrEmpty(displayName)) {
displayName = Strings.asTitle(name);
}
// we want display name early so its easier to spot
sb.append(", \"displayName\": ");
sb.append(Strings.doubleQuote(displayName));
// we want group early so its easier to spot
if (!Strings.isNullOrEmpty(group)) {
sb.append(", \"group\": ");
sb.append(Strings.doubleQuote(group));
}
// we want label early so its easier to spot
if (!Strings.isNullOrEmpty(label)) {
sb.append(", \"label\": ");
sb.append(Strings.doubleQuote(label));
}
if (required != null) {
// boolean type
sb.append(", \"required\": ");
sb.append(required.toString());
}
sb.append(", \"type\": ");
if ("enum".equals(typeName)) {
String actualType = JSonSchemaHelper.getType(type, false);
sb.append(Strings.doubleQuote(actualType));
sb.append(", \"javaType\": \"").append(type).append("\"");
sb.append(", \"enum\": [ ");
sb.append(enums.stream().map(Strings::doubleQuote).collect(Collectors.joining(", ")));
sb.append(" ]");
} else if (oneOfType) {
sb.append(Strings.doubleQuote(typeName));
sb.append(", \"javaType\": \"").append(type).append("\"");
sb.append(", \"oneOf\": [ ");
sb.append(oneOffTypes.stream().map(Strings::doubleQuote).collect(Collectors.joining(", ")));
sb.append(" ]");
} else if ("array".equals(typeName)) {
sb.append(Strings.doubleQuote("array"));
sb.append(", \"javaType\": \"").append(type).append("\"");
} else {
sb.append(Strings.doubleQuote(typeName));
sb.append(", \"javaType\": \"").append(type).append("\"");
}
if (!Strings.isNullOrEmpty(optionalPrefix)) {
sb.append(", \"optionalPrefix\": ");
String text = safeDefaultValue(optionalPrefix);
sb.append(Strings.doubleQuote(text));
}
if (!Strings.isNullOrEmpty(prefix)) {
sb.append(", \"prefix\": ");
String text = safeDefaultValue(prefix);
sb.append(Strings.doubleQuote(text));
}
if (multiValue) {
// boolean value
sb.append(", \"multiValue\": true");
}
if (deprecated != null) {
sb.append(", \"deprecated\": ");
// boolean value
sb.append(deprecated.toString());
}
if (!Strings.isNullOrEmpty(deprecationNote)) {
sb.append(", \"deprecationNote\": ");
sb.append(Strings.doubleQuote(deprecationNote));
}
if (secret != null) {
sb.append(", \"secret\": ");
// boolean value
sb.append(secret.toString());
}
if (!Strings.isNullOrEmpty(defaultValue)) {
sb.append(", \"defaultValue\": ");
String text = safeDefaultValue(defaultValue);
// the type can either be boolean, integer, number or text based
if ("boolean".equals(typeName) || "integer".equals(typeName) || "number".equals(typeName)) {
sb.append(text);
} else {
// text should be quoted
sb.append(Strings.doubleQuote(text));
}
}
// for expressions we want to know if it must be used as predicate or not
boolean predicate = "expression".equals(kind) || asPredicate;
if (predicate) {
sb.append(", \"asPredicate\": ");
if (asPredicate) {
sb.append("true");
} else {
sb.append("false");
}
}
if (!Strings.isNullOrEmpty(configurationClass)) {
sb.append(", \"configurationClass\": ");
sb.append(Strings.doubleQuote(configurationClass));
}
if (!Strings.isNullOrEmpty(configurationField)) {
sb.append(", \"configurationField\": ");
sb.append(Strings.doubleQuote(configurationField));
}
if (!Strings.isNullOrEmpty(description)) {
sb.append(", \"description\": ");
String text = sanitizeDescription(description, false);
sb.append(Strings.doubleQuote(text));
}
sb.append(" }");
return sb.toString();
}
/**
* Gets the JSon schema type.
*
* @param type the java type
* @return the json schema type, is never null, but returns <tt>object</tt> as the generic type
*/
public static String getType(String type, boolean enumType) {
if (enumType) {
return "enum";
} else if (type == null) {
// return generic type for unknown type
return "object";
} else if (type.equals(URI.class.getName()) || type.equals(URL.class.getName())) {
return "string";
} else if (type.equals(File.class.getName())) {
return "string";
} else if (type.equals(Date.class.getName())) {
return "string";
} else if (type.startsWith("java.lang.Class")) {
return "string";
} else if (type.startsWith("java.util.List") || type.startsWith("java.util.Collection")) {
return "array";
}
String primitive = getPrimitiveType(type);
if (primitive != null) {
return primitive;
}
return "object";
}
/**
* Gets the JSon schema primitive type.
*
* @param name the java type
* @return the json schema primitive type, or <tt>null</tt> if not a primitive
*/
public static String getPrimitiveType(String name) {
// special for byte[] or Object[] as its common to use
if ("java.lang.byte[]".equals(name) || "byte[]".equals(name)) {
return "string";
} else if ("java.lang.Byte[]".equals(name) || "Byte[]".equals(name)) {
return "array";
} else if ("java.lang.Object[]".equals(name) || "Object[]".equals(name)) {
return "array";
} else if ("java.lang.String[]".equals(name) || "String[]".equals(name)) {
return "array";
} else if ("java.lang.Character".equals(name) || "Character".equals(name) || "char".equals(name)) {
return "string";
} else if ("java.lang.String".equals(name) || "String".equals(name)) {
return "string";
} else if ("java.lang.Boolean".equals(name) || "Boolean".equals(name) || "boolean".equals(name)) {
return "boolean";
} else if ("java.lang.Integer".equals(name) || "Integer".equals(name) || "int".equals(name)) {
return "integer";
} else if ("java.lang.Long".equals(name) || "Long".equals(name) || "long".equals(name)) {
return "integer";
} else if ("java.lang.Short".equals(name) || "Short".equals(name) || "short".equals(name)) {
return "integer";
} else if ("java.lang.Byte".equals(name) || "Byte".equals(name) || "byte".equals(name)) {
return "integer";
} else if ("java.lang.Float".equals(name) || "Float".equals(name) || "float".equals(name)) {
return "number";
} else if ("java.lang.Double".equals(name) || "Double".equals(name) || "double".equals(name)) {
return "number";
}
return null;
}
/**
* Sanitizes the javadoc to removed invalid characters so it can be used as json description
*
* @param javadoc the javadoc
* @return the text that is valid as json
*/
public static String sanitizeDescription(String javadoc, boolean summary) {
if (Strings.isNullOrEmpty(javadoc)) {
return null;
}
// lets just use what java accepts as identifiers
StringBuilder sb = new StringBuilder();
// split into lines
String[] lines = javadoc.split("\n");
boolean first = true;
for (String line : lines) {
line = line.trim();
if (line.startsWith("**")) {
continue;
}
// remove leading javadoc *
if (line.startsWith("*")) {
line = line.substring(1);
line = line.trim();
}
// terminate if we reach @param, @return or @deprecated as we only want the javadoc summary
if (line.startsWith("@param") || line.startsWith("@return") || line.startsWith("@deprecated")) {
break;
}
// skip lines that are javadoc references
if (line.startsWith("@")) {
continue;
}
// remove all XML tags
line = line.replaceAll("<.*?>", "");
// remove all inlined javadoc links, eg such as {@link org.apache.camel.spi.Registry}
// use #? to remove leading # in case its a local reference
line = line.replaceAll("\\{\\@\\w+\\s#?([\\w.#(\\d,)]+)\\}", "$1");
// we are starting from a new line, so add a whitespace
if (!first) {
sb.append(' ');
}
// create a new line
StringBuilder cb = new StringBuilder();
for (char c : line.toCharArray()) {
if (Character.isJavaIdentifierPart(c) || VALID_CHARS.indexOf(c) != -1) {
cb.append(c);
} else if (Character.isWhitespace(c)) {
// always use space as whitespace, also for line feeds etc
cb.append(' ');
}
}
// append data
String s = cb.toString().trim();
sb.append(s);
boolean empty = Strings.isNullOrEmpty(s);
boolean endWithDot = s.endsWith(".");
boolean haveText = sb.length() > 0;
if (haveText && summary && (empty || endWithDot)) {
// if we only want a summary, then skip at first empty line we encounter, or if the sentence ends with a dot
break;
}
first = false;
}
String s = sb.toString();
// remove double whitespaces, and trim
s = s.replaceAll("\\s+", " ");
// unescape http links
s = s.replaceAll("\\\\(http:|https:)", "$1");
return s.trim();
}
/**
* Parses the json schema to split it into a list or rows, where each row contains key value pairs with the metadata
*
* @param group the group to parse from such as <tt>component</tt>, <tt>componentProperties</tt>, or <tt>properties</tt>.
* @param json the json
* @return a list of all the rows, where each row is a set of key value pairs with metadata
*/
@SuppressWarnings("unchecked")
public static List<Map<String, String>> parseJsonSchema(String group, String json, boolean parseProperties) {
List<Map<String, String>> answer = new ArrayList<>();
if (json == null) {
return answer;
}
// convert into a List<Map<String, String>> structure which is expected as output from this parser
try {
JsonObject output = (JsonObject) Jsoner.deserialize(json);
for (String key : output.keySet()) {
Map<?, ?> row = output.getMap(key);
if (key.equals(group)) {
if (parseProperties) {
// flattern each entry in the row with name as they key, and its value as the content (its a map also)
for (Object obj : row.entrySet()) {
Map.Entry<?, ?> entry = (Map.Entry<?, ?>) obj;
Map<String, String> newRow = new LinkedHashMap<>();
newRow.put("name", entry.getKey().toString());
Map<String, String> newData = transformMap((Map<?, ?>) entry.getValue());
newRow.putAll(newData);
answer.add(newRow);
}
} else {
// flattern each entry in the row as a list of single Map<key, value> elements
Map<?, ?> newData = transformMap(row);
for (Object obj : newData.entrySet()) {
Map.Entry<?, ?> entry = (Map.Entry<?, ?>) obj;
Map<String, String> newRow = new LinkedHashMap<>();
newRow.put(entry.getKey().toString(), entry.getValue().toString());
answer.add(newRow);
}
}
}
}
} catch (Exception e) {
// wrap parsing exceptions as runtime
throw new RuntimeException("Cannot parse json", e);
}
return answer;
}
private static String escapeJson(String value) {
// need to safe encode \r as \\r so its escaped
// need to safe encode \n as \\n so its escaped
// need to safe encode \t as \\t so its escaped
return value
.replace("\\r", "\\\\r")
.replace("\\n", "\\\\n")
.replace("\\t", "\\\\t");
}
/**
* The default value may need to be escaped to be safe for json
*/
private static String safeDefaultValue(String value) {
if ("\"".equals(value)) {
return "\\\"";
} else if ("\\".equals(value)) {
return "\\\\";
} else {
return value;
}
}
private static Map<String, String> transformMap(Map<?, ?> jsonMap) {
Map<String, String> answer = new LinkedHashMap<>();
for (Object rowObj : jsonMap.entrySet()) {
Map.Entry<?, ?> rowEntry = (Map.Entry<?, ?>) rowObj;
// if its a list type then its an enum, and we need to parse it as a single line separated with comma
// to be backwards compatible
Object newValue = rowEntry.getValue();
if (newValue instanceof List) {
List<?> list = (List<?>) newValue;
newValue = list.stream().map(Object::toString)
.collect(Collectors.joining(","));
}
// ensure value is escaped
String value = escapeJson(newValue.toString());
answer.put(rowEntry.getKey().toString(), value);
}
return answer;
}
/**
* Gets the value with the key in a safe way, eg returning an empty string if there was no value for the key.
*/
public static String getSafeValue(String key, List<Map<String, String>> rows) {
for (Map<String, String> row : rows) {
String value = row.get(key);
if (value != null) {
return value;
}
}
return "";
}
/**
* Gets the value with the key in a safe way, eg returning an empty string if there was no value for the key.
*/
public static String getSafeValue(String key, Map<String, String> rows) {
String value = rows.get(key);
if (value != null) {
return value;
}
return "";
}
public static String getPropertyDefaultValue(List<Map<String, String>> rows, String name) {
for (Map<String, String> row : rows) {
String defaultValue = null;
boolean found = false;
if (row.containsKey("name")) {
found = name.equals(row.get("name"));
}
if (row.containsKey("defaultValue")) {
defaultValue = row.get("defaultValue");
}
if (found) {
return defaultValue;
}
}
return null;
}
public static String getPropertyDescriptionValue(List<Map<String, String>> rows, String name) {
for (Map<String, String> row : rows) {
String description = null;
boolean found = false;
if (row.containsKey("name")) {
found = name.equals(row.get("name"));
}
if (row.containsKey("description")) {
description = row.get("description");
}
if (found) {
return description;
}
}
return null;
}
public static String getPropertyJavaType(List<Map<String, String>> rows, String name) {
for (Map<String, String> row : rows) {
String javaType = null;
boolean found = false;
if (row.containsKey("name")) {
found = name.equals(row.get("name"));
}
if (row.containsKey("javaType")) {
javaType = row.get("javaType");
}
if (found) {
return javaType;
}
}
return null;
}
public static String getPropertyType(List<Map<String, String>> rows, String name) {
for (Map<String, String> row : rows) {
String type = null;
boolean found = false;
if (row.containsKey("name")) {
found = name.equals(row.get("name"));
}
if (row.containsKey("type")) {
type = row.get("type");
}
if (found) {
return type;
}
}
return null;
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.io;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.log.PerfLogger;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.TableScanDesc;
import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobConfigurable;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.util.ReflectionUtils;
/**
* HiveInputFormat is a parameterized InputFormat which looks at the path name
* and determine the correct InputFormat for that path name from
* mapredPlan.pathToPartitionInfo(). It can be used to read files with different
* input format in the same map-reduce job.
*/
public class HiveInputFormat<K extends WritableComparable, V extends Writable>
implements InputFormat<K, V>, JobConfigurable {
public static final String CLASS_NAME = HiveInputFormat.class.getName();
public static final Log LOG = LogFactory.getLog(CLASS_NAME);
/**
* HiveInputSplit encapsulates an InputSplit with its corresponding
* inputFormatClass. The reason that it derives from FileSplit is to make sure
* "map.input.file" in MapTask.
*/
public static class HiveInputSplit extends FileSplit implements InputSplit,
Configurable {
InputSplit inputSplit;
String inputFormatClassName;
public HiveInputSplit() {
// This is the only public constructor of FileSplit
super((Path) null, 0, 0, (String[]) null);
}
public HiveInputSplit(InputSplit inputSplit, String inputFormatClassName) {
// This is the only public constructor of FileSplit
super((Path) null, 0, 0, (String[]) null);
this.inputSplit = inputSplit;
this.inputFormatClassName = inputFormatClassName;
}
public InputSplit getInputSplit() {
return inputSplit;
}
public String inputFormatClassName() {
return inputFormatClassName;
}
@Override
public Path getPath() {
if (inputSplit instanceof FileSplit) {
return ((FileSplit) inputSplit).getPath();
}
return new Path("");
}
/** The position of the first byte in the file to process. */
@Override
public long getStart() {
if (inputSplit instanceof FileSplit) {
return ((FileSplit) inputSplit).getStart();
}
return 0;
}
@Override
public String toString() {
return inputFormatClassName + ":" + inputSplit.toString();
}
@Override
public long getLength() {
long r = 0;
try {
r = inputSplit.getLength();
} catch (Exception e) {
throw new RuntimeException(e);
}
return r;
}
@Override
public String[] getLocations() throws IOException {
return inputSplit.getLocations();
}
@Override
public void readFields(DataInput in) throws IOException {
String inputSplitClassName = in.readUTF();
try {
inputSplit = (InputSplit) ReflectionUtils.newInstance(conf
.getClassByName(inputSplitClassName), conf);
} catch (Exception e) {
throw new IOException(
"Cannot create an instance of InputSplit class = "
+ inputSplitClassName + ":" + e.getMessage(), e);
}
inputSplit.readFields(in);
inputFormatClassName = in.readUTF();
}
@Override
public void write(DataOutput out) throws IOException {
out.writeUTF(inputSplit.getClass().getName());
inputSplit.write(out);
out.writeUTF(inputFormatClassName);
}
Configuration conf;
@Override
public Configuration getConf() {
return conf;
}
@Override
public void setConf(Configuration conf) {
this.conf = conf;
}
}
JobConf job;
public void configure(JobConf job) {
this.job = job;
}
/**
* A cache of InputFormat instances.
*/
protected static Map<Class, InputFormat<WritableComparable, Writable>> inputFormats;
public static InputFormat<WritableComparable, Writable> getInputFormatFromCache(
Class inputFormatClass, JobConf job) throws IOException {
if (inputFormats == null) {
inputFormats = new HashMap<Class, InputFormat<WritableComparable, Writable>>();
}
if (!inputFormats.containsKey(inputFormatClass)) {
try {
InputFormat<WritableComparable, Writable> newInstance = (InputFormat<WritableComparable, Writable>) ReflectionUtils
.newInstance(inputFormatClass, job);
inputFormats.put(inputFormatClass, newInstance);
} catch (Exception e) {
throw new IOException("Cannot create an instance of InputFormat class "
+ inputFormatClass.getName() + " as specified in mapredWork!", e);
}
}
return inputFormats.get(inputFormatClass);
}
public RecordReader getRecordReader(InputSplit split, JobConf job,
Reporter reporter) throws IOException {
HiveInputSplit hsplit = (HiveInputSplit) split;
InputSplit inputSplit = hsplit.getInputSplit();
String inputFormatClassName = null;
Class inputFormatClass = null;
try {
inputFormatClassName = hsplit.inputFormatClassName();
inputFormatClass = job.getClassByName(inputFormatClassName);
} catch (Exception e) {
throw new IOException("cannot find class " + inputFormatClassName, e);
}
if (this.mrwork == null) {
init(job);
}
boolean nonNative = false;
PartitionDesc part = pathToPartitionInfo.get(hsplit.getPath().toString());
if ((part != null) && (part.getTableDesc() != null)) {
Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), job);
nonNative = part.getTableDesc().isNonNative();
}
pushProjectionsAndFilters(job, inputFormatClass, hsplit.getPath()
.toString(), hsplit.getPath().toUri().getPath(), nonNative);
InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job);
RecordReader innerReader = null;
try {
innerReader = inputFormat.getRecordReader(inputSplit, job, reporter);
} catch (Exception e) {
innerReader = HiveIOExceptionHandlerUtil
.handleRecordReaderCreationException(e, job);
}
HiveRecordReader<K,V> rr = new HiveRecordReader(innerReader, job);
rr.initIOContext(hsplit, job, inputFormatClass, innerReader);
return rr;
}
protected Map<String, PartitionDesc> pathToPartitionInfo;
MapWork mrwork = null;
protected void init(JobConf job) {
mrwork = Utilities.getMapWork(job);
pathToPartitionInfo = mrwork.getPathToPartitionInfo();
}
/*
* AddSplitsForGroup collects separate calls to setInputPaths into one where possible.
* The reason for this is that this is faster on some InputFormats. E.g.: Orc will start
* a threadpool to do the work and calling it multiple times unnecessarily will create a lot
* of unnecessary thread pools.
*/
private void addSplitsForGroup(List<Path> dirs, TableScanOperator tableScan, JobConf conf,
InputFormat inputFormat, Class<? extends InputFormat> inputFormatClass, int splits,
TableDesc table, List<InputSplit> result) throws IOException {
Utilities.copyTableJobPropertiesToConf(table, conf);
if (tableScan != null) {
pushFilters(conf, tableScan);
}
FileInputFormat.setInputPaths(conf, dirs.toArray(new Path[dirs.size()]));
conf.setInputFormat(inputFormat.getClass());
int headerCount = 0;
int footerCount = 0;
if (table != null) {
headerCount = Utilities.getHeaderCount(table);
footerCount = Utilities.getFooterCount(table, conf);
if (headerCount != 0 || footerCount != 0) {
// Input file has header or footer, cannot be splitted.
conf.setLong(
ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDMINSPLITSIZE"),
Long.MAX_VALUE);
}
}
InputSplit[] iss = inputFormat.getSplits(conf, splits);
for (InputSplit is : iss) {
result.add(new HiveInputSplit(is, inputFormatClass.getName()));
}
}
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
PerfLogger perfLogger = PerfLogger.getPerfLogger();
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.GET_SPLITS);
init(job);
Path[] dirs = FileInputFormat.getInputPaths(job);
if (dirs.length == 0) {
// on tez we're avoiding to duplicate the file info in FileInputFormat.
if (HiveConf.getVar(job, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) {
try {
List<Path> paths = Utilities.getInputPathsTez(job, mrwork);
dirs = paths.toArray(new Path[paths.size()]);
} catch (Exception e) {
throw new IOException("Could not create input files", e);
}
} else {
throw new IOException("No input paths specified in job");
}
}
JobConf newjob = new JobConf(job);
List<InputSplit> result = new ArrayList<InputSplit>();
List<Path> currentDirs = new ArrayList<Path>();
Class<? extends InputFormat> currentInputFormatClass = null;
TableDesc currentTable = null;
TableScanOperator currentTableScan = null;
// for each dir, get the InputFormat, and do getSplits.
for (Path dir : dirs) {
PartitionDesc part = getPartitionDescFromPath(pathToPartitionInfo, dir);
Class<? extends InputFormat> inputFormatClass = part.getInputFileFormatClass();
TableDesc table = part.getTableDesc();
TableScanOperator tableScan = null;
List<String> aliases =
mrwork.getPathToAliases().get(dir.toUri().toString());
// Make filter pushdown information available to getSplits.
if ((aliases != null) && (aliases.size() == 1)) {
Operator op = mrwork.getAliasToWork().get(aliases.get(0));
if ((op != null) && (op instanceof TableScanOperator)) {
tableScan = (TableScanOperator) op;
// push down projections.
ColumnProjectionUtils.appendReadColumns(
newjob, tableScan.getNeededColumnIDs(), tableScan.getNeededColumns());
// push down filters
pushFilters(newjob, tableScan);
}
}
if (!currentDirs.isEmpty() &&
inputFormatClass.equals(currentInputFormatClass) &&
table.equals(currentTable) &&
tableScan == currentTableScan) {
currentDirs.add(dir);
continue;
}
if (!currentDirs.isEmpty()) {
LOG.info("Generating splits");
addSplitsForGroup(currentDirs, currentTableScan, newjob,
getInputFormatFromCache(currentInputFormatClass, job),
currentInputFormatClass, currentDirs.size()*(numSplits / dirs.length),
currentTable, result);
}
currentDirs.clear();
currentDirs.add(dir);
currentTableScan = tableScan;
currentTable = table;
currentInputFormatClass = inputFormatClass;
}
if (dirs.length != 0) {
LOG.info("Generating splits");
addSplitsForGroup(currentDirs, currentTableScan, newjob,
getInputFormatFromCache(currentInputFormatClass, job),
currentInputFormatClass, currentDirs.size()*(numSplits / dirs.length),
currentTable, result);
}
LOG.info("number of splits " + result.size());
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.GET_SPLITS);
return result.toArray(new HiveInputSplit[result.size()]);
}
protected static PartitionDesc getPartitionDescFromPath(
Map<String, PartitionDesc> pathToPartitionInfo, Path dir)
throws IOException {
PartitionDesc partDesc = pathToPartitionInfo.get(dir.toString());
if (partDesc == null) {
partDesc = pathToPartitionInfo.get(dir.toUri().getPath());
}
if (partDesc == null) {
throw new IOException("cannot find dir = " + dir.toString()
+ " in " + pathToPartitionInfo);
}
return partDesc;
}
public static void pushFilters(JobConf jobConf, TableScanOperator tableScan) {
TableScanDesc scanDesc = tableScan.getConf();
if (scanDesc == null) {
return;
}
// construct column name list and types for reference by filter push down
Utilities.setColumnNameList(jobConf, tableScan);
Utilities.setColumnTypeList(jobConf, tableScan);
// push down filters
ExprNodeGenericFuncDesc filterExpr = (ExprNodeGenericFuncDesc)scanDesc.getFilterExpr();
if (filterExpr == null) {
return;
}
String filterText = filterExpr.getExprString();
String filterExprSerialized = Utilities.serializeExpression(filterExpr);
if (LOG.isDebugEnabled()) {
LOG.debug("Filter text = " + filterText);
LOG.debug("Filter expression = " + filterExprSerialized);
}
jobConf.set(
TableScanDesc.FILTER_TEXT_CONF_STR,
filterText);
jobConf.set(
TableScanDesc.FILTER_EXPR_CONF_STR,
filterExprSerialized);
}
protected void pushProjectionsAndFilters(JobConf jobConf, Class inputFormatClass,
String splitPath, String splitPathWithNoSchema) {
pushProjectionsAndFilters(jobConf, inputFormatClass, splitPath,
splitPathWithNoSchema, false);
}
protected void pushProjectionsAndFilters(JobConf jobConf, Class inputFormatClass,
String splitPath, String splitPathWithNoSchema, boolean nonNative) {
if (this.mrwork == null) {
init(job);
}
if(this.mrwork.getPathToAliases() == null) {
return;
}
ArrayList<String> aliases = new ArrayList<String>();
Iterator<Entry<String, ArrayList<String>>> iterator = this.mrwork
.getPathToAliases().entrySet().iterator();
while (iterator.hasNext()) {
Entry<String, ArrayList<String>> entry = iterator.next();
String key = entry.getKey();
boolean match;
if (nonNative) {
// For non-native tables, we need to do an exact match to avoid
// HIVE-1903. (The table location contains no files, and the string
// representation of its path does not have a trailing slash.)
match =
splitPath.equals(key) || splitPathWithNoSchema.equals(key);
} else {
// But for native tables, we need to do a prefix match for
// subdirectories. (Unlike non-native tables, prefix mixups don't seem
// to be a potential problem here since we are always dealing with the
// path to something deeper than the table location.)
match =
splitPath.startsWith(key) || splitPathWithNoSchema.startsWith(key);
}
if (match) {
ArrayList<String> list = entry.getValue();
for (String val : list) {
aliases.add(val);
}
}
}
for (String alias : aliases) {
Operator<? extends OperatorDesc> op = this.mrwork.getAliasToWork().get(
alias);
if (op instanceof TableScanOperator) {
TableScanOperator ts = (TableScanOperator) op;
// push down projections.
ColumnProjectionUtils.appendReadColumns(
jobConf, ts.getNeededColumnIDs(), ts.getNeededColumns());
// push down filters
pushFilters(jobConf, ts);
}
}
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner.optimizations;
import com.facebook.presto.Session;
import com.facebook.presto.metadata.Signature;
import com.facebook.presto.spi.block.SortOrder;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.planner.DeterminismEvaluator;
import com.facebook.presto.sql.planner.PartitioningScheme;
import com.facebook.presto.sql.planner.PlanNodeIdAllocator;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.SymbolAllocator;
import com.facebook.presto.sql.planner.plan.AggregationNode;
import com.facebook.presto.sql.planner.plan.ApplyNode;
import com.facebook.presto.sql.planner.plan.AssignUniqueId;
import com.facebook.presto.sql.planner.plan.DeleteNode;
import com.facebook.presto.sql.planner.plan.DistinctLimitNode;
import com.facebook.presto.sql.planner.plan.EnforceSingleRowNode;
import com.facebook.presto.sql.planner.plan.ExceptNode;
import com.facebook.presto.sql.planner.plan.ExchangeNode;
import com.facebook.presto.sql.planner.plan.ExplainAnalyzeNode;
import com.facebook.presto.sql.planner.plan.FilterNode;
import com.facebook.presto.sql.planner.plan.GroupIdNode;
import com.facebook.presto.sql.planner.plan.IndexJoinNode;
import com.facebook.presto.sql.planner.plan.IndexSourceNode;
import com.facebook.presto.sql.planner.plan.IntersectNode;
import com.facebook.presto.sql.planner.plan.JoinNode;
import com.facebook.presto.sql.planner.plan.LimitNode;
import com.facebook.presto.sql.planner.plan.MarkDistinctNode;
import com.facebook.presto.sql.planner.plan.OutputNode;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.ProjectNode;
import com.facebook.presto.sql.planner.plan.RemoteSourceNode;
import com.facebook.presto.sql.planner.plan.RowNumberNode;
import com.facebook.presto.sql.planner.plan.SampleNode;
import com.facebook.presto.sql.planner.plan.SemiJoinNode;
import com.facebook.presto.sql.planner.plan.SetOperationNode;
import com.facebook.presto.sql.planner.plan.SimplePlanRewriter;
import com.facebook.presto.sql.planner.plan.SortNode;
import com.facebook.presto.sql.planner.plan.TableFinishNode;
import com.facebook.presto.sql.planner.plan.TableScanNode;
import com.facebook.presto.sql.planner.plan.TableWriterNode;
import com.facebook.presto.sql.planner.plan.TopNNode;
import com.facebook.presto.sql.planner.plan.TopNRowNumberNode;
import com.facebook.presto.sql.planner.plan.UnionNode;
import com.facebook.presto.sql.planner.plan.UnnestNode;
import com.facebook.presto.sql.planner.plan.ValuesNode;
import com.facebook.presto.sql.planner.plan.WindowNode;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.ExpressionRewriter;
import com.facebook.presto.sql.tree.ExpressionTreeRewriter;
import com.facebook.presto.sql.tree.FunctionCall;
import com.facebook.presto.sql.tree.NullLiteral;
import com.facebook.presto.sql.tree.SymbolReference;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import static com.facebook.presto.util.ImmutableCollectors.toImmutableList;
import static com.facebook.presto.util.ImmutableCollectors.toImmutableSet;
import static java.util.Objects.requireNonNull;
/**
* Re-maps symbol references that are just aliases of each other (e.g., due to projections like {@code $0 := $1})
* <p/>
* E.g.,
* <p/>
* {@code Output[$0, $1] -> Project[$0 := $2, $1 := $3 * 100] -> Aggregate[$2, $3 := sum($4)] -> ...}
* <p/>
* gets rewritten as
* <p/>
* {@code Output[$2, $1] -> Project[$2, $1 := $3 * 100] -> Aggregate[$2, $3 := sum($4)] -> ...}
*/
public class UnaliasSymbolReferences
implements PlanOptimizer
{
@Override
public PlanNode optimize(PlanNode plan, Session session, Map<Symbol, Type> types, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator)
{
requireNonNull(plan, "plan is null");
requireNonNull(session, "session is null");
requireNonNull(types, "types is null");
requireNonNull(symbolAllocator, "symbolAllocator is null");
requireNonNull(idAllocator, "idAllocator is null");
return SimplePlanRewriter.rewriteWith(new Rewriter(), plan);
}
private static class Rewriter
extends SimplePlanRewriter<Void>
{
private final Map<Symbol, Symbol> mapping = new HashMap<>();
@Override
public PlanNode visitAggregation(AggregationNode node, RewriteContext<Void> context)
{
PlanNode source = context.rewrite(node.getSource());
ImmutableMap.Builder<Symbol, Signature> functionInfos = ImmutableMap.builder();
ImmutableMap.Builder<Symbol, FunctionCall> functionCalls = ImmutableMap.builder();
ImmutableMap.Builder<Symbol, Symbol> masks = ImmutableMap.builder();
for (Map.Entry<Symbol, FunctionCall> entry : node.getAggregations().entrySet()) {
Symbol symbol = entry.getKey();
Symbol canonical = canonicalize(symbol);
FunctionCall canonicalCall = (FunctionCall) canonicalize(entry.getValue());
functionCalls.put(canonical, canonicalCall);
functionInfos.put(canonical, node.getFunctions().get(symbol));
}
for (Map.Entry<Symbol, Symbol> entry : node.getMasks().entrySet()) {
masks.put(canonicalize(entry.getKey()), canonicalize(entry.getValue()));
}
List<List<Symbol>> groupingSets = node.getGroupingSets().stream()
.map(this::canonicalizeAndDistinct)
.collect(toImmutableList());
return new AggregationNode(
node.getId(),
source,
functionCalls.build(),
functionInfos.build(),
masks.build(),
groupingSets,
node.getStep(),
canonicalize(node.getSampleWeight()),
node.getConfidence(),
canonicalize(node.getHashSymbol()),
canonicalize(node.getGroupIdSymbol()));
}
@Override
public PlanNode visitGroupId(GroupIdNode node, RewriteContext<Void> context)
{
PlanNode source = context.rewrite(node.getSource());
List<List<Symbol>> groupingSetsSymbols = node.getGroupingSets().stream()
.map(this::canonicalize)
.collect(Collectors.toList());
ImmutableMap.Builder<Symbol, Symbol> newPassthroughMap = ImmutableMap.builder();
for (Symbol inputSymbol : node.getIdentityMappings().keySet()) {
newPassthroughMap.put(canonicalize(inputSymbol), canonicalize(node.getIdentityMappings().get(inputSymbol)));
}
return new GroupIdNode(node.getId(), source, groupingSetsSymbols, newPassthroughMap.build(), canonicalize(node.getGroupIdSymbol()));
}
@Override
public PlanNode visitExplainAnalyze(ExplainAnalyzeNode node, RewriteContext<Void> context)
{
PlanNode source = context.rewrite(node.getSource());
return new ExplainAnalyzeNode(node.getId(), source, canonicalize(node.getOutputSymbol()));
}
@Override
public PlanNode visitMarkDistinct(MarkDistinctNode node, RewriteContext<Void> context)
{
PlanNode source = context.rewrite(node.getSource());
List<Symbol> symbols = canonicalizeAndDistinct(node.getDistinctSymbols());
return new MarkDistinctNode(node.getId(), source, canonicalize(node.getMarkerSymbol()), symbols, canonicalize(node.getHashSymbol()));
}
@Override
public PlanNode visitUnnest(UnnestNode node, RewriteContext<Void> context)
{
PlanNode source = context.rewrite(node.getSource());
ImmutableMap.Builder<Symbol, List<Symbol>> builder = ImmutableMap.builder();
for (Map.Entry<Symbol, List<Symbol>> entry : node.getUnnestSymbols().entrySet()) {
builder.put(canonicalize(entry.getKey()), entry.getValue());
}
return new UnnestNode(node.getId(), source, canonicalizeAndDistinct(node.getReplicateSymbols()), builder.build(), node.getOrdinalitySymbol());
}
@Override
public PlanNode visitWindow(WindowNode node, RewriteContext<Void> context)
{
PlanNode source = context.rewrite(node.getSource());
ImmutableMap.Builder<Symbol, WindowNode.Function> functions = ImmutableMap.builder();
ImmutableMap.Builder<WindowNode.Function, WindowNode.Frame> frames = ImmutableMap.builder();
for (Map.Entry<Symbol, WindowNode.Function> entry : node.getWindowFunctions().entrySet()) {
Symbol symbol = entry.getKey();
FunctionCall canonicalFunctionCall = (FunctionCall) canonicalize(entry.getValue().getFunctionCall());
Signature signature = entry.getValue().getSignature();
WindowNode.Frame canonicalFrame = canonicalize(entry.getValue().getFrame());
functions.put(canonicalize(symbol), new WindowNode.Function(canonicalFunctionCall, signature, canonicalFrame));
}
ImmutableMap.Builder<Symbol, SortOrder> orderings = ImmutableMap.builder();
for (Map.Entry<Symbol, SortOrder> entry : node.getOrderings().entrySet()) {
orderings.put(canonicalize(entry.getKey()), entry.getValue());
}
return new WindowNode(
node.getId(),
source,
canonicalizeAndDistinct(node.getSpecification()),
functions.build(),
canonicalize(node.getHashSymbol()),
canonicalize(node.getPrePartitionedInputs()),
node.getPreSortedOrderPrefix());
}
private WindowNode.Frame canonicalize(WindowNode.Frame frame)
{
return new WindowNode.Frame(frame.getType(),
frame.getStartType(), canonicalize(frame.getStartValue()),
frame.getEndType(), canonicalize(frame.getEndValue()));
}
@Override
public PlanNode visitTableScan(TableScanNode node, RewriteContext<Void> context)
{
Expression originalConstraint = null;
if (node.getOriginalConstraint() != null) {
originalConstraint = canonicalize(node.getOriginalConstraint());
}
return new TableScanNode(
node.getId(),
node.getTable(),
node.getOutputSymbols(),
node.getAssignments(),
node.getLayout(),
node.getCurrentConstraint(),
originalConstraint);
}
@Override
public PlanNode visitExchange(ExchangeNode node, RewriteContext<Void> context)
{
List<PlanNode> sources = node.getSources().stream()
.map(context::rewrite)
.collect(toImmutableList());
List<List<Symbol>> inputs = new ArrayList<>();
for (int i = 0; i < node.getInputs().size(); i++) {
inputs.add(new ArrayList<>());
}
Set<Symbol> addedOutputs = new HashSet<>();
ImmutableList.Builder<Symbol> outputs = ImmutableList.builder();
for (int symbolIndex = 0; symbolIndex < node.getOutputSymbols().size(); symbolIndex++) {
Symbol canonicalOutput = canonicalize(node.getOutputSymbols().get(symbolIndex));
if (addedOutputs.add(canonicalOutput)) {
outputs.add(canonicalOutput);
for (int i = 0; i < node.getInputs().size(); i++) {
List<Symbol> input = node.getInputs().get(i);
inputs.get(i).add(canonicalize(input.get(symbolIndex)));
}
}
}
PartitioningScheme partitioningScheme = new PartitioningScheme(
node.getPartitioningScheme().getPartitioning().translate(this::canonicalize),
outputs.build(),
canonicalize(node.getPartitioningScheme().getHashColumn()),
node.getPartitioningScheme().isReplicateNulls(),
node.getPartitioningScheme().getBucketToPartition());
return new ExchangeNode(node.getId(), node.getType(), node.getScope(), partitioningScheme, sources, inputs);
}
@Override
public PlanNode visitRemoteSource(RemoteSourceNode node, RewriteContext<Void> context)
{
return new RemoteSourceNode(node.getId(), node.getSourceFragmentIds(), canonicalizeAndDistinct(node.getOutputSymbols()));
}
@Override
public PlanNode visitLimit(LimitNode node, RewriteContext<Void> context)
{
return context.defaultRewrite(node);
}
@Override
public PlanNode visitDistinctLimit(DistinctLimitNode node, RewriteContext<Void> context)
{
return new DistinctLimitNode(node.getId(), context.rewrite(node.getSource()), node.getLimit(), node.isPartial(), canonicalize(node.getHashSymbol()));
}
@Override
public PlanNode visitSample(SampleNode node, RewriteContext<Void> context)
{
return new SampleNode(node.getId(), context.rewrite(node.getSource()), node.getSampleRatio(), node.getSampleType(), node.isRescaled(), canonicalize(node.getSampleWeightSymbol()));
}
@Override
public PlanNode visitValues(ValuesNode node, RewriteContext<Void> context)
{
return context.defaultRewrite(node);
}
@Override
public PlanNode visitDelete(DeleteNode node, RewriteContext<Void> context)
{
return new DeleteNode(node.getId(), context.rewrite(node.getSource()), node.getTarget(), canonicalize(node.getRowId()), node.getOutputSymbols());
}
@Override
public PlanNode visitTableFinish(TableFinishNode node, RewriteContext<Void> context)
{
return context.defaultRewrite(node);
}
@Override
public PlanNode visitRowNumber(RowNumberNode node, RewriteContext<Void> context)
{
return new RowNumberNode(node.getId(), context.rewrite(node.getSource()), canonicalizeAndDistinct(node.getPartitionBy()), canonicalize(node.getRowNumberSymbol()), node.getMaxRowCountPerPartition(), canonicalize(node.getHashSymbol()));
}
@Override
public PlanNode visitTopNRowNumber(TopNRowNumberNode node, RewriteContext<Void> context)
{
return new TopNRowNumberNode(
node.getId(),
context.rewrite(node.getSource()),
canonicalizeAndDistinct(node.getSpecification()),
canonicalize(node.getRowNumberSymbol()),
node.getMaxRowCountPerPartition(),
node.isPartial(),
canonicalize(node.getHashSymbol()));
}
@Override
public PlanNode visitFilter(FilterNode node, RewriteContext<Void> context)
{
PlanNode source = context.rewrite(node.getSource());
return new FilterNode(node.getId(), source, canonicalize(node.getPredicate()));
}
@Override
public PlanNode visitProject(ProjectNode node, RewriteContext<Void> context)
{
PlanNode source = context.rewrite(node.getSource());
Map<Expression, Symbol> computedExpressions = new HashMap<>();
Map<Symbol, Expression> assignments = new LinkedHashMap<>();
for (Map.Entry<Symbol, Expression> entry : node.getAssignments().entrySet()) {
Expression expression = canonicalize(entry.getValue());
if (expression instanceof SymbolReference) {
// Always map a trivial symbol projection
Symbol symbol = Symbol.from(expression);
if (!symbol.equals(entry.getKey())) {
map(entry.getKey(), symbol);
}
}
else if (DeterminismEvaluator.isDeterministic(expression) && !(expression instanceof NullLiteral)) {
// Try to map same deterministic expressions within a projection into the same symbol
// Omit NullLiterals since those have ambiguous types
Symbol computedSymbol = computedExpressions.get(expression);
if (computedSymbol == null) {
// If we haven't seen the expression before in this projection, record it
computedExpressions.put(expression, entry.getKey());
}
else {
// If we have seen the expression before and if it is deterministic
// then we can rewrite references to the current symbol in terms of the parallel computedSymbol in the projection
map(entry.getKey(), computedSymbol);
}
}
Symbol canonical = canonicalize(entry.getKey());
if (!assignments.containsKey(canonical)) {
assignments.put(canonical, expression);
}
}
return new ProjectNode(node.getId(), source, assignments);
}
@Override
public PlanNode visitOutput(OutputNode node, RewriteContext<Void> context)
{
PlanNode source = context.rewrite(node.getSource());
List<Symbol> canonical = Lists.transform(node.getOutputSymbols(), this::canonicalize);
return new OutputNode(node.getId(), source, node.getColumnNames(), canonical);
}
@Override
public PlanNode visitEnforceSingleRow(EnforceSingleRowNode node, RewriteContext<Void> context)
{
PlanNode source = context.rewrite(node.getSource());
return new EnforceSingleRowNode(node.getId(), source);
}
@Override
public PlanNode visitAssignUniqueId(AssignUniqueId node, RewriteContext<Void> context)
{
PlanNode source = context.rewrite(node.getSource());
return new AssignUniqueId(node.getId(), source, node.getIdColumn());
}
@Override
public PlanNode visitApply(ApplyNode node, RewriteContext<Void> context)
{
PlanNode source = context.rewrite(node.getInput());
PlanNode subquery = context.rewrite(node.getSubquery());
List<Symbol> canonicalCorrelation = Lists.transform(node.getCorrelation(), this::canonicalize);
return new ApplyNode(node.getId(), source, subquery, canonicalCorrelation);
}
@Override
public PlanNode visitTopN(TopNNode node, RewriteContext<Void> context)
{
PlanNode source = context.rewrite(node.getSource());
ImmutableList.Builder<Symbol> symbols = ImmutableList.builder();
ImmutableMap.Builder<Symbol, SortOrder> orderings = ImmutableMap.builder();
for (Symbol symbol : node.getOrderBy()) {
Symbol canonical = canonicalize(symbol);
symbols.add(canonical);
orderings.put(canonical, node.getOrderings().get(symbol));
}
return new TopNNode(node.getId(), source, node.getCount(), symbols.build(), orderings.build(), node.isPartial());
}
@Override
public PlanNode visitSort(SortNode node, RewriteContext<Void> context)
{
PlanNode source = context.rewrite(node.getSource());
ImmutableList.Builder<Symbol> symbols = ImmutableList.builder();
ImmutableMap.Builder<Symbol, SortOrder> orderings = ImmutableMap.builder();
for (Symbol symbol : node.getOrderBy()) {
Symbol canonical = canonicalize(symbol);
symbols.add(canonical);
orderings.put(canonical, node.getOrderings().get(symbol));
}
return new SortNode(node.getId(), source, symbols.build(), orderings.build());
}
@Override
public PlanNode visitJoin(JoinNode node, RewriteContext<Void> context)
{
PlanNode left = context.rewrite(node.getLeft());
PlanNode right = context.rewrite(node.getRight());
return new JoinNode(node.getId(), node.getType(), left, right, canonicalizeJoinCriteria(node.getCriteria()), node.getFilter().map(this::canonicalize), canonicalize(node.getLeftHashSymbol()), canonicalize(node.getRightHashSymbol()));
}
@Override
public PlanNode visitSemiJoin(SemiJoinNode node, RewriteContext<Void> context)
{
PlanNode source = context.rewrite(node.getSource());
PlanNode filteringSource = context.rewrite(node.getFilteringSource());
return new SemiJoinNode(node.getId(), source, filteringSource, canonicalize(node.getSourceJoinSymbol()), canonicalize(node.getFilteringSourceJoinSymbol()), canonicalize(node.getSemiJoinOutput()), canonicalize(node.getSourceHashSymbol()), canonicalize(node.getFilteringSourceHashSymbol()));
}
@Override
public PlanNode visitIndexSource(IndexSourceNode node, RewriteContext<Void> context)
{
return new IndexSourceNode(node.getId(), node.getIndexHandle(), node.getTableHandle(), node.getLayout(), canonicalize(node.getLookupSymbols()), node.getOutputSymbols(), node.getAssignments(), node.getEffectiveTupleDomain());
}
@Override
public PlanNode visitIndexJoin(IndexJoinNode node, RewriteContext<Void> context)
{
PlanNode probeSource = context.rewrite(node.getProbeSource());
PlanNode indexSource = context.rewrite(node.getIndexSource());
return new IndexJoinNode(node.getId(), node.getType(), probeSource, indexSource, canonicalizeIndexJoinCriteria(node.getCriteria()), canonicalize(node.getProbeHashSymbol()), canonicalize(node.getIndexHashSymbol()));
}
@Override
public PlanNode visitUnion(UnionNode node, RewriteContext<Void> context)
{
return new UnionNode(node.getId(), rewriteSources(node, context).build(), canonicalizeSetOperationSymbolMap(node.getSymbolMapping()), canonicalize(node.getOutputSymbols()));
}
@Override
public PlanNode visitIntersect(IntersectNode node, RewriteContext<Void> context)
{
return new IntersectNode(node.getId(), rewriteSources(node, context).build(), canonicalizeSetOperationSymbolMap(node.getSymbolMapping()), canonicalize(node.getOutputSymbols()));
}
@Override
public PlanNode visitExcept(ExceptNode node, RewriteContext<Void> context)
{
return new ExceptNode(node.getId(), rewriteSources(node, context).build(), canonicalizeSetOperationSymbolMap(node.getSymbolMapping()), canonicalize(node.getOutputSymbols()));
}
private ImmutableList.Builder<PlanNode> rewriteSources(SetOperationNode node, RewriteContext<Void> context)
{
ImmutableList.Builder<PlanNode> rewrittenSources = ImmutableList.builder();
for (PlanNode source : node.getSources()) {
rewrittenSources.add(context.rewrite(source));
}
return rewrittenSources;
}
@Override
public PlanNode visitTableWriter(TableWriterNode node, RewriteContext<Void> context)
{
PlanNode source = context.rewrite(node.getSource());
// Intentionally does not use canonicalizeAndDistinct as that would remove columns
ImmutableList<Symbol> columns = node.getColumns().stream()
.map(this::canonicalize)
.collect(toImmutableList());
return new TableWriterNode(
node.getId(),
source,
node.getTarget(),
columns,
node.getColumnNames(),
node.getOutputSymbols(),
canonicalize(node.getSampleWeightSymbol()),
node.getPartitioningScheme().map(this::canonicalizePartitionFunctionBinding));
}
@Override
protected PlanNode visitPlan(PlanNode node, RewriteContext<Void> context)
{
throw new UnsupportedOperationException("Unsupported plan node " + node.getClass().getSimpleName());
}
private void map(Symbol symbol, Symbol canonical)
{
Preconditions.checkArgument(!symbol.equals(canonical), "Can't map symbol to itself: %s", symbol);
mapping.put(symbol, canonical);
}
private Optional<Symbol> canonicalize(Optional<Symbol> symbol)
{
if (symbol.isPresent()) {
return Optional.of(canonicalize(symbol.get()));
}
return Optional.empty();
}
private Symbol canonicalize(Symbol symbol)
{
Symbol canonical = symbol;
while (mapping.containsKey(canonical)) {
canonical = mapping.get(canonical);
}
return canonical;
}
private Expression canonicalize(Expression value)
{
return ExpressionTreeRewriter.rewriteWith(new ExpressionRewriter<Void>()
{
@Override
public Expression rewriteSymbolReference(SymbolReference node, Void context, ExpressionTreeRewriter<Void> treeRewriter)
{
Symbol canonical = canonicalize(Symbol.from(node));
return canonical.toSymbolReference();
}
}, value);
}
private List<Symbol> canonicalizeAndDistinct(List<Symbol> outputs)
{
Set<Symbol> added = new HashSet<>();
ImmutableList.Builder<Symbol> builder = ImmutableList.builder();
for (Symbol symbol : outputs) {
Symbol canonical = canonicalize(symbol);
if (added.add(canonical)) {
builder.add(canonical);
}
}
return builder.build();
}
private WindowNode.Specification canonicalizeAndDistinct(WindowNode.Specification specification)
{
ImmutableMap.Builder<Symbol, SortOrder> orderings = ImmutableMap.builder();
for (Map.Entry<Symbol, SortOrder> entry : specification.getOrderings().entrySet()) {
orderings.put(canonicalize(entry.getKey()), entry.getValue());
}
return new WindowNode.Specification(
canonicalizeAndDistinct(specification.getPartitionBy()),
canonicalizeAndDistinct(specification.getOrderBy()),
orderings.build());
}
private List<Symbol> canonicalize(List<Symbol> symbols)
{
return symbols.stream()
.map(this::canonicalize)
.collect(toImmutableList());
}
private Set<Symbol> canonicalize(Set<Symbol> symbols)
{
return symbols.stream()
.map(this::canonicalize)
.collect(toImmutableSet());
}
private List<JoinNode.EquiJoinClause> canonicalizeJoinCriteria(List<JoinNode.EquiJoinClause> criteria)
{
ImmutableList.Builder<JoinNode.EquiJoinClause> builder = ImmutableList.builder();
for (JoinNode.EquiJoinClause clause : criteria) {
builder.add(new JoinNode.EquiJoinClause(canonicalize(clause.getLeft()), canonicalize(clause.getRight())));
}
return builder.build();
}
private List<IndexJoinNode.EquiJoinClause> canonicalizeIndexJoinCriteria(List<IndexJoinNode.EquiJoinClause> criteria)
{
ImmutableList.Builder<IndexJoinNode.EquiJoinClause> builder = ImmutableList.builder();
for (IndexJoinNode.EquiJoinClause clause : criteria) {
builder.add(new IndexJoinNode.EquiJoinClause(canonicalize(clause.getProbe()), canonicalize(clause.getIndex())));
}
return builder.build();
}
private ListMultimap<Symbol, Symbol> canonicalizeSetOperationSymbolMap(ListMultimap<Symbol, Symbol> setOperationSymbolMap)
{
ImmutableListMultimap.Builder<Symbol, Symbol> builder = ImmutableListMultimap.builder();
for (Map.Entry<Symbol, Collection<Symbol>> entry : setOperationSymbolMap.asMap().entrySet()) {
builder.putAll(canonicalize(entry.getKey()), Iterables.transform(entry.getValue(), this::canonicalize));
}
return builder.build();
}
private PartitioningScheme canonicalizePartitionFunctionBinding(PartitioningScheme scheme)
{
return new PartitioningScheme(
scheme.getPartitioning().translate(this::canonicalize),
canonicalize(scheme.getOutputLayout()),
canonicalize(scheme.getHashColumn()),
scheme.isReplicateNulls(),
scheme.getBucketToPartition());
}
}
}
|
|
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.workbench.forms.display.backend.provider;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.enterprise.inject.Instance;
import org.jbpm.workbench.forms.display.api.KieWorkbenchFormRenderingSettings;
import org.jbpm.workbench.forms.display.backend.provider.model.Invoice;
import org.jbpm.workbench.forms.display.backend.provider.model.InvoiceLine;
import org.jbpm.workbench.forms.service.providing.RenderingSettings;
import org.junit.Before;
import org.junit.Test;
import org.kie.internal.task.api.ContentMarshallerContext;
import org.kie.soup.project.datamodel.commons.util.RawMVELEvaluator;
import org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.impl.BackendFormRenderingContextManagerImpl;
import org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.impl.marshalling.FieldValueMarshaller;
import org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.impl.marshalling.FieldValueMarshallerRegistry;
import org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.impl.marshalling.FieldValueMarshallerRegistryImpl;
import org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.impl.marshalling.TextAreaFormFieldValueMarshaller;
import org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.impl.marshalling.models.MultipleSubFormFieldValueMarshaller;
import org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.impl.marshalling.models.SubFormFieldValueMarshaller;
import org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.impl.marshalling.time.DateMultipleInputFieldValueMarshaller;
import org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.impl.marshalling.time.DateMultipleSelectorFieldValueMarshaller;
import org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.impl.marshalling.time.LocalDateFieldValueMarshaller;
import org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.validation.impl.ContextModelConstraintsExtractorImpl;
import org.kie.workbench.common.forms.dynamic.service.context.generation.dynamic.BackendFormRenderingContextManager;
import org.kie.workbench.common.forms.dynamic.service.shared.impl.MapModelRenderingContext;
import org.kie.workbench.common.forms.fields.test.TestFieldManager;
import org.kie.workbench.common.forms.fields.test.TestMetaDataEntryManager;
import org.kie.workbench.common.forms.jbpm.server.service.formGeneration.impl.runtime.BPMNRuntimeFormGeneratorService;
import org.kie.workbench.common.forms.jbpm.server.service.impl.DynamicBPMNFormGeneratorImpl;
import org.kie.workbench.common.forms.jbpm.service.bpmn.DynamicBPMNFormGenerator;
import org.kie.workbench.common.forms.services.backend.serialization.FormDefinitionSerializer;
import org.kie.workbench.common.forms.services.backend.serialization.impl.FieldSerializer;
import org.kie.workbench.common.forms.services.backend.serialization.impl.FormDefinitionSerializerImpl;
import org.kie.workbench.common.forms.services.backend.serialization.impl.FormModelSerializer;
import org.mockito.Mock;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public abstract class AbstractFormsValuesProcessorTest<PROCESSOR extends KieWorkbenchFormsValuesProcessor<SETTINGS>, SETTINGS extends RenderingSettings> {
protected static final String SERVER_TEMPLATE_ID = "serverTemplateId";
protected static final String ID = "id";
protected static final String NAME = "name";
protected static final String ADDRESS = "address";
protected static final String PRODUCT = "product";
protected static final String QUANTITY = "quantity";
protected static final String PRICE = "price";
protected static final String TOTAL = "total";
protected static final String CLIENT = "client";
protected static final String LINES = "lines";
protected static final String COMMENTS = "comments";
protected static final String DATE = "date";
protected static final String INVOICE = "invoice";
protected static final int EXPECTED_MODEL_VALIDATIONS = 3;
protected static final int EXPECTED_FORMS = 4;
@Mock
protected ContentMarshallerContext marshallerContext;
private FieldValueMarshallerRegistry registry;
protected DynamicBPMNFormGenerator dynamicBPMNFormGenerator;
protected BackendFormRenderingContextManagerImpl backendFormRenderingContextManager;
protected BPMNRuntimeFormGeneratorService runtimeFormGeneratorService;
protected KieWorkbenchFormRenderingSettings kieWorkbenchFormRenderingSettings;
protected SETTINGS renderingSettings;
protected PROCESSOR processor;
@Before
public void init() {
SubFormFieldValueMarshaller subFormFieldValueMarshaller = new SubFormFieldValueMarshaller();
MultipleSubFormFieldValueMarshaller multipleSubFormFieldValueMarshaller = new MultipleSubFormFieldValueMarshaller();
List<FieldValueMarshaller> marshallers = Arrays.asList(subFormFieldValueMarshaller,
multipleSubFormFieldValueMarshaller,
new DateMultipleInputFieldValueMarshaller(),
new DateMultipleSelectorFieldValueMarshaller(),
new LocalDateFieldValueMarshaller(),
new TextAreaFormFieldValueMarshaller());
Instance<FieldValueMarshaller<?, ?, ?>> marshallersInstance = mock(Instance.class);
when(marshallersInstance.iterator()).then(proc -> marshallers.iterator());
registry = new FieldValueMarshallerRegistryImpl(marshallersInstance);
subFormFieldValueMarshaller.setRegistry(registry);
multipleSubFormFieldValueMarshaller.setRegistry(registry);
backendFormRenderingContextManager = new BackendFormRenderingContextManagerImpl(registry, new ContextModelConstraintsExtractorImpl());
runtimeFormGeneratorService = new BPMNRuntimeFormGeneratorService(new TestFieldManager(), new RawMVELEvaluator());
dynamicBPMNFormGenerator = new DynamicBPMNFormGeneratorImpl(runtimeFormGeneratorService);
processor = getProcessorInstance(new FormDefinitionSerializerImpl(new FieldSerializer(),
new FormModelSerializer(),
new TestMetaDataEntryManager()),
backendFormRenderingContextManager,
dynamicBPMNFormGenerator);
when(marshallerContext.getClassloader()).thenReturn(this.getClass().getClassLoader());
}
@Test
public void testGenerateRenderingContextWithExistingForms() {
Map<String, String> formData = new HashMap<>();
formData.put("invoice",
Invoice.class.getName());
renderingSettings = getFullRenderingSettings();
kieWorkbenchFormRenderingSettings = processor.generateRenderingContext(renderingSettings);
checkGeneratedContext();
}
@Test
public void testGenerateRenderingContextWithoutForms() {
renderingSettings = getRenderingSettingsWithoutForms();
kieWorkbenchFormRenderingSettings = processor.generateRenderingContext(renderingSettings,
true);
checkGeneratedContext();
}
protected void checkGeneratedContext() {
assertNotNull(kieWorkbenchFormRenderingSettings);
MapModelRenderingContext formRenderingContext = kieWorkbenchFormRenderingSettings.getRenderingContext();
assertNotNull(formRenderingContext);
assertFalse(formRenderingContext.getModelConstraints().isEmpty());
assertEquals(EXPECTED_MODEL_VALIDATIONS,
formRenderingContext.getModelConstraints().size());
assertFalse(formRenderingContext.getAvailableForms().isEmpty());
assertEquals(EXPECTED_FORMS,
formRenderingContext.getAvailableForms().size());
}
@Test
public void testProcessFormValues() {
testGenerateRenderingContextWithExistingForms();
Map<String, Object> formValues = getFormValues();
Map<String, Object> outputValues = processor.generateRuntimeValuesMap(kieWorkbenchFormRenderingSettings.getTimestamp(),
formValues);
assertNotNull(outputValues);
assertFalse(outputValues.isEmpty());
assertNotNull(outputValues.get(INVOICE));
assertTrue(outputValues.get(INVOICE) instanceof Invoice);
Invoice invoice = (Invoice) outputValues.get(INVOICE);
Map<String, Object> invoiceMap = (Map<String, Object>) formValues.get(INVOICE);
Map<String, Object> clientMap = (Map<String, Object>) invoiceMap.get(CLIENT);
assertNotNull(invoice.getClient());
assertEquals(clientMap.get("id"),
invoice.getClient().getId());
assertEquals(clientMap.get("name"),
invoice.getClient().getName());
assertEquals(clientMap.get("address"),
invoice.getClient().getAddress());
List<Map<String, Object>> linesMap = (List<Map<String, Object>>) invoiceMap.get(LINES);
assertNotNull(invoice.getLines());
assertEquals(linesMap.size(),
invoice.getLines().size());
Map<String, Object> lineMap = linesMap.get(0);
InvoiceLine line = invoice.getLines().get(0);
assertEquals(lineMap.get("product"),
line.getProduct());
assertEquals(lineMap.get("quantity"),
line.getQuantity());
assertEquals(lineMap.get("price"),
line.getPrice());
assertEquals(lineMap.get("total"),
line.getTotal());
assertEquals(invoiceMap.get("comments"),
invoice.getComments());
assertEquals(invoiceMap.get("total"),
invoice.getTotal());
assertEquals(invoiceMap.get("date"),
invoice.getDate());
}
protected Map<String, Object> getFormValues() {
Map<String, Object> formValues = new HashMap<>();
Map<String, Object> clientMap = new HashMap<>();
clientMap.put(ID,
new Long(1234));
clientMap.put(NAME,
"John Snow");
clientMap.put(ADDRESS,
"Winterfell");
List<Map<String, Object>> linesMap = new ArrayList<>();
Map<String, Object> lineMap = new HashMap<>();
lineMap.put(PRODUCT,
"Really Dangerous Sword");
lineMap.put(QUANTITY,
1);
lineMap.put(PRICE,
100.5);
lineMap.put(TOTAL,
100.5);
linesMap.add(lineMap);
Map<String, Object> invoiceMap = new HashMap<>();
invoiceMap.put(CLIENT,
clientMap);
invoiceMap.put(LINES,
linesMap);
invoiceMap.put(TOTAL,
100.5);
invoiceMap.put(COMMENTS,
"Everything was perfect");
invoiceMap.put(DATE,
new Date());
formValues.put(INVOICE,
invoiceMap);
return formValues;
}
abstract SETTINGS getFullRenderingSettings();
abstract SETTINGS getRenderingSettingsWithoutForms();
abstract PROCESSOR getProcessorInstance(FormDefinitionSerializer serializer,
BackendFormRenderingContextManager backendFormRenderingContextManager,
DynamicBPMNFormGenerator dynamicBPMNFormGenerator);
}
|
|
package com.vladsch.flexmark.ext.attributes;
import com.vladsch.flexmark.formatter.Formatter;
import com.vladsch.flexmark.parser.Parser;
import com.vladsch.flexmark.util.ast.Document;
import com.vladsch.flexmark.util.data.DataHolder;
import com.vladsch.flexmark.util.data.MutableDataSet;
import org.junit.Test;
import java.util.Collections;
import static org.junit.Assert.assertEquals;
public class MergeAttributesTest {
private static DataHolder OPTIONS = new MutableDataSet()
.set(Parser.EXTENSIONS, Collections.singletonList(AttributesExtension.create()))
.set(Parser.BLANK_LINES_IN_AST, true)
.set(Parser.PARSE_INNER_HTML_COMMENTS, true)
.set(Parser.HEADING_NO_ATX_SPACE, true)
.set(Formatter.MAX_TRAILING_BLANK_LINES, 0);
private static Formatter FORMATTER = Formatter.builder(OPTIONS).build();
private static Parser PARSER = Parser.builder(OPTIONS).build();
private static void assertMerged(String expected, String... markdownSources) {
int iMax = markdownSources.length;
Document[] documents = new Document[iMax];
for (int i = 0; i < iMax; i++) {
documents[i] = PARSER.parse(markdownSources[i]);
}
String mergedOutput = FORMATTER.mergeRender(documents, 1);
assertEquals("Merged results differ", expected, mergedOutput);
}
@Test
public void test_IdAttributeConflict1() {
testIdAttributeConflict();
}
private void testIdAttributeConflict() {
assertMerged(
"{#fig:test}\n" +
"\n" +
"[Figure](#fig:test).\n" +
"\n" +
"{#fig:test1}\n" +
"\n" +
"[Figure](#fig:test1).\n" +
"\n",
"{#fig:test}\n" +
"\n" +
"[Figure](#fig:test).\n" +
"\n",
"{#fig:test} \n" +
"\n" +
"[Figure](#fig:test).\n" +
"\n");
}
@Test
public void test_IdAttributeConflict2() {
testIdAttributeConflict();
testIdAttributeConflict();
}
@Test
public void test_UndefinedIdConflict1() {
testUndefinedIdConflict();
}
private void testUndefinedIdConflict() {
assertMerged(
"{#fig:test}\n" +
"\n" +
"[Figure](#fig:test).\n" +
"\n" +
"[Figure](#fig:test1).\n" +
"\n" +
"{#fig:test1}\n" +
"\n" +
"[Figure](#fig:test1).\n" +
"\n",
"{#fig:test} \n" +
"\n" +
"[Figure](#fig:test).\n" +
"\n" +
"[Figure](#fig:test1).\n" +
"\n",
"{#fig:test} \n" +
"\n" +
"[Figure](#fig:test).\n" +
"\n");
}
@Test
public void test_UndefinedIdConflict2() {
testUndefinedIdConflict();
testUndefinedIdConflict();
}
// Header attribute id adjustment
void testAtxHeadingConflict() {
assertMerged(
"# Atx Heading\n" +
"\n" +
"[link](#atx-heading)\n" +
"\n" +
"# Atx Heading {.atx-heading1}\n" +
"\n" +
"[link](#atx-heading1)\n" +
"\n",
"# Atx Heading\n" +
"[link](#atx-heading)\n" +
"\n",
"# Atx Heading\n" +
"[link](#atx-heading)\n" +
"\n");
}
@Test
public void test_AtxHeadingConflict1() {
testAtxHeadingConflict();
}
@Test
public void test_AtxHeadingConflict2() {
testAtxHeadingConflict();
testAtxHeadingConflict();
}
void testSetextHeadingConflict() {
assertMerged(
"Setext Heading\n" +
"==============\n" +
"\n" +
"[link](#setext-heading)\n" +
"\n" +
"Setext Heading {.setext-heading1}\n" +
"=================================\n" +
"\n" +
"[link](#setext-heading1)\n" +
"\n",
"Setext Heading\n" +
"=======\n" +
"[link](#setext-heading)\n" +
"\n",
"Setext Heading\n" +
"=======\n" +
"[link](#setext-heading)\n" +
"\n");
}
@Test
public void test_SetextHeadingConflict1() {
testSetextHeadingConflict();
}
@Test
public void test_SetextHeadingConflict2() {
testSetextHeadingConflict();
testSetextHeadingConflict();
}
// Header attribute id adjustment
void testAtxHeadingExplicitConflict() {
assertMerged(
"# Atx Heading {#atx-explicit}\n" +
"\n" +
"[link](#atx-explicit)\n" +
"\n" +
"# Atx Heading {#atx-explicit1}\n" +
"\n" +
"[link](#atx-explicit1)\n" +
"\n",
"# Atx Heading {#atx-explicit}\n" +
"[link](#atx-explicit)\n" +
"\n",
"# Atx Heading {#atx-explicit}\n" +
"[link](#atx-explicit)\n" +
"\n");
}
@Test
public void test_AtxHeadingExplicitConflict1() {
testAtxHeadingExplicitConflict();
}
@Test
public void test_AtxHeadingExplicitConflict2() {
testAtxHeadingExplicitConflict();
testAtxHeadingExplicitConflict();
}
void testSetextHeadingExplicitConflict() {
assertMerged(
"Setext Heading\n" +
"==============\n" +
"\n" +
"[link](#setext-heading)\n" +
"\n" +
"Setext Heading {.setext-heading1}\n" +
"=================================\n" +
"\n" +
"[link](#setext-heading1)\n" +
"\n",
"Setext Heading\n" +
"=======\n" +
"[link](#setext-heading)\n" +
"\n",
"Setext Heading\n" +
"=======\n" +
"[link](#setext-heading)\n" +
"\n");
}
@Test
public void test_SetextHeadingExplicitConflict1() {
testSetextHeadingExplicitConflict();
}
@Test
public void test_SetextHeadingExplicitConflict2() {
testSetextHeadingExplicitConflict();
testSetextHeadingExplicitConflict();
}
private void testHtmlPreservation() {
assertMerged(
"# Heading {style=\"font-size: 26pt\"}\n" +
"\n" +
"\\<CUSTOMER_ADDRESS\\> {.addresse}\n" +
"\n" +
"<br />\n" +
"\n" +
"<br />\n" +
"\n" +
"[](http://example.com)\n" +
"\n",
"# Heading {style=\"font-size: 26pt\"}\n" +
"\n" +
"\\<CUSTOMER_ADDRESS\\>\n" +
"{.addresse}\n" +
"\n" +
"<br />\n" +
"\n",
"<br />\n" +
"\n" +
"[](http://example.com)\n" +
"\n"
);
}
@Test
public void testHtmlPreservation1() {
testHtmlPreservation();
}
@Test
public void testHtmlPreservation2() {
testHtmlPreservation();
testHtmlPreservation();
}
private void testHtmlPreservationLink() {
assertMerged(
"[](http://example.com)\n" +
"\n" +
"<br />\n" +
"\n" +
"<br />\n" +
"\n" +
"[](http://example.com)\n" +
"\n",
"[](http://example.com)\n" +
"\n" +
"<br />\n" +
"\n",
"<br />\n" +
"\n" +
"[](http://example.com)\n" +
"\n"
);
}
@Test
public void testHtmlPreservationLink1() {
testHtmlPreservationLink();
}
@Test
public void testHtmlPreservationLink2() {
testHtmlPreservationLink();
testHtmlPreservationLink();
}
}
|
|
package text2;
import java.io.IOException;
import java.io.InputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.InputStreamReader;
import java.util.LinkedList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Arrays;
import text.Zip;
import text.FileHelper;
public class Code
{
@FunctionalInterface
interface Handler
{
public void action(String[] en, String[] ch);
}
static void processZipFile(Handler handler) throws IOException {
Zip.readFile("iciba-1-txt.zip", (entry, inputStream) -> {
String path = entry.getName();
if (entry.isDirectory()) return true;
if (path.startsWith("__MACOSX/")) return true;
if (path.endsWith(".DS_Store")) return true;
if (path.endsWith("FILES.txt")) return true;
if (! path.endsWith(".txt")) return true;
processFile(inputStream, handler);
return true;
});
}
static void processFile(InputStream inputStream, Handler handler) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"));
while (true) {
String en1 = reader.readLine();
String ch1 = reader.readLine();
if (en1 == null || ch1 == null) break;
en1 = removeTags(en1, false);
ch1 = removeTags(ch1, true);
String[] en = splitEn(en1);
String[] ch = splitCh(ch1);
handler.action(en, ch);
}
}
static String[] splitEn(String sen) {
String[] en = sen.split("\\s+");
for (int i = 0; i < en.length; ++ i) {
en[i] = en[i].toLowerCase();
}
return en;
/*
LinkedList<String> list = new LinkedList<>();
for (String e : en) {
e = e.trim();
int len = e.length();
if (len == 0) continue;
list.addAll(splitEn1(e));
}
return list.toArray(new String[0]);
*/
}
/*
static String frontChars = ",.-\"?";
static String endChars = ",.'-\"?!";
static HashSet<Character> frontCharSet = null;
static HashSet<Character> endCharSet = null;
static LinkedList<String> splitEn1(String en) {
if (frontCharSet == null) {
frontCharSet = new HashSet<Character>();
for (int i = 0; i < frontChars.length(); ++ i) {
frontCharSet.add(frontChars.charAt(i));
}
endCharSet = new HashSet<Character>();
for (int i = 0; i < endChars.length(); ++ i) {
endCharSet.add(endChars.charAt(i));
}
}
LinkedList<String> end = new LinkedList<>();
int len = en.length();
while (len > 0) {
if (! endCharSet.contains(en.charAt(len - 1))) break;
-- len;
end.addFirst("" + en.charAt(len));
en = en.substring(0, len);
}
LinkedList<String> front = new LinkedList<>();
while (len > 0) {
if (! frontCharSet.contains(en.charAt(0))) break;
-- len;
front.addLast("" + en.charAt(0));
en = en.substring(1, len + 1);
}
if (len > 0) front.addLast(en);
front.addAll(end);
return front;
}
*/
static String[] splitCh(String sen) {
String[] ch = new String[sen.length()];
for (int i = 0; i < ch.length; ++ i) {
ch[i] = "" + sen.charAt(i);
}
return ch;
}
static String removeTags(String text, boolean removeSpaces) {
text = text.replaceAll("<[^>]+>", "");
if (removeSpaces) text = text.replaceAll(" ", "");
return text;
}
//////////////////////////////////////
public static void main(String... args) throws IOException {
System.err.println("Counting occurrences ...");
processZipFile(Code::countWords);
System.err.println("Assigning word IDs ...");
writeInfo();
System.err.println("Printing code ...");
System.out.println(sentenceCount);
processZipFile(Code::writeCode);
}
static int sentenceCount = 0;
static HashMap<String, Integer> enCounter = new HashMap<>();
static HashMap<String, Integer> chCounter = new HashMap<>();
static HashMap<String, Integer> enId = new HashMap<>();
static HashMap<String, Integer> chId = new HashMap<>();
static HashMap<String, Integer> enLenCounter = new HashMap<>();
static HashMap<String, Integer> chLenCounter = new HashMap<>();
static void increaseCount(HashMap<String, Integer> counter, String word) {
if (counter.containsKey(word)) {
counter.put(word, counter.get(word) + 1);
}
else {
counter.put(word, 1);
}
}
static void countWords(String[] en, String[] ch) {
for (int i = 0; i < en.length; ++ i) increaseCount(enCounter, en[i]);
for (int i = 0; i < ch.length; ++ i) increaseCount(chCounter, ch[i]);
increaseCount(enLenCounter, "" + en.length);
increaseCount(chLenCounter, "" + ch.length);
++ sentenceCount;
}
static void writeInfo() throws IOException {
writeInfo(enCounter, "vocab.en.txt", enId);
System.err.println("#en = " + enId.size());
writeInfo(enLenCounter, "len.en.txt", null); // max=56
writeInfo(chCounter, "vocab.ch.txt", chId);
System.err.println("#ch = " + chId.size());
writeInfo(chLenCounter, "len.ch.txt", null); // max=188
}
static void writeInfo(HashMap<String, Integer> counter, String filename, HashMap<String, Integer> id) throws IOException {
String[] key = counter.keySet().toArray(new String[0]);
Arrays.sort(key, new Comparator<String>() {
public int compare(String t1, String t2) {
return counter.get(t2) - counter.get(t1);
}
});
if (filename != null) {
BufferedWriter writer = new BufferedWriter(new FileWriter(filename));
for (int i = 0; i < key.length; ++ i) {
writer.write(key[i] + "\t" + counter.get(key[i]) + "\n");
}
writer.close();
}
if (id != null) {
for (int i = 0; i < key.length; ++ i) {
id.put(key[i], i + 1);
}
}
}
static void writeCode(String[] en, String[] ch) {
writeCode1(en, enId);
writeCode1(ch, chId);
}
static void writeCode1(String[] words, HashMap<String, Integer> id) {
System.out.print(words.length);
for (int i = 0; i < words.length; ++ i) {
System.out.print(" " + id.get(words[i]));
//System.out.print(" " + words[i]);
}
System.out.println();
}
}
|
|
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.schemaorg.core;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
import com.google.schemaorg.SchemaOrgType;
import com.google.schemaorg.SchemaOrgTypeImpl;
import com.google.schemaorg.ValueType;
import com.google.schemaorg.core.datatype.Date;
import com.google.schemaorg.core.datatype.DateTime;
import com.google.schemaorg.core.datatype.Integer;
import com.google.schemaorg.core.datatype.Number;
import com.google.schemaorg.core.datatype.Text;
import com.google.schemaorg.core.datatype.URL;
import com.google.schemaorg.goog.GoogConstants;
import com.google.schemaorg.goog.PopularityScoreSpecification;
/** Implementation of {@link VideoObject}. */
public class VideoObjectImpl extends MediaObjectImpl implements VideoObject {
private static final ImmutableSet<String> PROPERTY_SET = initializePropertySet();
private static ImmutableSet<String> initializePropertySet() {
ImmutableSet.Builder<String> builder = ImmutableSet.builder();
builder.add(CoreConstants.PROPERTY_ABOUT);
builder.add(CoreConstants.PROPERTY_ACCESSIBILITY_API);
builder.add(CoreConstants.PROPERTY_ACCESSIBILITY_CONTROL);
builder.add(CoreConstants.PROPERTY_ACCESSIBILITY_FEATURE);
builder.add(CoreConstants.PROPERTY_ACCESSIBILITY_HAZARD);
builder.add(CoreConstants.PROPERTY_ACCOUNTABLE_PERSON);
builder.add(CoreConstants.PROPERTY_ACTOR);
builder.add(CoreConstants.PROPERTY_ACTORS);
builder.add(CoreConstants.PROPERTY_ADDITIONAL_TYPE);
builder.add(CoreConstants.PROPERTY_AGGREGATE_RATING);
builder.add(CoreConstants.PROPERTY_ALTERNATE_NAME);
builder.add(CoreConstants.PROPERTY_ALTERNATIVE_HEADLINE);
builder.add(CoreConstants.PROPERTY_ASSOCIATED_ARTICLE);
builder.add(CoreConstants.PROPERTY_ASSOCIATED_MEDIA);
builder.add(CoreConstants.PROPERTY_AUDIENCE);
builder.add(CoreConstants.PROPERTY_AUDIO);
builder.add(CoreConstants.PROPERTY_AUTHOR);
builder.add(CoreConstants.PROPERTY_AWARD);
builder.add(CoreConstants.PROPERTY_AWARDS);
builder.add(CoreConstants.PROPERTY_BITRATE);
builder.add(CoreConstants.PROPERTY_CAPTION);
builder.add(CoreConstants.PROPERTY_CHARACTER);
builder.add(CoreConstants.PROPERTY_CITATION);
builder.add(CoreConstants.PROPERTY_COMMENT);
builder.add(CoreConstants.PROPERTY_COMMENT_COUNT);
builder.add(CoreConstants.PROPERTY_CONTENT_LOCATION);
builder.add(CoreConstants.PROPERTY_CONTENT_RATING);
builder.add(CoreConstants.PROPERTY_CONTENT_SIZE);
builder.add(CoreConstants.PROPERTY_CONTENT_URL);
builder.add(CoreConstants.PROPERTY_CONTRIBUTOR);
builder.add(CoreConstants.PROPERTY_COPYRIGHT_HOLDER);
builder.add(CoreConstants.PROPERTY_COPYRIGHT_YEAR);
builder.add(CoreConstants.PROPERTY_CREATOR);
builder.add(CoreConstants.PROPERTY_DATE_CREATED);
builder.add(CoreConstants.PROPERTY_DATE_MODIFIED);
builder.add(CoreConstants.PROPERTY_DATE_PUBLISHED);
builder.add(CoreConstants.PROPERTY_DESCRIPTION);
builder.add(CoreConstants.PROPERTY_DIRECTOR);
builder.add(CoreConstants.PROPERTY_DIRECTORS);
builder.add(CoreConstants.PROPERTY_DISCUSSION_URL);
builder.add(CoreConstants.PROPERTY_DURATION);
builder.add(CoreConstants.PROPERTY_EDITOR);
builder.add(CoreConstants.PROPERTY_EDUCATIONAL_ALIGNMENT);
builder.add(CoreConstants.PROPERTY_EDUCATIONAL_USE);
builder.add(CoreConstants.PROPERTY_EMBED_URL);
builder.add(CoreConstants.PROPERTY_ENCODES_CREATIVE_WORK);
builder.add(CoreConstants.PROPERTY_ENCODING);
builder.add(CoreConstants.PROPERTY_ENCODING_FORMAT);
builder.add(CoreConstants.PROPERTY_ENCODINGS);
builder.add(CoreConstants.PROPERTY_EXAMPLE_OF_WORK);
builder.add(CoreConstants.PROPERTY_EXPIRES);
builder.add(CoreConstants.PROPERTY_FILE_FORMAT);
builder.add(CoreConstants.PROPERTY_GENRE);
builder.add(CoreConstants.PROPERTY_HAS_PART);
builder.add(CoreConstants.PROPERTY_HEADLINE);
builder.add(CoreConstants.PROPERTY_HEIGHT);
builder.add(CoreConstants.PROPERTY_IMAGE);
builder.add(CoreConstants.PROPERTY_IN_LANGUAGE);
builder.add(CoreConstants.PROPERTY_INTERACTION_STATISTIC);
builder.add(CoreConstants.PROPERTY_INTERACTIVITY_TYPE);
builder.add(CoreConstants.PROPERTY_IS_BASED_ON_URL);
builder.add(CoreConstants.PROPERTY_IS_FAMILY_FRIENDLY);
builder.add(CoreConstants.PROPERTY_IS_PART_OF);
builder.add(CoreConstants.PROPERTY_KEYWORDS);
builder.add(CoreConstants.PROPERTY_LEARNING_RESOURCE_TYPE);
builder.add(CoreConstants.PROPERTY_LICENSE);
builder.add(CoreConstants.PROPERTY_LOCATION_CREATED);
builder.add(CoreConstants.PROPERTY_MAIN_ENTITY);
builder.add(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE);
builder.add(CoreConstants.PROPERTY_MENTIONS);
builder.add(CoreConstants.PROPERTY_MUSIC_BY);
builder.add(CoreConstants.PROPERTY_NAME);
builder.add(CoreConstants.PROPERTY_OFFERS);
builder.add(CoreConstants.PROPERTY_PLAYER_TYPE);
builder.add(CoreConstants.PROPERTY_POSITION);
builder.add(CoreConstants.PROPERTY_POTENTIAL_ACTION);
builder.add(CoreConstants.PROPERTY_PRODUCER);
builder.add(CoreConstants.PROPERTY_PRODUCTION_COMPANY);
builder.add(CoreConstants.PROPERTY_PROVIDER);
builder.add(CoreConstants.PROPERTY_PUBLICATION);
builder.add(CoreConstants.PROPERTY_PUBLISHER);
builder.add(CoreConstants.PROPERTY_PUBLISHING_PRINCIPLES);
builder.add(CoreConstants.PROPERTY_RECORDED_AT);
builder.add(CoreConstants.PROPERTY_REGIONS_ALLOWED);
builder.add(CoreConstants.PROPERTY_RELEASED_EVENT);
builder.add(CoreConstants.PROPERTY_REQUIRES_SUBSCRIPTION);
builder.add(CoreConstants.PROPERTY_REVIEW);
builder.add(CoreConstants.PROPERTY_REVIEWS);
builder.add(CoreConstants.PROPERTY_SAME_AS);
builder.add(CoreConstants.PROPERTY_SCHEMA_VERSION);
builder.add(CoreConstants.PROPERTY_SOURCE_ORGANIZATION);
builder.add(CoreConstants.PROPERTY_TEXT);
builder.add(CoreConstants.PROPERTY_THUMBNAIL);
builder.add(CoreConstants.PROPERTY_THUMBNAIL_URL);
builder.add(CoreConstants.PROPERTY_TIME_REQUIRED);
builder.add(CoreConstants.PROPERTY_TRANSCRIPT);
builder.add(CoreConstants.PROPERTY_TRANSLATOR);
builder.add(CoreConstants.PROPERTY_TYPICAL_AGE_RANGE);
builder.add(CoreConstants.PROPERTY_UPLOAD_DATE);
builder.add(CoreConstants.PROPERTY_URL);
builder.add(CoreConstants.PROPERTY_VERSION);
builder.add(CoreConstants.PROPERTY_VIDEO);
builder.add(CoreConstants.PROPERTY_VIDEO_FRAME_SIZE);
builder.add(CoreConstants.PROPERTY_VIDEO_QUALITY);
builder.add(CoreConstants.PROPERTY_WIDTH);
builder.add(CoreConstants.PROPERTY_WORK_EXAMPLE);
builder.add(GoogConstants.PROPERTY_DETAILED_DESCRIPTION);
builder.add(GoogConstants.PROPERTY_POPULARITY_SCORE);
return builder.build();
}
static final class BuilderImpl extends SchemaOrgTypeImpl.BuilderImpl<VideoObject.Builder>
implements VideoObject.Builder {
@Override
public VideoObject.Builder addAbout(Thing value) {
return addProperty(CoreConstants.PROPERTY_ABOUT, value);
}
@Override
public VideoObject.Builder addAbout(Thing.Builder value) {
return addProperty(CoreConstants.PROPERTY_ABOUT, value.build());
}
@Override
public VideoObject.Builder addAbout(String value) {
return addProperty(CoreConstants.PROPERTY_ABOUT, Text.of(value));
}
@Override
public VideoObject.Builder addAccessibilityAPI(Text value) {
return addProperty(CoreConstants.PROPERTY_ACCESSIBILITY_API, value);
}
@Override
public VideoObject.Builder addAccessibilityAPI(String value) {
return addProperty(CoreConstants.PROPERTY_ACCESSIBILITY_API, Text.of(value));
}
@Override
public VideoObject.Builder addAccessibilityControl(Text value) {
return addProperty(CoreConstants.PROPERTY_ACCESSIBILITY_CONTROL, value);
}
@Override
public VideoObject.Builder addAccessibilityControl(String value) {
return addProperty(CoreConstants.PROPERTY_ACCESSIBILITY_CONTROL, Text.of(value));
}
@Override
public VideoObject.Builder addAccessibilityFeature(Text value) {
return addProperty(CoreConstants.PROPERTY_ACCESSIBILITY_FEATURE, value);
}
@Override
public VideoObject.Builder addAccessibilityFeature(String value) {
return addProperty(CoreConstants.PROPERTY_ACCESSIBILITY_FEATURE, Text.of(value));
}
@Override
public VideoObject.Builder addAccessibilityHazard(Text value) {
return addProperty(CoreConstants.PROPERTY_ACCESSIBILITY_HAZARD, value);
}
@Override
public VideoObject.Builder addAccessibilityHazard(String value) {
return addProperty(CoreConstants.PROPERTY_ACCESSIBILITY_HAZARD, Text.of(value));
}
@Override
public VideoObject.Builder addAccountablePerson(Person value) {
return addProperty(CoreConstants.PROPERTY_ACCOUNTABLE_PERSON, value);
}
@Override
public VideoObject.Builder addAccountablePerson(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_ACCOUNTABLE_PERSON, value.build());
}
@Override
public VideoObject.Builder addAccountablePerson(String value) {
return addProperty(CoreConstants.PROPERTY_ACCOUNTABLE_PERSON, Text.of(value));
}
@Override
public VideoObject.Builder addActor(Person value) {
return addProperty(CoreConstants.PROPERTY_ACTOR, value);
}
@Override
public VideoObject.Builder addActor(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_ACTOR, value.build());
}
@Override
public VideoObject.Builder addActor(String value) {
return addProperty(CoreConstants.PROPERTY_ACTOR, Text.of(value));
}
@Override
public VideoObject.Builder addActors(Person value) {
return addProperty(CoreConstants.PROPERTY_ACTORS, value);
}
@Override
public VideoObject.Builder addActors(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_ACTORS, value.build());
}
@Override
public VideoObject.Builder addActors(String value) {
return addProperty(CoreConstants.PROPERTY_ACTORS, Text.of(value));
}
@Override
public VideoObject.Builder addAdditionalType(URL value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, value);
}
@Override
public VideoObject.Builder addAdditionalType(String value) {
return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, Text.of(value));
}
@Override
public VideoObject.Builder addAggregateRating(AggregateRating value) {
return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, value);
}
@Override
public VideoObject.Builder addAggregateRating(AggregateRating.Builder value) {
return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, value.build());
}
@Override
public VideoObject.Builder addAggregateRating(String value) {
return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, Text.of(value));
}
@Override
public VideoObject.Builder addAlternateName(Text value) {
return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, value);
}
@Override
public VideoObject.Builder addAlternateName(String value) {
return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, Text.of(value));
}
@Override
public VideoObject.Builder addAlternativeHeadline(Text value) {
return addProperty(CoreConstants.PROPERTY_ALTERNATIVE_HEADLINE, value);
}
@Override
public VideoObject.Builder addAlternativeHeadline(String value) {
return addProperty(CoreConstants.PROPERTY_ALTERNATIVE_HEADLINE, Text.of(value));
}
@Override
public VideoObject.Builder addAssociatedArticle(NewsArticle value) {
return addProperty(CoreConstants.PROPERTY_ASSOCIATED_ARTICLE, value);
}
@Override
public VideoObject.Builder addAssociatedArticle(NewsArticle.Builder value) {
return addProperty(CoreConstants.PROPERTY_ASSOCIATED_ARTICLE, value.build());
}
@Override
public VideoObject.Builder addAssociatedArticle(String value) {
return addProperty(CoreConstants.PROPERTY_ASSOCIATED_ARTICLE, Text.of(value));
}
@Override
public VideoObject.Builder addAssociatedMedia(MediaObject value) {
return addProperty(CoreConstants.PROPERTY_ASSOCIATED_MEDIA, value);
}
@Override
public VideoObject.Builder addAssociatedMedia(MediaObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_ASSOCIATED_MEDIA, value.build());
}
@Override
public VideoObject.Builder addAssociatedMedia(String value) {
return addProperty(CoreConstants.PROPERTY_ASSOCIATED_MEDIA, Text.of(value));
}
@Override
public VideoObject.Builder addAudience(Audience value) {
return addProperty(CoreConstants.PROPERTY_AUDIENCE, value);
}
@Override
public VideoObject.Builder addAudience(Audience.Builder value) {
return addProperty(CoreConstants.PROPERTY_AUDIENCE, value.build());
}
@Override
public VideoObject.Builder addAudience(String value) {
return addProperty(CoreConstants.PROPERTY_AUDIENCE, Text.of(value));
}
@Override
public VideoObject.Builder addAudio(AudioObject value) {
return addProperty(CoreConstants.PROPERTY_AUDIO, value);
}
@Override
public VideoObject.Builder addAudio(AudioObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_AUDIO, value.build());
}
@Override
public VideoObject.Builder addAudio(String value) {
return addProperty(CoreConstants.PROPERTY_AUDIO, Text.of(value));
}
@Override
public VideoObject.Builder addAuthor(Organization value) {
return addProperty(CoreConstants.PROPERTY_AUTHOR, value);
}
@Override
public VideoObject.Builder addAuthor(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_AUTHOR, value.build());
}
@Override
public VideoObject.Builder addAuthor(Person value) {
return addProperty(CoreConstants.PROPERTY_AUTHOR, value);
}
@Override
public VideoObject.Builder addAuthor(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_AUTHOR, value.build());
}
@Override
public VideoObject.Builder addAuthor(String value) {
return addProperty(CoreConstants.PROPERTY_AUTHOR, Text.of(value));
}
@Override
public VideoObject.Builder addAward(Text value) {
return addProperty(CoreConstants.PROPERTY_AWARD, value);
}
@Override
public VideoObject.Builder addAward(String value) {
return addProperty(CoreConstants.PROPERTY_AWARD, Text.of(value));
}
@Override
public VideoObject.Builder addAwards(Text value) {
return addProperty(CoreConstants.PROPERTY_AWARDS, value);
}
@Override
public VideoObject.Builder addAwards(String value) {
return addProperty(CoreConstants.PROPERTY_AWARDS, Text.of(value));
}
@Override
public VideoObject.Builder addBitrate(Text value) {
return addProperty(CoreConstants.PROPERTY_BITRATE, value);
}
@Override
public VideoObject.Builder addBitrate(String value) {
return addProperty(CoreConstants.PROPERTY_BITRATE, Text.of(value));
}
@Override
public VideoObject.Builder addCaption(Text value) {
return addProperty(CoreConstants.PROPERTY_CAPTION, value);
}
@Override
public VideoObject.Builder addCaption(String value) {
return addProperty(CoreConstants.PROPERTY_CAPTION, Text.of(value));
}
@Override
public VideoObject.Builder addCharacter(Person value) {
return addProperty(CoreConstants.PROPERTY_CHARACTER, value);
}
@Override
public VideoObject.Builder addCharacter(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_CHARACTER, value.build());
}
@Override
public VideoObject.Builder addCharacter(String value) {
return addProperty(CoreConstants.PROPERTY_CHARACTER, Text.of(value));
}
@Override
public VideoObject.Builder addCitation(CreativeWork value) {
return addProperty(CoreConstants.PROPERTY_CITATION, value);
}
@Override
public VideoObject.Builder addCitation(CreativeWork.Builder value) {
return addProperty(CoreConstants.PROPERTY_CITATION, value.build());
}
@Override
public VideoObject.Builder addCitation(Text value) {
return addProperty(CoreConstants.PROPERTY_CITATION, value);
}
@Override
public VideoObject.Builder addCitation(String value) {
return addProperty(CoreConstants.PROPERTY_CITATION, Text.of(value));
}
@Override
public VideoObject.Builder addComment(Comment value) {
return addProperty(CoreConstants.PROPERTY_COMMENT, value);
}
@Override
public VideoObject.Builder addComment(Comment.Builder value) {
return addProperty(CoreConstants.PROPERTY_COMMENT, value.build());
}
@Override
public VideoObject.Builder addComment(String value) {
return addProperty(CoreConstants.PROPERTY_COMMENT, Text.of(value));
}
@Override
public VideoObject.Builder addCommentCount(Integer value) {
return addProperty(CoreConstants.PROPERTY_COMMENT_COUNT, value);
}
@Override
public VideoObject.Builder addCommentCount(String value) {
return addProperty(CoreConstants.PROPERTY_COMMENT_COUNT, Text.of(value));
}
@Override
public VideoObject.Builder addContentLocation(Place value) {
return addProperty(CoreConstants.PROPERTY_CONTENT_LOCATION, value);
}
@Override
public VideoObject.Builder addContentLocation(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_CONTENT_LOCATION, value.build());
}
@Override
public VideoObject.Builder addContentLocation(String value) {
return addProperty(CoreConstants.PROPERTY_CONTENT_LOCATION, Text.of(value));
}
@Override
public VideoObject.Builder addContentRating(Text value) {
return addProperty(CoreConstants.PROPERTY_CONTENT_RATING, value);
}
@Override
public VideoObject.Builder addContentRating(String value) {
return addProperty(CoreConstants.PROPERTY_CONTENT_RATING, Text.of(value));
}
@Override
public VideoObject.Builder addContentSize(Text value) {
return addProperty(CoreConstants.PROPERTY_CONTENT_SIZE, value);
}
@Override
public VideoObject.Builder addContentSize(String value) {
return addProperty(CoreConstants.PROPERTY_CONTENT_SIZE, Text.of(value));
}
@Override
public VideoObject.Builder addContentUrl(URL value) {
return addProperty(CoreConstants.PROPERTY_CONTENT_URL, value);
}
@Override
public VideoObject.Builder addContentUrl(String value) {
return addProperty(CoreConstants.PROPERTY_CONTENT_URL, Text.of(value));
}
@Override
public VideoObject.Builder addContributor(Organization value) {
return addProperty(CoreConstants.PROPERTY_CONTRIBUTOR, value);
}
@Override
public VideoObject.Builder addContributor(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_CONTRIBUTOR, value.build());
}
@Override
public VideoObject.Builder addContributor(Person value) {
return addProperty(CoreConstants.PROPERTY_CONTRIBUTOR, value);
}
@Override
public VideoObject.Builder addContributor(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_CONTRIBUTOR, value.build());
}
@Override
public VideoObject.Builder addContributor(String value) {
return addProperty(CoreConstants.PROPERTY_CONTRIBUTOR, Text.of(value));
}
@Override
public VideoObject.Builder addCopyrightHolder(Organization value) {
return addProperty(CoreConstants.PROPERTY_COPYRIGHT_HOLDER, value);
}
@Override
public VideoObject.Builder addCopyrightHolder(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_COPYRIGHT_HOLDER, value.build());
}
@Override
public VideoObject.Builder addCopyrightHolder(Person value) {
return addProperty(CoreConstants.PROPERTY_COPYRIGHT_HOLDER, value);
}
@Override
public VideoObject.Builder addCopyrightHolder(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_COPYRIGHT_HOLDER, value.build());
}
@Override
public VideoObject.Builder addCopyrightHolder(String value) {
return addProperty(CoreConstants.PROPERTY_COPYRIGHT_HOLDER, Text.of(value));
}
@Override
public VideoObject.Builder addCopyrightYear(Number value) {
return addProperty(CoreConstants.PROPERTY_COPYRIGHT_YEAR, value);
}
@Override
public VideoObject.Builder addCopyrightYear(String value) {
return addProperty(CoreConstants.PROPERTY_COPYRIGHT_YEAR, Text.of(value));
}
@Override
public VideoObject.Builder addCreator(Organization value) {
return addProperty(CoreConstants.PROPERTY_CREATOR, value);
}
@Override
public VideoObject.Builder addCreator(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_CREATOR, value.build());
}
@Override
public VideoObject.Builder addCreator(Person value) {
return addProperty(CoreConstants.PROPERTY_CREATOR, value);
}
@Override
public VideoObject.Builder addCreator(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_CREATOR, value.build());
}
@Override
public VideoObject.Builder addCreator(String value) {
return addProperty(CoreConstants.PROPERTY_CREATOR, Text.of(value));
}
@Override
public VideoObject.Builder addDateCreated(Date value) {
return addProperty(CoreConstants.PROPERTY_DATE_CREATED, value);
}
@Override
public VideoObject.Builder addDateCreated(DateTime value) {
return addProperty(CoreConstants.PROPERTY_DATE_CREATED, value);
}
@Override
public VideoObject.Builder addDateCreated(String value) {
return addProperty(CoreConstants.PROPERTY_DATE_CREATED, Text.of(value));
}
@Override
public VideoObject.Builder addDateModified(Date value) {
return addProperty(CoreConstants.PROPERTY_DATE_MODIFIED, value);
}
@Override
public VideoObject.Builder addDateModified(DateTime value) {
return addProperty(CoreConstants.PROPERTY_DATE_MODIFIED, value);
}
@Override
public VideoObject.Builder addDateModified(String value) {
return addProperty(CoreConstants.PROPERTY_DATE_MODIFIED, Text.of(value));
}
@Override
public VideoObject.Builder addDatePublished(Date value) {
return addProperty(CoreConstants.PROPERTY_DATE_PUBLISHED, value);
}
@Override
public VideoObject.Builder addDatePublished(String value) {
return addProperty(CoreConstants.PROPERTY_DATE_PUBLISHED, Text.of(value));
}
@Override
public VideoObject.Builder addDescription(Text value) {
return addProperty(CoreConstants.PROPERTY_DESCRIPTION, value);
}
@Override
public VideoObject.Builder addDescription(String value) {
return addProperty(CoreConstants.PROPERTY_DESCRIPTION, Text.of(value));
}
@Override
public VideoObject.Builder addDirector(Person value) {
return addProperty(CoreConstants.PROPERTY_DIRECTOR, value);
}
@Override
public VideoObject.Builder addDirector(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_DIRECTOR, value.build());
}
@Override
public VideoObject.Builder addDirector(String value) {
return addProperty(CoreConstants.PROPERTY_DIRECTOR, Text.of(value));
}
@Override
public VideoObject.Builder addDirectors(Person value) {
return addProperty(CoreConstants.PROPERTY_DIRECTORS, value);
}
@Override
public VideoObject.Builder addDirectors(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_DIRECTORS, value.build());
}
@Override
public VideoObject.Builder addDirectors(String value) {
return addProperty(CoreConstants.PROPERTY_DIRECTORS, Text.of(value));
}
@Override
public VideoObject.Builder addDiscussionUrl(URL value) {
return addProperty(CoreConstants.PROPERTY_DISCUSSION_URL, value);
}
@Override
public VideoObject.Builder addDiscussionUrl(String value) {
return addProperty(CoreConstants.PROPERTY_DISCUSSION_URL, Text.of(value));
}
@Override
public VideoObject.Builder addDuration(Duration value) {
return addProperty(CoreConstants.PROPERTY_DURATION, value);
}
@Override
public VideoObject.Builder addDuration(Duration.Builder value) {
return addProperty(CoreConstants.PROPERTY_DURATION, value.build());
}
@Override
public VideoObject.Builder addDuration(String value) {
return addProperty(CoreConstants.PROPERTY_DURATION, Text.of(value));
}
@Override
public VideoObject.Builder addEditor(Person value) {
return addProperty(CoreConstants.PROPERTY_EDITOR, value);
}
@Override
public VideoObject.Builder addEditor(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_EDITOR, value.build());
}
@Override
public VideoObject.Builder addEditor(String value) {
return addProperty(CoreConstants.PROPERTY_EDITOR, Text.of(value));
}
@Override
public VideoObject.Builder addEducationalAlignment(AlignmentObject value) {
return addProperty(CoreConstants.PROPERTY_EDUCATIONAL_ALIGNMENT, value);
}
@Override
public VideoObject.Builder addEducationalAlignment(AlignmentObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_EDUCATIONAL_ALIGNMENT, value.build());
}
@Override
public VideoObject.Builder addEducationalAlignment(String value) {
return addProperty(CoreConstants.PROPERTY_EDUCATIONAL_ALIGNMENT, Text.of(value));
}
@Override
public VideoObject.Builder addEducationalUse(Text value) {
return addProperty(CoreConstants.PROPERTY_EDUCATIONAL_USE, value);
}
@Override
public VideoObject.Builder addEducationalUse(String value) {
return addProperty(CoreConstants.PROPERTY_EDUCATIONAL_USE, Text.of(value));
}
@Override
public VideoObject.Builder addEmbedUrl(URL value) {
return addProperty(CoreConstants.PROPERTY_EMBED_URL, value);
}
@Override
public VideoObject.Builder addEmbedUrl(String value) {
return addProperty(CoreConstants.PROPERTY_EMBED_URL, Text.of(value));
}
@Override
public VideoObject.Builder addEncodesCreativeWork(CreativeWork value) {
return addProperty(CoreConstants.PROPERTY_ENCODES_CREATIVE_WORK, value);
}
@Override
public VideoObject.Builder addEncodesCreativeWork(CreativeWork.Builder value) {
return addProperty(CoreConstants.PROPERTY_ENCODES_CREATIVE_WORK, value.build());
}
@Override
public VideoObject.Builder addEncodesCreativeWork(String value) {
return addProperty(CoreConstants.PROPERTY_ENCODES_CREATIVE_WORK, Text.of(value));
}
@Override
public VideoObject.Builder addEncoding(MediaObject value) {
return addProperty(CoreConstants.PROPERTY_ENCODING, value);
}
@Override
public VideoObject.Builder addEncoding(MediaObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_ENCODING, value.build());
}
@Override
public VideoObject.Builder addEncoding(String value) {
return addProperty(CoreConstants.PROPERTY_ENCODING, Text.of(value));
}
@Override
public VideoObject.Builder addEncodingFormat(Text value) {
return addProperty(CoreConstants.PROPERTY_ENCODING_FORMAT, value);
}
@Override
public VideoObject.Builder addEncodingFormat(String value) {
return addProperty(CoreConstants.PROPERTY_ENCODING_FORMAT, Text.of(value));
}
@Override
public VideoObject.Builder addEncodings(MediaObject value) {
return addProperty(CoreConstants.PROPERTY_ENCODINGS, value);
}
@Override
public VideoObject.Builder addEncodings(MediaObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_ENCODINGS, value.build());
}
@Override
public VideoObject.Builder addEncodings(String value) {
return addProperty(CoreConstants.PROPERTY_ENCODINGS, Text.of(value));
}
@Override
public VideoObject.Builder addExampleOfWork(CreativeWork value) {
return addProperty(CoreConstants.PROPERTY_EXAMPLE_OF_WORK, value);
}
@Override
public VideoObject.Builder addExampleOfWork(CreativeWork.Builder value) {
return addProperty(CoreConstants.PROPERTY_EXAMPLE_OF_WORK, value.build());
}
@Override
public VideoObject.Builder addExampleOfWork(String value) {
return addProperty(CoreConstants.PROPERTY_EXAMPLE_OF_WORK, Text.of(value));
}
@Override
public VideoObject.Builder addExpires(Date value) {
return addProperty(CoreConstants.PROPERTY_EXPIRES, value);
}
@Override
public VideoObject.Builder addExpires(String value) {
return addProperty(CoreConstants.PROPERTY_EXPIRES, Text.of(value));
}
@Override
public VideoObject.Builder addFileFormat(Text value) {
return addProperty(CoreConstants.PROPERTY_FILE_FORMAT, value);
}
@Override
public VideoObject.Builder addFileFormat(String value) {
return addProperty(CoreConstants.PROPERTY_FILE_FORMAT, Text.of(value));
}
@Override
public VideoObject.Builder addGenre(Text value) {
return addProperty(CoreConstants.PROPERTY_GENRE, value);
}
@Override
public VideoObject.Builder addGenre(URL value) {
return addProperty(CoreConstants.PROPERTY_GENRE, value);
}
@Override
public VideoObject.Builder addGenre(String value) {
return addProperty(CoreConstants.PROPERTY_GENRE, Text.of(value));
}
@Override
public VideoObject.Builder addHasPart(CreativeWork value) {
return addProperty(CoreConstants.PROPERTY_HAS_PART, value);
}
@Override
public VideoObject.Builder addHasPart(CreativeWork.Builder value) {
return addProperty(CoreConstants.PROPERTY_HAS_PART, value.build());
}
@Override
public VideoObject.Builder addHasPart(String value) {
return addProperty(CoreConstants.PROPERTY_HAS_PART, Text.of(value));
}
@Override
public VideoObject.Builder addHeadline(Text value) {
return addProperty(CoreConstants.PROPERTY_HEADLINE, value);
}
@Override
public VideoObject.Builder addHeadline(String value) {
return addProperty(CoreConstants.PROPERTY_HEADLINE, Text.of(value));
}
@Override
public VideoObject.Builder addHeight(Distance value) {
return addProperty(CoreConstants.PROPERTY_HEIGHT, value);
}
@Override
public VideoObject.Builder addHeight(Distance.Builder value) {
return addProperty(CoreConstants.PROPERTY_HEIGHT, value.build());
}
@Override
public VideoObject.Builder addHeight(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_HEIGHT, value);
}
@Override
public VideoObject.Builder addHeight(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_HEIGHT, value.build());
}
@Override
public VideoObject.Builder addHeight(String value) {
return addProperty(CoreConstants.PROPERTY_HEIGHT, Text.of(value));
}
@Override
public VideoObject.Builder addImage(ImageObject value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, value);
}
@Override
public VideoObject.Builder addImage(ImageObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, value.build());
}
@Override
public VideoObject.Builder addImage(URL value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, value);
}
@Override
public VideoObject.Builder addImage(String value) {
return addProperty(CoreConstants.PROPERTY_IMAGE, Text.of(value));
}
@Override
public VideoObject.Builder addInLanguage(Language value) {
return addProperty(CoreConstants.PROPERTY_IN_LANGUAGE, value);
}
@Override
public VideoObject.Builder addInLanguage(Language.Builder value) {
return addProperty(CoreConstants.PROPERTY_IN_LANGUAGE, value.build());
}
@Override
public VideoObject.Builder addInLanguage(Text value) {
return addProperty(CoreConstants.PROPERTY_IN_LANGUAGE, value);
}
@Override
public VideoObject.Builder addInLanguage(String value) {
return addProperty(CoreConstants.PROPERTY_IN_LANGUAGE, Text.of(value));
}
@Override
public VideoObject.Builder addInteractionStatistic(InteractionCounter value) {
return addProperty(CoreConstants.PROPERTY_INTERACTION_STATISTIC, value);
}
@Override
public VideoObject.Builder addInteractionStatistic(InteractionCounter.Builder value) {
return addProperty(CoreConstants.PROPERTY_INTERACTION_STATISTIC, value.build());
}
@Override
public VideoObject.Builder addInteractionStatistic(String value) {
return addProperty(CoreConstants.PROPERTY_INTERACTION_STATISTIC, Text.of(value));
}
@Override
public VideoObject.Builder addInteractivityType(Text value) {
return addProperty(CoreConstants.PROPERTY_INTERACTIVITY_TYPE, value);
}
@Override
public VideoObject.Builder addInteractivityType(String value) {
return addProperty(CoreConstants.PROPERTY_INTERACTIVITY_TYPE, Text.of(value));
}
@Override
public VideoObject.Builder addIsBasedOnUrl(URL value) {
return addProperty(CoreConstants.PROPERTY_IS_BASED_ON_URL, value);
}
@Override
public VideoObject.Builder addIsBasedOnUrl(String value) {
return addProperty(CoreConstants.PROPERTY_IS_BASED_ON_URL, Text.of(value));
}
@Override
public VideoObject.Builder addIsFamilyFriendly(Boolean value) {
return addProperty(CoreConstants.PROPERTY_IS_FAMILY_FRIENDLY, value);
}
@Override
public VideoObject.Builder addIsFamilyFriendly(String value) {
return addProperty(CoreConstants.PROPERTY_IS_FAMILY_FRIENDLY, Text.of(value));
}
@Override
public VideoObject.Builder addIsPartOf(CreativeWork value) {
return addProperty(CoreConstants.PROPERTY_IS_PART_OF, value);
}
@Override
public VideoObject.Builder addIsPartOf(CreativeWork.Builder value) {
return addProperty(CoreConstants.PROPERTY_IS_PART_OF, value.build());
}
@Override
public VideoObject.Builder addIsPartOf(String value) {
return addProperty(CoreConstants.PROPERTY_IS_PART_OF, Text.of(value));
}
@Override
public VideoObject.Builder addKeywords(Text value) {
return addProperty(CoreConstants.PROPERTY_KEYWORDS, value);
}
@Override
public VideoObject.Builder addKeywords(String value) {
return addProperty(CoreConstants.PROPERTY_KEYWORDS, Text.of(value));
}
@Override
public VideoObject.Builder addLearningResourceType(Text value) {
return addProperty(CoreConstants.PROPERTY_LEARNING_RESOURCE_TYPE, value);
}
@Override
public VideoObject.Builder addLearningResourceType(String value) {
return addProperty(CoreConstants.PROPERTY_LEARNING_RESOURCE_TYPE, Text.of(value));
}
@Override
public VideoObject.Builder addLicense(CreativeWork value) {
return addProperty(CoreConstants.PROPERTY_LICENSE, value);
}
@Override
public VideoObject.Builder addLicense(CreativeWork.Builder value) {
return addProperty(CoreConstants.PROPERTY_LICENSE, value.build());
}
@Override
public VideoObject.Builder addLicense(URL value) {
return addProperty(CoreConstants.PROPERTY_LICENSE, value);
}
@Override
public VideoObject.Builder addLicense(String value) {
return addProperty(CoreConstants.PROPERTY_LICENSE, Text.of(value));
}
@Override
public VideoObject.Builder addLocationCreated(Place value) {
return addProperty(CoreConstants.PROPERTY_LOCATION_CREATED, value);
}
@Override
public VideoObject.Builder addLocationCreated(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_LOCATION_CREATED, value.build());
}
@Override
public VideoObject.Builder addLocationCreated(String value) {
return addProperty(CoreConstants.PROPERTY_LOCATION_CREATED, Text.of(value));
}
@Override
public VideoObject.Builder addMainEntity(Thing value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY, value);
}
@Override
public VideoObject.Builder addMainEntity(Thing.Builder value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY, value.build());
}
@Override
public VideoObject.Builder addMainEntity(String value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY, Text.of(value));
}
@Override
public VideoObject.Builder addMainEntityOfPage(CreativeWork value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value);
}
@Override
public VideoObject.Builder addMainEntityOfPage(CreativeWork.Builder value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value.build());
}
@Override
public VideoObject.Builder addMainEntityOfPage(URL value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value);
}
@Override
public VideoObject.Builder addMainEntityOfPage(String value) {
return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, Text.of(value));
}
@Override
public VideoObject.Builder addMentions(Thing value) {
return addProperty(CoreConstants.PROPERTY_MENTIONS, value);
}
@Override
public VideoObject.Builder addMentions(Thing.Builder value) {
return addProperty(CoreConstants.PROPERTY_MENTIONS, value.build());
}
@Override
public VideoObject.Builder addMentions(String value) {
return addProperty(CoreConstants.PROPERTY_MENTIONS, Text.of(value));
}
@Override
public VideoObject.Builder addMusicBy(MusicGroup value) {
return addProperty(CoreConstants.PROPERTY_MUSIC_BY, value);
}
@Override
public VideoObject.Builder addMusicBy(MusicGroup.Builder value) {
return addProperty(CoreConstants.PROPERTY_MUSIC_BY, value.build());
}
@Override
public VideoObject.Builder addMusicBy(Person value) {
return addProperty(CoreConstants.PROPERTY_MUSIC_BY, value);
}
@Override
public VideoObject.Builder addMusicBy(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_MUSIC_BY, value.build());
}
@Override
public VideoObject.Builder addMusicBy(String value) {
return addProperty(CoreConstants.PROPERTY_MUSIC_BY, Text.of(value));
}
@Override
public VideoObject.Builder addName(Text value) {
return addProperty(CoreConstants.PROPERTY_NAME, value);
}
@Override
public VideoObject.Builder addName(String value) {
return addProperty(CoreConstants.PROPERTY_NAME, Text.of(value));
}
@Override
public VideoObject.Builder addOffers(Offer value) {
return addProperty(CoreConstants.PROPERTY_OFFERS, value);
}
@Override
public VideoObject.Builder addOffers(Offer.Builder value) {
return addProperty(CoreConstants.PROPERTY_OFFERS, value.build());
}
@Override
public VideoObject.Builder addOffers(String value) {
return addProperty(CoreConstants.PROPERTY_OFFERS, Text.of(value));
}
@Override
public VideoObject.Builder addPlayerType(Text value) {
return addProperty(CoreConstants.PROPERTY_PLAYER_TYPE, value);
}
@Override
public VideoObject.Builder addPlayerType(String value) {
return addProperty(CoreConstants.PROPERTY_PLAYER_TYPE, Text.of(value));
}
@Override
public VideoObject.Builder addPosition(Integer value) {
return addProperty(CoreConstants.PROPERTY_POSITION, value);
}
@Override
public VideoObject.Builder addPosition(Text value) {
return addProperty(CoreConstants.PROPERTY_POSITION, value);
}
@Override
public VideoObject.Builder addPosition(String value) {
return addProperty(CoreConstants.PROPERTY_POSITION, Text.of(value));
}
@Override
public VideoObject.Builder addPotentialAction(Action value) {
return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value);
}
@Override
public VideoObject.Builder addPotentialAction(Action.Builder value) {
return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value.build());
}
@Override
public VideoObject.Builder addPotentialAction(String value) {
return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, Text.of(value));
}
@Override
public VideoObject.Builder addProducer(Organization value) {
return addProperty(CoreConstants.PROPERTY_PRODUCER, value);
}
@Override
public VideoObject.Builder addProducer(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_PRODUCER, value.build());
}
@Override
public VideoObject.Builder addProducer(Person value) {
return addProperty(CoreConstants.PROPERTY_PRODUCER, value);
}
@Override
public VideoObject.Builder addProducer(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_PRODUCER, value.build());
}
@Override
public VideoObject.Builder addProducer(String value) {
return addProperty(CoreConstants.PROPERTY_PRODUCER, Text.of(value));
}
@Override
public VideoObject.Builder addProductionCompany(Organization value) {
return addProperty(CoreConstants.PROPERTY_PRODUCTION_COMPANY, value);
}
@Override
public VideoObject.Builder addProductionCompany(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_PRODUCTION_COMPANY, value.build());
}
@Override
public VideoObject.Builder addProductionCompany(String value) {
return addProperty(CoreConstants.PROPERTY_PRODUCTION_COMPANY, Text.of(value));
}
@Override
public VideoObject.Builder addProvider(Organization value) {
return addProperty(CoreConstants.PROPERTY_PROVIDER, value);
}
@Override
public VideoObject.Builder addProvider(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_PROVIDER, value.build());
}
@Override
public VideoObject.Builder addProvider(Person value) {
return addProperty(CoreConstants.PROPERTY_PROVIDER, value);
}
@Override
public VideoObject.Builder addProvider(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_PROVIDER, value.build());
}
@Override
public VideoObject.Builder addProvider(String value) {
return addProperty(CoreConstants.PROPERTY_PROVIDER, Text.of(value));
}
@Override
public VideoObject.Builder addPublication(PublicationEvent value) {
return addProperty(CoreConstants.PROPERTY_PUBLICATION, value);
}
@Override
public VideoObject.Builder addPublication(PublicationEvent.Builder value) {
return addProperty(CoreConstants.PROPERTY_PUBLICATION, value.build());
}
@Override
public VideoObject.Builder addPublication(String value) {
return addProperty(CoreConstants.PROPERTY_PUBLICATION, Text.of(value));
}
@Override
public VideoObject.Builder addPublisher(Organization value) {
return addProperty(CoreConstants.PROPERTY_PUBLISHER, value);
}
@Override
public VideoObject.Builder addPublisher(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_PUBLISHER, value.build());
}
@Override
public VideoObject.Builder addPublisher(Person value) {
return addProperty(CoreConstants.PROPERTY_PUBLISHER, value);
}
@Override
public VideoObject.Builder addPublisher(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_PUBLISHER, value.build());
}
@Override
public VideoObject.Builder addPublisher(String value) {
return addProperty(CoreConstants.PROPERTY_PUBLISHER, Text.of(value));
}
@Override
public VideoObject.Builder addPublishingPrinciples(URL value) {
return addProperty(CoreConstants.PROPERTY_PUBLISHING_PRINCIPLES, value);
}
@Override
public VideoObject.Builder addPublishingPrinciples(String value) {
return addProperty(CoreConstants.PROPERTY_PUBLISHING_PRINCIPLES, Text.of(value));
}
@Override
public VideoObject.Builder addRecordedAt(Event value) {
return addProperty(CoreConstants.PROPERTY_RECORDED_AT, value);
}
@Override
public VideoObject.Builder addRecordedAt(Event.Builder value) {
return addProperty(CoreConstants.PROPERTY_RECORDED_AT, value.build());
}
@Override
public VideoObject.Builder addRecordedAt(String value) {
return addProperty(CoreConstants.PROPERTY_RECORDED_AT, Text.of(value));
}
@Override
public VideoObject.Builder addRegionsAllowed(Place value) {
return addProperty(CoreConstants.PROPERTY_REGIONS_ALLOWED, value);
}
@Override
public VideoObject.Builder addRegionsAllowed(Place.Builder value) {
return addProperty(CoreConstants.PROPERTY_REGIONS_ALLOWED, value.build());
}
@Override
public VideoObject.Builder addRegionsAllowed(String value) {
return addProperty(CoreConstants.PROPERTY_REGIONS_ALLOWED, Text.of(value));
}
@Override
public VideoObject.Builder addReleasedEvent(PublicationEvent value) {
return addProperty(CoreConstants.PROPERTY_RELEASED_EVENT, value);
}
@Override
public VideoObject.Builder addReleasedEvent(PublicationEvent.Builder value) {
return addProperty(CoreConstants.PROPERTY_RELEASED_EVENT, value.build());
}
@Override
public VideoObject.Builder addReleasedEvent(String value) {
return addProperty(CoreConstants.PROPERTY_RELEASED_EVENT, Text.of(value));
}
@Override
public VideoObject.Builder addRequiresSubscription(Boolean value) {
return addProperty(CoreConstants.PROPERTY_REQUIRES_SUBSCRIPTION, value);
}
@Override
public VideoObject.Builder addRequiresSubscription(String value) {
return addProperty(CoreConstants.PROPERTY_REQUIRES_SUBSCRIPTION, Text.of(value));
}
@Override
public VideoObject.Builder addReview(Review value) {
return addProperty(CoreConstants.PROPERTY_REVIEW, value);
}
@Override
public VideoObject.Builder addReview(Review.Builder value) {
return addProperty(CoreConstants.PROPERTY_REVIEW, value.build());
}
@Override
public VideoObject.Builder addReview(String value) {
return addProperty(CoreConstants.PROPERTY_REVIEW, Text.of(value));
}
@Override
public VideoObject.Builder addReviews(Review value) {
return addProperty(CoreConstants.PROPERTY_REVIEWS, value);
}
@Override
public VideoObject.Builder addReviews(Review.Builder value) {
return addProperty(CoreConstants.PROPERTY_REVIEWS, value.build());
}
@Override
public VideoObject.Builder addReviews(String value) {
return addProperty(CoreConstants.PROPERTY_REVIEWS, Text.of(value));
}
@Override
public VideoObject.Builder addSameAs(URL value) {
return addProperty(CoreConstants.PROPERTY_SAME_AS, value);
}
@Override
public VideoObject.Builder addSameAs(String value) {
return addProperty(CoreConstants.PROPERTY_SAME_AS, Text.of(value));
}
@Override
public VideoObject.Builder addSchemaVersion(Text value) {
return addProperty(CoreConstants.PROPERTY_SCHEMA_VERSION, value);
}
@Override
public VideoObject.Builder addSchemaVersion(URL value) {
return addProperty(CoreConstants.PROPERTY_SCHEMA_VERSION, value);
}
@Override
public VideoObject.Builder addSchemaVersion(String value) {
return addProperty(CoreConstants.PROPERTY_SCHEMA_VERSION, Text.of(value));
}
@Override
public VideoObject.Builder addSourceOrganization(Organization value) {
return addProperty(CoreConstants.PROPERTY_SOURCE_ORGANIZATION, value);
}
@Override
public VideoObject.Builder addSourceOrganization(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_SOURCE_ORGANIZATION, value.build());
}
@Override
public VideoObject.Builder addSourceOrganization(String value) {
return addProperty(CoreConstants.PROPERTY_SOURCE_ORGANIZATION, Text.of(value));
}
@Override
public VideoObject.Builder addText(Text value) {
return addProperty(CoreConstants.PROPERTY_TEXT, value);
}
@Override
public VideoObject.Builder addText(String value) {
return addProperty(CoreConstants.PROPERTY_TEXT, Text.of(value));
}
@Override
public VideoObject.Builder addThumbnail(ImageObject value) {
return addProperty(CoreConstants.PROPERTY_THUMBNAIL, value);
}
@Override
public VideoObject.Builder addThumbnail(ImageObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_THUMBNAIL, value.build());
}
@Override
public VideoObject.Builder addThumbnail(String value) {
return addProperty(CoreConstants.PROPERTY_THUMBNAIL, Text.of(value));
}
@Override
public VideoObject.Builder addThumbnailUrl(URL value) {
return addProperty(CoreConstants.PROPERTY_THUMBNAIL_URL, value);
}
@Override
public VideoObject.Builder addThumbnailUrl(String value) {
return addProperty(CoreConstants.PROPERTY_THUMBNAIL_URL, Text.of(value));
}
@Override
public VideoObject.Builder addTimeRequired(Duration value) {
return addProperty(CoreConstants.PROPERTY_TIME_REQUIRED, value);
}
@Override
public VideoObject.Builder addTimeRequired(Duration.Builder value) {
return addProperty(CoreConstants.PROPERTY_TIME_REQUIRED, value.build());
}
@Override
public VideoObject.Builder addTimeRequired(String value) {
return addProperty(CoreConstants.PROPERTY_TIME_REQUIRED, Text.of(value));
}
@Override
public VideoObject.Builder addTranscript(Text value) {
return addProperty(CoreConstants.PROPERTY_TRANSCRIPT, value);
}
@Override
public VideoObject.Builder addTranscript(String value) {
return addProperty(CoreConstants.PROPERTY_TRANSCRIPT, Text.of(value));
}
@Override
public VideoObject.Builder addTranslator(Organization value) {
return addProperty(CoreConstants.PROPERTY_TRANSLATOR, value);
}
@Override
public VideoObject.Builder addTranslator(Organization.Builder value) {
return addProperty(CoreConstants.PROPERTY_TRANSLATOR, value.build());
}
@Override
public VideoObject.Builder addTranslator(Person value) {
return addProperty(CoreConstants.PROPERTY_TRANSLATOR, value);
}
@Override
public VideoObject.Builder addTranslator(Person.Builder value) {
return addProperty(CoreConstants.PROPERTY_TRANSLATOR, value.build());
}
@Override
public VideoObject.Builder addTranslator(String value) {
return addProperty(CoreConstants.PROPERTY_TRANSLATOR, Text.of(value));
}
@Override
public VideoObject.Builder addTypicalAgeRange(Text value) {
return addProperty(CoreConstants.PROPERTY_TYPICAL_AGE_RANGE, value);
}
@Override
public VideoObject.Builder addTypicalAgeRange(String value) {
return addProperty(CoreConstants.PROPERTY_TYPICAL_AGE_RANGE, Text.of(value));
}
@Override
public VideoObject.Builder addUploadDate(Date value) {
return addProperty(CoreConstants.PROPERTY_UPLOAD_DATE, value);
}
@Override
public VideoObject.Builder addUploadDate(String value) {
return addProperty(CoreConstants.PROPERTY_UPLOAD_DATE, Text.of(value));
}
@Override
public VideoObject.Builder addUrl(URL value) {
return addProperty(CoreConstants.PROPERTY_URL, value);
}
@Override
public VideoObject.Builder addUrl(String value) {
return addProperty(CoreConstants.PROPERTY_URL, Text.of(value));
}
@Override
public VideoObject.Builder addVersion(Number value) {
return addProperty(CoreConstants.PROPERTY_VERSION, value);
}
@Override
public VideoObject.Builder addVersion(String value) {
return addProperty(CoreConstants.PROPERTY_VERSION, Text.of(value));
}
@Override
public VideoObject.Builder addVideo(VideoObject value) {
return addProperty(CoreConstants.PROPERTY_VIDEO, value);
}
@Override
public VideoObject.Builder addVideo(VideoObject.Builder value) {
return addProperty(CoreConstants.PROPERTY_VIDEO, value.build());
}
@Override
public VideoObject.Builder addVideo(String value) {
return addProperty(CoreConstants.PROPERTY_VIDEO, Text.of(value));
}
@Override
public VideoObject.Builder addVideoFrameSize(Text value) {
return addProperty(CoreConstants.PROPERTY_VIDEO_FRAME_SIZE, value);
}
@Override
public VideoObject.Builder addVideoFrameSize(String value) {
return addProperty(CoreConstants.PROPERTY_VIDEO_FRAME_SIZE, Text.of(value));
}
@Override
public VideoObject.Builder addVideoQuality(Text value) {
return addProperty(CoreConstants.PROPERTY_VIDEO_QUALITY, value);
}
@Override
public VideoObject.Builder addVideoQuality(String value) {
return addProperty(CoreConstants.PROPERTY_VIDEO_QUALITY, Text.of(value));
}
@Override
public VideoObject.Builder addWidth(Distance value) {
return addProperty(CoreConstants.PROPERTY_WIDTH, value);
}
@Override
public VideoObject.Builder addWidth(Distance.Builder value) {
return addProperty(CoreConstants.PROPERTY_WIDTH, value.build());
}
@Override
public VideoObject.Builder addWidth(QuantitativeValue value) {
return addProperty(CoreConstants.PROPERTY_WIDTH, value);
}
@Override
public VideoObject.Builder addWidth(QuantitativeValue.Builder value) {
return addProperty(CoreConstants.PROPERTY_WIDTH, value.build());
}
@Override
public VideoObject.Builder addWidth(String value) {
return addProperty(CoreConstants.PROPERTY_WIDTH, Text.of(value));
}
@Override
public VideoObject.Builder addWorkExample(CreativeWork value) {
return addProperty(CoreConstants.PROPERTY_WORK_EXAMPLE, value);
}
@Override
public VideoObject.Builder addWorkExample(CreativeWork.Builder value) {
return addProperty(CoreConstants.PROPERTY_WORK_EXAMPLE, value.build());
}
@Override
public VideoObject.Builder addWorkExample(String value) {
return addProperty(CoreConstants.PROPERTY_WORK_EXAMPLE, Text.of(value));
}
@Override
public VideoObject.Builder addDetailedDescription(Article value) {
return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value);
}
@Override
public VideoObject.Builder addDetailedDescription(Article.Builder value) {
return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value.build());
}
@Override
public VideoObject.Builder addDetailedDescription(String value) {
return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, Text.of(value));
}
@Override
public VideoObject.Builder addPopularityScore(PopularityScoreSpecification value) {
return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value);
}
@Override
public VideoObject.Builder addPopularityScore(PopularityScoreSpecification.Builder value) {
return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value.build());
}
@Override
public VideoObject.Builder addPopularityScore(String value) {
return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, Text.of(value));
}
@Override
public VideoObject build() {
return new VideoObjectImpl(properties, reverseMap);
}
}
public VideoObjectImpl(
Multimap<String, ValueType> properties, Multimap<String, Thing> reverseMap) {
super(properties, reverseMap);
}
@Override
public String getFullTypeName() {
return CoreConstants.TYPE_VIDEO_OBJECT;
}
@Override
public boolean includesProperty(String property) {
return PROPERTY_SET.contains(CoreConstants.NAMESPACE + property)
|| PROPERTY_SET.contains(GoogConstants.NAMESPACE + property)
|| PROPERTY_SET.contains(property);
}
@Override
public ImmutableList<SchemaOrgType> getActorList() {
return getProperty(CoreConstants.PROPERTY_ACTOR);
}
@Override
public ImmutableList<SchemaOrgType> getActorsList() {
return getProperty(CoreConstants.PROPERTY_ACTORS);
}
@Override
public ImmutableList<SchemaOrgType> getCaptionList() {
return getProperty(CoreConstants.PROPERTY_CAPTION);
}
@Override
public ImmutableList<SchemaOrgType> getDirectorList() {
return getProperty(CoreConstants.PROPERTY_DIRECTOR);
}
@Override
public ImmutableList<SchemaOrgType> getDirectorsList() {
return getProperty(CoreConstants.PROPERTY_DIRECTORS);
}
@Override
public ImmutableList<SchemaOrgType> getMusicByList() {
return getProperty(CoreConstants.PROPERTY_MUSIC_BY);
}
@Override
public ImmutableList<SchemaOrgType> getThumbnailList() {
return getProperty(CoreConstants.PROPERTY_THUMBNAIL);
}
@Override
public ImmutableList<SchemaOrgType> getTranscriptList() {
return getProperty(CoreConstants.PROPERTY_TRANSCRIPT);
}
@Override
public ImmutableList<SchemaOrgType> getVideoFrameSizeList() {
return getProperty(CoreConstants.PROPERTY_VIDEO_FRAME_SIZE);
}
@Override
public ImmutableList<SchemaOrgType> getVideoQualityList() {
return getProperty(CoreConstants.PROPERTY_VIDEO_QUALITY);
}
}
|
|
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.completion;
import com.intellij.codeInsight.TailType;
import com.intellij.codeInsight.completion.util.ParenthesesInsertHandler;
import com.intellij.codeInsight.generation.GenerateMembersUtil;
import com.intellij.codeInsight.lookup.*;
import com.intellij.codeInsight.template.impl.TemplateManagerImpl;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.patterns.ElementPattern;
import com.intellij.patterns.PsiJavaPatterns;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.codeStyle.SuggestedNameInfo;
import com.intellij.psi.codeStyle.VariableKind;
import com.intellij.psi.util.PropertyUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.refactoring.introduceField.InplaceIntroduceFieldPopup;
import com.intellij.refactoring.introduceVariable.IntroduceVariableBase;
import com.intellij.util.ArrayUtil;
import com.intellij.util.PlatformIcons;
import com.intellij.util.containers.ContainerUtil;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import static com.intellij.patterns.PlatformPatterns.psiElement;
import static com.intellij.patterns.PsiJavaPatterns.psiClass;
import static com.intellij.patterns.PsiJavaPatterns.psiField;
import static com.intellij.patterns.StandardPatterns.or;
/**
* @author peter
*/
public class JavaMemberNameCompletionContributor extends CompletionContributor {
public static final ElementPattern<PsiElement> INSIDE_TYPE_PARAMS_PATTERN = psiElement().
afterLeaf(psiElement().withText("?").andOr(
psiElement().afterLeaf("<", ","),
psiElement().afterSiblingSkipping(psiElement().whitespaceCommentEmptyOrError(), psiElement(PsiAnnotation.class))));
static final int MAX_SCOPE_SIZE_TO_SEARCH_UNRESOLVED = 50000;
@Override
public void fillCompletionVariants(@NotNull CompletionParameters parameters, @NotNull CompletionResultSet result) {
if (parameters.getCompletionType() != CompletionType.BASIC && parameters.getCompletionType() != CompletionType.SMART) {
return;
}
if (parameters.getInvocationCount() == 0 && TemplateManagerImpl.getTemplateState(parameters.getEditor()) != null) {
return;
}
PsiElement position = parameters.getPosition();
final Set<LookupElement> lookupSet = new THashSet<LookupElement>();
if (psiElement(PsiIdentifier.class).andNot(INSIDE_TYPE_PARAMS_PATTERN).withParent(
or(psiElement(PsiLocalVariable.class), psiElement(PsiParameter.class))).accepts(position)) {
completeLocalVariableName(lookupSet, result.getPrefixMatcher(), (PsiVariable)parameters.getPosition().getParent(),
parameters.getInvocationCount() >= 1);
for (final LookupElement item : lookupSet) {
if (item instanceof LookupItem) {
((LookupItem)item).setAutoCompletionPolicy(AutoCompletionPolicy.GIVE_CHANCE_TO_OVERWRITE);
}
}
}
if (psiElement(PsiIdentifier.class).withParent(PsiField.class).andNot(INSIDE_TYPE_PARAMS_PATTERN).accepts(position)) {
final PsiField variable = (PsiField)parameters.getPosition().getParent();
completeMethodName(lookupSet, variable, result.getPrefixMatcher());
completeFieldName(lookupSet, variable, result.getPrefixMatcher(), parameters.getInvocationCount() >= 1);
}
if (PsiJavaPatterns.psiElement().nameIdentifierOf(PsiJavaPatterns.psiMethod().withParent(PsiClass.class)).accepts(position)) {
completeMethodName(lookupSet, parameters.getPosition().getParent(), result.getPrefixMatcher());
}
for (final LookupElement item : lookupSet) {
result.addElement(item);
}
}
private static void completeLocalVariableName(Set<LookupElement> set, PrefixMatcher matcher, PsiVariable var, boolean includeOverlapped) {
FeatureUsageTracker.getInstance().triggerFeatureUsed("editing.completion.variable.name");
Project project = var.getProject();
final JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(project);
final VariableKind variableKind = codeStyleManager.getVariableKind(var);
String propertyName = null;
if (variableKind == VariableKind.PARAMETER) {
final PsiMethod method = PsiTreeUtil.getParentOfType(var, PsiMethod.class);
if (method != null) {
propertyName = PropertyUtil.getPropertyName(method);
}
if (method != null && method.getName().startsWith("with")) {
propertyName = StringUtil.decapitalize(method.getName().substring(4));
}
}
final PsiType type = var.getType();
SuggestedNameInfo suggestedNameInfo = codeStyleManager.suggestVariableName(variableKind, propertyName, null, type, StringUtil.isEmpty(matcher.getPrefix()));
suggestedNameInfo = codeStyleManager.suggestUniqueVariableName(suggestedNameInfo, var, false);
final String[] suggestedNames = suggestedNameInfo.names;
addLookupItems(set, suggestedNameInfo, matcher, project, suggestedNames);
if (!hasStartMatches(set, matcher)) {
if (type.equalsToText(CommonClassNames.JAVA_LANG_OBJECT) && matcher.prefixMatches("object")) {
set.add(withInsertHandler(suggestedNameInfo, LookupElementBuilder.create("object")));
}
if (type.equalsToText(CommonClassNames.JAVA_LANG_STRING) && matcher.prefixMatches("string")) {
set.add(withInsertHandler(suggestedNameInfo, LookupElementBuilder.create("string")));
}
}
if (!hasStartMatches(set, matcher) && includeOverlapped) {
addLookupItems(set, null, matcher, project, getOverlappedNameVersions(matcher.getPrefix(), suggestedNames, ""));
}
PsiElement parent = PsiTreeUtil.getParentOfType(var, PsiCodeBlock.class);
if(parent == null) parent = PsiTreeUtil.getParentOfType(var, PsiMethod.class, PsiLambdaExpression.class);
addLookupItems(set, suggestedNameInfo, matcher, project, getUnresolvedReferences(parent, false));
if (var instanceof PsiParameter && parent instanceof PsiMethod) {
addSuggestionsInspiredByFieldNames(set, matcher, var, project, codeStyleManager);
}
PsiExpression initializer = var.getInitializer();
if (initializer != null) {
SuggestedNameInfo initializerSuggestions = IntroduceVariableBase.getSuggestedName(type, initializer);
addLookupItems(set, initializerSuggestions, matcher, project, initializerSuggestions.names);
}
}
private static boolean hasStartMatches(PrefixMatcher matcher, Set<String> set) {
for (String s : set) {
if (matcher.isStartMatch(s)) {
return true;
}
}
return false;
}
private static boolean hasStartMatches(Set<LookupElement> set, PrefixMatcher matcher) {
for (LookupElement lookupElement : set) {
if (matcher.isStartMatch(lookupElement)) {
return true;
}
}
return false;
}
private static void addSuggestionsInspiredByFieldNames(Set<LookupElement> set,
PrefixMatcher matcher,
PsiVariable var,
Project project,
JavaCodeStyleManager codeStyleManager) {
PsiClass psiClass = PsiTreeUtil.getParentOfType(var, PsiClass.class);
if (psiClass == null) {
return;
}
for (PsiField field : psiClass.getFields()) {
if (field.getType().isAssignableFrom(var.getType())) {
String prop = codeStyleManager.variableNameToPropertyName(field.getName(), VariableKind.FIELD);
addLookupItems(set, null, matcher, project, codeStyleManager.propertyNameToVariableName(prop, VariableKind.PARAMETER));
}
}
}
private static String[] getOverlappedNameVersions(final String prefix, final String[] suggestedNames, String suffix) {
final List<String> newSuggestions = new ArrayList<String>();
int longestOverlap = 0;
for (String suggestedName : suggestedNames) {
if (suggestedName.length() < 3) {
continue;
}
if (suggestedName.toUpperCase().startsWith(prefix.toUpperCase())) {
newSuggestions.add(suggestedName);
longestOverlap = prefix.length();
}
suggestedName = String.valueOf(Character.toUpperCase(suggestedName.charAt(0))) + suggestedName.substring(1);
final int overlap = getOverlap(suggestedName, prefix);
if (overlap < longestOverlap) continue;
if (overlap > longestOverlap) {
newSuggestions.clear();
longestOverlap = overlap;
}
String suggestion = prefix.substring(0, prefix.length() - overlap) + suggestedName;
final int lastIndexOfSuffix = suggestion.lastIndexOf(suffix);
if (lastIndexOfSuffix >= 0 && suffix.length() < suggestion.length() - lastIndexOfSuffix) {
suggestion = suggestion.substring(0, lastIndexOfSuffix) + suffix;
}
if (!newSuggestions.contains(suggestion)) {
newSuggestions.add(suggestion);
}
}
return ArrayUtil.toStringArray(newSuggestions);
}
private static int getOverlap(final String propertyName, final String prefix) {
int overlap = 0;
int propertyNameLen = propertyName.length();
int prefixLen = prefix.length();
for (int j = 1; j < prefixLen && j < propertyNameLen; j++) {
if (prefix.substring(prefixLen - j).equals(propertyName.substring(0, j))) {
overlap = j;
}
}
return overlap;
}
private static String[] getUnresolvedReferences(final PsiElement parentOfType, final boolean referenceOnMethod) {
if (parentOfType != null && parentOfType.getTextLength() > MAX_SCOPE_SIZE_TO_SEARCH_UNRESOLVED) return ArrayUtil.EMPTY_STRING_ARRAY;
final Set<String> unresolvedRefs = new LinkedHashSet<String>();
if (parentOfType != null) {
parentOfType.accept(new JavaRecursiveElementWalkingVisitor() {
@Override public void visitReferenceExpression(PsiReferenceExpression reference) {
final PsiElement parent = reference.getParent();
if (parent instanceof PsiReference) return;
if (referenceOnMethod && parent instanceof PsiMethodCallExpression &&
reference == ((PsiMethodCallExpression)parent).getMethodExpression()) {
if (reference.resolve() == null) {
ContainerUtil.addIfNotNull(unresolvedRefs, reference.getReferenceName());
}
}
else if (!referenceOnMethod && !(parent instanceof PsiMethodCallExpression) &&reference.resolve() == null) {
ContainerUtil.addIfNotNull(unresolvedRefs, reference.getReferenceName());
}
}
});
}
return ArrayUtil.toStringArray(unresolvedRefs);
}
private static void completeFieldName(Set<LookupElement> set, PsiField var, final PrefixMatcher matcher, boolean includeOverlapped) {
FeatureUsageTracker.getInstance().triggerFeatureUsed("editing.completion.variable.name");
Project project = var.getProject();
JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(project);
final VariableKind variableKind = JavaCodeStyleManager.getInstance(project).getVariableKind(var);
final String prefix = matcher.getPrefix();
if (PsiType.VOID.equals(var.getType()) || psiField().inClass(psiClass().isInterface().andNot(psiClass().isAnnotationType())).accepts(var)) {
completeVariableNameForRefactoring(project, set, matcher, var.getType(), variableKind, includeOverlapped, true);
return;
}
SuggestedNameInfo suggestedNameInfo = codeStyleManager.suggestVariableName(variableKind, null, null, var.getType());
final String[] suggestedNames = suggestedNameInfo.names;
addLookupItems(set, suggestedNameInfo, matcher, project, suggestedNames);
if (!hasStartMatches(set, matcher) && includeOverlapped) {
// use suggested names as suffixes
final String requiredSuffix = codeStyleManager.getSuffixByVariableKind(variableKind);
if(variableKind != VariableKind.STATIC_FINAL_FIELD){
for (int i = 0; i < suggestedNames.length; i++)
suggestedNames[i] = codeStyleManager.variableNameToPropertyName(suggestedNames[i], variableKind);
}
addLookupItems(set, null, matcher, project, getOverlappedNameVersions(prefix, suggestedNames, requiredSuffix));
}
addLookupItems(set, suggestedNameInfo, matcher, project, getUnresolvedReferences(var.getParent(), false));
PsiExpression initializer = var.getInitializer();
PsiClass containingClass = var.getContainingClass();
if (initializer != null && containingClass != null) {
SuggestedNameInfo initializerSuggestions = InplaceIntroduceFieldPopup.
suggestFieldName(var.getType(), null, initializer, var.hasModifierProperty(PsiModifier.STATIC), containingClass);
addLookupItems(set, initializerSuggestions, matcher, project, initializerSuggestions.names);
}
}
public static void completeVariableNameForRefactoring(Project project,
Set<LookupElement> set,
PrefixMatcher matcher,
PsiType varType,
VariableKind varKind, final boolean includeOverlapped, final boolean methodPrefix) {
JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(project);
SuggestedNameInfo suggestedNameInfo = codeStyleManager.suggestVariableName(varKind, null, null, varType);
final String[] strings = completeVariableNameForRefactoring(codeStyleManager, matcher, varType, varKind, suggestedNameInfo,
includeOverlapped, methodPrefix);
addLookupItems(set, suggestedNameInfo, matcher, project, strings);
}
public static String[] completeVariableNameForRefactoring(JavaCodeStyleManager codeStyleManager,
final PrefixMatcher matcher,
@Nullable final PsiType varType,
final VariableKind varKind,
SuggestedNameInfo suggestedNameInfo,
final boolean includeOverlapped, final boolean methodPrefix) {
Set<String> result = new LinkedHashSet<String>();
final String[] suggestedNames = suggestedNameInfo.names;
for (final String suggestedName : suggestedNames) {
if (matcher.prefixMatches(suggestedName)) {
result.add(suggestedName);
}
}
if (!hasStartMatches(matcher, result) && PsiType.VOID != varType && includeOverlapped) {
// use suggested names as suffixes
final String requiredSuffix = codeStyleManager.getSuffixByVariableKind(varKind);
final String prefix = matcher.getPrefix();
if (varKind != VariableKind.STATIC_FINAL_FIELD || methodPrefix) {
for (int i = 0; i < suggestedNames.length; i++) {
suggestedNames[i] = codeStyleManager.variableNameToPropertyName(suggestedNames[i], varKind);
}
}
ContainerUtil.addAll(result, getOverlappedNameVersions(prefix, suggestedNames, requiredSuffix));
}
return ArrayUtil.toStringArray(result);
}
private static void completeMethodName(Set<LookupElement> set, PsiElement element, final PrefixMatcher matcher){
if(element instanceof PsiMethod) {
final PsiMethod method = (PsiMethod)element;
if (method.isConstructor()) {
final PsiClass containingClass = method.getContainingClass();
if (containingClass != null) {
final String name = containingClass.getName();
if (StringUtil.isNotEmpty(name)) {
addLookupItems(set, null, matcher, element.getProject(), name);
}
}
return;
}
}
PsiClass ourClassParent = PsiTreeUtil.getParentOfType(element, PsiClass.class);
if (ourClassParent == null) return;
if (ourClassParent.isAnnotationType() && matcher.prefixMatches(PsiAnnotation.DEFAULT_REFERENCED_METHOD_NAME)) {
set.add(LookupElementBuilder.create(PsiAnnotation.DEFAULT_REFERENCED_METHOD_NAME)
.withIcon(PlatformIcons.METHOD_ICON)
.withTailText("()")
.withInsertHandler(ParenthesesInsertHandler.NO_PARAMETERS));
}
addLookupItems(set, null, matcher, element.getProject(), getUnresolvedReferences(ourClassParent, true));
addLookupItems(set, null, matcher, element.getProject(), getPropertiesHandlersNames(
ourClassParent,
((PsiModifierListOwner)element).hasModifierProperty(PsiModifier.STATIC),
PsiUtil.getTypeByPsiElement(element), element));
}
private static String[] getPropertiesHandlersNames(final PsiClass psiClass,
final boolean staticContext,
final PsiType varType,
final PsiElement element) {
final List<String> propertyHandlers = new ArrayList<String>();
for (final PsiField field : psiClass.getFields()) {
if (field == element) continue;
if (StringUtil.isEmpty(field.getName())) continue;
PsiUtilCore.ensureValid(field);
PsiType fieldType = field.getType();
PsiUtil.ensureValidType(fieldType);
final PsiModifierList modifierList = field.getModifierList();
if (staticContext && (modifierList != null && !modifierList.hasModifierProperty(PsiModifier.STATIC))) continue;
if (fieldType.equals(varType)) {
final String getterName = PropertyUtil.suggestGetterName(field);
if ((psiClass.findMethodsByName(getterName, true).length == 0 ||
psiClass.findMethodBySignature(GenerateMembersUtil.generateGetterPrototype(field), true) == null)) {
propertyHandlers.add(getterName);
}
}
if (PsiType.VOID.equals(varType)) {
final String setterName = PropertyUtil.suggestSetterName(field);
if ((psiClass.findMethodsByName(setterName, true).length == 0 ||
psiClass.findMethodBySignature(GenerateMembersUtil.generateSetterPrototype(field), true) == null)) {
propertyHandlers.add(setterName);
}
}
}
return ArrayUtil.toStringArray(propertyHandlers);
}
private static void addLookupItems(Set<LookupElement> lookupElements, @Nullable final SuggestedNameInfo callback, PrefixMatcher matcher, Project project, String... strings) {
outer:
for (int i = 0; i < strings.length; i++) {
String name = strings[i];
if (!matcher.prefixMatches(name) || !PsiNameHelper.getInstance(project).isIdentifier(name, LanguageLevel.HIGHEST)) {
continue;
}
for (LookupElement lookupElement : lookupElements) {
if (lookupElement.getAllLookupStrings().contains(name)) {
continue outer;
}
}
LookupElement element = PrioritizedLookupElement.withPriority(LookupElementBuilder.create(name).withAutoCompletionPolicy(AutoCompletionPolicy.GIVE_CHANCE_TO_OVERWRITE), -i);
if (callback != null) {
element = withInsertHandler(callback, element);
}
lookupElements.add(element);
}
}
private static LookupElementDecorator<LookupElement> withInsertHandler(final SuggestedNameInfo callback, LookupElement element) {
return LookupElementDecorator.withInsertHandler(element, new InsertHandler<LookupElementDecorator<LookupElement>>() {
@Override
public void handleInsert(InsertionContext context, LookupElementDecorator<LookupElement> item) {
TailType tailType = LookupItem.getDefaultTailType(context.getCompletionChar());
if (tailType != null) {
context.setAddCompletionChar(false);
tailType.processTail(context.getEditor(), context.getTailOffset());
}
callback.nameChosen(item.getLookupString());
}
});
}
}
|
|
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.thrift2;
import static org.apache.hadoop.hbase.util.Bytes.getBytes;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.ExtendedCellBuilder;
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Consistency;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.OperationWithAttributes;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Scan.ReadType;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.ParseFilter;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.security.visibility.Authorizations;
import org.apache.hadoop.hbase.security.visibility.CellVisibility;
import org.apache.hadoop.hbase.thrift2.generated.TAppend;
import org.apache.hadoop.hbase.thrift2.generated.TAuthorization;
import org.apache.hadoop.hbase.thrift2.generated.TBloomFilterType;
import org.apache.hadoop.hbase.thrift2.generated.TCellVisibility;
import org.apache.hadoop.hbase.thrift2.generated.TColumn;
import org.apache.hadoop.hbase.thrift2.generated.TColumnFamilyDescriptor;
import org.apache.hadoop.hbase.thrift2.generated.TColumnIncrement;
import org.apache.hadoop.hbase.thrift2.generated.TColumnValue;
import org.apache.hadoop.hbase.thrift2.generated.TCompareOperator;
import org.apache.hadoop.hbase.thrift2.generated.TCompressionAlgorithm;
import org.apache.hadoop.hbase.thrift2.generated.TConsistency;
import org.apache.hadoop.hbase.thrift2.generated.TDataBlockEncoding;
import org.apache.hadoop.hbase.thrift2.generated.TDelete;
import org.apache.hadoop.hbase.thrift2.generated.TDeleteType;
import org.apache.hadoop.hbase.thrift2.generated.TDurability;
import org.apache.hadoop.hbase.thrift2.generated.TGet;
import org.apache.hadoop.hbase.thrift2.generated.THRegionInfo;
import org.apache.hadoop.hbase.thrift2.generated.THRegionLocation;
import org.apache.hadoop.hbase.thrift2.generated.TIncrement;
import org.apache.hadoop.hbase.thrift2.generated.TKeepDeletedCells;
import org.apache.hadoop.hbase.thrift2.generated.TMutation;
import org.apache.hadoop.hbase.thrift2.generated.TNamespaceDescriptor;
import org.apache.hadoop.hbase.thrift2.generated.TPut;
import org.apache.hadoop.hbase.thrift2.generated.TReadType;
import org.apache.hadoop.hbase.thrift2.generated.TResult;
import org.apache.hadoop.hbase.thrift2.generated.TRowMutations;
import org.apache.hadoop.hbase.thrift2.generated.TScan;
import org.apache.hadoop.hbase.thrift2.generated.TServerName;
import org.apache.hadoop.hbase.thrift2.generated.TTableDescriptor;
import org.apache.hadoop.hbase.thrift2.generated.TTableName;
import org.apache.hadoop.hbase.thrift2.generated.TTimeRange;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@InterfaceAudience.Private
public final class ThriftUtilities {
private final static Cell[] EMPTY_CELL_ARRAY = new Cell[]{};
private final static Result EMPTY_RESULT = Result.create(EMPTY_CELL_ARRAY);
private final static Result EMPTY_RESULT_STALE = Result.create(EMPTY_CELL_ARRAY, null, true);
private ThriftUtilities() {
throw new UnsupportedOperationException("Can't initialize class");
}
/**
* Creates a {@link Get} (HBase) from a {@link TGet} (Thrift).
*
* This ignores any timestamps set on {@link TColumn} objects.
*
* @param in the <code>TGet</code> to convert
*
* @return <code>Get</code> object
*
* @throws IOException if an invalid time range or max version parameter is given
*/
public static Get getFromThrift(TGet in) throws IOException {
Get out = new Get(in.getRow());
// Timestamp overwrites time range if both are set
if (in.isSetTimestamp()) {
out.setTimestamp(in.getTimestamp());
} else if (in.isSetTimeRange()) {
out.setTimeRange(in.getTimeRange().getMinStamp(), in.getTimeRange().getMaxStamp());
}
if (in.isSetMaxVersions()) {
out.readVersions(in.getMaxVersions());
}
if (in.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
out.setFilter(parseFilter.parseFilterString(in.getFilterString()));
}
if (in.isSetAttributes()) {
addAttributes(out,in.getAttributes());
}
if (in.isSetAuthorizations()) {
out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels()));
}
if (in.isSetConsistency()) {
out.setConsistency(consistencyFromThrift(in.getConsistency()));
}
if (in.isSetTargetReplicaId()) {
out.setReplicaId(in.getTargetReplicaId());
}
if (in.isSetCacheBlocks()) {
out.setCacheBlocks(in.isCacheBlocks());
}
if (in.isSetStoreLimit()) {
out.setMaxResultsPerColumnFamily(in.getStoreLimit());
}
if (in.isSetStoreOffset()) {
out.setRowOffsetPerColumnFamily(in.getStoreOffset());
}
if (in.isSetExistence_only()) {
out.setCheckExistenceOnly(in.isExistence_only());
}
if (in.isSetColumns()) {
for (TColumn column : in.getColumns()) {
if (column.isSetQualifier()) {
out.addColumn(column.getFamily(), column.getQualifier());
} else {
out.addFamily(column.getFamily());
}
}
}
if (in.isSetFilterBytes()) {
out.setFilter(filterFromThrift(in.getFilterBytes()));
}
return out;
}
/**
* Converts multiple {@link TGet}s (Thrift) into a list of {@link Get}s (HBase).
*
* @param in list of <code>TGet</code>s to convert
*
* @return list of <code>Get</code> objects
*
* @throws IOException if an invalid time range or max version parameter is given
* @see #getFromThrift(TGet)
*/
public static List<Get> getsFromThrift(List<TGet> in) throws IOException {
List<Get> out = new ArrayList<>(in.size());
for (TGet get : in) {
out.add(getFromThrift(get));
}
return out;
}
/**
* Creates a {@link TResult} (Thrift) from a {@link Result} (HBase).
*
* @param in the <code>Result</code> to convert
*
* @return converted result, returns an empty result if the input is <code>null</code>
*/
public static TResult resultFromHBase(Result in) {
Cell[] raw = in.rawCells();
TResult out = new TResult();
byte[] row = in.getRow();
if (row != null) {
out.setRow(in.getRow());
}
List<TColumnValue> columnValues = new ArrayList<>(raw.length);
for (Cell kv : raw) {
TColumnValue col = new TColumnValue();
col.setFamily(CellUtil.cloneFamily(kv));
col.setQualifier(CellUtil.cloneQualifier(kv));
col.setTimestamp(kv.getTimestamp());
col.setValue(CellUtil.cloneValue(kv));
col.setType(kv.getType().getCode());
if (kv.getTagsLength() > 0) {
col.setTags(PrivateCellUtil.cloneTags(kv));
}
columnValues.add(col);
}
out.setColumnValues(columnValues);
out.setStale(in.isStale());
out.setPartial(in.mayHaveMoreCellsInRow());
return out;
}
/**
* Converts multiple {@link Result}s (HBase) into a list of {@link TResult}s (Thrift).
*
* @param in array of <code>Result</code>s to convert
*
* @return list of converted <code>TResult</code>s
*
* @see #resultFromHBase(Result)
*/
public static List<TResult> resultsFromHBase(Result[] in) {
List<TResult> out = new ArrayList<>(in.length);
for (Result result : in) {
out.add(resultFromHBase(result));
}
return out;
}
/**
* Creates a {@link Put} (HBase) from a {@link TPut} (Thrift)
*
* @param in the <code>TPut</code> to convert
*
* @return converted <code>Put</code>
*/
public static Put putFromThrift(TPut in) {
Put out;
if (in.isSetTimestamp()) {
out = new Put(in.getRow(), in.getTimestamp());
} else {
out = new Put(in.getRow());
}
if (in.isSetDurability()) {
out.setDurability(durabilityFromThrift(in.getDurability()));
}
for (TColumnValue columnValue : in.getColumnValues()) {
try {
if (columnValue.isSetTimestamp()) {
out.add(CellBuilderFactory.create(CellBuilderType.DEEP_COPY)
.setRow(out.getRow())
.setFamily(columnValue.getFamily())
.setQualifier(columnValue.getQualifier())
.setTimestamp(columnValue.getTimestamp())
.setType(Cell.Type.Put)
.setValue(columnValue.getValue())
.build());
} else {
out.add(CellBuilderFactory.create(CellBuilderType.DEEP_COPY)
.setRow(out.getRow())
.setFamily(columnValue.getFamily())
.setQualifier(columnValue.getQualifier())
.setTimestamp(out.getTimestamp())
.setType(Cell.Type.Put)
.setValue(columnValue.getValue())
.build());
}
} catch (IOException e) {
throw new IllegalArgumentException((e));
}
}
if (in.isSetAttributes()) {
addAttributes(out,in.getAttributes());
}
if (in.getCellVisibility() != null) {
out.setCellVisibility(new CellVisibility(in.getCellVisibility().getExpression()));
}
return out;
}
/**
* Converts multiple {@link TPut}s (Thrift) into a list of {@link Put}s (HBase).
*
* @param in list of <code>TPut</code>s to convert
*
* @return list of converted <code>Put</code>s
*
* @see #putFromThrift(TPut)
*/
public static List<Put> putsFromThrift(List<TPut> in) {
List<Put> out = new ArrayList<>(in.size());
for (TPut put : in) {
out.add(putFromThrift(put));
}
return out;
}
/**
* Creates a {@link Delete} (HBase) from a {@link TDelete} (Thrift).
*
* @param in the <code>TDelete</code> to convert
*
* @return converted <code>Delete</code>
*/
public static Delete deleteFromThrift(TDelete in) {
Delete out;
if (in.isSetColumns()) {
out = new Delete(in.getRow());
for (TColumn column : in.getColumns()) {
if (in.isSetDeleteType()) {
switch (in.getDeleteType()) {
case DELETE_COLUMN:
if (column.isSetTimestamp()) {
out.addColumn(column.getFamily(), column.getQualifier(), column.getTimestamp());
} else {
out.addColumn(column.getFamily(), column.getQualifier());
}
break;
case DELETE_COLUMNS:
if (column.isSetTimestamp()) {
out.addColumns(column.getFamily(), column.getQualifier(), column.getTimestamp());
} else {
out.addColumns(column.getFamily(), column.getQualifier());
}
break;
case DELETE_FAMILY:
if (column.isSetTimestamp()) {
out.addFamily(column.getFamily(), column.getTimestamp());
} else {
out.addFamily(column.getFamily());
}
break;
case DELETE_FAMILY_VERSION:
if (column.isSetTimestamp()) {
out.addFamilyVersion(column.getFamily(), column.getTimestamp());
} else {
throw new IllegalArgumentException(
"Timestamp is required for TDelete with DeleteFamilyVersion type");
}
break;
default:
throw new IllegalArgumentException("DeleteType is required for TDelete");
}
} else {
throw new IllegalArgumentException("DeleteType is required for TDelete");
}
}
} else {
if (in.isSetTimestamp()) {
out = new Delete(in.getRow(), in.getTimestamp());
} else {
out = new Delete(in.getRow());
}
}
if (in.isSetAttributes()) {
addAttributes(out,in.getAttributes());
}
if (in.isSetDurability()) {
out.setDurability(durabilityFromThrift(in.getDurability()));
}
return out;
}
/**
* Converts multiple {@link TDelete}s (Thrift) into a list of {@link Delete}s (HBase).
*
* @param in list of <code>TDelete</code>s to convert
*
* @return list of converted <code>Delete</code>s
*
* @see #deleteFromThrift(TDelete)
*/
public static List<Delete> deletesFromThrift(List<TDelete> in) {
List<Delete> out = new ArrayList<>(in.size());
for (TDelete delete : in) {
out.add(deleteFromThrift(delete));
}
return out;
}
public static TDeleteType deleteTypeFromHBase(Cell.Type type) {
switch (type) {
case Delete: return TDeleteType.DELETE_COLUMN;
case DeleteColumn: return TDeleteType.DELETE_COLUMNS;
case DeleteFamily: return TDeleteType.DELETE_FAMILY;
case DeleteFamilyVersion: return TDeleteType.DELETE_FAMILY_VERSION;
default: throw new IllegalArgumentException("Unknow delete type " + type);
} }
public static TDelete deleteFromHBase(Delete in) {
TDelete out = new TDelete(ByteBuffer.wrap(in.getRow()));
List<TColumn> columns = new ArrayList<>(in.getFamilyCellMap().entrySet().size());
long rowTimestamp = in.getTimestamp();
if (rowTimestamp != HConstants.LATEST_TIMESTAMP) {
out.setTimestamp(rowTimestamp);
}
for (Map.Entry<String, byte[]> attribute : in.getAttributesMap().entrySet()) {
out.putToAttributes(ByteBuffer.wrap(Bytes.toBytes(attribute.getKey())),
ByteBuffer.wrap(attribute.getValue()));
}
if (in.getDurability() != Durability.USE_DEFAULT) {
out.setDurability(durabilityFromHBase(in.getDurability()));
}
// Delete the whole row
if (in.getFamilyCellMap().size() == 0) {
return out;
}
TDeleteType type = null;
for (Map.Entry<byte[], List<Cell>> familyEntry:
in.getFamilyCellMap().entrySet()) {
byte[] family = familyEntry.getKey();
TColumn column = new TColumn(ByteBuffer.wrap(familyEntry.getKey()));
for (Cell cell: familyEntry.getValue()) {
TDeleteType cellDeleteType = deleteTypeFromHBase(cell.getType());
if (type == null) {
type = cellDeleteType;
} else if (type != cellDeleteType){
throw new RuntimeException("Only the same delete type is supported, but two delete type "
+ "is founded, one is " + type + " the other one is " + cellDeleteType);
}
byte[] qualifier = CellUtil.cloneQualifier(cell);
long timestamp = cell.getTimestamp();
column.setFamily(family);
if (qualifier != null) {
column.setQualifier(qualifier);
}
if (timestamp != HConstants.LATEST_TIMESTAMP) {
column.setTimestamp(timestamp);
}
}
columns.add(column);
}
out.setColumns(columns);
out.setDeleteType(type);
return out;
}
/**
* Creates a {@link RowMutations} (HBase) from a {@link TRowMutations} (Thrift)
*
* @param in the <code>TRowMutations</code> to convert
*
* @return converted <code>RowMutations</code>
*/
public static RowMutations rowMutationsFromThrift(TRowMutations in) throws IOException {
List<TMutation> mutations = in.getMutations();
RowMutations out = new RowMutations(in.getRow(), mutations.size());
for (TMutation mutation : mutations) {
if (mutation.isSetPut()) {
out.add(putFromThrift(mutation.getPut()));
}
if (mutation.isSetDeleteSingle()) {
out.add(deleteFromThrift(mutation.getDeleteSingle()));
}
}
return out;
}
public static Scan scanFromThrift(TScan in) throws IOException {
Scan out = new Scan();
if (in.isSetStartRow()) {
out.setStartRow(in.getStartRow());
}
if (in.isSetStopRow()) {
out.setStopRow(in.getStopRow());
}
if (in.isSetCaching()) {
out.setCaching(in.getCaching());
}
if (in.isSetMaxVersions()) {
out.setMaxVersions(in.getMaxVersions());
}
if (in.isSetColumns()) {
for (TColumn column : in.getColumns()) {
if (column.isSetQualifier()) {
out.addColumn(column.getFamily(), column.getQualifier());
} else {
out.addFamily(column.getFamily());
}
}
}
TTimeRange timeRange = in.getTimeRange();
if (timeRange != null &&
timeRange.isSetMinStamp() && timeRange.isSetMaxStamp()) {
out.setTimeRange(timeRange.getMinStamp(), timeRange.getMaxStamp());
}
if (in.isSetBatchSize()) {
out.setBatch(in.getBatchSize());
}
if (in.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
out.setFilter(parseFilter.parseFilterString(in.getFilterString()));
}
if (in.isSetAttributes()) {
addAttributes(out,in.getAttributes());
}
if (in.isSetAuthorizations()) {
out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels()));
}
if (in.isSetReversed()) {
out.setReversed(in.isReversed());
}
if (in.isSetCacheBlocks()) {
out.setCacheBlocks(in.isCacheBlocks());
}
if (in.isSetColFamTimeRangeMap()) {
Map<ByteBuffer, TTimeRange> colFamTimeRangeMap = in.getColFamTimeRangeMap();
if (MapUtils.isNotEmpty(colFamTimeRangeMap)) {
for (Map.Entry<ByteBuffer, TTimeRange> entry : colFamTimeRangeMap.entrySet()) {
out.setColumnFamilyTimeRange(Bytes.toBytes(entry.getKey()),
entry.getValue().getMinStamp(), entry.getValue().getMaxStamp());
}
}
}
if (in.isSetReadType()) {
out.setReadType(readTypeFromThrift(in.getReadType()));
}
if (in.isSetLimit()) {
out.setLimit(in.getLimit());
}
if (in.isSetConsistency()) {
out.setConsistency(consistencyFromThrift(in.getConsistency()));
}
if (in.isSetTargetReplicaId()) {
out.setReplicaId(in.getTargetReplicaId());
}
if (in.isSetFilterBytes()) {
out.setFilter(filterFromThrift(in.getFilterBytes()));
}
return out;
}
public static byte[] filterFromHBase(Filter filter) throws IOException {
FilterProtos.Filter filterPB = ProtobufUtil.toFilter(filter);
return filterPB.toByteArray();
}
public static Filter filterFromThrift(byte[] filterBytes) throws IOException {
FilterProtos.Filter filterPB = FilterProtos.Filter.parseFrom(filterBytes);
return ProtobufUtil.toFilter(filterPB);
}
public static TScan scanFromHBase(Scan in) throws IOException {
TScan out = new TScan();
out.setStartRow(in.getStartRow());
out.setStopRow(in.getStopRow());
out.setCaching(in.getCaching());
out.setMaxVersions(in.getMaxVersions());
for (Map.Entry<byte[], NavigableSet<byte[]>> family : in.getFamilyMap().entrySet()) {
if (family.getValue() != null && !family.getValue().isEmpty()) {
for (byte[] qualifier : family.getValue()) {
TColumn column = new TColumn();
column.setFamily(family.getKey());
column.setQualifier(qualifier);
out.addToColumns(column);
}
} else {
TColumn column = new TColumn();
column.setFamily(family.getKey());
out.addToColumns(column);
}
}
TTimeRange tTimeRange = new TTimeRange();
tTimeRange.setMinStamp(in.getTimeRange().getMin()).setMaxStamp(in.getTimeRange().getMax());
out.setTimeRange(tTimeRange);
out.setBatchSize(in.getBatch());
for (Map.Entry<String, byte[]> attribute : in.getAttributesMap().entrySet()) {
out.putToAttributes(ByteBuffer.wrap(Bytes.toBytes(attribute.getKey())),
ByteBuffer.wrap(attribute.getValue()));
}
try {
Authorizations authorizations = in.getAuthorizations();
if (authorizations != null) {
TAuthorization tAuthorization = new TAuthorization();
tAuthorization.setLabels(authorizations.getLabels());
out.setAuthorizations(tAuthorization);
}
} catch (DeserializationException e) {
throw new RuntimeException(e);
}
out.setReversed(in.isReversed());
out.setCacheBlocks(in.getCacheBlocks());
out.setReadType(readTypeFromHBase(in.getReadType()));
out.setLimit(in.getLimit());
out.setConsistency(consistencyFromHBase(in.getConsistency()));
out.setTargetReplicaId(in.getReplicaId());
for (Map.Entry<byte[], TimeRange> entry : in.getColumnFamilyTimeRange().entrySet()) {
if (entry.getValue() != null) {
TTimeRange timeRange = new TTimeRange();
timeRange.setMinStamp(entry.getValue().getMin()).setMaxStamp(entry.getValue().getMax());
out.putToColFamTimeRangeMap(ByteBuffer.wrap(entry.getKey()), timeRange);
}
}
if (in.getFilter() != null) {
try {
out.setFilterBytes(filterFromHBase(in.getFilter()));
} catch (IOException ioE) {
throw new RuntimeException(ioE);
}
}
return out;
}
public static Increment incrementFromThrift(TIncrement in) throws IOException {
Increment out = new Increment(in.getRow());
for (TColumnIncrement column : in.getColumns()) {
out.addColumn(column.getFamily(), column.getQualifier(), column.getAmount());
}
if (in.isSetAttributes()) {
addAttributes(out,in.getAttributes());
}
if (in.isSetDurability()) {
out.setDurability(durabilityFromThrift(in.getDurability()));
}
if(in.getCellVisibility() != null) {
out.setCellVisibility(new CellVisibility(in.getCellVisibility().getExpression()));
}
if (in.isSetReturnResults()) {
out.setReturnResults(in.isReturnResults());
}
return out;
}
public static Append appendFromThrift(TAppend append) throws IOException {
Append out = new Append(append.getRow());
for (TColumnValue column : append.getColumns()) {
out.addColumn(column.getFamily(), column.getQualifier(), column.getValue());
}
if (append.isSetAttributes()) {
addAttributes(out, append.getAttributes());
}
if (append.isSetDurability()) {
out.setDurability(durabilityFromThrift(append.getDurability()));
}
if(append.getCellVisibility() != null) {
out.setCellVisibility(new CellVisibility(append.getCellVisibility().getExpression()));
}
if (append.isSetReturnResults()) {
out.setReturnResults(append.isReturnResults());
}
return out;
}
public static THRegionLocation regionLocationFromHBase(HRegionLocation hrl) {
RegionInfo hri = hrl.getRegion();
ServerName serverName = hrl.getServerName();
THRegionInfo thRegionInfo = new THRegionInfo();
THRegionLocation thRegionLocation = new THRegionLocation();
TServerName tServerName = new TServerName();
tServerName.setHostName(serverName.getHostname());
tServerName.setPort(serverName.getPort());
tServerName.setStartCode(serverName.getStartcode());
thRegionInfo.setTableName(hri.getTable().getName());
thRegionInfo.setEndKey(hri.getEndKey());
thRegionInfo.setStartKey(hri.getStartKey());
thRegionInfo.setOffline(hri.isOffline());
thRegionInfo.setSplit(hri.isSplit());
thRegionInfo.setReplicaId(hri.getReplicaId());
thRegionLocation.setRegionInfo(thRegionInfo);
thRegionLocation.setServerName(tServerName);
return thRegionLocation;
}
public static List<THRegionLocation> regionLocationsFromHBase(List<HRegionLocation> locations) {
List<THRegionLocation> tlocations = new ArrayList<>(locations.size());
for (HRegionLocation hrl:locations) {
tlocations.add(regionLocationFromHBase(hrl));
}
return tlocations;
}
/**
* Adds all the attributes into the Operation object
*/
private static void addAttributes(OperationWithAttributes op,
Map<ByteBuffer, ByteBuffer> attributes) {
if (attributes == null || attributes.isEmpty()) {
return;
}
for (Map.Entry<ByteBuffer, ByteBuffer> entry : attributes.entrySet()) {
String name = Bytes.toStringBinary(getBytes(entry.getKey()));
byte[] value = getBytes(entry.getValue());
op.setAttribute(name, value);
}
}
private static Durability durabilityFromThrift(TDurability tDurability) {
switch (tDurability.getValue()) {
case 0: return Durability.USE_DEFAULT;
case 1: return Durability.SKIP_WAL;
case 2: return Durability.ASYNC_WAL;
case 3: return Durability.SYNC_WAL;
case 4: return Durability.FSYNC_WAL;
default: return Durability.USE_DEFAULT;
}
}
public static CompareOperator compareOpFromThrift(TCompareOperator tCompareOp) {
switch (tCompareOp.getValue()) {
case 0: return CompareOperator.LESS;
case 1: return CompareOperator.LESS_OR_EQUAL;
case 2: return CompareOperator.EQUAL;
case 3: return CompareOperator.NOT_EQUAL;
case 4: return CompareOperator.GREATER_OR_EQUAL;
case 5: return CompareOperator.GREATER;
case 6: return CompareOperator.NO_OP;
default: return null;
}
}
private static ReadType readTypeFromThrift(TReadType tReadType) {
switch (tReadType.getValue()) {
case 1: return ReadType.DEFAULT;
case 2: return ReadType.STREAM;
case 3: return ReadType.PREAD;
default: return null;
}
}
private static TReadType readTypeFromHBase(ReadType readType) {
switch (readType) {
case DEFAULT: return TReadType.DEFAULT;
case STREAM: return TReadType.STREAM;
case PREAD: return TReadType.PREAD;
default: return TReadType.DEFAULT;
}
}
private static Consistency consistencyFromThrift(TConsistency tConsistency) {
switch (tConsistency.getValue()) {
case 1: return Consistency.STRONG;
case 2: return Consistency.TIMELINE;
default: return Consistency.STRONG;
}
}
public static TableName tableNameFromThrift(TTableName tableName) {
return TableName.valueOf(tableName.getNs(), tableName.getQualifier());
}
public static TableName[] tableNamesArrayFromThrift(List<TTableName> tableNames) {
TableName[] out = new TableName[tableNames.size()];
int index = 0;
for (TTableName tableName : tableNames) {
out[index++] = tableNameFromThrift(tableName);
}
return out;
}
public static List<TableName> tableNamesFromThrift(List<TTableName> tableNames) {
List<TableName> out = new ArrayList<>(tableNames.size());
for (TTableName tableName : tableNames) {
out.add(tableNameFromThrift(tableName));
}
return out;
}
public static TTableName tableNameFromHBase(TableName table) {
TTableName tableName = new TTableName();
tableName.setNs(table.getNamespace());
tableName.setQualifier(table.getQualifier());
return tableName;
}
public static List<TTableName> tableNamesFromHBase(List<TableName> in) {
List<TTableName> out = new ArrayList<>(in.size());
for (TableName tableName : in) {
out.add(tableNameFromHBase(tableName));
}
return out;
}
public static List<TTableName> tableNamesFromHBase(TableName[] in) {
List<TTableName> out = new ArrayList<>(in.length);
for (TableName tableName : in) {
out.add(tableNameFromHBase(tableName));
}
return out;
}
public static byte[][] splitKeyFromThrift(List<ByteBuffer> in) {
if (in == null || in.size() == 0) {
return null;
}
byte[][] out = new byte[in.size()][];
int index = 0;
for (ByteBuffer key : in) {
out[index++] = key.array();
}
return out;
}
public static BloomType bloomFilterFromThrift(TBloomFilterType in) {
switch (in.getValue()) {
case 0: return BloomType.NONE;
case 1: return BloomType.ROW;
case 2: return BloomType.ROWCOL;
case 3: return BloomType.ROWPREFIX_FIXED_LENGTH;
default: return BloomType.ROW;
}
}
public static Compression.Algorithm compressionAlgorithmFromThrift(TCompressionAlgorithm in) {
switch (in.getValue()) {
case 0: return Compression.Algorithm.LZO;
case 1: return Compression.Algorithm.GZ;
case 2: return Compression.Algorithm.NONE;
case 3: return Compression.Algorithm.SNAPPY;
case 4: return Compression.Algorithm.LZ4;
case 5: return Compression.Algorithm.BZIP2;
case 6: return Compression.Algorithm.ZSTD;
default: return Compression.Algorithm.NONE;
}
}
public static DataBlockEncoding dataBlockEncodingFromThrift(TDataBlockEncoding in) {
switch (in.getValue()) {
case 0: return DataBlockEncoding.NONE;
case 2: return DataBlockEncoding.PREFIX;
case 3: return DataBlockEncoding.DIFF;
case 4: return DataBlockEncoding.FAST_DIFF;
case 7: return DataBlockEncoding.ROW_INDEX_V1;
default: return DataBlockEncoding.NONE;
}
}
public static KeepDeletedCells keepDeletedCellsFromThrift(TKeepDeletedCells in) {
switch (in.getValue()) {
case 0: return KeepDeletedCells.FALSE;
case 1: return KeepDeletedCells.TRUE;
case 2: return KeepDeletedCells.TTL;
default: return KeepDeletedCells.FALSE;
}
}
public static ColumnFamilyDescriptor columnFamilyDescriptorFromThrift(
TColumnFamilyDescriptor in) {
ColumnFamilyDescriptorBuilder builder = ColumnFamilyDescriptorBuilder
.newBuilder(in.getName());
if (in.isSetAttributes()) {
for (Map.Entry<ByteBuffer, ByteBuffer> attribute : in.getAttributes().entrySet()) {
builder.setValue(attribute.getKey().array(), attribute.getValue().array());
}
}
if (in.isSetConfiguration()) {
for (Map.Entry<String, String> conf : in.getConfiguration().entrySet()) {
builder.setConfiguration(conf.getKey(), conf.getValue());
}
}
if (in.isSetBlockSize()) {
builder.setBlocksize(in.getBlockSize());
}
if (in.isSetBloomnFilterType()) {
builder.setBloomFilterType(bloomFilterFromThrift(in.getBloomnFilterType()));
}
if (in.isSetCompressionType()) {
builder.setCompressionType(compressionAlgorithmFromThrift(in.getCompressionType()));
}
if (in.isSetDfsReplication()) {
builder.setDFSReplication(in.getDfsReplication());
}
if (in.isSetDataBlockEncoding()) {
builder.setDataBlockEncoding(dataBlockEncodingFromThrift(in.getDataBlockEncoding()));
}
if (in.isSetKeepDeletedCells()) {
builder.setKeepDeletedCells(keepDeletedCellsFromThrift(in.getKeepDeletedCells()));
}
if (in.isSetMaxVersions()) {
builder.setMaxVersions(in.getMaxVersions());
}
if (in.isSetMinVersions()) {
builder.setMinVersions(in.getMinVersions());
}
if (in.isSetScope()) {
builder.setScope(in.getScope());
}
if (in.isSetTimeToLive()) {
builder.setTimeToLive(in.getTimeToLive());
}
if (in.isSetBlockCacheEnabled()) {
builder.setBlockCacheEnabled(in.isBlockCacheEnabled());
}
if (in.isSetCacheBloomsOnWrite()) {
builder.setCacheBloomsOnWrite(in.isCacheBloomsOnWrite());
}
if (in.isSetCacheDataOnWrite()) {
builder.setCacheDataOnWrite(in.isCacheDataOnWrite());
}
if (in.isSetCacheIndexesOnWrite()) {
builder.setCacheIndexesOnWrite(in.isCacheIndexesOnWrite());
}
if (in.isSetCompressTags()) {
builder.setCompressTags(in.isCompressTags());
}
if (in.isSetEvictBlocksOnClose()) {
builder.setEvictBlocksOnClose(in.isEvictBlocksOnClose());
}
if (in.isSetInMemory()) {
builder.setInMemory(in.isInMemory());
}
return builder.build();
}
public static NamespaceDescriptor namespaceDescriptorFromThrift(TNamespaceDescriptor in) {
NamespaceDescriptor.Builder builder = NamespaceDescriptor.create(in.getName());
if (in.isSetConfiguration()) {
for (Map.Entry<String, String> conf : in.getConfiguration().entrySet()) {
builder.addConfiguration(conf.getKey(), conf.getValue());
}
}
return builder.build();
}
public static TNamespaceDescriptor namespaceDescriptorFromHBase(NamespaceDescriptor in) {
TNamespaceDescriptor out = new TNamespaceDescriptor();
out.setName(in.getName());
for (Map.Entry<String, String> conf : in.getConfiguration().entrySet()) {
out.putToConfiguration(conf.getKey(), conf.getValue());
}
return out;
}
public static List<TNamespaceDescriptor> namespaceDescriptorsFromHBase(
NamespaceDescriptor[] in) {
List<TNamespaceDescriptor> out = new ArrayList<>(in.length);
for (NamespaceDescriptor descriptor : in) {
out.add(namespaceDescriptorFromHBase(descriptor));
}
return out;
}
public static TableDescriptor tableDescriptorFromThrift(TTableDescriptor in) {
TableDescriptorBuilder builder = TableDescriptorBuilder
.newBuilder(tableNameFromThrift(in.getTableName()));
for (TColumnFamilyDescriptor column : in.getColumns()) {
builder.setColumnFamily(columnFamilyDescriptorFromThrift(column));
}
if (in.isSetAttributes()) {
for (Map.Entry<ByteBuffer, ByteBuffer> attribute : in.getAttributes().entrySet()) {
builder.setValue(attribute.getKey().array(), attribute.getValue().array());
}
}
if (in.isSetDurability()) {
builder.setDurability(durabilityFromThrift(in.getDurability()));
}
return builder.build();
}
public static HTableDescriptor hTableDescriptorFromThrift(TTableDescriptor in) {
return new HTableDescriptor(tableDescriptorFromThrift(in));
}
public static HTableDescriptor[] hTableDescriptorsFromThrift(List<TTableDescriptor> in) {
HTableDescriptor[] out = new HTableDescriptor[in.size()];
int index = 0;
for (TTableDescriptor tTableDescriptor : in) {
out[index++] = hTableDescriptorFromThrift(tTableDescriptor);
}
return out;
}
public static List<TableDescriptor> tableDescriptorsFromThrift(List<TTableDescriptor> in) {
List<TableDescriptor> out = new ArrayList<>();
for (TTableDescriptor tableDescriptor : in) {
out.add(tableDescriptorFromThrift(tableDescriptor));
}
return out;
}
private static TDurability durabilityFromHBase(Durability durability) {
switch (durability) {
case USE_DEFAULT: return TDurability.USE_DEFAULT;
case SKIP_WAL: return TDurability.SKIP_WAL;
case ASYNC_WAL: return TDurability.ASYNC_WAL;
case SYNC_WAL: return TDurability.SYNC_WAL;
case FSYNC_WAL: return TDurability.FSYNC_WAL;
default: return null;
}
}
public static TTableDescriptor tableDescriptorFromHBase(TableDescriptor in) {
TTableDescriptor out = new TTableDescriptor();
out.setTableName(tableNameFromHBase(in.getTableName()));
Map<Bytes, Bytes> attributes = in.getValues();
for (Map.Entry<Bytes, Bytes> attribute : attributes.entrySet()) {
out.putToAttributes(ByteBuffer.wrap(attribute.getKey().get()),
ByteBuffer.wrap(attribute.getValue().get()));
}
for (ColumnFamilyDescriptor column : in.getColumnFamilies()) {
out.addToColumns(columnFamilyDescriptorFromHBase(column));
}
out.setDurability(durabilityFromHBase(in.getDurability()));
return out;
}
public static List<TTableDescriptor> tableDescriptorsFromHBase(List<TableDescriptor> in) {
List<TTableDescriptor> out = new ArrayList<>(in.size());
for (TableDescriptor descriptor : in) {
out.add(tableDescriptorFromHBase(descriptor));
}
return out;
}
public static List<TTableDescriptor> tableDescriptorsFromHBase(TableDescriptor[] in) {
List<TTableDescriptor> out = new ArrayList<>(in.length);
for (TableDescriptor descriptor : in) {
out.add(tableDescriptorFromHBase(descriptor));
}
return out;
}
public static TBloomFilterType bloomFilterFromHBase(BloomType in) {
switch (in) {
case NONE: return TBloomFilterType.NONE;
case ROW: return TBloomFilterType.ROW;
case ROWCOL: return TBloomFilterType.ROWCOL;
case ROWPREFIX_FIXED_LENGTH: return TBloomFilterType.ROWPREFIX_FIXED_LENGTH;
default: return TBloomFilterType.ROW;
}
}
public static TCompressionAlgorithm compressionAlgorithmFromHBase(Compression.Algorithm in) {
switch (in) {
case LZO: return TCompressionAlgorithm.LZO;
case GZ: return TCompressionAlgorithm.GZ;
case NONE: return TCompressionAlgorithm.NONE;
case SNAPPY: return TCompressionAlgorithm.SNAPPY;
case LZ4: return TCompressionAlgorithm.LZ4;
case BZIP2: return TCompressionAlgorithm.BZIP2;
case ZSTD: return TCompressionAlgorithm.ZSTD;
default: return TCompressionAlgorithm.NONE;
}
}
public static TDataBlockEncoding dataBlockEncodingFromHBase(DataBlockEncoding in) {
switch (in) {
case NONE: return TDataBlockEncoding.NONE;
case PREFIX: return TDataBlockEncoding.PREFIX;
case DIFF: return TDataBlockEncoding.DIFF;
case FAST_DIFF: return TDataBlockEncoding.FAST_DIFF;
case ROW_INDEX_V1: return TDataBlockEncoding.ROW_INDEX_V1;
default: return TDataBlockEncoding.NONE;
}
}
public static TKeepDeletedCells keepDeletedCellsFromHBase(KeepDeletedCells in) {
switch (in) {
case FALSE: return TKeepDeletedCells.FALSE;
case TRUE: return TKeepDeletedCells.TRUE;
case TTL: return TKeepDeletedCells.TTL;
default: return TKeepDeletedCells.FALSE;
}
}
public static TColumnFamilyDescriptor columnFamilyDescriptorFromHBase(
ColumnFamilyDescriptor in) {
TColumnFamilyDescriptor out = new TColumnFamilyDescriptor();
out.setName(in.getName());
for (Map.Entry<Bytes, Bytes> attribute : in.getValues().entrySet()) {
out.putToAttributes(ByteBuffer.wrap(attribute.getKey().get()),
ByteBuffer.wrap(attribute.getValue().get()));
}
for (Map.Entry<String, String> conf : in.getConfiguration().entrySet()) {
out.putToConfiguration(conf.getKey(), conf.getValue());
}
out.setBlockSize(in.getBlocksize());
out.setBloomnFilterType(bloomFilterFromHBase(in.getBloomFilterType()));
out.setCompressionType(compressionAlgorithmFromHBase(in.getCompressionType()));
out.setDfsReplication(in.getDFSReplication());
out.setDataBlockEncoding(dataBlockEncodingFromHBase(in.getDataBlockEncoding()));
out.setKeepDeletedCells(keepDeletedCellsFromHBase(in.getKeepDeletedCells()));
out.setMaxVersions(in.getMaxVersions());
out.setMinVersions(in.getMinVersions());
out.setScope(in.getScope());
out.setTimeToLive(in.getTimeToLive());
out.setBlockCacheEnabled(in.isBlockCacheEnabled());
out.setCacheBloomsOnWrite(in.isCacheBloomsOnWrite());
out.setCacheDataOnWrite(in.isCacheDataOnWrite());
out.setCacheIndexesOnWrite(in.isCacheIndexesOnWrite());
out.setCompressTags(in.isCompressTags());
out.setEvictBlocksOnClose(in.isEvictBlocksOnClose());
out.setInMemory(in.isInMemory());
return out;
}
private static TConsistency consistencyFromHBase(Consistency consistency) {
switch (consistency) {
case STRONG: return TConsistency.STRONG;
case TIMELINE: return TConsistency.TIMELINE;
default: return TConsistency.STRONG;
}
}
public static TGet getFromHBase(Get in) {
TGet out = new TGet();
out.setRow(in.getRow());
TTimeRange tTimeRange = new TTimeRange();
tTimeRange.setMaxStamp(in.getTimeRange().getMax()).setMinStamp(in.getTimeRange().getMin());
out.setTimeRange(tTimeRange);
out.setMaxVersions(in.getMaxVersions());
for (Map.Entry<String, byte[]> attribute : in.getAttributesMap().entrySet()) {
out.putToAttributes(ByteBuffer.wrap(Bytes.toBytes(attribute.getKey())),
ByteBuffer.wrap(attribute.getValue()));
}
try {
Authorizations authorizations = in.getAuthorizations();
if (authorizations != null) {
TAuthorization tAuthorization = new TAuthorization();
tAuthorization.setLabels(authorizations.getLabels());
out.setAuthorizations(tAuthorization);
}
} catch (DeserializationException e) {
throw new RuntimeException(e);
}
out.setConsistency(consistencyFromHBase(in.getConsistency()));
out.setTargetReplicaId(in.getReplicaId());
out.setCacheBlocks(in.getCacheBlocks());
out.setStoreLimit(in.getMaxResultsPerColumnFamily());
out.setStoreOffset(in.getRowOffsetPerColumnFamily());
out.setExistence_only(in.isCheckExistenceOnly());
for (Map.Entry<byte[], NavigableSet<byte[]>> family : in.getFamilyMap().entrySet()) {
if (family.getValue() != null && !family.getValue().isEmpty()) {
for (byte[] qualifier : family.getValue()) {
TColumn column = new TColumn();
column.setFamily(family.getKey());
column.setQualifier(qualifier);
out.addToColumns(column);
}
} else {
TColumn column = new TColumn();
column.setFamily(family.getKey());
out.addToColumns(column);
}
}
if (in.getFilter() != null) {
try {
out.setFilterBytes(filterFromHBase(in.getFilter()));
} catch (IOException ioE) {
throw new RuntimeException(ioE);
}
}
return out;
}
public static Cell toCell(ExtendedCellBuilder cellBuilder, byte[] row, TColumnValue columnValue) {
return cellBuilder.clear()
.setRow(row)
.setFamily(columnValue.getFamily())
.setQualifier(columnValue.getQualifier())
.setTimestamp(columnValue.getTimestamp())
.setType(columnValue.getType())
.setValue(columnValue.getValue())
.setTags(columnValue.getTags())
.build();
}
public static Result resultFromThrift(TResult in) {
if (in == null) {
return null;
}
if (!in.isSetColumnValues() || in.getColumnValues().isEmpty()){
return in.isStale() ? EMPTY_RESULT_STALE : EMPTY_RESULT;
}
List<Cell> cells = new ArrayList<>(in.getColumnValues().size());
ExtendedCellBuilder builder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
for (TColumnValue columnValue : in.getColumnValues()) {
cells.add(toCell(builder, in.getRow(), columnValue));
}
return Result.create(cells, null, in.isStale(), in.isPartial());
}
public static TPut putFromHBase(Put in) {
TPut out = new TPut();
out.setRow(in.getRow());
if (in.getTimestamp() != HConstants.LATEST_TIMESTAMP) {
out.setTimestamp(in.getTimestamp());
}
if (in.getDurability() != Durability.USE_DEFAULT) {
out.setDurability(durabilityFromHBase(in.getDurability()));
}
for (Map.Entry<byte [], List<Cell>> entry : in.getFamilyCellMap().entrySet()) {
byte[] family = entry.getKey();
for (Cell cell : entry.getValue()) {
TColumnValue columnValue = new TColumnValue();
columnValue.setFamily(family)
.setQualifier(CellUtil.cloneQualifier(cell))
.setType(cell.getType().getCode())
.setTimestamp(cell.getTimestamp())
.setValue(CellUtil.cloneValue(cell));
if (cell.getTagsLength() != 0) {
columnValue.setTags(CellUtil.cloneTags(cell));
}
out.addToColumnValues(columnValue);
}
}
for (Map.Entry<String, byte[]> attribute : in.getAttributesMap().entrySet()) {
out.putToAttributes(ByteBuffer.wrap(Bytes.toBytes(attribute.getKey())),
ByteBuffer.wrap(attribute.getValue()));
}
try {
CellVisibility cellVisibility = in.getCellVisibility();
if (cellVisibility != null) {
TCellVisibility tCellVisibility = new TCellVisibility();
tCellVisibility.setExpression(cellVisibility.getExpression());
out.setCellVisibility(tCellVisibility);
}
} catch (DeserializationException e) {
throw new RuntimeException(e);
}
return out;
}
public static List<TPut> putsFromHBase(List<Put> in) {
List<TPut> out = new ArrayList<>(in.size());
for (Put put : in) {
out.add(putFromHBase(put));
}
return out;
}
public static NamespaceDescriptor[] namespaceDescriptorsFromThrift(
List<TNamespaceDescriptor> in) {
NamespaceDescriptor[] out = new NamespaceDescriptor[in.size()];
int index = 0;
for (TNamespaceDescriptor descriptor : in) {
out[index++] = namespaceDescriptorFromThrift(descriptor);
}
return out;
}
public static List<TDelete> deletesFromHBase(List<Delete> in) {
List<TDelete> out = new ArrayList<>(in.size());
for (Delete delete : in) {
out.add(deleteFromHBase(delete));
}
return out;
}
public static TAppend appendFromHBase(Append in) throws IOException {
TAppend out = new TAppend();
out.setRow(in.getRow());
if (in.getDurability() != Durability.USE_DEFAULT) {
out.setDurability(durabilityFromHBase(in.getDurability()));
}
for (Map.Entry<byte [], List<Cell>> entry : in.getFamilyCellMap().entrySet()) {
byte[] family = entry.getKey();
for (Cell cell : entry.getValue()) {
TColumnValue columnValue = new TColumnValue();
columnValue.setFamily(family)
.setQualifier(CellUtil.cloneQualifier(cell))
.setType(cell.getType().getCode())
.setTimestamp(cell.getTimestamp())
.setValue(CellUtil.cloneValue(cell));
if (cell.getTagsLength() != 0) {
columnValue.setTags(CellUtil.cloneTags(cell));
}
out.addToColumns(columnValue);
}
}
for (Map.Entry<String, byte[]> attribute : in.getAttributesMap().entrySet()) {
out.putToAttributes(ByteBuffer.wrap(Bytes.toBytes(attribute.getKey())),
ByteBuffer.wrap(attribute.getValue()));
}
try {
CellVisibility cellVisibility = in.getCellVisibility();
if (cellVisibility != null) {
TCellVisibility tCellVisibility = new TCellVisibility();
tCellVisibility.setExpression(cellVisibility.getExpression());
out.setCellVisibility(tCellVisibility);
}
} catch (DeserializationException e) {
throw new RuntimeException(e);
}
out.setReturnResults(in.isReturnResults());
return out;
}
public static TIncrement incrementFromHBase(Increment in) throws IOException {
TIncrement out = new TIncrement();
out.setRow(in.getRow());
if (in.getDurability() != Durability.USE_DEFAULT) {
out.setDurability(durabilityFromHBase(in.getDurability()));
}
for (Map.Entry<byte [], List<Cell>> entry : in.getFamilyCellMap().entrySet()) {
byte[] family = entry.getKey();
for (Cell cell : entry.getValue()) {
TColumnIncrement columnValue = new TColumnIncrement();
columnValue.setFamily(family).setQualifier(CellUtil.cloneQualifier(cell));
columnValue.setAmount(
Bytes.toLong(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()));
out.addToColumns(columnValue);
}
}
for (Map.Entry<String, byte[]> attribute : in.getAttributesMap().entrySet()) {
out.putToAttributes(ByteBuffer.wrap(Bytes.toBytes(attribute.getKey())),
ByteBuffer.wrap(attribute.getValue()));
}
try {
CellVisibility cellVisibility = in.getCellVisibility();
if (cellVisibility != null) {
TCellVisibility tCellVisibility = new TCellVisibility();
tCellVisibility.setExpression(cellVisibility.getExpression());
out.setCellVisibility(tCellVisibility);
}
} catch (DeserializationException e) {
throw new RuntimeException(e);
}
out.setReturnResults(in.isReturnResults());
return out;
}
public static TRowMutations rowMutationsFromHBase(RowMutations in) {
TRowMutations tRowMutations = new TRowMutations();
tRowMutations.setRow(in.getRow());
for (Mutation mutation : in.getMutations()) {
TMutation tMutation = new TMutation();
if (mutation instanceof Put) {
tMutation.setPut(ThriftUtilities.putFromHBase((Put)mutation));
} else if (mutation instanceof Delete) {
tMutation.setDeleteSingle(ThriftUtilities.deleteFromHBase((Delete)mutation));
} else {
throw new IllegalArgumentException(
"Only Put and Delete is supported in mutateRow, but muation=" + mutation);
}
tRowMutations.addToMutations(tMutation);
}
return tRowMutations;
}
public static TCompareOperator compareOpFromHBase(CompareOperator compareOp) {
switch (compareOp) {
case LESS: return TCompareOperator.LESS;
case LESS_OR_EQUAL: return TCompareOperator.LESS_OR_EQUAL;
case EQUAL: return TCompareOperator.EQUAL;
case NOT_EQUAL: return TCompareOperator.NOT_EQUAL;
case GREATER_OR_EQUAL: return TCompareOperator.GREATER_OR_EQUAL;
case GREATER: return TCompareOperator.GREATER;
case NO_OP: return TCompareOperator.NO_OP;
default: return null;
}
}
public static List<ByteBuffer> splitKeyFromHBase(byte[][] in) {
if (in == null || in.length == 0) {
return null;
}
List<ByteBuffer> out = new ArrayList<>(in.length);
for (byte[] key : in) {
out.add(ByteBuffer.wrap(key));
}
return out;
}
public static Result[] resultsFromThrift(List<TResult> in) {
Result[] out = new Result[in.size()];
int index = 0;
for (TResult tResult : in) {
out[index++] = resultFromThrift(tResult);
}
return out;
}
public static List<TGet> getsFromHBase(List<Get> in) {
List<TGet> out = new ArrayList<>(in.size());
for (Get get : in) {
out.add(getFromHBase(get));
}
return out;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.utils.memory;
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.rows.*;
import org.apache.cassandra.utils.concurrent.OpOrder;
import org.apache.cassandra.utils.concurrent.WaitQueue;
public abstract class MemtableAllocator
{
private final SubAllocator onHeap;
private final SubAllocator offHeap;
volatile LifeCycle state = LifeCycle.LIVE;
enum LifeCycle
{
LIVE, DISCARDING, DISCARDED;
LifeCycle transition(LifeCycle targetState)
{
switch (targetState)
{
case DISCARDING:
assert this == LifeCycle.LIVE;
return LifeCycle.DISCARDING;
case DISCARDED:
assert this == LifeCycle.DISCARDING;
return LifeCycle.DISCARDED;
default:
throw new IllegalStateException();
}
}
}
MemtableAllocator(SubAllocator onHeap, SubAllocator offHeap)
{
this.onHeap = onHeap;
this.offHeap = offHeap;
}
public abstract Row.Builder rowBuilder(OpOrder.Group opGroup);
public abstract DecoratedKey clone(DecoratedKey key, OpOrder.Group opGroup);
public abstract DataReclaimer reclaimer();
public abstract EnsureOnHeap ensureOnHeap();
public SubAllocator onHeap()
{
return onHeap;
}
public SubAllocator offHeap()
{
return offHeap;
}
/**
* Mark this allocator reclaiming; this will permit any outstanding allocations to temporarily
* overshoot the maximum memory limit so that flushing can begin immediately
*/
public void setDiscarding()
{
state = state.transition(LifeCycle.DISCARDING);
// mark the memory owned by this allocator as reclaiming
onHeap.markAllReclaiming();
offHeap.markAllReclaiming();
}
/**
* Indicate the memory and resources owned by this allocator are no longer referenced,
* and can be reclaimed/reused.
*/
public void setDiscarded()
{
state = state.transition(LifeCycle.DISCARDED);
// release any memory owned by this allocator; automatically signals waiters
onHeap.releaseAll();
offHeap.releaseAll();
}
public boolean isLive()
{
return state == LifeCycle.LIVE;
}
public static interface DataReclaimer
{
public DataReclaimer reclaim(Row row);
public DataReclaimer reclaimImmediately(Row row);
public DataReclaimer reclaimImmediately(DecoratedKey key);
public void cancel();
public void commit();
}
public static final DataReclaimer NO_OP = new DataReclaimer()
{
public DataReclaimer reclaim(Row update)
{
return this;
}
public DataReclaimer reclaimImmediately(Row update)
{
return this;
}
public DataReclaimer reclaimImmediately(DecoratedKey key)
{
return this;
}
@Override
public void cancel()
{}
@Override
public void commit()
{}
};
/** Mark the BB as unused, permitting it to be reclaimed */
public static final class SubAllocator
{
// the tracker we are owning memory from
private final MemtablePool.SubPool parent;
// the amount of memory/resource owned by this object
private volatile long owns;
// the amount of memory we are reporting to collect; this may be inaccurate, but is close
// and is used only to ensure that once we have reclaimed we mark the tracker with the same amount
private volatile long reclaiming;
SubAllocator(MemtablePool.SubPool parent)
{
this.parent = parent;
}
// should only be called once we know we will never allocate to the object again.
// currently no corroboration/enforcement of this is performed.
void releaseAll()
{
parent.released(ownsUpdater.getAndSet(this, 0));
parent.reclaimed(reclaimingUpdater.getAndSet(this, 0));
}
// like allocate, but permits allocations to be negative
public void adjust(long size, OpOrder.Group opGroup)
{
if (size <= 0)
released(-size);
else
allocate(size, opGroup);
}
// allocate memory in the tracker, and mark ourselves as owning it
public void allocate(long size, OpOrder.Group opGroup)
{
assert size >= 0;
while (true)
{
if (parent.tryAllocate(size))
{
acquired(size);
return;
}
WaitQueue.Signal signal = opGroup.isBlockingSignal(parent.hasRoom().register());
boolean allocated = parent.tryAllocate(size);
if (allocated || opGroup.isBlocking())
{
signal.cancel();
if (allocated) // if we allocated, take ownership
acquired(size);
else // otherwise we're blocking so we're permitted to overshoot our constraints, to just allocate without blocking
allocated(size);
return;
}
else
signal.awaitUninterruptibly();
}
}
// retroactively mark an amount allocated and acquired in the tracker, and owned by us
private void allocated(long size)
{
parent.allocated(size);
ownsUpdater.addAndGet(this, size);
}
// retroactively mark an amount acquired in the tracker, and owned by us
private void acquired(long size)
{
parent.acquired(size);
ownsUpdater.addAndGet(this, size);
}
void released(long size)
{
parent.released(size);
ownsUpdater.addAndGet(this, -size);
}
// mark everything we currently own as reclaiming, both here and in our parent
void markAllReclaiming()
{
while (true)
{
long cur = owns;
long prev = reclaiming;
if (!reclaimingUpdater.compareAndSet(this, prev, cur))
continue;
parent.reclaiming(cur - prev);
return;
}
}
public long owns()
{
return owns;
}
public float ownershipRatio()
{
float r = owns / (float) parent.limit;
if (Float.isNaN(r))
return 0;
return r;
}
private static final AtomicLongFieldUpdater<SubAllocator> ownsUpdater = AtomicLongFieldUpdater.newUpdater(SubAllocator.class, "owns");
private static final AtomicLongFieldUpdater<SubAllocator> reclaimingUpdater = AtomicLongFieldUpdater.newUpdater(SubAllocator.class, "reclaiming");
}
}
|
|
/*
* Copyright 2017. Dmitry Malkovich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dmitrymalkovich.android.githubanalytics.data.source.local;
import android.content.ContentProvider;
import android.content.ContentValues;
import android.content.UriMatcher;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteQueryBuilder;
import android.net.Uri;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import com.dmitrymalkovich.android.githubanalytics.Utils;
import com.dmitrymalkovich.android.githubanalytics.data.source.local.contract.ClonesContract;
import com.dmitrymalkovich.android.githubanalytics.data.source.local.contract.ReferrerContract;
import com.dmitrymalkovich.android.githubanalytics.data.source.local.contract.RepositoryContract;
import com.dmitrymalkovich.android.githubanalytics.data.source.local.contract.StargazersContract;
import com.dmitrymalkovich.android.githubanalytics.data.source.local.contract.TrendingContract;
import com.dmitrymalkovich.android.githubanalytics.data.source.local.contract.UserContract;
import com.dmitrymalkovich.android.githubanalytics.data.source.local.contract.ViewsContract;
public class GithubDataProvider extends ContentProvider {
private static final int REPOSITORIES = 300;
private static final int REFERRERS = 400;
private static final int CLONES = 401;
private static final int VIEWS = 402;
private static final int TRENDING = 500;
private static final int STARGAZERS = 600;
private static final int REPOSITORIES_STARGAZERS = 700;
private static final int USERS = 801;
private static final UriMatcher sUriMatcher = buildUriMatcher();
private static final SQLiteQueryBuilder sRepositoryByVisitorsAndStarsQueryBuilder;
public static String LOG_TAG = GithubDataProvider.class.getSimpleName();
static {
sRepositoryByVisitorsAndStarsQueryBuilder = new SQLiteQueryBuilder();
sRepositoryByVisitorsAndStarsQueryBuilder.setTables(
RepositoryContract.RepositoryEntry.TABLE_NAME
+ " LEFT JOIN (SELECT stargazers.repository_id, COUNT(stargazers.timestamp) as stars FROM stargazers WHERE timestamp >= "
+ Utils.today() +
" GROUP BY stargazers.repository_id) as stargazers ON stargazers.repository_id = repository.repository_id"
+ " LEFT JOIN (SELECT stargazers.repository_id, COUNT(stargazers.timestamp) as stars FROM stargazers WHERE timestamp >= "
+ Utils.yesterday() + " AND timestamp < " + Utils.today() +
" GROUP BY stargazers.repository_id) as stargazers_yesterday ON stargazers_yesterday.repository_id = repository.repository_id"
+ " LEFT JOIN (SELECT stargazers.repository_id, COUNT(stargazers.timestamp) as stars FROM stargazers WHERE timestamp >= "
+ Utils.twoWeeksAgo() +
" GROUP BY stargazers.repository_id) as stargazers_two_weeks ON stargazers_two_weeks.repository_id = repository.repository_id"
+ " LEFT JOIN (SELECT traffic_views.repository_id, traffic_views.uniques, traffic_views.count FROM traffic_views WHERE timestamp >= "
+ Utils.today() +
") as traffic_views ON traffic_views.repository_id = repository.repository_id"
+ " LEFT JOIN (SELECT traffic_views.repository_id, traffic_views.uniques, traffic_views.count FROM traffic_views WHERE timestamp >= "
+ Utils.yesterday() + " AND timestamp < " + Utils.today() +
") as traffic_views_yesterday ON traffic_views_yesterday.repository_id = repository.repository_id"
+ " LEFT JOIN (SELECT traffic_views.repository_id, SUM(traffic_views.uniques) as uniques, SUM(traffic_views.count) as count FROM traffic_views WHERE timestamp >= "
+ Utils.twoWeeksAgo() +
" GROUP BY traffic_views.repository_id) as traffic_views_two_weeks ON traffic_views_two_weeks.repository_id = repository.repository_id"
+ " LEFT JOIN (SELECT traffic_clones.repository_id, traffic_clones.uniques, traffic_clones.count FROM traffic_clones WHERE timestamp >= "
+ Utils.today() +
") as traffic_clones ON traffic_clones.repository_id = repository.repository_id"
+ " LEFT JOIN (SELECT traffic_clones.repository_id, traffic_clones.uniques, traffic_clones.count FROM traffic_clones WHERE timestamp >= "
+ Utils.yesterday() + " AND timestamp <" + Utils.today() +
") as traffic_clones_yesterday ON traffic_clones_yesterday.repository_id = repository.repository_id"
+ " LEFT JOIN (SELECT traffic_clones.repository_id, SUM(traffic_clones.uniques) as uniques, SUM(traffic_clones.count) as count FROM traffic_clones WHERE timestamp >= "
+ Utils.twoWeeksAgo() +
" GROUP BY traffic_clones.repository_id) as traffic_clones_two_weeks ON traffic_clones_two_weeks.repository_id = repository.repository_id"
+ " LEFT JOIN (SELECT traffic_paths._id, traffic_paths.repository_id, traffic_paths.referrer, MAX(traffic_paths.count) as count, traffic_paths.uniques as uniques FROM traffic_paths GROUP BY traffic_paths.repository_id) as traffic_paths_1 ON traffic_paths_1.repository_id = repository.repository_id"
+ " LEFT JOIN (SELECT traffic_paths._id, traffic_paths.repository_id, traffic_paths.referrer, " +
" MAX(traffic_paths.count) as count, traffic_paths.uniques as uniques " +
" FROM traffic_paths WHERE traffic_paths.count " +
" < (SELECT MAX(tp2.count) FROM traffic_paths as tp2 WHERE tp2.repository_id = traffic_paths.repository_id GROUP BY tp2.repository_id) " +
" GROUP BY traffic_paths.repository_id) as traffic_paths_2 " +
" ON traffic_paths_2.repository_id = repository.repository_id "
);
}
private GithubAnalyticsDbHelper mOpenHelper;
private static UriMatcher buildUriMatcher() {
final UriMatcher matcher = new UriMatcher(UriMatcher.NO_MATCH);
final String authority = RepositoryContract.CONTENT_AUTHORITY;
matcher.addURI(authority, RepositoryContract.PATH_REPOSITORY, REPOSITORIES);
matcher.addURI(authority, ReferrerContract.PATH_REFERRERS, REFERRERS);
matcher.addURI(authority, ViewsContract.PATH_VIEWS, VIEWS);
matcher.addURI(authority, ClonesContract.PATH_CLONES, CLONES);
matcher.addURI(authority, TrendingContract.PATH_TRENDING, TRENDING);
matcher.addURI(authority, StargazersContract.PATH_STARGAZERS, STARGAZERS);
matcher.addURI(authority, RepositoryContract.PATH_REPOSITORY_STARGAZERS, REPOSITORIES_STARGAZERS);
matcher.addURI(authority, UserContract.PATH_USERS, USERS);
return matcher;
}
@Override
public boolean onCreate() {
mOpenHelper = new GithubAnalyticsDbHelper(getContext());
return true;
}
@Nullable
@Override
public Cursor query(@NonNull Uri uri, String[] projection, String selection, String[] selectionArgs,
String sortOrder) {
Cursor cursor;
switch (sUriMatcher.match(uri)) {
case REPOSITORIES: {
cursor = performQuery(RepositoryContract.RepositoryEntry.TABLE_NAME,
projection, selection, selectionArgs, sortOrder);
break;
}
case REFERRERS: {
cursor = performQuery(ReferrerContract.ReferrerEntry.TABLE_NAME,
projection, selection, selectionArgs, sortOrder);
break;
}
case CLONES: {
cursor = performQuery(ClonesContract.ClonesEntry.TABLE_NAME,
projection, selection, selectionArgs, sortOrder);
break;
}
case VIEWS: {
cursor = performQuery(ViewsContract.ViewsEntry.TABLE_NAME,
projection, selection, selectionArgs, sortOrder);
break;
}
case TRENDING: {
cursor = performQuery(TrendingContract.TrendingEntry.TABLE_NAME,
projection, selection, selectionArgs, sortOrder);
break;
}
case STARGAZERS: {
cursor = performQuery(StargazersContract.Entry.TABLE_NAME,
projection, selection, selectionArgs, sortOrder);
break;
}
case REPOSITORIES_STARGAZERS: {
cursor = performQuery(sRepositoryByVisitorsAndStarsQueryBuilder,
projection, selection, selectionArgs, sortOrder);
break;
}
case USERS: {
cursor = performQuery(UserContract.UsersEntry.TABLE_NAME,
projection, selection, selectionArgs, sortOrder);
break;
}
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
if (getContext() != null) {
cursor.setNotificationUri(getContext().getContentResolver(), uri);
}
return cursor;
}
private Cursor performQuery(SQLiteQueryBuilder queryBuilder, String[] projection, String selection,
String[] selectionArgs, String sortOrder) {
return queryBuilder.query(
mOpenHelper.getReadableDatabase(),
projection,
selection,
selectionArgs,
null,
null,
sortOrder
);
}
private Cursor performQuery(String tableName, String[] projection, String selection,
String[] selectionArgs, String sortOrder) {
return mOpenHelper.getReadableDatabase().query(
tableName,
projection,
selection,
selectionArgs,
null,
null,
sortOrder
);
}
@Nullable
@Override
public String getType(@NonNull Uri uri) {
final int match = sUriMatcher.match(uri);
switch (match) {
case REPOSITORIES:
return RepositoryContract.RepositoryEntry.CONTENT_TYPE;
case REFERRERS:
return ReferrerContract.ReferrerEntry.CONTENT_TYPE;
case CLONES:
return ClonesContract.ClonesEntry.CONTENT_TYPE;
case VIEWS:
return ViewsContract.ViewsEntry.CONTENT_TYPE;
case TRENDING:
return TrendingContract.TrendingEntry.CONTENT_TYPE;
case STARGAZERS:
return StargazersContract.Entry.CONTENT_TYPE;
case REPOSITORIES_STARGAZERS:
return RepositoryContract.RepositoryEntry.CONTENT_TYPE_REPOSITORY_STARGAZERS;
case USERS:
return UserContract.UsersEntry.CONTENT_TYPE;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
}
@Nullable
@Override
public Uri insert(@NonNull Uri uri, ContentValues values) {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
Uri returnUri;
switch (match) {
case REPOSITORIES: {
long id = db.insert(RepositoryContract.RepositoryEntry.TABLE_NAME, null, values);
if (id > 0) {
returnUri = RepositoryContract.RepositoryEntry.buildRepositoryUri(id);
} else {
throw new android.database.SQLException("Failed to insert row into " + uri);
}
break;
}
case REFERRERS: {
long id = db.insert(ReferrerContract.ReferrerEntry.TABLE_NAME, null, values);
if (id > 0) {
returnUri = ReferrerContract.ReferrerEntry.buildUri(id);
} else {
throw new android.database.SQLException("Failed to insert row into " + uri);
}
break;
}
case VIEWS: {
long id = db.insert(ViewsContract.ViewsEntry.TABLE_NAME, null, values);
if (id > 0) {
returnUri = ViewsContract.ViewsEntry.buildUri(id);
} else {
throw new android.database.SQLException("Failed to insert row into " + uri);
}
break;
}
case CLONES: {
long id = db.insert(ClonesContract.ClonesEntry.TABLE_NAME, null, values);
if (id > 0) {
returnUri = ClonesContract.ClonesEntry.buildUri(id);
} else {
throw new android.database.SQLException("Failed to insert row into " + uri);
}
break;
}
case TRENDING: {
long id = db.insert(TrendingContract.TrendingEntry.TABLE_NAME, null, values);
if (id > 0) {
returnUri = TrendingContract.TrendingEntry.buildUri(id);
} else {
throw new android.database.SQLException("Failed to insert row into " + uri);
}
break;
}
case STARGAZERS: {
long id = db.insert(StargazersContract.Entry.TABLE_NAME, null, values);
if (id > 0) {
returnUri = StargazersContract.Entry.buildUri(id);
} else {
throw new android.database.SQLException("Failed to insert row into " + uri);
}
break;
}
case USERS: {
long id = db.insert(UserContract.UsersEntry.TABLE_NAME, null, values);
if (id > 0) {
returnUri = UserContract.UsersEntry.buildUri(id);
} else {
throw new android.database.SQLException("Failed to insert row into " + uri);
}
break;
}
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
if (getContext() != null) {
getContext().getContentResolver().notifyChange(uri, null);
if (match == STARGAZERS || match == REPOSITORIES || match == CLONES
|| match == VIEWS || match == REFERRERS) {
getContext().getContentResolver().notifyChange(
RepositoryContract.RepositoryEntry.CONTENT_URI_REPOSITORY_STARGAZERS, null);
}
}
return returnUri;
}
@Override
public int delete(@NonNull Uri uri, String sourceSelection, String[] selectionArgs) {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
int rowsDeleted;
String selection = sourceSelection;
if (null == selection) {
selection = "1";
}
switch (match) {
case REPOSITORIES:
rowsDeleted = db.delete(
RepositoryContract.RepositoryEntry.TABLE_NAME, selection, selectionArgs);
break;
case REFERRERS:
rowsDeleted = db.delete(
ReferrerContract.ReferrerEntry.TABLE_NAME, selection, selectionArgs);
break;
case CLONES:
rowsDeleted = db.delete(
ClonesContract.ClonesEntry.TABLE_NAME, selection, selectionArgs);
break;
case VIEWS:
rowsDeleted = db.delete(
ViewsContract.ViewsEntry.TABLE_NAME, selection, selectionArgs);
break;
case TRENDING:
rowsDeleted = db.delete(
TrendingContract.TrendingEntry.TABLE_NAME, selection, selectionArgs);
break;
case STARGAZERS:
rowsDeleted = db.delete(
StargazersContract.Entry.TABLE_NAME, selection, selectionArgs);
break;
case USERS:
rowsDeleted = db.delete(
UserContract.UsersEntry.TABLE_NAME, selection, selectionArgs);
break;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
if (rowsDeleted != 0 && getContext() != null) {
getContext().getContentResolver().notifyChange(uri, null);
}
return rowsDeleted;
}
@Override
public int update(@NonNull Uri uri, ContentValues values, String selection, String[] selectionArgs) {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
int rowsUpdated;
switch (match) {
case REPOSITORIES:
rowsUpdated = db.update(RepositoryContract.RepositoryEntry.TABLE_NAME, values, selection,
selectionArgs);
break;
case REFERRERS:
rowsUpdated = db.update(ReferrerContract.ReferrerEntry.TABLE_NAME, values, selection,
selectionArgs);
break;
case CLONES:
rowsUpdated = db.update(ClonesContract.ClonesEntry.TABLE_NAME, values, selection,
selectionArgs);
break;
case VIEWS:
rowsUpdated = db.update(ViewsContract.ViewsEntry.TABLE_NAME, values, selection,
selectionArgs);
break;
case TRENDING:
rowsUpdated = db.update(TrendingContract.TrendingEntry.TABLE_NAME, values, selection,
selectionArgs);
break;
case STARGAZERS:
rowsUpdated = db.update(StargazersContract.Entry.TABLE_NAME, values, selection,
selectionArgs);
break;
case USERS:
rowsUpdated = db.update(UserContract.UsersEntry.TABLE_NAME, values, selection,
selectionArgs);
break;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
if (rowsUpdated != 0 && getContext() != null) {
getContext().getContentResolver().notifyChange(uri, null);
if (match == STARGAZERS || match == REPOSITORIES || match == CLONES
|| match == VIEWS || match == REFERRERS) {
getContext().getContentResolver().notifyChange(
RepositoryContract.RepositoryEntry.CONTENT_URI_REPOSITORY_STARGAZERS, null);
getContext().getContentResolver().notifyChange(
RepositoryContract.RepositoryEntry.CONTENT_URI, null);
}
}
return rowsUpdated;
}
}
|
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.storage.file.share;
import com.azure.core.http.rest.Response;
import com.azure.core.util.Context;
import com.azure.storage.common.StorageSharedKeyCredential;
import com.azure.storage.common.sas.AccountSasPermission;
import com.azure.storage.common.sas.AccountSasResourceType;
import com.azure.storage.common.sas.AccountSasService;
import com.azure.storage.common.sas.AccountSasSignatureValues;
import com.azure.storage.file.share.models.ShareAccessTier;
import com.azure.storage.file.share.models.ShareServiceProperties;
import com.azure.storage.file.share.models.ListSharesOptions;
import com.azure.storage.file.share.options.ShareCreateOptions;
import java.time.Duration;
import java.time.LocalDateTime;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.Collections;
import java.util.Map;
/**
* Contains code snippets when generating javadocs through doclets for {@link ShareServiceClient} and {@link ShareServiceAsyncClient}.
*/
public class ShareServiceJavaDocCodeSamples {
private String key1 = "key1";
private String value1 = "val1";
/**
* Generates code sample for {@link ShareServiceClient} instantiation.
*/
public void initialization() {
// BEGIN: com.azure.storage.file.share.ShareServiceClient.instantiation
ShareServiceClient client = new ShareServiceClientBuilder()
.connectionString("${connectionString}")
.endpoint("${endpoint}")
.buildClient();
// END: com.azure.storage.file.share.ShareServiceClient.instantiation
}
/**
* Generates code sample for {@link ShareServiceAsyncClient} instantiation.
*/
public void asyncInitialization() {
// BEGIN: 'com.azure.storage.file.share.ShareServiceAsyncClient.instantiation
ShareServiceAsyncClient client = new ShareServiceClientBuilder()
.connectionString("${connectionString}")
.endpoint("${endpoint}")
.buildAsyncClient();
// END: 'com.azure.storage.file.share.ShareServiceAsyncClient.instantiation
}
/**
* Generates code sample for creating a {@link ShareServiceClient} with SAS token.
* @return An instance of {@link ShareServiceClient}
*/
public ShareServiceClient createClientWithSASToken() {
// BEGIN: com.azure.storage.file.share.ShareServiceClient.instantiation.sastoken
ShareServiceClient fileServiceClient = new ShareServiceClientBuilder()
.endpoint("https://${accountName}.file.core.windows.net?${SASToken}")
.buildClient();
// END: com.azure.storage.file.share.ShareServiceClient.instantiation.sastoken
return fileServiceClient;
}
/**
* Generates code sample for creating a {@link ShareServiceClient} with SAS token.
* {@code SASTokenQueryParams} is composed of the Key
* @return An instance of {@link ShareServiceClient}
*/
public ShareServiceClient createClientWithCredential() {
// BEGIN: com.azure.storage.file.share.ShareServiceClient.instantiation.credential
ShareServiceClient fileServiceClient = new ShareServiceClientBuilder()
.endpoint("https://{accountName}.file.core.windows.net")
.sasToken("${SASTokenQueryParams}")
.buildClient();
// END: com.azure.storage.file.share.ShareServiceClient.instantiation.credential
return fileServiceClient;
}
/**
* Generates code sample for creating a {@link ShareServiceClient} with {@code connectionString} which turns into {@link StorageSharedKeyCredential}
* @return An instance of {@link ShareServiceClient}
*/
public ShareServiceClient createClientWithConnectionString() {
// BEGIN: com.azure.storage.file.share.ShareServiceClient.instantiation.connectionstring
String connectionString = "DefaultEndpointsProtocol=https;AccountName={name};AccountKey={key};"
+ "EndpointSuffix={core.windows.net}";
ShareServiceClient fileServiceClient = new ShareServiceClientBuilder()
.connectionString(connectionString)
.buildClient();
// END: com.azure.storage.file.share.ShareServiceClient.instantiation.connectionstring
return fileServiceClient;
}
/**
* Generates a code sample for using {@link ShareServiceClient#createShare(String)}
*/
public void createShare() {
ShareServiceClient fileServiceClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareServiceClient.createShare#string
fileServiceClient.createShare("myshare");
System.out.println("Creating the share completed.");
// END: com.azure.storage.file.share.ShareServiceClient.createShare#string
}
/**
* Generates a code sample for using {@link ShareServiceClient#createShareWithResponse(String, Map, Integer,
* Duration, Context)} with metadata
*/
public void createShareWithMetadata() {
ShareServiceClient fileServiceClient = createClientWithSASToken();
// BEGIN: ShareServiceClient.createShareWithResponse#string-map-integer-duration-context
Response<ShareClient> response = fileServiceClient.createShareWithResponse("test",
Collections.singletonMap("share", "metadata"), null, Duration.ofSeconds(1),
new Context(key1, value1));
System.out.printf("Creating the share completed with status code %d", response.getStatusCode());
// END: ShareServiceClient.createShareWithResponse#string-map-integer-duration-context
}
/**
* Generates a code sample for using {@link ShareServiceClient#createShareWithResponse(String, ShareCreateOptions,
* Duration, Context)} with metadata
*/
public void createShareWithOptions() {
ShareServiceClient fileServiceClient = createClientWithSASToken();
// BEGIN: ShareServiceClient.createShareWithResponse#String-ShareCreateOptions-Duration-Context
Response<ShareClient> response = fileServiceClient.createShareWithResponse("test",
new ShareCreateOptions().setMetadata(Collections.singletonMap("share", "metadata")).setQuotaInGb(1)
.setAccessTier(ShareAccessTier.HOT), Duration.ofSeconds(1), new Context(key1, value1));
System.out.printf("Creating the share completed with status code %d", response.getStatusCode());
// END: ShareServiceClient.createShareWithResponse#String-ShareCreateOptions-Duration-Context
}
/**
* Generates a code sample for using {@link ShareServiceClient#listShares()}
*/
public void listShares() {
ShareServiceClient fileServiceClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareServiceClient.listShares
fileServiceClient.listShares().forEach(
shareItem -> System.out.printf("Share %s exists in the account", shareItem.getName())
);
// END: com.azure.storage.file.share.ShareServiceClient.listShares
}
/**
* Generates a code sample for using {@link ShareServiceClient#listShares(ListSharesOptions, Duration, Context)} of prefix.
*/
public void listSharesWithPrefix() {
ShareServiceClient fileServiceClient = createClientWithSASToken();
// BEGIN: ShareServiceClient.listShares#ListSharesOptions-Duration-Context1
fileServiceClient.listShares(new ListSharesOptions().setPrefix("azure"), Duration.ofSeconds(1),
new Context(key1, value1)).forEach(
shareItem -> System.out.printf("Share %s exists in the account", shareItem.getName())
);
// END: ShareServiceClient.listShares#ListSharesOptions-Duration-Context1
}
/**
* Generates a code sample for using {@link ShareServiceClient#listShares(ListSharesOptions, Duration, Context)}
* of metadata and snapshot.
*/
public void listSharesWithMetadataAndSnapshot() {
ShareServiceClient fileServiceClient = createClientWithSASToken();
// BEGIN: ShareServiceClient.listShares#ListSharesOptions-Duration-Context2
fileServiceClient.listShares(new ListSharesOptions().setIncludeMetadata(true)
.setIncludeSnapshots(true), Duration.ofSeconds(1), new Context(key1, value1)).forEach(
shareItem -> System.out.printf("Share %s exists in the account", shareItem.getName())
);
// END: ShareServiceClient.listShares#ListSharesOptions-Duration-Context2
}
/**
* Generates a code sample for using {@link ShareServiceClient#deleteShare(String)}
*/
public void deleteShare() {
ShareServiceClient fileServiceClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareServiceClient.deleteShare#string
fileServiceClient.deleteShare("myshare");
// END: com.azure.storage.file.share.ShareServiceClient.deleteShare#string
}
/**
* Generates a code sample for using {@link ShareServiceClient#deleteShareWithResponse(String, String,
* Duration, Context)}
*/
public void deleteShareMaxOverload() {
ShareServiceClient fileServiceClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareServiceClient.deleteShareWithResponse#string-string-duration-context
OffsetDateTime midnight = OffsetDateTime.of(LocalDateTime.now(), ZoneOffset.UTC);
Response<Void> response = fileServiceClient.deleteShareWithResponse("test", midnight.toString(),
Duration.ofSeconds(1), new Context(key1, value1));
System.out.printf("Deleting the snapshot completed with status code %d", response.getStatusCode());
// END: com.azure.storage.file.share.ShareServiceClient.deleteShareWithResponse#string-string-duration-context
}
/**
* Generates a code sample for using {@link ShareServiceClient#getProperties()}
*/
public void getProperties() {
ShareServiceClient fileServiceClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareServiceClient.getProperties
ShareServiceProperties properties = fileServiceClient.getProperties();
System.out.printf("Hour metrics enabled: %b, Minute metrics enabled: %b", properties.getHourMetrics().isEnabled(),
properties.getMinuteMetrics().isEnabled());
// END: com.azure.storage.file.share.ShareServiceClient.getProperties
}
/**
* Generates a code sample for using {@link ShareServiceClient#getPropertiesWithResponse(Duration, Context)}
*/
public void getPropertiesWithResponse() {
ShareServiceClient fileServiceClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareServiceClient.getPropertiesWithResponse#duration-context
ShareServiceProperties properties = fileServiceClient.getPropertiesWithResponse(
Duration.ofSeconds(1), new Context(key1, value1)).getValue();
System.out.printf("Hour metrics enabled: %b, Minute metrics enabled: %b", properties.getHourMetrics().isEnabled(),
properties.getMinuteMetrics().isEnabled());
// END: com.azure.storage.file.share.ShareServiceClient.getPropertiesWithResponse#duration-context
}
/**
* Generates a code sample for using {@link ShareServiceClient#setProperties(ShareServiceProperties)}
*/
public void setProperties() {
ShareServiceClient fileServiceClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareServiceClient.setProperties#fileServiceProperties
ShareServiceProperties properties = fileServiceClient.getProperties();
properties.getMinuteMetrics().setEnabled(true);
properties.getHourMetrics().setEnabled(true);
fileServiceClient.setProperties(properties);
System.out.println("Setting File service properties completed.");
// END: com.azure.storage.file.share.ShareServiceClient.setProperties#fileServiceProperties
}
/**
* Generates a code sample for using {@link ShareServiceClient#setProperties(ShareServiceProperties)}
*/
public void setPropertiesWithResponse() {
ShareServiceClient fileServiceClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareServiceClient.setPropertiesWithResponse#fileServiceProperties-Context
ShareServiceProperties properties = fileServiceClient.getPropertiesWithResponse(
Duration.ofSeconds(1), new Context(key1, value1)).getValue();
properties.getMinuteMetrics().setEnabled(true);
properties.getHourMetrics().setEnabled(true);
Response<Void> response = fileServiceClient.setPropertiesWithResponse(properties,
Duration.ofSeconds(1), new Context(key1, value1));
System.out.printf("Setting File service properties completed with status code %d", response.getStatusCode());
// END: com.azure.storage.file.share.ShareServiceClient.setPropertiesWithResponse#fileServiceProperties-Context
}
/**
* Generates a code sample for using {@link ShareServiceClient#setProperties(ShareServiceProperties)} to clear CORS in file service.
*/
public void clearProperties() {
ShareServiceClient fileServiceClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareServiceClient.setPropertiesWithResponse#fileServiceProperties-Context.clearCORS
ShareServiceProperties properties = fileServiceClient.getProperties();
properties.setCors(Collections.emptyList());
Response<Void> response = fileServiceClient.setPropertiesWithResponse(properties,
Duration.ofSeconds(1), new Context(key1, value1));
System.out.printf("Setting File service properties completed with status code %d", response.getStatusCode());
// END: com.azure.storage.file.share.ShareServiceClient.setPropertiesWithResponse#fileServiceProperties-Context.clearCORS
}
/**
* Code snippet for {@link ShareServiceClient#generateAccountSas(AccountSasSignatureValues)}
*/
public void generateAccountSas() {
ShareServiceClient fileServiceClient = createClientWithCredential();
// BEGIN: com.azure.storage.file.share.ShareServiceClient.generateAccountSas#AccountSasSignatureValues
AccountSasPermission permissions = new AccountSasPermission()
.setListPermission(true)
.setReadPermission(true);
AccountSasResourceType resourceTypes = new AccountSasResourceType().setContainer(true);
AccountSasService services = new AccountSasService().setBlobAccess(true).setFileAccess(true);
OffsetDateTime expiryTime = OffsetDateTime.now().plus(Duration.ofDays(2));
AccountSasSignatureValues sasValues =
new AccountSasSignatureValues(expiryTime, permissions, services, resourceTypes);
// Client must be authenticated via StorageSharedKeyCredential
String sas = fileServiceClient.generateAccountSas(sasValues);
// END: com.azure.storage.file.share.ShareServiceClient.generateAccountSas#AccountSasSignatureValues
}
/**
* Code snippet for {@link ShareServiceClient#undeleteShare(String, String)}.
*/
public void undeleteShare() {
ShareServiceClient fileServiceClient = createClientWithSASToken();
Context context = new Context("Key", "Value");
// BEGIN: com.azure.storage.file.share.ShareServiceClient.undeleteShare#String-String
ListSharesOptions listSharesOptions = new ListSharesOptions();
listSharesOptions.setIncludeDeleted(true);
fileServiceClient.listShares(listSharesOptions, Duration.ofSeconds(1), context).forEach(
deletedShare -> {
ShareClient shareClient = fileServiceClient.undeleteShare(
deletedShare.getName(), deletedShare.getVersion());
}
);
// END: com.azure.storage.file.share.ShareServiceClient.undeleteShare#String-String
}
/**
* Code snippet for {@link ShareServiceClient#undeleteShareWithResponse(String, String, Duration, Context)}.
*/
public void undeleteShareWithResponse() {
ShareServiceClient fileServiceClient = createClientWithSASToken();
Context context = new Context("Key", "Value");
// BEGIN: com.azure.storage.file.share.ShareServiceClient.undeleteShareWithResponse#String-String-Duration-Context
ListSharesOptions listSharesOptions = new ListSharesOptions();
listSharesOptions.setIncludeDeleted(true);
fileServiceClient.listShares(listSharesOptions, Duration.ofSeconds(1), context).forEach(
deletedShare -> {
ShareClient shareClient = fileServiceClient.undeleteShareWithResponse(
deletedShare.getName(), deletedShare.getVersion(), Duration.ofSeconds(1), context).getValue();
}
);
// END: com.azure.storage.file.share.ShareServiceClient.undeleteShareWithResponse#String-String-Duration-Context
}
}
|
|
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.ymex.kits.cache;
import java.io.BufferedInputStream;
import java.io.BufferedWriter;
import java.io.Closeable;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
import java.lang.reflect.Array;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
******************************************************************************
* Taken from the JB source code, can be found in:
* libcore/luni/src/main/java/libcore/io/DiskLruCache.java
* or direct link:
* https://android.googlesource.com/platform/libcore/+/android-4.1.1_r1/luni/src/main/java/libcore/io/DiskLruCache.java
******************************************************************************
*
* A cache that uses a bounded amount of space on a filesystem. Each cache
* entry has a string key and a fixed number of values. Values are byte
* sequences, accessible as streams or files. Each value must be between {@code
* 0} and {@code Integer.MAX_VALUE} bytes in length.
*
* <p>The cache stores its data in a directory on the filesystem. This
* directory must be exclusive to the cache; the cache may delete or overwrite
* files from its directory. It is an error for multiple processes to use the
* same cache directory at the same time.
*
* <p>This cache limits the number of bytes that it will store on the
* filesystem. When the number of stored bytes exceeds the limit, the cache will
* remove entries in the background until the limit is satisfied. The limit is
* not strict: the cache may temporarily exceed it while waiting for files to be
* deleted. The limit does not include filesystem overhead or the cache
* journal so space-sensitive applications should set a conservative limit.
*
* <p>Clients call {@link #edit} to create or update the values of an entry. An
* entry may have only one editor at one time; if a value is not available to be
* edited then {@link #edit} will return null.
* <ul>
* <li>When an entry is being <strong>created</strong> it is necessary to
* supply a full set of values; the empty value should be used as a
* placeholder if necessary.
* <li>When an entry is being <strong>edited</strong>, it is not necessary
* to supply data for every value; values default to their previous
* value.
* </ul>
* Every {@link #edit} call must be matched by a call to {@link Editor#commit}
* or {@link Editor#abort}. Committing is atomic: a read observes the full set
* of values as they were before or after the commit, but never a mix of values.
*
* <p>Clients call {@link #get} to read a snapshot of an entry. The read will
* observe the value at the time that {@link #get} was called. Updates and
* removals after the call do not impact ongoing reads.
*
* <p>This class is tolerant of some I/O errors. If files are missing from the
* filesystem, the corresponding entries will be dropped from the cache. If
* an error occurs while writing a cache value, the edit will fail silently.
* Callers should handle other problems by catching {@code IOException} and
* responding appropriately.
*/
public final class DiskLruCache implements Closeable {
static final String JOURNAL_FILE = "journal";
static final String JOURNAL_FILE_TMP = "journal.tmp";
static final String MAGIC = "libcore.io.DiskLruCache";
static final String VERSION_1 = "1";
static final long ANY_SEQUENCE_NUMBER = -1;
private static final String CLEAN = "CLEAN";
private static final String DIRTY = "DIRTY";
private static final String REMOVE = "REMOVE";
private static final String READ = "READ";
private static final Charset UTF_8 = Charset.forName("UTF-8");
private static final int IO_BUFFER_SIZE = 8 * 1024;
/*
* This cache uses a journal file named "journal". A typical journal file
* looks like this:
* libcore.io.DiskLruCache
* 1
* 100
* 2
*
* CLEAN 3400330d1dfc7f3f7f4b8d4d803dfcf6 832 21054
* DIRTY 335c4c6028171cfddfbaae1a9c313c52
* CLEAN 335c4c6028171cfddfbaae1a9c313c52 3934 2342
* REMOVE 335c4c6028171cfddfbaae1a9c313c52
* DIRTY 1ab96a171faeeee38496d8b330771a7a
* CLEAN 1ab96a171faeeee38496d8b330771a7a 1600 234
* READ 335c4c6028171cfddfbaae1a9c313c52
* READ 3400330d1dfc7f3f7f4b8d4d803dfcf6
*
* The first five lines of the journal form its header. They are the
* constant string "libcore.io.DiskLruCache", the disk cache's version,
* the application's version, the value count, and a blank line.
*
* Each of the subsequent lines in the file is a record of the state of a
* cache entry. Each line contains space-separated values: a state, a key,
* and optional state-specific values.
* o DIRTY lines track that an entry is actively being created or updated.
* Every successful DIRTY action should be followed by a CLEAN or REMOVE
* action. DIRTY lines without a matching CLEAN or REMOVE indicate that
* temporary files may need to be deleted.
* o CLEAN lines track a cache entry that has been successfully published
* and may be read. A publish line is followed by the lengths of each of
* its values.
* o READ lines track accesses for LRU.
* o REMOVE lines track entries that have been deleted.
*
* The journal file is appended to as cache operations occur. The journal may
* occasionally be compacted by dropping redundant lines. A temporary file named
* "journal.tmp" will be used during compaction; that file should be deleted if
* it exists when the cache is opened.
*/
private final File directory;
private final File journalFile;
private final File journalFileTmp;
private final int appVersion;
private final long maxSize;
private final int valueCount;
private long size = 0;
private Writer journalWriter;
private final LinkedHashMap<String, Entry> lruEntries
= new LinkedHashMap<String, Entry>(0, 0.75f, true);
private int redundantOpCount;
/**
* To differentiate between old and current snapshots, each entry is given
* a sequence number each time an edit is committed. A snapshot is stale if
* its sequence number is not equal to its entry's sequence number.
*/
private long nextSequenceNumber = 0;
/* From java.util.Arrays */
@SuppressWarnings("unchecked")
private static <T> T[] copyOfRange(T[] original, int start, int end) {
final int originalLength = original.length; // For exception priority compatibility.
if (start > end) {
throw new IllegalArgumentException();
}
if (start < 0 || start > originalLength) {
throw new ArrayIndexOutOfBoundsException();
}
final int resultLength = end - start;
final int copyLength = Math.min(resultLength, originalLength - start);
final T[] result = (T[]) Array
.newInstance(original.getClass().getComponentType(), resultLength);
System.arraycopy(original, start, result, 0, copyLength);
return result;
}
/**
* Returns the remainder of 'reader' as a string, closing it when done.
*/
public static String readFully(Reader reader) throws IOException {
try {
StringWriter writer = new StringWriter();
char[] buffer = new char[1024];
int count;
while ((count = reader.read(buffer)) != -1) {
writer.write(buffer, 0, count);
}
return writer.toString();
} finally {
reader.close();
}
}
/**
* Returns the ASCII characters up to but not including the next "\r\n", or
* "\n".
*
* @throws EOFException if the stream is exhausted before the next newline
* character.
*/
public static String readAsciiLine(InputStream in) throws IOException {
// TODO: support UTF-8 here instead
StringBuilder result = new StringBuilder(80);
while (true) {
int c = in.read();
if (c == -1) {
throw new EOFException();
} else if (c == '\n') {
break;
}
result.append((char) c);
}
int length = result.length();
if (length > 0 && result.charAt(length - 1) == '\r') {
result.setLength(length - 1);
}
return result.toString();
}
/**
* Closes 'closeable', ignoring any checked exceptions. Does nothing if 'closeable' is null.
*/
public static void closeQuietly(Closeable closeable) {
if (closeable != null) {
try {
closeable.close();
} catch (RuntimeException rethrown) {
throw rethrown;
} catch (Exception ignored) {
}
}
}
/**
* Recursively delete everything in {@code dir}.
*/
// TODO: this should specify paths as Strings rather than as Files
public static void deleteContents(File dir) throws IOException {
File[] files = dir.listFiles();
if (files == null) {
throw new IllegalArgumentException("not a directory: " + dir);
}
for (File file : files) {
if (file.isDirectory()) {
deleteContents(file);
}
if (!file.delete()) {
throw new IOException("failed to delete file: " + file);
}
}
}
/** This cache uses a single background thread to evict entries. */
private final ExecutorService executorService = new ThreadPoolExecutor(0, 1,
60L, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
private final Callable<Void> cleanupCallable = new Callable<Void>() {
@Override public Void call() throws Exception {
synchronized (DiskLruCache.this) {
if (journalWriter == null) {
return null; // closed
}
trimToSize();
if (journalRebuildRequired()) {
rebuildJournal();
redundantOpCount = 0;
}
}
return null;
}
};
private DiskLruCache(File directory, int appVersion, int valueCount, long maxSize) {
this.directory = directory;
this.appVersion = appVersion;
this.journalFile = new File(directory, JOURNAL_FILE);
this.journalFileTmp = new File(directory, JOURNAL_FILE_TMP);
this.valueCount = valueCount;
this.maxSize = maxSize;
}
/**
* Opens the cache in {@code directory}, creating a cache if none exists
* there.
*
* @param directory a writable directory
* @param appVersion
* @param valueCount the number of values per cache entry. Must be positive.
* @param maxSize the maximum number of bytes this cache should use to store
* @throws IOException if reading or writing the cache directory fails
*/
public static DiskLruCache open(File directory, int appVersion, int valueCount, long maxSize)
throws IOException {
if (maxSize <= 0) {
throw new IllegalArgumentException("maxSize <= 0");
}
if (valueCount <= 0) {
throw new IllegalArgumentException("valueCount <= 0");
}
// prefer to pick up where we left off
DiskLruCache cache = new DiskLruCache(directory, appVersion, valueCount, maxSize);
if (cache.journalFile.exists()) {
try {
cache.readJournal();
cache.processJournal();
cache.journalWriter = new BufferedWriter(new FileWriter(cache.journalFile, true),
IO_BUFFER_SIZE);
return cache;
} catch (IOException journalIsCorrupt) {
// System.logW("DiskLruCache " + directory + " is corrupt: "
// + journalIsCorrupt.getMessage() + ", removing");
cache.delete();
}
}
// create a new empty cache
directory.mkdirs();
cache = new DiskLruCache(directory, appVersion, valueCount, maxSize);
cache.rebuildJournal();
return cache;
}
private void readJournal() throws IOException {
InputStream in = new BufferedInputStream(new FileInputStream(journalFile), IO_BUFFER_SIZE);
try {
String magic = readAsciiLine(in);
String version = readAsciiLine(in);
String appVersionString = readAsciiLine(in);
String valueCountString = readAsciiLine(in);
String blank = readAsciiLine(in);
if (!MAGIC.equals(magic)
|| !VERSION_1.equals(version)
|| !Integer.toString(appVersion).equals(appVersionString)
|| !Integer.toString(valueCount).equals(valueCountString)
|| !"".equals(blank)) {
throw new IOException("unexpected journal header: ["
+ magic + ", " + version + ", " + valueCountString + ", " + blank + "]");
}
while (true) {
try {
readJournalLine(readAsciiLine(in));
} catch (EOFException endOfJournal) {
break;
}
}
} finally {
closeQuietly(in);
}
}
private void readJournalLine(String line) throws IOException {
String[] parts = line.split(" ");
if (parts.length < 2) {
throw new IOException("unexpected journal line: " + line);
}
String key = parts[1];
if (parts[0].equals(REMOVE) && parts.length == 2) {
lruEntries.remove(key);
return;
}
Entry entry = lruEntries.get(key);
if (entry == null) {
entry = new Entry(key);
lruEntries.put(key, entry);
}
if (parts[0].equals(CLEAN) && parts.length == 2 + valueCount) {
entry.readable = true;
entry.currentEditor = null;
entry.setLengths(copyOfRange(parts, 2, parts.length));
} else if (parts[0].equals(DIRTY) && parts.length == 2) {
entry.currentEditor = new Editor(entry);
} else if (parts[0].equals(READ) && parts.length == 2) {
// this work was already done by calling lruEntries.get()
} else {
throw new IOException("unexpected journal line: " + line);
}
}
/**
* Computes the initial size and collects garbage as a part of opening the
* cache. Dirty entries are assumed to be inconsistent and will be deleted.
*/
private void processJournal() throws IOException {
deleteIfExists(journalFileTmp);
for (Iterator<Entry> i = lruEntries.values().iterator(); i.hasNext(); ) {
Entry entry = i.next();
if (entry.currentEditor == null) {
for (int t = 0; t < valueCount; t++) {
size += entry.lengths[t];
}
} else {
entry.currentEditor = null;
for (int t = 0; t < valueCount; t++) {
deleteIfExists(entry.getCleanFile(t));
deleteIfExists(entry.getDirtyFile(t));
}
i.remove();
}
}
}
/**
* Creates a new journal that omits redundant information. This replaces the
* current journal if it exists.
*/
private synchronized void rebuildJournal() throws IOException {
if (journalWriter != null) {
journalWriter.close();
}
Writer writer = new BufferedWriter(new FileWriter(journalFileTmp), IO_BUFFER_SIZE);
writer.write(MAGIC);
writer.write("\n");
writer.write(VERSION_1);
writer.write("\n");
writer.write(Integer.toString(appVersion));
writer.write("\n");
writer.write(Integer.toString(valueCount));
writer.write("\n");
writer.write("\n");
for (Entry entry : lruEntries.values()) {
if (entry.currentEditor != null) {
writer.write(DIRTY + ' ' + entry.key + '\n');
} else {
writer.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n');
}
}
writer.close();
journalFileTmp.renameTo(journalFile);
journalWriter = new BufferedWriter(new FileWriter(journalFile, true), IO_BUFFER_SIZE);
}
private static void deleteIfExists(File file) throws IOException {
// try {
// Libcore.os.remove(file.getPath());
// } catch (ErrnoException errnoException) {
// if (errnoException.errno != OsConstants.ENOENT) {
// throw errnoException.rethrowAsIOException();
// }
// }
if (file.exists() && !file.delete()) {
throw new IOException();
}
}
/**
* Returns a snapshot of the entry named {@code key}, or null if it doesn't
* exist is not currently readable. If a value is returned, it is moved to
* the head of the LRU queue.
*/
public synchronized Snapshot get(String key) throws IOException {
checkNotClosed();
validateKey(key);
Entry entry = lruEntries.get(key);
if (entry == null) {
return null;
}
if (!entry.readable) {
return null;
}
/*
* Open all streams eagerly to guarantee that we see a single published
* snapshot. If we opened streams lazily then the streams could come
* from different edits.
*/
InputStream[] ins = new InputStream[valueCount];
try {
for (int i = 0; i < valueCount; i++) {
ins[i] = new FileInputStream(entry.getCleanFile(i));
}
} catch (FileNotFoundException e) {
// a file must have been deleted manually!
return null;
}
redundantOpCount++;
journalWriter.append(READ + ' ' + key + '\n');
if (journalRebuildRequired()) {
executorService.submit(cleanupCallable);
}
return new Snapshot(key, entry.sequenceNumber, ins);
}
/**
* Returns an editor for the entry named {@code key}, or null if another
* edit is in progress.
*/
public Editor edit(String key) throws IOException {
return edit(key, ANY_SEQUENCE_NUMBER);
}
private synchronized Editor edit(String key, long expectedSequenceNumber) throws IOException {
checkNotClosed();
validateKey(key);
Entry entry = lruEntries.get(key);
if (expectedSequenceNumber != ANY_SEQUENCE_NUMBER
&& (entry == null || entry.sequenceNumber != expectedSequenceNumber)) {
return null; // snapshot is stale
}
if (entry == null) {
entry = new Entry(key);
lruEntries.put(key, entry);
} else if (entry.currentEditor != null) {
return null; // another edit is in progress
}
Editor editor = new Editor(entry);
entry.currentEditor = editor;
// flush the journal before creating files to prevent file leaks
journalWriter.write(DIRTY + ' ' + key + '\n');
journalWriter.flush();
return editor;
}
/**
* Returns the directory where this cache stores its data.
*/
public File getDirectory() {
return directory;
}
/**
* Returns the maximum number of bytes that this cache should use to store
* its data.
*/
public long maxSize() {
return maxSize;
}
/**
* Returns the number of bytes currently being used to store the values in
* this cache. This may be greater than the max size if a background
* deletion is pending.
*/
public synchronized long size() {
return size;
}
private synchronized void completeEdit(Editor editor, boolean success) throws IOException {
Entry entry = editor.entry;
if (entry.currentEditor != editor) {
throw new IllegalStateException();
}
// if this edit is creating the entry for the first time, every index must have a value
if (success && !entry.readable) {
for (int i = 0; i < valueCount; i++) {
if (!entry.getDirtyFile(i).exists()) {
editor.abort();
throw new IllegalStateException("edit didn't create file " + i);
}
}
}
for (int i = 0; i < valueCount; i++) {
File dirty = entry.getDirtyFile(i);
if (success) {
if (dirty.exists()) {
File clean = entry.getCleanFile(i);
dirty.renameTo(clean);
long oldLength = entry.lengths[i];
long newLength = clean.length();
entry.lengths[i] = newLength;
size = size - oldLength + newLength;
}
} else {
deleteIfExists(dirty);
}
}
redundantOpCount++;
entry.currentEditor = null;
if (entry.readable | success) {
entry.readable = true;
journalWriter.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n');
if (success) {
entry.sequenceNumber = nextSequenceNumber++;
}
} else {
lruEntries.remove(entry.key);
journalWriter.write(REMOVE + ' ' + entry.key + '\n');
}
if (size > maxSize || journalRebuildRequired()) {
executorService.submit(cleanupCallable);
}
}
/**
* We only rebuild the journal when it will halve the size of the journal
* and eliminate at least 2000 ops.
*/
private boolean journalRebuildRequired() {
final int REDUNDANT_OP_COMPACT_THRESHOLD = 2000;
return redundantOpCount >= REDUNDANT_OP_COMPACT_THRESHOLD
&& redundantOpCount >= lruEntries.size();
}
/**
* Drops the entry for {@code key} if it exists and can be removed. Entries
* actively being edited cannot be removed.
*
* @return true if an entry was removed.
*/
public synchronized boolean remove(String key) throws IOException {
checkNotClosed();
validateKey(key);
Entry entry = lruEntries.get(key);
if (entry == null || entry.currentEditor != null) {
return false;
}
for (int i = 0; i < valueCount; i++) {
File file = entry.getCleanFile(i);
if (!file.delete()) {
throw new IOException("failed to delete " + file);
}
size -= entry.lengths[i];
entry.lengths[i] = 0;
}
redundantOpCount++;
journalWriter.append(REMOVE + ' ' + key + '\n');
lruEntries.remove(key);
if (journalRebuildRequired()) {
executorService.submit(cleanupCallable);
}
return true;
}
/**
* Returns true if this cache has been closed.
*/
public boolean isClosed() {
return journalWriter == null;
}
private void checkNotClosed() {
if (journalWriter == null) {
throw new IllegalStateException("cache is closed");
}
}
/**
* Force buffered operations to the filesystem.
*/
public synchronized void flush() throws IOException {
checkNotClosed();
trimToSize();
journalWriter.flush();
}
/**
* Closes this cache. Stored values will remain on the filesystem.
*/
public synchronized void close() throws IOException {
if (journalWriter == null) {
return; // already closed
}
for (Entry entry : new ArrayList<Entry>(lruEntries.values())) {
if (entry.currentEditor != null) {
entry.currentEditor.abort();
}
}
trimToSize();
journalWriter.close();
journalWriter = null;
}
private void trimToSize() throws IOException {
while (size > maxSize) {
// Map.Entry<String, Entry> toEvict = lruEntries.eldest();
final Map.Entry<String, Entry> toEvict = lruEntries.entrySet().iterator().next();
remove(toEvict.getKey());
}
}
/**
* Closes the cache and deletes all of its stored values. This will delete
* all files in the cache directory including files that weren't created by
* the cache.
*/
public void delete() throws IOException {
close();
deleteContents(directory);
}
private void validateKey(String key) {
if (key.contains(" ") || key.contains("\n") || key.contains("\r")) {
throw new IllegalArgumentException(
"keys must not contain spaces or newlines: \"" + key + "\"");
}
}
private static String inputStreamToString(InputStream in) throws IOException {
return readFully(new InputStreamReader(in, UTF_8));
}
/**
* A snapshot of the values for an entry.
*/
public final class Snapshot implements Closeable {
private final String key;
private final long sequenceNumber;
private final InputStream[] ins;
private Snapshot(String key, long sequenceNumber, InputStream[] ins) {
this.key = key;
this.sequenceNumber = sequenceNumber;
this.ins = ins;
}
/**
* Returns an editor for this snapshot's entry, or null if either the
* entry has changed since this snapshot was created or if another edit
* is in progress.
*/
public Editor edit() throws IOException {
return DiskLruCache.this.edit(key, sequenceNumber);
}
/**
* Returns the unbuffered stream with the value for {@code index}.
*/
public InputStream getInputStream(int index) {
return ins[index];
}
/**
* Returns the string value for {@code index}.
*/
public String getString(int index) throws IOException {
return inputStreamToString(getInputStream(index));
}
@Override public void close() {
for (InputStream in : ins) {
closeQuietly(in);
}
}
}
/**
* Edits the values for an entry.
*/
public final class Editor {
private final Entry entry;
private boolean hasErrors;
private Editor(Entry entry) {
this.entry = entry;
}
/**
* Returns an unbuffered input stream to read the last committed value,
* or null if no value has been committed.
*/
public InputStream newInputStream(int index) throws IOException {
synchronized (DiskLruCache.this) {
if (entry.currentEditor != this) {
throw new IllegalStateException();
}
if (!entry.readable) {
return null;
}
return new FileInputStream(entry.getCleanFile(index));
}
}
/**
* Returns the last committed value as a string, or null if no value
* has been committed.
*/
public String getString(int index) throws IOException {
InputStream in = newInputStream(index);
return in != null ? inputStreamToString(in) : null;
}
/**
* Returns a new unbuffered output stream to write the value at
* {@code index}. If the underlying output stream encounters errors
* when writing to the filesystem, this edit will be aborted when
* {@link #commit} is called. The returned output stream does not throw
* IOExceptions.
*/
public OutputStream newOutputStream(int index) throws IOException {
synchronized (DiskLruCache.this) {
if (entry.currentEditor != this) {
throw new IllegalStateException();
}
return new FaultHidingOutputStream(new FileOutputStream(entry.getDirtyFile(index)));
}
}
/**
* Sets the value at {@code index} to {@code value}.
*/
public void set(int index, String value) throws IOException {
Writer writer = null;
try {
writer = new OutputStreamWriter(newOutputStream(index), UTF_8);
writer.write(value);
} finally {
closeQuietly(writer);
}
}
/**
* Commits this edit so it is visible to readers. This releases the
* edit lock so another edit may be started on the same key.
*/
public void commit() throws IOException {
if (hasErrors) {
completeEdit(this, false);
remove(entry.key); // the previous entry is stale
} else {
completeEdit(this, true);
}
}
/**
* Aborts this edit. This releases the edit lock so another edit may be
* started on the same key.
*/
public void abort() throws IOException {
completeEdit(this, false);
}
private class FaultHidingOutputStream extends FilterOutputStream {
private FaultHidingOutputStream(OutputStream out) {
super(out);
}
@Override public void write(int oneByte) {
try {
out.write(oneByte);
} catch (IOException e) {
hasErrors = true;
}
}
@Override public void write(byte[] buffer, int offset, int length) {
try {
out.write(buffer, offset, length);
} catch (IOException e) {
hasErrors = true;
}
}
@Override public void close() {
try {
out.close();
} catch (IOException e) {
hasErrors = true;
}
}
@Override public void flush() {
try {
out.flush();
} catch (IOException e) {
hasErrors = true;
}
}
}
}
private final class Entry {
private final String key;
/** Lengths of this entry's files. */
private final long[] lengths;
/** True if this entry has ever been published */
private boolean readable;
/** The ongoing edit or null if this entry is not being edited. */
private Editor currentEditor;
/** The sequence number of the most recently committed edit to this entry. */
private long sequenceNumber;
private Entry(String key) {
this.key = key;
this.lengths = new long[valueCount];
}
public String getLengths() throws IOException {
StringBuilder result = new StringBuilder();
for (long size : lengths) {
result.append(' ').append(size);
}
return result.toString();
}
/**
* Set lengths using decimal numbers like "10123".
*/
private void setLengths(String[] strings) throws IOException {
if (strings.length != valueCount) {
throw invalidLengths(strings);
}
try {
for (int i = 0; i < strings.length; i++) {
lengths[i] = Long.parseLong(strings[i]);
}
} catch (NumberFormatException e) {
throw invalidLengths(strings);
}
}
private IOException invalidLengths(String[] strings) throws IOException {
throw new IOException("unexpected journal line: " + Arrays.toString(strings));
}
public File getCleanFile(int i) {
return new File(directory, key + "." + i);
}
public File getDirtyFile(int i) {
return new File(directory, key + "." + i + ".tmp");
}
}
}
|
|
package org.apache.helix.webapp;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.logging.Level;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.helix.webapp.resources.ClusterResource;
import org.apache.helix.webapp.resources.ClustersResource;
import org.apache.helix.webapp.resources.ConfigResource;
import org.apache.helix.webapp.resources.ConstraintResource;
import org.apache.helix.webapp.resources.ControllerResource;
import org.apache.helix.webapp.resources.ControllerStatusUpdateResource;
import org.apache.helix.webapp.resources.CurrentStateResource;
import org.apache.helix.webapp.resources.CurrentStatesResource;
import org.apache.helix.webapp.resources.ErrorResource;
import org.apache.helix.webapp.resources.ErrorsResource;
import org.apache.helix.webapp.resources.ExternalViewResource;
import org.apache.helix.webapp.resources.IdealStateResource;
import org.apache.helix.webapp.resources.InstanceResource;
import org.apache.helix.webapp.resources.InstancesResource;
import org.apache.helix.webapp.resources.ResourceGroupResource;
import org.apache.helix.webapp.resources.ResourceGroupsResource;
import org.apache.helix.webapp.resources.SchedulerTasksResource;
import org.apache.helix.webapp.resources.StateModelResource;
import org.apache.helix.webapp.resources.StateModelsResource;
import org.apache.helix.webapp.resources.StatusUpdateResource;
import org.apache.helix.webapp.resources.StatusUpdatesResource;
import org.apache.helix.webapp.resources.ZkChildResource;
import org.apache.helix.webapp.resources.ZkPathResource;
import org.restlet.Application;
import org.restlet.Context;
import org.restlet.Request;
import org.restlet.Response;
import org.restlet.Restlet;
import org.restlet.data.MediaType;
import org.restlet.representation.StringRepresentation;
import org.restlet.routing.Router;
import org.restlet.routing.Template;
public class RestAdminApplication extends Application {
public static final String HELP = "help";
public static final String ZKSERVERADDRESS = "zkSvr";
public static final String PORT = "port";
public static final String ZKCLIENT = "zkClient";
public static final int DEFAULT_PORT = 8100;
static {
org.restlet.engine.Engine.setLogLevel(Level.SEVERE);
}
public RestAdminApplication() {
super();
}
public RestAdminApplication(Context context) {
super(context);
}
@Override
public Restlet createInboundRoot() {
Router router = new Router(getContext());
router.setDefaultMatchingMode(Template.MODE_EQUALS);
router.attach("/clusters", ClustersResource.class);
router.attach("/clusters/{clusterName}", ClusterResource.class);
router.attach("/clusters/{clusterName}/resourceGroups", ResourceGroupsResource.class);
router.attach("/clusters/{clusterName}/resourceGroups/{resourceName}",
ResourceGroupResource.class);
router.attach("/clusters/{clusterName}/instances", InstancesResource.class);
router.attach("/clusters/{clusterName}/instances/{instanceName}", InstanceResource.class);
router.attach("/clusters/{clusterName}/instances/{instanceName}/currentState/{resourceName}",
CurrentStateResource.class);
router.attach("/clusters/{clusterName}/instances/{instanceName}/statusUpdate/{resourceName}",
StatusUpdateResource.class);
router.attach("/clusters/{clusterName}/instances/{instanceName}/errors/{resourceName}",
ErrorResource.class);
router.attach("/clusters/{clusterName}/instances/{instanceName}/currentState",
CurrentStatesResource.class);
router.attach("/clusters/{clusterName}/instances/{instanceName}/statusUpdate",
StatusUpdatesResource.class);
router.attach("/clusters/{clusterName}/instances/{instanceName}/errors", ErrorsResource.class);
router.attach("/clusters/{clusterName}/resourceGroups/{resourceName}/idealState",
IdealStateResource.class);
router.attach("/clusters/{clusterName}/resourceGroups/{resourceName}/externalView",
ExternalViewResource.class);
router.attach("/clusters/{clusterName}/StateModelDefs/{modelName}", StateModelResource.class);
router.attach("/clusters/{clusterName}/StateModelDefs", StateModelsResource.class);
router.attach("/clusters/{clusterName}/SchedulerTasks", SchedulerTasksResource.class);
router.attach("/clusters/{clusterName}/Controller", ControllerResource.class);
router.attach("/clusters/{clusterName}/Controller/statusUpdates/{MessageType}/{MessageId}",
ControllerStatusUpdateResource.class);
router.attach("/clusters/{clusterName}/configs", ConfigResource.class);
router.attach("/clusters/{clusterName}/configs/{scope}", ConfigResource.class);
router.attach("/clusters/{clusterName}/configs/{scope}/{scopeKey1}", ConfigResource.class);
router.attach("/clusters/{clusterName}/configs/{scope}/{scopeKey1}/{scopeKey2}",
ConfigResource.class);
router.attach("/clusters/{clusterName}/constraints/{constraintType}", ConstraintResource.class);
router.attach("/clusters/{clusterName}/constraints/{constraintType}/{constraintId}",
ConstraintResource.class);
router.attach("/zkPath", ZkPathResource.class).setMatchingMode(Template.MODE_STARTS_WITH);
router.attach("/zkChild", ZkChildResource.class).setMatchingMode(Template.MODE_STARTS_WITH);
Restlet mainpage = new Restlet() {
@Override
public void handle(Request request, Response response) {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("<html>");
stringBuilder.append("<head><title>Restlet Cluster Management page</title></head>");
stringBuilder.append("<body bgcolor=white>");
stringBuilder.append("<table border=\"0\">");
stringBuilder.append("<tr>");
stringBuilder.append("<td>");
stringBuilder.append("<h1>Rest cluster management interface V1</h1>");
stringBuilder.append("</td>");
stringBuilder.append("</tr>");
stringBuilder.append("</table>");
stringBuilder.append("</body>");
stringBuilder.append("</html>");
response.setEntity(new StringRepresentation(stringBuilder.toString(), MediaType.TEXT_HTML));
}
};
router.attach("", mainpage);
return router;
}
public static void printUsage(Options cliOptions) {
HelpFormatter helpFormatter = new HelpFormatter();
helpFormatter.printHelp("java " + RestAdminApplication.class.getName(), cliOptions);
}
@SuppressWarnings("static-access")
private static Options constructCommandLineOptions() {
Option helpOption =
OptionBuilder.withLongOpt(HELP).withDescription("Prints command-line options info")
.create();
helpOption.setArgs(0);
helpOption.setRequired(false);
helpOption.setArgName("print help message");
Option zkServerOption =
OptionBuilder.withLongOpt(ZKSERVERADDRESS).withDescription("Provide zookeeper address")
.create();
zkServerOption.setArgs(1);
zkServerOption.setRequired(true);
zkServerOption.setArgName("ZookeeperServerAddress(Required)");
Option portOption =
OptionBuilder.withLongOpt(PORT).withDescription("Provide web service port").create();
portOption.setArgs(1);
portOption.setRequired(false);
portOption.setArgName("web service port, default: " + DEFAULT_PORT);
Options options = new Options();
options.addOption(helpOption);
options.addOption(zkServerOption);
options.addOption(portOption);
return options;
}
public static void processCommandLineArgs(String[] cliArgs) throws Exception {
CommandLineParser cliParser = new GnuParser();
Options cliOptions = constructCommandLineOptions();
CommandLine cmd = null;
try {
cmd = cliParser.parse(cliOptions, cliArgs);
} catch (ParseException pe) {
System.err.println("RestAdminApplication: failed to parse command-line options: "
+ pe.toString());
printUsage(cliOptions);
System.exit(1);
}
int port = DEFAULT_PORT;
if (cmd.hasOption(HELP)) {
printUsage(cliOptions);
return;
} else if (cmd.hasOption(PORT)) {
port = Integer.parseInt(cmd.getOptionValue(PORT));
}
HelixAdminWebApp app = new HelixAdminWebApp(cmd.getOptionValue(ZKSERVERADDRESS), port);
app.start();
try {
Thread.currentThread().join();
} finally {
app.stop();
}
}
/**
* @param args
* @throws Exception
*/
public static void main(String[] args) throws Exception {
processCommandLineArgs(args);
}
}
|
|
package us.myles.ViaVersion;
import com.google.gson.JsonObject;
import com.google.inject.Inject;
import org.spongepowered.api.Game;
import org.spongepowered.api.config.DefaultConfig;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.event.Listener;
import org.spongepowered.api.event.game.state.GameAboutToStartServerEvent;
import org.spongepowered.api.plugin.Plugin;
import org.spongepowered.api.plugin.PluginContainer;
import org.spongepowered.api.scheduler.SpongeExecutorService;
import org.spongepowered.api.text.serializer.TextSerializers;
import us.myles.ViaVersion.api.Via;
import us.myles.ViaVersion.api.ViaAPI;
import us.myles.ViaVersion.api.command.ViaCommandSender;
import us.myles.ViaVersion.api.configuration.ConfigurationProvider;
import us.myles.ViaVersion.api.platform.TaskId;
import us.myles.ViaVersion.api.platform.ViaPlatform;
import us.myles.ViaVersion.dump.PluginInfo;
import us.myles.ViaVersion.sponge.VersionInfo;
import us.myles.ViaVersion.sponge.commands.SpongeCommandHandler;
import us.myles.ViaVersion.sponge.commands.SpongeCommandSender;
import us.myles.ViaVersion.sponge.platform.*;
import us.myles.ViaVersion.sponge.util.LoggerWrapper;
import us.myles.ViaVersion.util.GsonUtil;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
@Plugin(id = "viaversion",
name = "ViaVersion",
version = VersionInfo.VERSION,
authors = {"_MylesC", "Matsv"},
description = "Allow newer Minecraft versions to connect to an older server version.",
dependencies = {}
)
public class SpongePlugin implements ViaPlatform {
@Inject
private Game game;
@Inject
private PluginContainer container;
@Inject
@DefaultConfig(sharedRoot = false)
private File defaultConfig;
private SpongeViaAPI api = new SpongeViaAPI();
private SpongeExecutorService asyncExecutor;
private SpongeExecutorService syncExecutor;
private SpongeConfigAPI conf;
private Logger logger;
@Listener
public void onServerStart(GameAboutToStartServerEvent event) {
// Setup Logger
logger = new LoggerWrapper(container.getLogger());
// Setup Plugin
conf = new SpongeConfigAPI(defaultConfig.getParentFile());
syncExecutor = game.getScheduler().createSyncExecutor(this);
asyncExecutor = game.getScheduler().createAsyncExecutor(this);
SpongeCommandHandler commandHandler = new SpongeCommandHandler();
game.getCommandManager().register(this, commandHandler, Arrays.asList("viaversion", "viaver", "vvsponge"));
getLogger().info("ViaVersion " + getPluginVersion() + " is now loaded, injecting!");
// Init platform
Via.init(ViaManager.builder()
.platform(this)
.commandHandler(commandHandler)
.injector(new SpongeViaInjector())
.loader(new SpongeViaLoader(this))
.build());
// Inject!
Via.getManager().init();
}
@Override
public Logger getLogger() {
return logger;
}
@Override
public String getPlatformName() {
return game.getPlatform().getImplementation().getName();
}
@Override
public String getPlatformVersion() {
return game.getPlatform().getImplementation().getVersion().orElse("Unknown Version");
}
@Override
public String getPluginVersion() {
return container.getVersion().orElse("Unknown Version");
}
@Override
public TaskId runAsync(Runnable runnable) {
asyncExecutor.execute(runnable);
return new SpongeTaskId(null);
}
@Override
public TaskId runSync(Runnable runnable) {
syncExecutor.execute(runnable);
return new SpongeTaskId(null);
}
@Override
public TaskId runRepeatingSync(Runnable runnable, Long ticks) {
Long time = ticks * 50L;
return new SpongeTaskId(syncExecutor.scheduleAtFixedRate(runnable, time, time, TimeUnit.MILLISECONDS).getTask());
}
@Override
public void cancelTask(TaskId taskId) {
if (taskId == null) return;
if (taskId.getObject() == null) return;
if (taskId instanceof SpongeTaskId) {
((SpongeTaskId) taskId).getObject().cancel();
}
}
@Override
public ViaCommandSender[] getOnlinePlayers() {
ViaCommandSender[] array = new ViaCommandSender[game.getServer().getOnlinePlayers().size()];
int i = 0;
for (Player player : game.getServer().getOnlinePlayers()) {
array[i++] = new SpongeCommandSender(player);
}
return array;
}
@Override
public void sendMessage(UUID uuid, String message) {
for (Player player : game.getServer().getOnlinePlayers()) {
if (player.getUniqueId().equals(uuid))
player.sendMessage(TextSerializers.LEGACY_FORMATTING_CODE.deserialize(message));
}
}
@Override
public boolean kickPlayer(UUID uuid, String message) {
for (Player player : game.getServer().getOnlinePlayers()) {
if (player.getUniqueId().equals(uuid)) {
player.kick(TextSerializers.LEGACY_FORMATTING_CODE.deserialize(message));
return true;
}
}
return false;
}
@Override
public boolean isPluginEnabled() {
return true;
}
@Override
public ViaAPI getApi() {
return api;
}
@Override
public SpongeConfigAPI getConf() {
return conf;
}
@Override
public ConfigurationProvider getConfigurationProvider() {
return conf;
}
@Override
public void onReload() {
getLogger().severe("ViaVersion is already loaded, this should work fine. If you get any console errors, try rebooting.");
}
@Override
public JsonObject getDump() {
JsonObject platformSpecific = new JsonObject();
List<PluginInfo> plugins = new ArrayList<>();
for (PluginContainer p : game.getPluginManager().getPlugins()) {
plugins.add(new PluginInfo(
true,
p.getName(),
p.getVersion().orElse("Unknown Version"),
p.getInstance().isPresent() ? p.getInstance().get().getClass().getCanonicalName() : "Unknown",
p.getAuthors()
));
}
platformSpecific.add("plugins", GsonUtil.getGson().toJsonTree(plugins));
return platformSpecific;
}
@Override
public boolean isOldClientsAllowed() {
return true;
}
}
|
|
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job.entries.zipfile;
import static org.pentaho.di.job.entry.validator.AbstractFileValidator.putVariableSpace;
import static org.pentaho.di.job.entry.validator.AndValidator.putValidators;
import static org.pentaho.di.job.entry.validator.FileDoesNotExistValidator.putFailIfExists;
import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.andValidator;
import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.fileDoesNotExistValidator;
import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.notBlankValidator;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.text.DecimalFormat;
import java.text.ParsePosition;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.Deflater;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
import org.apache.commons.vfs.FileObject;
import org.apache.commons.vfs.FileSelectInfo;
import org.apache.commons.vfs.FileSelector;
import org.apache.commons.vfs.FileSystemException;
import org.apache.commons.vfs.FileType;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.ResultFile;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleFileException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.util.StringUtil;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.entry.JobEntryBase;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.job.entry.validator.ValidatorContext;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
/**
* This defines a 'zip file' job entry. Its main use would be to zip files in a directory and process zipped files
* (deleted or move).
*
* @author Samatar Hassan
* @since 27-02-2007
*
*/
public class JobEntryZipFile extends JobEntryBase implements Cloneable, JobEntryInterface {
private static Class<?> PKG = JobEntryZipFile.class; // for i18n purposes, needed by Translator2!!
private String zipFilename;
public int compressionRate;
public int ifZipFileExists;
public int afterZip;
private String wildCard;
private String excludeWildCard;
private String sourceDirectory;
private String movetoDirectory;
private boolean addFileToResult;
private boolean isFromPrevious;
private boolean createParentFolder;
private boolean addDate;
private boolean addTime;
private boolean specifyFormat;
private String dateTimeFormat;
private boolean createMoveToDirectory;
private boolean includingSubFolders;
private String storedSourcePathDepth;
/**
* Default constructor.
*/
public JobEntryZipFile( String n ) {
super( n, "" );
dateTimeFormat = null;
zipFilename = null;
ifZipFileExists = 2;
afterZip = 0;
compressionRate = 1;
wildCard = null;
excludeWildCard = null;
sourceDirectory = null;
movetoDirectory = null;
addFileToResult = false;
isFromPrevious = false;
createParentFolder = false;
addDate = false;
addTime = false;
specifyFormat = false;
createMoveToDirectory = false;
includingSubFolders = true;
storedSourcePathDepth = "1";
}
public JobEntryZipFile() {
this( "" );
}
public Object clone() {
JobEntryZipFile je = (JobEntryZipFile) super.clone();
return je;
}
public String getXML() {
StringBuffer retval = new StringBuffer( 500 );
retval.append( super.getXML() );
retval.append( " " ).append( XMLHandler.addTagValue( "zipfilename", zipFilename ) );
retval.append( " " ).append( XMLHandler.addTagValue( "compressionrate", compressionRate ) );
retval.append( " " ).append( XMLHandler.addTagValue( "ifzipfileexists", ifZipFileExists ) );
retval.append( " " ).append( XMLHandler.addTagValue( "wildcard", wildCard ) );
retval.append( " " ).append( XMLHandler.addTagValue( "wildcardexclude", excludeWildCard ) );
retval.append( " " ).append( XMLHandler.addTagValue( "sourcedirectory", sourceDirectory ) );
retval.append( " " ).append( XMLHandler.addTagValue( "movetodirectory", movetoDirectory ) );
retval.append( " " ).append( XMLHandler.addTagValue( "afterzip", afterZip ) );
retval.append( " " ).append( XMLHandler.addTagValue( "addfiletoresult", addFileToResult ) );
retval.append( " " ).append( XMLHandler.addTagValue( "isfromprevious", isFromPrevious ) );
retval.append( " " ).append( XMLHandler.addTagValue( "createparentfolder", createParentFolder ) );
retval.append( " " ).append( XMLHandler.addTagValue( "adddate", addDate ) );
retval.append( " " ).append( XMLHandler.addTagValue( "addtime", addTime ) );
retval.append( " " ).append( XMLHandler.addTagValue( "SpecifyFormat", specifyFormat ) );
retval.append( " " ).append( XMLHandler.addTagValue( "date_time_format", dateTimeFormat ) );
retval.append( " " ).append( XMLHandler.addTagValue( "createMoveToDirectory", createMoveToDirectory ) );
retval.append( " " ).append( XMLHandler.addTagValue( "include_subfolders", includingSubFolders ) );
retval.append( " " ).append( XMLHandler.addTagValue( "stored_source_path_depth", storedSourcePathDepth ) );
return retval.toString();
}
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
Repository rep, IMetaStore metaStore ) throws KettleXMLException {
try {
super.loadXML( entrynode, databases, slaveServers );
zipFilename = XMLHandler.getTagValue( entrynode, "zipfilename" );
compressionRate = Const.toInt( XMLHandler.getTagValue( entrynode, "compressionrate" ), -1 );
ifZipFileExists = Const.toInt( XMLHandler.getTagValue( entrynode, "ifzipfileexists" ), -1 );
afterZip = Const.toInt( XMLHandler.getTagValue( entrynode, "afterzip" ), -1 );
wildCard = XMLHandler.getTagValue( entrynode, "wildcard" );
excludeWildCard = XMLHandler.getTagValue( entrynode, "wildcardexclude" );
sourceDirectory = XMLHandler.getTagValue( entrynode, "sourcedirectory" );
movetoDirectory = XMLHandler.getTagValue( entrynode, "movetodirectory" );
addFileToResult = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "addfiletoresult" ) );
isFromPrevious = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "isfromprevious" ) );
createParentFolder = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "createparentfolder" ) );
addDate = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "adddate" ) );
addTime = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "addtime" ) );
specifyFormat = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "SpecifyFormat" ) );
dateTimeFormat = XMLHandler.getTagValue( entrynode, "date_time_format" );
createMoveToDirectory = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "createMoveToDirectory" ) );
includingSubFolders = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "include_subfolders" ) );
storedSourcePathDepth = XMLHandler.getTagValue( entrynode, "stored_source_path_depth" );
} catch ( KettleXMLException xe ) {
throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntryZipFile.UnableLoadJobEntryXML" ), xe );
}
}
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ) throws KettleException {
try {
zipFilename = rep.getJobEntryAttributeString( id_jobentry, "zipfilename" );
compressionRate = (int) rep.getJobEntryAttributeInteger( id_jobentry, "compressionrate" );
ifZipFileExists = (int) rep.getJobEntryAttributeInteger( id_jobentry, "ifzipfileexists" );
afterZip = (int) rep.getJobEntryAttributeInteger( id_jobentry, "afterzip" );
wildCard = rep.getJobEntryAttributeString( id_jobentry, "wildcard" );
excludeWildCard = rep.getJobEntryAttributeString( id_jobentry, "wildcardexclude" );
sourceDirectory = rep.getJobEntryAttributeString( id_jobentry, "sourcedirectory" );
movetoDirectory = rep.getJobEntryAttributeString( id_jobentry, "movetodirectory" );
addFileToResult = rep.getJobEntryAttributeBoolean( id_jobentry, "addfiletoresult" );
isFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "isfromprevious" );
createParentFolder = rep.getJobEntryAttributeBoolean( id_jobentry, "createparentfolder" );
addDate = rep.getJobEntryAttributeBoolean( id_jobentry, "adddate" );
addTime = rep.getJobEntryAttributeBoolean( id_jobentry, "addtime" );
specifyFormat = rep.getJobEntryAttributeBoolean( id_jobentry, "SpecifyFormat" );
dateTimeFormat = rep.getJobEntryAttributeString( id_jobentry, "date_time_format" );
createMoveToDirectory = rep.getJobEntryAttributeBoolean( id_jobentry, "createMoveToDirectory" );
includingSubFolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" );
storedSourcePathDepth = rep.getJobEntryAttributeString( id_jobentry, "stored_source_path_depth" );
} catch ( KettleException dbe ) {
throw new KettleException( BaseMessages.getString( PKG, "JobEntryZipFile.UnableLoadJobEntryRep", ""
+ id_jobentry ), dbe );
}
}
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
try {
rep.saveJobEntryAttribute( id_job, getObjectId(), "zipfilename", zipFilename );
rep.saveJobEntryAttribute( id_job, getObjectId(), "compressionrate", compressionRate );
rep.saveJobEntryAttribute( id_job, getObjectId(), "ifzipfileexists", ifZipFileExists );
rep.saveJobEntryAttribute( id_job, getObjectId(), "afterzip", afterZip );
rep.saveJobEntryAttribute( id_job, getObjectId(), "wildcard", wildCard );
rep.saveJobEntryAttribute( id_job, getObjectId(), "wildcardexclude", excludeWildCard );
rep.saveJobEntryAttribute( id_job, getObjectId(), "sourcedirectory", sourceDirectory );
rep.saveJobEntryAttribute( id_job, getObjectId(), "movetodirectory", movetoDirectory );
rep.saveJobEntryAttribute( id_job, getObjectId(), "addfiletoresult", addFileToResult );
rep.saveJobEntryAttribute( id_job, getObjectId(), "isfromprevious", isFromPrevious );
rep.saveJobEntryAttribute( id_job, getObjectId(), "createparentfolder", createParentFolder );
rep.saveJobEntryAttribute( id_job, getObjectId(), "addtime", addTime );
rep.saveJobEntryAttribute( id_job, getObjectId(), "adddate", addDate );
rep.saveJobEntryAttribute( id_job, getObjectId(), "SpecifyFormat", specifyFormat );
rep.saveJobEntryAttribute( id_job, getObjectId(), "date_time_format", dateTimeFormat );
rep.saveJobEntryAttribute( id_job, getObjectId(), "createMoveToDirectory", createMoveToDirectory );
rep.saveJobEntryAttribute( id_job, getObjectId(), "include_subfolders", includingSubFolders );
rep.saveJobEntryAttribute( id_job, getObjectId(), "stored_source_path_depth", storedSourcePathDepth );
} catch ( KettleDatabaseException dbe ) {
throw new KettleException( BaseMessages
.getString( PKG, "JobEntryZipFile.UnableSaveJobEntryRep", "" + id_job ), dbe );
}
}
private boolean createParentFolder( String filename ) {
// Check for parent folder
FileObject parentfolder = null;
boolean result = false;
try {
// Get parent folder
parentfolder = KettleVFS.getFileObject( filename, this ).getParent();
if ( !parentfolder.exists() ) {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntryZipFile.CanNotFindFolder", ""
+ parentfolder.getName() ) );
}
parentfolder.createFolder();
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntryZipFile.FolderCreated", "" + parentfolder.getName() ) );
}
} else {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntryZipFile.FolderExists", "" + parentfolder.getName() ) );
}
}
result = true;
} catch ( Exception e ) {
logError(
BaseMessages.getString( PKG, "JobEntryZipFile.CanNotCreateFolder", "" + parentfolder.getName() ), e );
} finally {
if ( parentfolder != null ) {
try {
parentfolder.close();
parentfolder = null;
} catch ( Exception ex ) {
// Ignore
}
}
}
return result;
}
public boolean processRowFile( Job parentJob, Result result, String realZipfilename, String realWildcard,
String realWildcardExclude, String realSourceDirectoryOrFile, String realMovetodirectory,
boolean createparentfolder ) {
boolean Fileexists = false;
File tempFile = null;
File fileZip = null;
boolean resultat = false;
boolean renameOk = false;
boolean orginExist = false;
// Check if target file/folder exists!
FileObject originFile = null;
ZipInputStream zin = null;
byte[] buffer = null;
OutputStream dest = null;
BufferedOutputStream buff = null;
ZipOutputStream out = null;
ZipEntry entry = null;
String localSourceFilename = realSourceDirectoryOrFile;
try {
originFile = KettleVFS.getFileObject( realSourceDirectoryOrFile, this );
localSourceFilename = KettleVFS.getFilename( originFile );
orginExist = originFile.exists();
} catch ( Exception e ) {
// Ignore errors
} finally {
if ( originFile != null ) {
try {
originFile.close();
} catch ( IOException ex ) {
logError( "Error closing file '" + originFile.toString() + "'", ex );
}
}
}
String localrealZipfilename = realZipfilename;
if ( realZipfilename != null && orginExist ) {
FileObject fileObject = null;
try {
fileObject = KettleVFS.getFileObject( localrealZipfilename, this );
localrealZipfilename = KettleVFS.getFilename( fileObject );
// Check if Zip File exists
if ( fileObject.exists() ) {
Fileexists = true;
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "JobZipFiles.Zip_FileExists1.Label" )
+ localrealZipfilename + BaseMessages.getString( PKG, "JobZipFiles.Zip_FileExists2.Label" ) );
}
}
// Let's see if we need to create parent folder of destination zip filename
if ( createparentfolder ) {
createParentFolder( localrealZipfilename );
}
// Let's start the process now
if ( ifZipFileExists == 3 && Fileexists ) {
// the zip file exists and user want to Fail
resultat = false;
} else if ( ifZipFileExists == 2 && Fileexists ) {
// the zip file exists and user want to do nothing
if ( addFileToResult ) {
// Add file to result files name
ResultFile resultFile =
new ResultFile( ResultFile.FILE_TYPE_GENERAL, fileObject, parentJob.getJobname(), toString() );
result.getResultFiles().put( resultFile.getFile().toString(), resultFile );
}
resultat = true;
} else if ( afterZip == 2 && realMovetodirectory == null ) {
// After Zip, Move files..User must give a destination Folder
resultat = false;
logError( BaseMessages.getString( PKG, "JobZipFiles.AfterZip_No_DestinationFolder_Defined.Label" ) );
} else {
// After Zip, Move files..User must give a destination Folder
// Let's see if we deal with file or folder
FileObject[] fileList = null;
FileObject sourceFileOrFolder = KettleVFS.getFileObject( localSourceFilename );
boolean isSourceDirectory = sourceFileOrFolder.getType().equals( FileType.FOLDER );
final Pattern pattern;
final Pattern patternexclude;
if ( isSourceDirectory ) {
// Let's prepare the pattern matcher for performance reasons.
// We only do this if the target is a folder !
//
if ( !Const.isEmpty( realWildcard ) ) {
pattern = Pattern.compile( realWildcard );
} else {
pattern = null;
}
if ( !Const.isEmpty( realWildcardExclude ) ) {
patternexclude = Pattern.compile( realWildcardExclude );
} else {
patternexclude = null;
}
// Target is a directory
// Get all the files in the directory...
//
if ( includingSubFolders ) {
fileList = sourceFileOrFolder.findFiles( new FileSelector() {
public boolean traverseDescendents( FileSelectInfo fileInfo ) throws Exception {
return true;
}
public boolean includeFile( FileSelectInfo fileInfo ) throws Exception {
boolean include;
// Only include files in the sub-folders...
// When we include sub-folders we match the whole filename, not just the base-name
//
if ( fileInfo.getFile().getType().equals( FileType.FILE ) ) {
include = true;
if ( pattern != null ) {
String name = fileInfo.getFile().getName().getPath();
include = pattern.matcher( name ).matches();
}
if ( include && patternexclude != null ) {
String name = fileInfo.getFile().getName().getPath();
include = !pattern.matcher( name ).matches();
}
} else {
include = false;
}
return include;
}
} );
} else {
fileList = sourceFileOrFolder.getChildren();
}
} else {
pattern = null;
patternexclude = null;
// Target is a file
fileList = new FileObject[] { sourceFileOrFolder };
}
if ( fileList.length == 0 ) {
resultat = false;
logError( BaseMessages.getString( PKG, "JobZipFiles.Log.FolderIsEmpty", localSourceFilename ) );
} else if ( !checkContainsFile( localSourceFilename, fileList, isSourceDirectory ) ) {
resultat = false;
logError( BaseMessages.getString( PKG, "JobZipFiles.Log.NoFilesInFolder", localSourceFilename ) );
} else {
if ( ifZipFileExists == 0 && Fileexists ) {
// the zip file exists and user want to create new one with unique name
// Format Date
// do we have already a .zip at the end?
if ( localrealZipfilename.toLowerCase().endsWith( ".zip" ) ) {
// strip this off
localrealZipfilename = localrealZipfilename.substring( 0, localrealZipfilename.length() - 4 );
}
localrealZipfilename += "_" + StringUtil.getFormattedDateTimeNow( true ) + ".zip";
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "JobZipFiles.Zip_FileNameChange1.Label" )
+ localrealZipfilename + BaseMessages.getString( PKG, "JobZipFiles.Zip_FileNameChange1.Label" ) );
}
} else if ( ifZipFileExists == 1 && Fileexists ) {
// the zip file exists and user want to append
// get a temp file
fileZip = getFile( localrealZipfilename );
tempFile = File.createTempFile( fileZip.getName(), null );
// delete it, otherwise we cannot rename existing zip to it.
tempFile.delete();
renameOk = fileZip.renameTo( tempFile );
if ( !renameOk ) {
logError( BaseMessages.getString( PKG, "JobZipFiles.Cant_Rename_Temp1.Label" )
+ fileZip.getAbsolutePath()
+ BaseMessages.getString( PKG, "JobZipFiles.Cant_Rename_Temp2.Label" )
+ tempFile.getAbsolutePath()
+ BaseMessages.getString( PKG, "JobZipFiles.Cant_Rename_Temp3.Label" ) );
}
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "JobZipFiles.Zip_FileAppend1.Label" )
+ localrealZipfilename + BaseMessages.getString( PKG, "JobZipFiles.Zip_FileAppend2.Label" ) );
}
}
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobZipFiles.Files_Found1.Label" )
+ fileList.length + BaseMessages.getString( PKG, "JobZipFiles.Files_Found2.Label" )
+ localSourceFilename + BaseMessages.getString( PKG, "JobZipFiles.Files_Found3.Label" ) );
}
// Prepare Zip File
buffer = new byte[18024];
dest = KettleVFS.getOutputStream( localrealZipfilename, false );
buff = new BufferedOutputStream( dest );
out = new ZipOutputStream( buff );
HashSet<String> fileSet = new HashSet<String>();
if ( renameOk ) {
// User want to append files to existing Zip file
// The idea is to rename the existing zip file to a temporary file
// and then adds all entries in the existing zip along with the new files,
// excluding the zip entries that have the same name as one of the new files.
zin = new ZipInputStream( new FileInputStream( tempFile ) );
entry = zin.getNextEntry();
while ( entry != null ) {
String name = entry.getName();
if ( !fileSet.contains( name ) ) {
// Add ZIP entry to output stream.
out.putNextEntry( new ZipEntry( name ) );
// Transfer bytes from the ZIP file to the output file
int len;
while ( ( len = zin.read( buffer ) ) > 0 ) {
out.write( buffer, 0, len );
}
fileSet.add( name );
}
entry = zin.getNextEntry();
}
// Close the streams
zin.close();
}
// Set the method
out.setMethod( ZipOutputStream.DEFLATED );
// Set the compression level
if ( compressionRate == 0 ) {
out.setLevel( Deflater.NO_COMPRESSION );
} else if ( compressionRate == 1 ) {
out.setLevel( Deflater.DEFAULT_COMPRESSION );
}
if ( compressionRate == 2 ) {
out.setLevel( Deflater.BEST_COMPRESSION );
}
if ( compressionRate == 3 ) {
out.setLevel( Deflater.BEST_SPEED );
}
// Specify Zipped files (After that we will move,delete them...)
FileObject[] zippedFiles = new FileObject[fileList.length];
int fileNum = 0;
// Get the files in the list...
for ( int i = 0; i < fileList.length && !parentJob.isStopped(); i++ ) {
boolean getIt = true;
boolean getItexclude = false;
// First see if the file matches the regular expression!
// ..only if target is a folder !
if ( isSourceDirectory ) {
// If we include sub-folders, we match on the whole name, not just the basename
//
String filename;
if ( includingSubFolders ) {
filename = fileList[i].getName().getPath();
} else {
filename = fileList[i].getName().getBaseName();
}
if ( pattern != null ) {
// Matches the base name of the file (backward compatible!)
//
Matcher matcher = pattern.matcher( filename );
getIt = matcher.matches();
}
if ( patternexclude != null ) {
Matcher matcherexclude = patternexclude.matcher( filename );
getItexclude = matcherexclude.matches();
}
}
// Get processing File
String targetFilename = KettleVFS.getFilename( fileList[i] );
if ( sourceFileOrFolder.getType().equals( FileType.FILE ) ) {
targetFilename = localSourceFilename;
}
FileObject file = KettleVFS.getFileObject( targetFilename );
boolean isTargetDirectory = file.exists() && file.getType().equals( FileType.FOLDER );
if ( getIt && !getItexclude && !isTargetDirectory && !fileSet.contains( targetFilename ) ) {
// We can add the file to the Zip Archive
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "JobZipFiles.Add_FilesToZip1.Label" )
+ fileList[i] + BaseMessages.getString( PKG, "JobZipFiles.Add_FilesToZip2.Label" )
+ localSourceFilename + BaseMessages.getString( PKG, "JobZipFiles.Add_FilesToZip3.Label" ) );
}
// Associate a file input stream for the current file
InputStream in = KettleVFS.getInputStream( file );
// Add ZIP entry to output stream.
//
String relativeName;
String fullName = fileList[i].getName().getPath();
String basePath = sourceFileOrFolder.getName().getPath();
if ( isSourceDirectory ) {
if ( fullName.startsWith( basePath ) ) {
relativeName = fullName.substring( basePath.length() + 1 );
} else {
relativeName = fullName;
}
} else if ( isFromPrevious ) {
int depth = determineDepth( environmentSubstitute( storedSourcePathDepth ) );
relativeName = determineZipfilenameForDepth( fullName, depth );
} else {
relativeName = fileList[i].getName().getBaseName();
}
out.putNextEntry( new ZipEntry( relativeName ) );
int len;
while ( ( len = in.read( buffer ) ) > 0 ) {
out.write( buffer, 0, len );
}
out.flush();
out.closeEntry();
// Close the current file input stream
in.close();
// Get Zipped File
zippedFiles[fileNum] = fileList[i];
fileNum = fileNum + 1;
}
}
// Close the ZipOutPutStream
out.close();
buff.close();
dest.close();
if ( log.isBasic() ) {
logBasic( BaseMessages.getString( PKG, "JobZipFiles.Log.TotalZippedFiles", "" + zippedFiles.length ) );
}
// Delete Temp File
if ( tempFile != null ) {
tempFile.delete();
}
// -----Get the list of Zipped Files and Move or Delete Them
if ( afterZip == 1 || afterZip == 2 ) {
// iterate through the array of Zipped files
for ( int i = 0; i < zippedFiles.length; i++ ) {
if ( zippedFiles[i] != null ) {
// Delete, Move File
FileObject fileObjectd = zippedFiles[i];
if ( !isSourceDirectory ) {
fileObjectd = KettleVFS.getFileObject( localSourceFilename );
}
// Here we can move, delete files
if ( afterZip == 1 ) {
// Delete File
boolean deleted = fileObjectd.delete();
if ( !deleted ) {
resultat = false;
logError( BaseMessages.getString( PKG, "JobZipFiles.Cant_Delete_File1.Label" )
+ localSourceFilename + Const.FILE_SEPARATOR + zippedFiles[i]
+ BaseMessages.getString( PKG, "JobZipFiles.Cant_Delete_File2.Label" ) );
}
// File deleted
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "JobZipFiles.File_Deleted1.Label" )
+ localSourceFilename + Const.FILE_SEPARATOR + zippedFiles[i]
+ BaseMessages.getString( PKG, "JobZipFiles.File_Deleted2.Label" ) );
}
} else if ( afterZip == 2 ) {
// Move File
FileObject fileObjectm = null;
try {
fileObjectm =
KettleVFS.getFileObject( realMovetodirectory
+ Const.FILE_SEPARATOR + fileObjectd.getName().getBaseName() );
fileObjectd.moveTo( fileObjectm );
} catch ( IOException e ) {
logError( BaseMessages.getString( PKG, "JobZipFiles.Cant_Move_File1.Label" )
+ zippedFiles[i] + BaseMessages.getString( PKG, "JobZipFiles.Cant_Move_File2.Label" )
+ e.getMessage() );
resultat = false;
} finally {
try {
if ( fileObjectm != null ) {
fileObjectm.close();
}
} catch ( Exception e ) {
if ( fileObjectm != null ) {
logError( "Error closing file '" + fileObjectm.toString() + "'", e );
}
}
}
// File moved
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "JobZipFiles.File_Moved1.Label" )
+ zippedFiles[i] + BaseMessages.getString( PKG, "JobZipFiles.File_Moved2.Label" ) );
}
}
}
}
}
if ( addFileToResult ) {
// Add file to result files name
ResultFile resultFile =
new ResultFile( ResultFile.FILE_TYPE_GENERAL, fileObject, parentJob.getJobname(), toString() );
result.getResultFiles().put( resultFile.getFile().toString(), resultFile );
}
resultat = true;
}
}
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobZipFiles.Cant_CreateZipFile1.Label" )
+ localrealZipfilename + BaseMessages.getString( PKG, "JobZipFiles.Cant_CreateZipFile2.Label" ), e );
resultat = false;
} finally {
if ( fileObject != null ) {
try {
fileObject.close();
fileObject = null;
} catch ( IOException ex ) {
logError( "Error closing file '" + fileObject.toString() + "'", ex );
}
}
try {
if ( out != null ) {
out.close();
}
if ( buff != null ) {
buff.close();
}
if ( dest != null ) {
dest.close();
}
if ( zin != null ) {
zin.close();
}
if ( entry != null ) {
entry = null;
}
} catch ( IOException ex ) {
logError( "Error closing zip file entry for file '" + originFile.toString() + "'", ex );
}
}
} else {
resultat = true;
if ( localrealZipfilename == null ) {
logError( BaseMessages.getString( PKG, "JobZipFiles.No_ZipFile_Defined.Label" ) );
}
if ( !orginExist ) {
logError( BaseMessages.getString( PKG, "JobZipFiles.No_FolderCible_Defined.Label", localSourceFilename ) );
}
}
// return a verifier
return resultat;
}
private int determineDepth( String depthString ) throws KettleException {
DecimalFormat df = new DecimalFormat( "0" );
ParsePosition pp = new ParsePosition( 0 );
df.setParseIntegerOnly( true );
try {
Number n = df.parse( depthString, pp );
if ( n == null ) {
return 1; // default
}
if ( pp.getErrorIndex() == 0 ) {
throw new KettleException( "Unable to convert stored depth '"
+ depthString + "' to depth at position " + pp.getErrorIndex() );
}
return n.intValue();
} catch ( Exception e ) {
throw new KettleException( "Unable to convert stored depth '" + depthString + "' to depth", e );
}
}
/**
* Get the requested part of the filename
*
* @param filename
* the filename (full) (/path/to/a/file.txt)
* @param depth
* the depth to get. 0 means: the complete filename, 1: the name only (file.txt), 2: one folder (a/file.txt)
* 3: two folders (to/a/file.txt) and so on.
* @return the requested part of the file name up to a certain depth
* @throws KettleFileException
*/
private String determineZipfilenameForDepth( String filename, int depth ) throws KettleException {
try {
if ( Const.isEmpty( filename ) ) {
return null;
}
if ( depth == 0 ) {
return filename;
}
FileObject fileObject = KettleVFS.getFileObject( filename );
FileObject folder = fileObject.getParent();
String baseName = fileObject.getName().getBaseName();
if ( depth == 1 ) {
return baseName;
}
StringBuilder path = new StringBuilder( baseName );
int d = 1;
while ( d < depth && folder != null ) {
path.insert( 0, '/' );
path.insert( 0, folder.getName().getBaseName() );
folder = folder.getParent();
d++;
}
return path.toString();
} catch ( Exception e ) {
throw new KettleException( "Unable to get zip filename '" + filename + "' to depth " + depth, e );
}
}
private File getFile( final String filename ) {
try {
String uri = KettleVFS.getFileObject( environmentSubstitute( filename ) ).getName().getPath();
return new File( uri );
} catch ( KettleFileException ex ) {
logError( "Error in Fetching URI for File: " + filename, ex );
}
return new File( filename );
}
private boolean checkContainsFile( String realSourceDirectoryOrFile, FileObject[] filelist, boolean isDirectory ) throws FileSystemException {
boolean retval = false;
for ( int i = 0; i < filelist.length; i++ ) {
FileObject file = filelist[i];
if ( ( file.exists() && file.getType().equals( FileType.FILE ) ) ) {
retval = true;
}
}
return retval;
}
public Result execute( Result previousResult, int nr ) {
Result result = previousResult;
List<RowMetaAndData> rows = result.getRows();
// reset values
String realZipfilename = null;
String realWildcard = null;
String realWildcardExclude = null;
String realTargetdirectory = null;
String realMovetodirectory = environmentSubstitute( movetoDirectory );
// Sanity check
boolean SanityControlOK = true;
if ( afterZip == 2 ) {
if ( Const.isEmpty( realMovetodirectory ) ) {
SanityControlOK = false;
logError( BaseMessages.getString( PKG, "JobZipFiles.AfterZip_No_DestinationFolder_Defined.Label" ) );
} else {
FileObject moveToDirectory = null;
try {
moveToDirectory = KettleVFS.getFileObject( realMovetodirectory, this );
if ( moveToDirectory.exists() ) {
if ( moveToDirectory.getType() == FileType.FOLDER ) {
if ( log.isDetailed() ) {
logDetailed( BaseMessages
.getString( PKG, "JobZipFiles.Log.MoveToFolderExist", realMovetodirectory ) );
}
} else {
SanityControlOK = false;
logError( BaseMessages.getString( PKG, "JobZipFiles.Log.MoveToFolderNotFolder", realMovetodirectory ) );
}
} else {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString(
PKG, "JobZipFiles.Log.MoveToFolderNotNotExist", realMovetodirectory ) );
}
if ( createMoveToDirectory ) {
moveToDirectory.createFolder();
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString(
PKG, "JobZipFiles.Log.MoveToFolderCreaterd", realMovetodirectory ) );
}
} else {
SanityControlOK = false;
logError( BaseMessages.getString(
PKG, "JobZipFiles.Log.MoveToFolderNotNotExist", realMovetodirectory ) );
}
}
} catch ( Exception e ) {
SanityControlOK = false;
logError( BaseMessages
.getString( PKG, "JobZipFiles.ErrorGettingMoveToFolder.Label", realMovetodirectory ), e );
} finally {
if ( moveToDirectory != null ) {
realMovetodirectory = KettleVFS.getFilename( moveToDirectory );
try {
moveToDirectory.close();
moveToDirectory = null;
} catch ( Exception e ) {
logError( "Error moving to directory", e );
result.setResult( false );
result.setNrErrors( 1 );
}
}
}
}
}
if ( !SanityControlOK ) {
result.setNrErrors( 1 );
result.setResult( false );
return result;
}
// arguments from previous
if ( isFromPrevious ) {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobZipFiles.ArgFromPrevious.Found", ( rows != null ? rows
.size() : 0 )
+ "" ) );
}
}
if ( isFromPrevious && rows != null ) {
try {
for ( int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++ ) {
// get arguments from previous job entry
RowMetaAndData resultRow = rows.get( iteration );
// get target directory
realTargetdirectory = resultRow.getString( 0, null );
if ( !Const.isEmpty( realTargetdirectory ) ) {
// get wildcard to include
if ( !Const.isEmpty( resultRow.getString( 1, null ) ) ) {
realWildcard = resultRow.getString( 1, null );
}
// get wildcard to exclude
if ( !Const.isEmpty( resultRow.getString( 2, null ) ) ) {
realWildcardExclude = resultRow.getString( 2, null );
}
// get destination zip file
realZipfilename = resultRow.getString( 3, null );
if ( !Const.isEmpty( realZipfilename ) ) {
if ( !processRowFile(
parentJob, result, realZipfilename, realWildcard, realWildcardExclude, realTargetdirectory,
realMovetodirectory, createParentFolder ) ) {
result.setResult( false );
return result;
}
} else {
logError( "destination zip filename is empty! Ignoring row..." );
}
} else {
logError( "Target directory is empty! Ignoring row..." );
}
}
} catch ( Exception e ) {
logError( "Erreur during process!", e );
result.setResult( false );
result.setNrErrors( 1 );
}
} else if ( !isFromPrevious ) {
if ( !Const.isEmpty( sourceDirectory ) ) {
// get values from job entry
realZipfilename =
getFullFilename( environmentSubstitute( zipFilename ), addDate, addTime, specifyFormat, dateTimeFormat );
realWildcard = environmentSubstitute( wildCard );
realWildcardExclude = environmentSubstitute( excludeWildCard );
realTargetdirectory = environmentSubstitute( sourceDirectory );
result.setResult( processRowFile(
parentJob, result, realZipfilename, realWildcard, realWildcardExclude, realTargetdirectory,
realMovetodirectory, createParentFolder ) );
} else {
logError( "Source folder/file is empty! Ignoring row..." );
}
}
// End
return result;
}
public String getFullFilename( String filename, boolean add_date, boolean add_time, boolean specify_format,
String datetime_folder ) {
String retval = "";
if ( Const.isEmpty( filename ) ) {
return null;
}
// Replace possible environment variables...
String realfilename = environmentSubstitute( filename );
int lenstring = realfilename.length();
int lastindexOfDot = realfilename.lastIndexOf( '.' );
if ( lastindexOfDot == -1 ) {
lastindexOfDot = lenstring;
}
retval = realfilename.substring( 0, lastindexOfDot );
final SimpleDateFormat daf = new SimpleDateFormat();
Date now = new Date();
if ( specify_format && !Const.isEmpty( datetime_folder ) ) {
daf.applyPattern( datetime_folder );
String dt = daf.format( now );
retval += dt;
} else {
if ( add_date ) {
daf.applyPattern( "yyyyMMdd" );
String d = daf.format( now );
retval += "_" + d;
}
if ( add_time ) {
daf.applyPattern( "HHmmssSSS" );
String t = daf.format( now );
retval += "_" + t;
}
}
retval += realfilename.substring( lastindexOfDot, lenstring );
return retval;
}
public boolean evaluates() {
return true;
}
public void setZipFilename( String zipFilename ) {
this.zipFilename = zipFilename;
}
public void setWildcard( String wildcard ) {
this.wildCard = wildcard;
}
public void setWildcardExclude( String wildcardexclude ) {
this.excludeWildCard = wildcardexclude;
}
public void setSourceDirectory( String sourcedirectory ) {
this.sourceDirectory = sourcedirectory;
}
public void setMoveToDirectory( String movetodirectory ) {
this.movetoDirectory = movetodirectory;
}
public String getSourceDirectory() {
return sourceDirectory;
}
public String getMoveToDirectory() {
return movetoDirectory;
}
public String getZipFilename() {
return zipFilename;
}
public boolean isCreateMoveToDirectory() {
return createMoveToDirectory;
}
public void setCreateMoveToDirectory( boolean createMoveToDirectory ) {
this.createMoveToDirectory = createMoveToDirectory;
}
public String getWildcard() {
return wildCard;
}
public String getWildcardExclude() {
return excludeWildCard;
}
public void setAddFileToResult( boolean addfiletoresultin ) {
this.addFileToResult = addfiletoresultin;
}
public boolean isAddFileToResult() {
return addFileToResult;
}
public void setcreateparentfolder( boolean createparentfolder ) {
this.createParentFolder = createparentfolder;
}
public void setDateInFilename( boolean adddate ) {
this.addDate = adddate;
}
public boolean isDateInFilename() {
return addDate;
}
public void setTimeInFilename( boolean addtime ) {
this.addTime = addtime;
}
public boolean isTimeInFilename() {
return addTime;
}
public boolean isSpecifyFormat() {
return specifyFormat;
}
public void setSpecifyFormat( boolean SpecifyFormat ) {
this.specifyFormat = SpecifyFormat;
}
public String getDateTimeFormat() {
return dateTimeFormat;
}
public void setDateTimeFormat( String date_time_format ) {
this.dateTimeFormat = date_time_format;
}
public boolean getcreateparentfolder() {
return createParentFolder;
}
public void setDatafromprevious( boolean isfromprevious ) {
this.isFromPrevious = isfromprevious;
}
public boolean getDatafromprevious() {
return isFromPrevious;
}
@Override
public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space,
Repository repository, IMetaStore metaStore ) {
ValidatorContext ctx1 = new ValidatorContext();
putVariableSpace( ctx1, getVariables() );
putValidators( ctx1, notBlankValidator(), fileDoesNotExistValidator() );
if ( 3 == ifZipFileExists ) {
// execute method fails if the file already exists; we should too
putFailIfExists( ctx1, true );
}
andValidator().validate( this, "zipFilename", remarks, ctx1 );
if ( 2 == afterZip ) {
// setting says to move
andValidator().validate( this, "moveToDirectory", remarks, putValidators( notBlankValidator() ) );
}
andValidator().validate( this, "sourceDirectory", remarks, putValidators( notBlankValidator() ) );
}
/**
* @return true if the search for files to zip in a folder include sub-folders
*/
public boolean isIncludingSubFolders() {
return includingSubFolders;
}
/**
* @param includesSubFolders
* Set to true if the search for files to zip in a folder needs to include sub-folders
*/
public void setIncludingSubFolders( boolean includesSubFolders ) {
this.includingSubFolders = includesSubFolders;
}
public String getStoredSourcePathDepth() {
return storedSourcePathDepth;
}
public void setStoredSourcePathDepth( String storedSourcePathDepth ) {
this.storedSourcePathDepth = storedSourcePathDepth;
}
}
|
|
/*
* Copyright 2000-2010 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.tasks.config;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.tasks.TaskManager;
import com.intellij.tasks.TaskRepository;
import com.intellij.tasks.impl.BaseRepository;
import com.intellij.ui.DocumentAdapter;
import com.intellij.ui.EditorTextField;
import com.intellij.ui.PanelWithAnchor;
import com.intellij.ui.components.JBCheckBox;
import com.intellij.ui.components.JBLabel;
import com.intellij.ui.components.JBTabbedPane;
import com.intellij.util.Consumer;
import com.intellij.util.net.HttpConfigurable;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
/**
* @author Dmitry Avdeev
*/
public class BaseRepositoryEditor<T extends BaseRepository> extends TaskRepositoryEditor implements PanelWithAnchor {
protected JBLabel myUrlLabel;
protected JTextField myURLText;
protected JTextField myUserNameText;
protected JBLabel myUsernameLabel;
protected JCheckBox myShareUrlCheckBox;
protected JPasswordField myPasswordText;
protected JBLabel myPasswordLabel;
protected JButton myTestButton;
private JPanel myPanel;
private JBCheckBox myUseProxy;
private JButton myProxySettingsButton;
protected JCheckBox myUseHttpAuthenticationCheckBox;
protected JPanel myCustomPanel;
private JBCheckBox myAddCommitMessage;
private JBLabel myComment;
private JPanel myEditorPanel;
protected JBCheckBox myLoginAnonymouslyJBCheckBox;
protected JBTabbedPane myTabbedPane;
private JTextPane myAdvertiser;
private boolean myApplying;
protected Project myProject;
protected final T myRepository;
private final Consumer<T> myChangeListener;
private final Document myDocument;
private final Editor myEditor;
private JComponent myAnchor;
public BaseRepositoryEditor(final Project project, final T repository, Consumer<T> changeListener) {
myProject = project;
myRepository = repository;
myChangeListener = changeListener;
myTestButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
afterTestConnection(TaskManager.getManager(project).testConnection(repository));
}
});
myProxySettingsButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
HttpConfigurable.editConfigurable(myPanel);
enableButtons();
doApply();
}
});
myURLText.setText(repository.getUrl());
myUserNameText.setText(repository.getUsername());
myPasswordText.setText(repository.getPassword());
myShareUrlCheckBox.setSelected(repository.isShared());
myUseProxy.setSelected(repository.isUseProxy());
myUseHttpAuthenticationCheckBox.setSelected(repository.isUseHttpAuthentication());
myUseHttpAuthenticationCheckBox.setVisible(repository.isSupported(TaskRepository.BASIC_HTTP_AUTHORIZATION));
myLoginAnonymouslyJBCheckBox.setVisible(repository.isSupported(TaskRepository.LOGIN_ANONYMOUSLY));
myLoginAnonymouslyJBCheckBox.setSelected(repository.isLoginAnonymously());
myLoginAnonymouslyJBCheckBox.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
loginAnonymouslyChanged(!myLoginAnonymouslyJBCheckBox.isSelected());
}
});
myAddCommitMessage.setSelected(repository.isShouldFormatCommitMessage());
myDocument = EditorFactory.getInstance().createDocument(repository.getCommitMessageFormat());
myEditor = EditorFactory.getInstance().createEditor(myDocument);
myEditor.getSettings().setCaretRowShown(false);
myEditorPanel.add(myEditor.getComponent(), BorderLayout.CENTER);
myComment.setText("Available placeholders: " + repository.getComment());
String advertiser = repository.getRepositoryType().getAdvertiser();
if (advertiser != null) {
Messages.installHyperlinkSupport(myAdvertiser);
myAdvertiser.setText(advertiser);
}
else {
myAdvertiser.setVisible(false);
}
installListener(myAddCommitMessage);
installListener(myDocument);
installListener(myURLText);
installListener(myUserNameText);
installListener(myPasswordText);
installListener(myShareUrlCheckBox);
installListener(myUseProxy);
installListener(myUseHttpAuthenticationCheckBox);
installListener(myLoginAnonymouslyJBCheckBox);
enableButtons();
enableEditor();
JComponent customPanel = createCustomPanel();
if (customPanel != null) {
myCustomPanel.add(customPanel, BorderLayout.CENTER);
}
setAnchor(myUseProxy);
loginAnonymouslyChanged(!myLoginAnonymouslyJBCheckBox.isSelected());
}
protected final void updateCustomPanel() {
myCustomPanel.removeAll();
JComponent customPanel = createCustomPanel();
if (customPanel != null) {
myCustomPanel.add(customPanel, BorderLayout.CENTER);
}
myCustomPanel.repaint();
}
private void loginAnonymouslyChanged(boolean enabled) {
myUsernameLabel.setEnabled(enabled);
myUserNameText.setEnabled(enabled);
myPasswordLabel.setEnabled(enabled);
myPasswordText.setEnabled(enabled);
myUseHttpAuthenticationCheckBox.setEnabled(enabled);
}
@Nullable
protected JComponent createCustomPanel() {
return null;
}
protected void afterTestConnection(final boolean connectionSuccessful) {
}
protected void enableButtons() {
myUseProxy.setEnabled(HttpConfigurable.getInstance().USE_HTTP_PROXY);
if (!HttpConfigurable.getInstance().USE_HTTP_PROXY) {
myUseProxy.setSelected(false);
}
}
protected void installListener(JCheckBox checkBox) {
checkBox.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
doApply();
}
});
}
protected void installListener(JTextField textField) {
textField.getDocument().addDocumentListener(new DocumentAdapter() {
@Override
protected void textChanged(DocumentEvent e) {
ApplicationManager.getApplication().invokeLater(() -> doApply());
}
});
}
protected void installListener(JComboBox comboBox) {
comboBox.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(final ItemEvent e) {
if (e.getStateChange() == ItemEvent.SELECTED) {
doApply();
}
}
});
}
protected void installListener(final Document document) {
document.addDocumentListener(new com.intellij.openapi.editor.event.DocumentAdapter() {
@Override
public void documentChanged(com.intellij.openapi.editor.event.DocumentEvent e) {
doApply();
}
});
}
protected void installListener(EditorTextField editor) {
installListener(editor.getDocument());
}
protected void doApply() {
if (!myApplying) {
try {
myApplying = true;
apply();
enableEditor();
}
finally {
myApplying = false;
}
}
}
private void enableEditor() {
boolean selected = myAddCommitMessage.isSelected();
UIUtil.setEnabled(myEditorPanel, selected, true);
((EditorEx)myEditor).setRendererMode(!selected);
}
public JComponent createComponent() {
return myPanel;
}
@Override
public JComponent getPreferredFocusedComponent() {
return myURLText;
}
@Override
public void dispose() {
EditorFactory.getInstance().releaseEditor(myEditor);
}
public void apply() {
myRepository.setUrl(myURLText.getText().trim());
myRepository.setUsername(myUserNameText.getText().trim());
//noinspection deprecation
myRepository.setPassword(myPasswordText.getText());
myRepository.setShared(myShareUrlCheckBox.isSelected());
myRepository.setUseProxy(myUseProxy.isSelected());
myRepository.setUseHttpAuthentication(myUseHttpAuthenticationCheckBox.isSelected());
myRepository.setLoginAnonymously(myLoginAnonymouslyJBCheckBox.isSelected());
myRepository.setShouldFormatCommitMessage(myAddCommitMessage.isSelected());
myRepository.setCommitMessageFormat(myDocument.getText());
myChangeListener.consume(myRepository);
}
@Override
public JComponent getAnchor() {
return myAnchor;
}
@Override
public void setAnchor(@Nullable final JComponent anchor) {
myAnchor = anchor;
myUrlLabel.setAnchor(anchor);
myUsernameLabel.setAnchor(anchor);
myPasswordLabel.setAnchor(anchor);
myUseProxy.setAnchor(anchor);
}
}
|
|
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.framework.main;
import java.awt.*;
import java.awt.event.*;
import java.io.*;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import javax.swing.JFrame;
import javax.swing.JPanel;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.input.SAXBuilder;
import org.jdom.output.XMLOutputter;
import db.buffers.DataBuffer;
import docking.*;
import docking.action.DockingAction;
import docking.action.MenuData;
import docking.help.Help;
import docking.help.HelpService;
import docking.tool.ToolConstants;
import docking.util.AnimationUtils;
import docking.util.image.ToolIconURL;
import docking.widgets.OptionDialog;
import generic.jar.ResourceFile;
import generic.util.WindowUtilities;
import ghidra.app.plugin.GenericPluginCategoryNames;
import ghidra.app.util.GenericHelpTopics;
import ghidra.framework.Application;
import ghidra.framework.LoggingInitialization;
import ghidra.framework.client.*;
import ghidra.framework.main.datatree.ChangedFilesDialog;
import ghidra.framework.main.datatree.CheckInTask;
import ghidra.framework.main.logviewer.event.FVEvent;
import ghidra.framework.main.logviewer.event.FVEvent.EventType;
import ghidra.framework.main.logviewer.event.FVEventListener;
import ghidra.framework.main.logviewer.model.ChunkModel;
import ghidra.framework.main.logviewer.model.ChunkReader;
import ghidra.framework.main.logviewer.ui.FileViewer;
import ghidra.framework.main.logviewer.ui.FileWatcher;
import ghidra.framework.model.*;
import ghidra.framework.options.*;
import ghidra.framework.plugintool.Plugin;
import ghidra.framework.plugintool.PluginTool;
import ghidra.framework.plugintool.util.*;
import ghidra.framework.preferences.Preferences;
import ghidra.framework.project.tool.GhidraTool;
import ghidra.framework.project.tool.GhidraToolTemplate;
import ghidra.util.*;
import ghidra.util.bean.GGlassPane;
import ghidra.util.classfinder.ClassSearcher;
import ghidra.util.datastruct.WeakDataStructureFactory;
import ghidra.util.datastruct.WeakSet;
import ghidra.util.exception.CancelledException;
import ghidra.util.exception.VersionException;
import ghidra.util.task.*;
import ghidra.util.xml.GenericXMLOutputter;
import ghidra.util.xml.XmlUtilities;
/**
* Tool that serves as the the Ghidra Project Window. Only those plugins that
* implement the FrontEndable interface may be <i>directly</i> added to this
* tool by the user. Other plugins that are not marked as FrontEndable may get
* pulled in because the FrontEndable plugins depend on them. These plugins are
* aware of what tool they live in so that they can behave in the appropriate
* manner.
*/
public class FrontEndTool extends PluginTool implements OptionsChangeListener {
public static final String AUTOMATICALLY_SAVE_TOOLS = "Automatically Save Tools";
private static final String USE_ALERT_ANIMATION_OPTION_NAME = "Use Notification Animation";
// TODO: Experimental Option !!
private static final String ENABLE_COMPRESSED_DATABUFFER_OUTPUT =
"Use DataBuffer Output Compression";
private static final int MIN_HEIGHT = 600;
/**
* Preference name for whether to show the "What's New" help page when the
* Ghidra Project Window is displayed.
*/
private final static String GHIDRA_SHOW_WHATS_NEW = "GhidraShowWhatsNew";
/**
* Window state preference for the location of the divider for the split
* pane in the Ghidra Project Window. The divider is visible when another
* project view is opened.
*/
private final static String GHIDRA_MAIN_PANEL_DIVIDER_LOC = "GhidraMainPanelDividerLocation";
private static final String FRONT_END_TOOL_XML_NAME = "FRONTEND";
private static final String FRONT_END_FILE_NAME = "FrontEndTool.xml";
private static final String CONFIGURE_GROUP = "Configure";
private WeakSet<ProjectListener> listeners;
private FrontEndPlugin plugin;
private ComponentProvider compProvider;
private LogComponentProvider logProvider;
private WindowListener windowListener;
private DockingAction configureToolAction;
private PluginClassManager pluginClassManager;
/**
* Construct a new Ghidra Project Window.
*
* @param pm project manager
*/
public FrontEndTool(ProjectManager pm) {
super(null, pm, null, null /*tool template*/, false, false, false);
setToolName("Project Window");
listeners = WeakDataStructureFactory.createCopyOnWriteWeakSet();
addFrontEndPlugin();
createActions();
loadToolConfigurationFromDisk();
ensureSize();
windowListener = new WindowAdapter() {
@Override
public void windowOpened(WindowEvent e) {
setDividerLocation();
getToolFrame().removeWindowListener(windowListener);
}
};
JFrame toolFrame = getToolFrame();
toolFrame.addWindowListener(windowListener);
AppInfo.setFrontEndTool(this);
AppInfo.setActiveProject(getProject());
}
private void ensureSize() {
JFrame frame = getToolFrame();
Dimension size = frame.getSize();
if (size.height < MIN_HEIGHT) {
size.height = MIN_HEIGHT;
Point center = WindowUtilities.centerOnScreen(size);
frame.setBounds(center.x, center.y, size.width, size.height);
}
}
@Override
public PluginClassManager getPluginClassManager() {
if (pluginClassManager == null) {
pluginClassManager = new PluginClassManager(FrontEndable.class, null);
}
return pluginClassManager;
}
public void selectFiles(Set<DomainFile> files) {
plugin.selectFiles(files);
}
private void loadToolConfigurationFromDisk() {
File saveFile = new File(Application.getUserSettingsDirectory(), FRONT_END_FILE_NAME);
if (!saveFile.exists()) {
addFrontEndablePlugins();
return;
}
try {
InputStream is = new FileInputStream(saveFile);
SAXBuilder sax = XmlUtilities.createSecureSAXBuilder(false, false);
Element root = sax.build(is).getRootElement();
GhidraToolTemplate template = new GhidraToolTemplate(
(Element) root.getChildren().get(0), saveFile.getAbsolutePath());
refresh(template);
}
catch (JDOMException e) {
Msg.showError(this, null, "Error", "Error in XML reading front end configuration", e);
}
catch (IOException e) {
Msg.showError(this, null, "Error", "Error reading front end configuration", e);
}
}
void saveToolConfigurationToDisk() {
ToolTemplate template = saveToolToToolTemplate();
Element root = new Element(FRONT_END_TOOL_XML_NAME);
root.addContent(template.saveToXml());
File saveFile = new File(Application.getUserSettingsDirectory(), FRONT_END_FILE_NAME);
try {
OutputStream os = new FileOutputStream(saveFile);
org.jdom.Document doc = new org.jdom.Document(root);
XMLOutputter xmlOut = new GenericXMLOutputter();
xmlOut.output(doc, os);
os.close();
}
catch (IOException e) {
Msg.showError(this, null, "Error", "Error saving front end configuration", e);
}
}
private void addFrontEndPlugin() {
plugin = new FrontEndPlugin(this);
plugin.setProjectManager(getProjectManager());
try {
addPlugin(plugin);
}
catch (PluginException e) {
// should not happen
Msg.showError(this, getToolFrame(), "Can't Create Project Window", e.getMessage(), e);
}
compProvider = plugin.getFrontEndProvider();
showComponentHeader(compProvider, false);
}
private void initFrontEndOptions() {
ToolOptions options = getOptions(ToolConstants.TOOL_OPTIONS);
HelpLocation help = new HelpLocation(ToolConstants.TOOL_HELP_TOPIC, "Save_Tool");
options.registerOption(AUTOMATICALLY_SAVE_TOOLS, true, help,
"When enabled tools will be saved " + "when they are closed");
options.registerOption(USE_ALERT_ANIMATION_OPTION_NAME, true, help,
"Signals that user notifications " +
"should be animated. This makes notifications more distinguishable.");
options.registerOption(ENABLE_COMPRESSED_DATABUFFER_OUTPUT, Boolean.FALSE, help,
"When enabled data buffers sent to Ghidra Server are compressed (see server configuration for other direction)");
boolean autoSave = options.getBoolean(AUTOMATICALLY_SAVE_TOOLS, true);
GhidraTool.autoSave = autoSave;
boolean animationEnabled = options.getBoolean(USE_ALERT_ANIMATION_OPTION_NAME, true);
AnimationUtils.setAnimationEnabled(animationEnabled);
boolean compressDataBuffers =
options.getBoolean(ENABLE_COMPRESSED_DATABUFFER_OUTPUT, false);
DataBuffer.enableCompressedSerializationOutput(compressDataBuffers);
options.addOptionsChangeListener(this);
}
@Override
public void optionsChanged(ToolOptions options, String optionName, Object oldValue,
Object newValue) {
if (AUTOMATICALLY_SAVE_TOOLS.equals(optionName)) {
GhidraTool.autoSave = (Boolean) newValue;
}
else if (USE_ALERT_ANIMATION_OPTION_NAME.equals(optionName)) {
AnimationUtils.setAnimationEnabled((Boolean) newValue);
}
else if (ENABLE_COMPRESSED_DATABUFFER_OUTPUT.equals(optionName)) {
DataBuffer.enableCompressedSerializationOutput((Boolean) newValue);
}
}
@Override
public void exit() {
saveToolConfigurationToDisk();
plugin.exitGhidra();
}
@Override
public void close() {
exit();
}
/**
* Set the active project.
*
* @param project may be null if there is no active project
*/
public void setActiveProject(Project project) {
if (isDisposed) {
return;
}
ToolOptions options = getOptions(ToolConstants.TOOL_OPTIONS);
options.removeOptionsChangeListener(this);
configureToolAction.setEnabled(true);
setProject(project);
AppInfo.setActiveProject(project);
plugin.setActiveProject(project);
initFrontEndOptions();
}
/**
* Add the given project listener.
*
* @param l listener to add
*/
public void addProjectListener(ProjectListener l) {
listeners.add(l);
}
/**
* Remove the given project listener.
*
* @param l listener to remove
*/
public void removeProjectListener(ProjectListener l) {
listeners.remove(l);
}
/**
* NOTE: do not call this from a non-Swing thread
*
* @param tool the tool
* @return true if the repository is null or is connected.
*/
boolean checkRepositoryConnected(PluginTool tool) {
RepositoryAdapter repository = tool.getProject().getRepository();
if (repository != null) {
if (!repository.verifyConnection()) {
if (OptionDialog.showYesNoDialog(tool.getToolFrame(), "Lost Connection to Server",
"The connection to the Ghidra Server has been lost.\n" +
"Do you want to reconnect now?") == OptionDialog.OPTION_ONE) {
try {
repository.connect();
return true;
}
catch (NotConnectedException e) {
// message displayed by repository server adapter
return false;
}
catch (IOException e) {
ClientUtil.handleException(repository, e, "Repository Connection",
tool.getToolFrame());
return false;
}
}
return false;
}
}
return true;
}
/**
* Check in the given domain file.
*
* @param tool tool that has the domain file opened
* @param domainFile domain file to check in
*/
public void checkIn(PluginTool tool, DomainFile domainFile) {
ArrayList<DomainFile> list = new ArrayList<>();
list.add(domainFile);
checkIn(tool, list, tool.getToolFrame());
}
/**
* Check in the list of domain files.
*
* @param tool tool that has the domain files opened
* @param fileList list of DomainFile objects
* @param parent parent of dialog if an error occurs during checkin
*/
public void checkIn(PluginTool tool, List<DomainFile> fileList, Component parent) {
if (!checkRepositoryConnected(tool)) {
return;
}
ArrayList<DomainFile> changedList = new ArrayList<>();
ArrayList<DomainFile> list = new ArrayList<>();
for (int i = 0; i < fileList.size(); i++) {
DomainFile df = fileList.get(i);
if (df != null && df.canCheckin()) {
if (!canCloseDomainFile(df)) {
continue;
}
list.add(df);
if (df.isChanged()) {
changedList.add(df);
}
}
}
if (changedList.size() > 0) {
ChangedFilesDialog dialog = new ChangedFilesDialog(tool, changedList);
dialog.setCancelToolTipText("Cancel Check In");
if (!dialog.showDialog()) {// blocks until the user hits Save or Cancel
Msg.info(this, "Checkin canceled");
return;
}
for (int i = 0; i < changedList.size(); i++) {
DomainFile df = changedList.get(i);
if (df.isChanged()) {
list.remove(df);
}
}
}
if (list.size() > 0) {
tool.execute(new CheckInTask(tool, list, parent));
}
else {
Msg.showError(this, tool.getToolFrame(), "Checkin Failed", "Unable to checkin file(s)");
}
}
/**
* Merge the latest version in the repository with the given checked out
* domain file. Upon completion of the merge, the domain file appears as
* though the latest version was checked out.
*
* @param tool tool that has the domain file opened
* @param domainFile domain file where latest version will be merged into
* @param taskListener listener that is notified when the merge task
* completes
*/
public void merge(PluginTool tool, DomainFile domainFile, TaskListener taskListener) {
ArrayList<DomainFile> list = new ArrayList<>();
list.add(domainFile);
merge(tool, list, taskListener);
}
/**
* Merge the latest version (in the repository) of each checked out file in
* fileList. Upon completion of the merge, the domain file appears as though
* the latest version was checked out.
*
* @param tool tool that has the domain files opened
* @param fileList list of files that are checked out and are to be merged
* @param taskListener listener that is notified when the merge task
* completes
*/
public void merge(PluginTool tool, List<DomainFile> fileList, TaskListener taskListener) {
if (!checkRepositoryConnected(tool)) {
return;
}
ArrayList<DomainFile> list = new ArrayList<>();
ArrayList<DomainFile> changedList = new ArrayList<>();
for (int i = 0; i < fileList.size(); i++) {
DomainFile df = fileList.get(i);
if (df != null && df.canMerge()) {
if (!canCloseDomainFile(df)) {
continue;
}
list.add(df);
if (df.isChanged()) {
changedList.add(df);
}
}
}
if (changedList.size() > 0) {
ChangedFilesDialog dialog = new ChangedFilesDialog(tool, changedList);
dialog.setCancelToolTipText("Cancel Merge");
if (!dialog.showDialog()) {// blocks until the user hits Save or Cancel
Msg.info(this, "Merge canceled");
return;
}
for (int i = 0; i < changedList.size(); i++) {
DomainFile df = changedList.get(i);
if (df.isChanged()) {
list.remove(df);
}
}
}
if (list.size() > 0) {
execute(new MergeTask(tool, list, taskListener));
}
else {
Msg.showError(this, tool.getToolFrame(), "Update Failed", "Unable to update file(s)");
}
}
@Override
public void setVisible(boolean visibility) {
if (visibility) {
super.setVisible(visibility);
plugin.rebuildRecentMenus();
checkWhatsNewPreference();
}
else {
super.setVisible(visibility);
// Treat setVisible(false) as a dispose, as this is the only time we should be hidden
AppInfo.setFrontEndTool(null);
AppInfo.setActiveProject(null);
dispose();
}
}
public void setBusy(boolean busy) {
JFrame rootFrame = winMgr.getRootFrame();
Component glassPane = rootFrame.getGlassPane();
if (!(glassPane instanceof GGlassPane)) {
Msg.debug(this, "Found root frame without a GhidraGlassPane registered!");
return;
}
GGlassPane dockingGlassPane = (GGlassPane) glassPane;
dockingGlassPane.setBusy(busy);
}
private void addManageExtensionsAction() {
DockingAction installExtensionsAction = new DockingAction("Extensions", "Project Window") {
@Override
public void actionPerformed(ActionContext context) {
showExtensions();
extensionTableProvider.setHelpLocation(
new HelpLocation(GenericHelpTopics.FRONT_END, "Extensions"));
}
@Override
public boolean isEnabledForContext(ActionContext context) {
return isConfigurable();
}
};
MenuData menuData =
new MenuData(new String[] { ToolConstants.MENU_FILE, "Install Extensions..." }, null,
CONFIGURE_GROUP);
menuData.setMenuSubGroup(CONFIGURE_GROUP + 2);
installExtensionsAction.setMenuBarData(menuData);
installExtensionsAction.setHelpLocation(
new HelpLocation(GenericHelpTopics.FRONT_END, "Extensions"));
installExtensionsAction.setEnabled(true);
addAction(installExtensionsAction);
}
private void addManagePluginsAction() {
configureToolAction = new DockingAction("Configure Tool", "Project Window") {
@Override
public void actionPerformed(ActionContext context) {
showConfig(false, false);
manageDialog.setHelpLocation(
new HelpLocation(GenericHelpTopics.FRONT_END, "Configure"));
}
@Override
public boolean isEnabledForContext(ActionContext context) {
return isConfigurable();
}
};
MenuData menuData = new MenuData(new String[] { ToolConstants.MENU_FILE, "Configure..." },
null, CONFIGURE_GROUP);
menuData.setMenuSubGroup(CONFIGURE_GROUP + 1);
configureToolAction.setMenuBarData(menuData);
configureToolAction.setHelpLocation(
new HelpLocation(GenericHelpTopics.FRONT_END, "Configure"));
configureToolAction.setEnabled(true);
addAction(configureToolAction);
}
@Override
public ToolTemplate getToolTemplate(boolean includeConfigState) {
ToolTemplate toolTemplate = new FrontEndToolTemplate(getIconURL(),
saveToXml(includeConfigState), getSupportedDataTypes());
return toolTemplate;
}
//////////////////////////////////////////////////////////////////////
/**
* Get project listeners.
*
* @return ProjectListener[]
*/
Iterable<ProjectListener> getListeners() {
return listeners;
}
// access for Junit tests
ComponentProvider getProvider() {
return compProvider;
}
SaveState getSaveableDisplayData() {
SaveState saveState = new SaveState();
plugin.writeDataState(saveState);
return saveState;
}
void setSaveableDisplayData(SaveState saveState) {
plugin.readDataState(saveState);
}
////////////////////////////////////////////////////////////////////
/**
* Add those plugins that implement the FrontEndable interface and have a
* RELEASED status and not (example || testing) category.
*/
private void addFrontEndablePlugins() {
List<String> classNames = new ArrayList<>();
for (Class<? extends Plugin> pluginClass : ClassSearcher.getClasses(Plugin.class,
c -> FrontEndable.class.isAssignableFrom(c))) {
PluginDescription pd = PluginDescription.getPluginDescription(pluginClass);
String category = pd.getCategory();
boolean isBadCategory = category.equals(GenericPluginCategoryNames.EXAMPLES) ||
category.equals(GenericPluginCategoryNames.TESTING);
if (pd.getStatus() == PluginStatus.RELEASED && !isBadCategory) {
classNames.add(pluginClass.getName());
}
}
try {
addPlugins(classNames.toArray(new String[classNames.size()]));
}
catch (PluginException e) {
Msg.showError(this, getToolFrame(), "Plugin Error", "Error restoring front-end plugins",
e);
}
}
/**
* Refresh the plugins in the Ghidra Project Window based on what is
* contained in the given XML Element.
*
* @param tc object that contains an entry for each plugin and its
* configuration state
*/
private void refresh(ToolTemplate tc) {
listeners = WeakDataStructureFactory.createCopyOnWriteWeakSet();
List<Plugin> list = getManagedPlugins();
list.remove(plugin);
Plugin[] plugins = new Plugin[list.size()];
plugins = list.toArray(plugins);
removePlugins(plugins);
Element root = tc.saveToXml();
Element elem = root.getChild("TOOL");
restoreOptionsFromXml(elem);
try {
restorePluginsFromXml(elem);
}
catch (PluginException e) {
Msg.showError(this, getToolFrame(), "Error Restoring Front-end Plugins", e.getMessage(),
e);
}
winMgr.restoreFromXML(tc.getToolElement());
setConfigChanged(false);
}
private void createActions() {
addExitAction();
addManagePluginsAction();
addManageExtensionsAction();
addOptionsAction();
addHelpActions();
// our log file action
DockingAction action = new DockingAction("Show Log", ToolConstants.TOOL_OWNER) {
@Override
public void actionPerformed(ActionContext context) {
showGhidraUserLogFile();
}
};
action.setMenuBarData(
new MenuData(new String[] { ToolConstants.MENU_HELP, "Show Log" }, null, "BBB"));
action.setEnabled(true);
addAction(action);
}
private void setDividerLocation() {
String dividerLocStr = Preferences.getProperty(GHIDRA_MAIN_PANEL_DIVIDER_LOC);
if (dividerLocStr != null) {
int dividerLoc = parse(dividerLocStr, -1);
ProjectDataPanel pdp = plugin.getProjectDataPanel();
pdp.setDividerLocation(dividerLoc);
pdp.invalidate();
getToolFrame().validate();
}
}
/**
* Get the int value for the given string.
*
* @param value the string value to parse
* @param defaultValue return this value if a NumberFormatException is
* thrown during the parseInt() method
*/
private int parse(String value, int defaultValue) {
if (value != null) {
try {
return Integer.parseInt(value);
}
catch (NumberFormatException e) {
// don't care
}
}
return defaultValue;
}
/**
* Check the "What's New" preference; if it has not been set, then show the
* "What's New" help page. This should only happen if the preference was
* never set.
*/
private void checkWhatsNewPreference() {
if (SystemUtilities.isInDevelopmentMode() || SystemUtilities.isInTestingMode()) {
return; // don't show help for dev mode
}
HelpService help = Help.getHelpService();
// if this is the first time Ghidra is being run, pop up
// the What's New help page
String showWhatsNewStribng = Preferences.getProperty(GHIDRA_SHOW_WHATS_NEW, "true");
boolean showWhatsNew = Boolean.parseBoolean(showWhatsNewStribng);
if (!showWhatsNew) {
return;
}
Preferences.setProperty(GHIDRA_SHOW_WHATS_NEW, "false");
Preferences.store();
ResourceFile installDir = Application.getInstallationDirectory();
ResourceFile whatsNewFile = new ResourceFile(installDir, "docs/WhatsNew.html");
try {
URL url = whatsNewFile.toURL();
help.showHelp(url);
}
catch (MalformedURLException e) {
Msg.debug(this, "Unable to show the What's New help page", e);
}
}
@Override
public boolean canCloseDomainFile(DomainFile df) {
PluginTool[] tools = getProject().getToolManager().getRunningTools();
for (PluginTool tool : tools) {
DomainFile[] files = tool.getDomainFiles();
for (DomainFile domainFile : files) {
if (df == domainFile) {
return tool.canCloseDomainFile(df);
}
}
}
return true;
}
void showGhidraUserLogFile() {
File logFile = LoggingInitialization.getApplicationLogFile();
if (logFile == null) {
return;// something odd is going on; can't find log file
}
if (logProvider == null) {
logProvider = new LogComponentProvider(this, logFile);
showDialog(logProvider);
return;
}
if (logProvider.isShowing()) {
logProvider.toFront();
}
else {
showDialog(logProvider, getToolFrame());
}
}
//==================================================================================================
// Inner Classes
//==================================================================================================
private static class LogComponentProvider extends DialogComponentProvider {
private final File logFile;
private Dimension defaultSize = new Dimension(600, 400);
private FileWatcher watcher;
LogComponentProvider(PluginTool tool, File logFile) {
super("Ghidra User Log", false, false, false, false);
this.logFile = logFile;
addWorkPanel(buildWorkPanel());
}
/**
* Need to override this method so we can stop the file watcher when the
* dialog is closed.
*/
@Override
protected void dialogClosed() {
if (watcher != null) {
watcher.stop();
}
}
/**
* Need to override this method so we can stop the file watcher when the
* dialog is closed.
*/
@Override
protected void dialogShown() {
if (watcher != null) {
watcher.start();
}
}
private JPanel buildWorkPanel() {
JPanel panel = new JPanel(new BorderLayout()) {
@Override
public Dimension getPreferredSize() {
return defaultSize;
}
};
try {
FVEventListener eventListener = new FVEventListener();
ChunkModel model = new ChunkModel();
ChunkReader reader = new ChunkReader(logFile, model);
FileViewer viewer = new FileViewer(reader, model, eventListener);
panel.add(viewer);
panel.setVisible(true);
// Turn on the file watcher so events will be fired off whenever the log file
// changes.
watcher = new FileWatcher(logFile, eventListener);
watcher.start();
// Now tell subscribers that the file needs to be read-in. Have it view the bottom
// of the file on startup.
FVEvent loadEvt = new FVEvent(EventType.SCROLL_END, null);
eventListener.send(loadEvt);
}
catch (IOException e) {
Msg.error(this, "Exception reading log file", e);
}
return panel;
}
}
/**
* Task to merge latest version of a domain file into the checked out
* version.
*/
private class MergeTask extends Task {
private List<DomainFile> list;
private PluginTool tool;
private TaskListener taskListener;
private boolean wasCanceled;
/**
* Construct a new MergeTask.
*
* @param tool tool that has the domain files open
* @param list list of DomainFiles to be merged
* @param taskListener listener that is notified when this task
* completes
*/
MergeTask(PluginTool tool, List<DomainFile> list, TaskListener taskListener) {
super("Merge", true, true, true);
this.tool = tool;
this.list = list;
this.taskListener = taskListener;
}
@Override
public void run(TaskMonitor monitor) {
String currentName = null;
try {
for (int i = 0; i < list.size() && !monitor.isCancelled(); i++) {
DomainFile df = list.get(i);
currentName = df.getName();
monitor.setMessage("Initiating Merging for " + currentName);
df.merge(true, monitor);
}
}
catch (VersionException e) {
Msg.showError(this, tool.getToolFrame(), "Error During Merge Process",
"Versioned file was created with newer version of Ghidra: " + currentName);
}
catch (CancelledException e) {
wasCanceled = true;
Msg.info(this, "Merge Process was canceled");
}
catch (IOException e) {
ClientUtil.handleException(getProject().getRepository(), e, "Merge Process",
tool.getToolFrame());
}
notifyTaskListener();
}
private void notifyTaskListener() {
if (taskListener == null) {
return;
}
Swing.runNow(() -> {
if (wasCanceled) {
taskListener.taskCancelled(MergeTask.this);
}
else {
taskListener.taskCompleted(MergeTask.this);
}
});
}
}
private static class FrontEndToolTemplate extends GhidraToolTemplate {
FrontEndToolTemplate(ToolIconURL iconURL, Element element, Class<?>[] supportedDataTypes) {
super(iconURL, element, supportedDataTypes);
}
}
}
|
|
/*
Copyright 2016 Goldman Sachs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package com.gs.fw.common.mithra.test;
import com.gs.fw.common.mithra.MithraList;
import com.gs.fw.common.mithra.databasetype.DatabaseType;
import com.gs.fw.common.mithra.databasetype.SybaseIqDatabaseType;
import com.gs.fw.common.mithra.databasetype.SybaseIqNativeDatabaseType;
import com.gs.fw.common.mithra.finder.Operation;
import com.gs.fw.common.mithra.test.domain.*;
import java.sql.*;
import java.util.Calendar;
import java.util.Date;
public class MithraSybaseIqTestAbstract extends MithraTestAbstract
{
private MithraTestResource mithraTestResource;
private String testDataFileName = "testdata/vendor/mithraSybaseIqTestData.txt";
public void setTestDataFileName(String testDataFileName)
{
this.testDataFileName = testDataFileName;
}
public String getTestDataFileName()
{
return testDataFileName;
}
// switch the next 3 methods to Native types for testing with Native driver. Also, switch in the MithraSybaseIqTestConfig.xml
protected DatabaseType getNormalDatabaseType()
{
return SybaseIqDatabaseType.getInstance();
}
protected DatabaseType getUnsharedTempDatabaseType()
{
return SybaseIqDatabaseType.getInstanceWithoutSharedTempTables();
}
protected VendorTestConnectionManager getVendorTestConnectionManager()
{
return SybaseIqTestConnectionManager.getInstance();
}
protected void setUp()
throws Exception
{
TestInfinityTimestamp.fixForSybaseIq();
setMithraTestObjectToResultSetComparator(new AllTypesIqResultSetComparator());
mithraTestResource = new MithraTestResource("MithraSybaseIqTestConfig.xml", getNormalDatabaseType());
mithraTestResource.setRestrictedClassList(getRestrictedClassList());
VendorTestConnectionManager connectionManager = getVendorTestConnectionManager();
connectionManager.setDefaultSource("DVDB");
connectionManager.setDatabaseTimeZone(this.getDatabaseTimeZone());
mithraTestResource.createSingleDatabase(connectionManager, "DVDB", getTestDataFileName());
mithraTestResource.setTestConnectionsOnTearDown(true);
mithraTestResource.createDatabaseForStringSourceAttribute(connectionManager, "A", "testdata/vendor/mithraSybaseIqSourceATestData.txt");
mithraTestResource.setUp();
}
protected void tearDown() throws Exception
{
if (mithraTestResource != null)
{
mithraTestResource.tearDown();
}
if (!getVendorTestConnectionManager().ensureAllConnectionsReturnedToPool())
{
fail("Connections were not returned to pool");
}
}
protected DatabaseType getDatabaseType()
{
return this.mithraTestResource.getDatabaseType();
}
protected void validateMithraResult(Operation op, String sql, int minSize)
{
AllTypesIqList list = new AllTypesIqList(op);
list.forceResolve();
this.validateMithraResult(list, sql, minSize);
}
protected void validateMithraResult(MithraList list, String sql, int minSize)
{
try
{
list.setBypassCache(true);
Connection con = getVendorTestConnectionManager().getConnection();
PreparedStatement ps = con.prepareStatement(sql);
this.genericRetrievalTest(ps, list, con, minSize);
}
catch(SQLException e)
{
getLogger().error("SQLException on MithraSybaseTestAbstract.validateMithraResult()",e);
throw new RuntimeException("SQLException ",e);
}
}
protected void validateMithraResult(Operation op, String sql)
{
validateMithraResult(op, sql, 1);
}
protected AllTypesIqList createNewAllTypesIqList(int firstId, long count)
{
AllTypesIqList list = new AllTypesIqList();
for(int i = firstId; i < (firstId + count); i++)
{
AllTypesIq obj = this.createNewAllTypesIq(i, true);
list.add(obj);
}
return list;
}
protected ProductList createNewProductList(int firstId, long count)
{
ProductList list = new ProductList();
for(int i = firstId; i < (firstId + count); i++)
{
Product obj = new Product();
obj.setProductId(i);
obj.setProductCode("ABC"+i);
obj.setProductDescription("Product "+i);
obj.setManufacturerId(1);
obj.setDailyProductionRate(100.25f);
list.add(obj);
}
return list;
}
protected AllTypesIq createNewAllTypesIq(int id, boolean withNullablesNull)
{
AllTypesIq allTypesIqObj = new AllTypesIq();
long time = System.currentTimeMillis() / 10 * 10;
Calendar cal = Calendar.getInstance();
cal.set(Calendar.AM_PM, Calendar.AM);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
Date date = new Date(cal.getTimeInMillis());
Timestamp timestamp = new Timestamp(time);
byte[] newData = new byte[5];
newData[0] = toByte(0xAA);
newData[1] = toByte(0xBB);
newData[2] = toByte(0x99);
newData[3] = toByte(0x11);
newData[4] = 0;
if(withNullablesNull)
{
allTypesIqObj.setNullablePrimitiveAttributesToNull();
}
else
{
allTypesIqObj.setNullableByteValue((byte)100);
allTypesIqObj.setNullableShortValue((short) 30000);
allTypesIqObj.setNullableCharValue('a');
allTypesIqObj.setNullableIntValue(2000000000);
allTypesIqObj.setNullableLongValue(9000000000000000000L);
allTypesIqObj.setNullableFloatValue(100.99f);
allTypesIqObj.setNullableDoubleValue(100.99998888777);
allTypesIqObj.setNullableDateValue(date);
allTypesIqObj.setNullableTimestampValue(timestamp);
allTypesIqObj.setNullableStringValue("This is a test");
}
allTypesIqObj.setId(id);
allTypesIqObj.setBooleanValue(true);
allTypesIqObj.setByteValue((byte)100);
allTypesIqObj.setShortValue((short) 30000);
allTypesIqObj.setCharValue('a');
allTypesIqObj.setIntValue(2000000000);
allTypesIqObj.setLongValue(9000000000000000000L);
allTypesIqObj.setFloatValue(100.99f);
allTypesIqObj.setDoubleValue(100.99998888777);
allTypesIqObj.setDateValue(date);
allTypesIqObj.setTimestampValue(timestamp);
allTypesIqObj.setStringValue("This is a test");
return allTypesIqObj;
}
public void testLocalTempTable() throws Exception
{
Connection con = getVendorTestConnectionManager().getConnection();
Statement stm = con.createStatement();
stm.executeUpdate("create local temporary table TAAAAWMCEAHLGOOLFPCAOrderDriv (c0 integer not null) on commit preserve rows");
ResultSet rs = stm.executeQuery("select 1 from TAAAAWMCEAHLGOOLFPCAOrderDriv where 0 = 1");
ResultSetMetaData metaData = rs.getMetaData();
System.out.println("meta data");
con.close();
}
}
|
|
/*
* Copyright 2015-2016 DevCon5 GmbH, [email protected]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.tourniquet.junit.net;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicInteger;
import org.hamcrest.core.IsNot;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.Description;
import org.junit.runner.RunWith;
import org.junit.runners.model.Statement;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
/**
*
*/
@RunWith(MockitoJUnitRunner.class)
public class UDPReceiverTest {
@Mock
private Description description;
private UDPReceiver subject;
@Before
public void setUp() throws Exception {
this.subject = new UDPReceiver();
}
@Test
public void testHasMorePackets_noneInQueue_false() throws Exception {
assertFalse(subject.hasMorePackets());
}
@Test
public void testHasMorePackets_packetReceived_true() throws Throwable {
//prepare
Statement stmt = new Statement() {
@Override
public void evaluate() throws Throwable {
sendPacket("Test".getBytes());
}
} ;
subject.apply(stmt, description).evaluate();
//act
boolean result = subject.hasMorePackets();
//assert
assertTrue(result);
}
@Test
public void testNextPacket() throws Throwable {
//prepare
Statement stmt = new Statement() {
@Override
public void evaluate() throws Throwable {
sendPacket("Test1".getBytes());
sendPacket("Test2".getBytes());
sendPacket("Test3".getBytes());
}
} ;
subject.apply(stmt, description).evaluate();
//act
byte[] packet1 = subject.nextPacket();
byte[] packet2 = subject.nextPacket();
byte[] packet3 = subject.nextPacket();
//assert
assertEquals("Test1", new String(packet1));
assertEquals("Test2", new String(packet2));
assertEquals("Test3", new String(packet3));
}
@Test
public void testPacketCount() throws Throwable {
//prepare
Statement stmt = new Statement() {
@Override
public void evaluate() throws Throwable {
sendPacket("Test1".getBytes());
sendPacket("Test2".getBytes());
sendPacket("Test3".getBytes());
}
} ;
subject.apply(stmt, description).evaluate();
//act
int count = subject.packetCount();
//assert
assertEquals(3, count);
}
@Test
public void testSetBufferSize() throws Throwable {
//prepare
Statement stmt = new Statement() {
@Override
public void evaluate() throws Throwable {
sendPacket("Test1".getBytes());
}
} ;
//act
subject.setBufferSize(4);
//assert
subject.apply(stmt, description).evaluate();
assertTrue(subject.hasMorePackets());
byte[] packet = subject.nextPacket();
assertEquals(4, packet.length);
}
@Test
public void testSetServerPort() throws Throwable {
//prepare
final AtomicInteger actualPort = new AtomicInteger();
Statement stmt = new Statement() {
@Override
public void evaluate() throws Throwable {
actualPort.set(subject.getServerPort());
sendPacket("Test1".getBytes());
assertThat(NetworkMatchers.datagramPort(actualPort.get()), IsNot.not(NetworkMatchers.isAvailable()));
}
} ;
//act
int port = NetworkUtils.findAvailablePort();
subject.setServerPort(port);
//assert
subject.apply(stmt, description).evaluate();
assertEquals(port, actualPort.get());
}
@Test
public void testOnDatagramReceived() throws Throwable {
//prepare
Statement stmt = new Statement() {
@Override
public void evaluate() throws Throwable {
sendPacket("Test".getBytes());
}
} ;
//act
final List<byte[]> packets = new CopyOnWriteArrayList<>();
subject.onDatagramReceived(packets::add);
//assert
subject.apply(stmt, description).evaluate();
assertEquals(1, packets.size());
assertEquals("Test", new String(packets.get(0)));
}
/**
* Sends a packet to the test rule
* @param data
* @throws IOException
*/
private void sendPacket(final byte[] data) throws IOException {
final InetAddress address = InetAddress.getLocalHost();
final DatagramPacket packet = new DatagramPacket(data, data.length, address, subject.getServerPort());
try(DatagramSocket datagramSocket = new DatagramSocket()) {
datagramSocket.send(packet);
Thread.sleep(25);
} catch (InterruptedException e) {
//omit
}
}
}
|
|
/*
* Knetik Platform API Documentation latest
* This is the spec for the Knetik API. Use this in conjunction with the documentation found at https://knetikcloud.com.
*
* OpenAPI spec version: latest
* Contact: [email protected]
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package com.knetikcloud.api;
import com.knetikcloud.client.ApiException;
import com.knetikcloud.model.EntitlementGrantRequest;
import com.knetikcloud.model.EntitlementItem;
import com.knetikcloud.model.InventoryStatusWrapper;
import com.knetikcloud.model.InvoiceResource;
import com.knetikcloud.model.ItemTemplateResource;
import com.knetikcloud.model.PageResourceEntitlementItem;
import com.knetikcloud.model.PageResourceItemTemplateResource;
import com.knetikcloud.model.PageResourceUserInventoryResource;
import com.knetikcloud.model.PageResourceUserItemLogResource;
import com.knetikcloud.model.Result;
import com.knetikcloud.model.UserInventoryAddRequest;
import com.knetikcloud.model.UserInventoryResource;
import org.junit.Test;
import org.junit.Ignore;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* API tests for UsersInventoryApi
*/
@Ignore
public class UsersInventoryApiTest {
private final UsersInventoryApi api = new UsersInventoryApi();
/**
* Adds an item to the user inventory
*
* The inventory is fulfilled asynchronously UNLESS the invoice is explicitely skipped. Depending on the use case, it might require the client to verify that the entitlement was added after the fact or configure a BRE rule to get a notification in real time. <br><br><b>Permissions Needed:</b> INVENTORY_ADMIN
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void addItemToUserInventoryTest() throws ApiException {
Integer id = null;
UserInventoryAddRequest userInventoryAddRequest = null;
InvoiceResource response = api.addItemToUserInventory(id, userInventoryAddRequest);
// TODO: test validations
}
/**
* Check for access to an item without consuming
*
* Useful for pre-check and accounts for all various buisness rules. <br><br><b>Permissions Needed:</b> INVENTORY_ADMIN or owner
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void checkUserEntitlementItemTest() throws ApiException {
String userId = null;
Integer itemId = null;
String sku = null;
api.checkUserEntitlementItem(userId, itemId, sku);
// TODO: test validations
}
/**
* Create an entitlement item
*
* <b>Permissions Needed:</b> INVENTORY_ADMIN
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void createEntitlementItemTest() throws ApiException {
Boolean cascade = null;
EntitlementItem entitlementItem = null;
EntitlementItem response = api.createEntitlementItem(cascade, entitlementItem);
// TODO: test validations
}
/**
* Create an entitlement template
*
* Entitlement templates define a type of entitlement and the properties they have. <br><br><b>Permissions Needed:</b> TEMPLATE_ADMIN
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void createEntitlementTemplateTest() throws ApiException {
ItemTemplateResource template = null;
ItemTemplateResource response = api.createEntitlementTemplate(template);
// TODO: test validations
}
/**
* Delete an entitlement item
*
* <b>Permissions Needed:</b> INVENTORY_ADMIN
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void deleteEntitlementItemTest() throws ApiException {
Integer entitlementId = null;
api.deleteEntitlementItem(entitlementId);
// TODO: test validations
}
/**
* Delete an entitlement template
*
* If cascade = 'detach', it will force delete the template even if it's attached to other objects. <br><br><b>Permissions Needed:</b> TEMPLATE_ADMIN
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void deleteEntitlementTemplateTest() throws ApiException {
String id = null;
String cascade = null;
api.deleteEntitlementTemplate(id, cascade);
// TODO: test validations
}
/**
* Get a single entitlement item
*
* <b>Permissions Needed:</b> ANY
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void getEntitlementItemTest() throws ApiException {
Integer entitlementId = null;
EntitlementItem response = api.getEntitlementItem(entitlementId);
// TODO: test validations
}
/**
* List and search entitlement items
*
* <b>Permissions Needed:</b> ANY
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void getEntitlementItemsTest() throws ApiException {
String filterTemplate = null;
Integer size = null;
Integer page = null;
String order = null;
PageResourceEntitlementItem response = api.getEntitlementItems(filterTemplate, size, page, order);
// TODO: test validations
}
/**
* Get a single entitlement template
*
* <b>Permissions Needed:</b> TEMPLATE_ADMIN or ACHIEVEMENTS_ADMIN
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void getEntitlementTemplateTest() throws ApiException {
String id = null;
ItemTemplateResource response = api.getEntitlementTemplate(id);
// TODO: test validations
}
/**
* List and search entitlement templates
*
* <b>Permissions Needed:</b> TEMPLATE_ADMIN or ACHIEVEMENTS_ADMIN
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void getEntitlementTemplatesTest() throws ApiException {
Integer size = null;
Integer page = null;
String order = null;
PageResourceItemTemplateResource response = api.getEntitlementTemplates(size, page, order);
// TODO: test validations
}
/**
* List the user inventory entries for a given user
*
* <b>Permissions Needed:</b> INVENTORY_ADMIN or owner
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void getUserInventoriesTest() throws ApiException {
Integer id = null;
Boolean inactive = null;
Integer size = null;
Integer page = null;
String filterItemName = null;
Integer filterItemId = null;
String filterUsername = null;
String filterGroup = null;
String filterDate = null;
PageResourceUserInventoryResource response = api.getUserInventories(id, inactive, size, page, filterItemName, filterItemId, filterUsername, filterGroup, filterDate);
// TODO: test validations
}
/**
* Get an inventory entry
*
* <b>Permissions Needed:</b> INVENTORY_ADMIN
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void getUserInventoryTest() throws ApiException {
String userId = null;
Integer id = null;
UserInventoryResource response = api.getUserInventory(userId, id);
// TODO: test validations
}
/**
* List the log entries for this inventory entry
*
* <b>Permissions Needed:</b> INVENTORY_ADMIN or owner
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void getUserInventoryLogTest() throws ApiException {
String userId = null;
Integer id = null;
Integer size = null;
Integer page = null;
PageResourceUserItemLogResource response = api.getUserInventoryLog(userId, id, size, page);
// TODO: test validations
}
/**
* List the user inventory entries for all users
*
* <b>Permissions Needed:</b> INVENTORY_ADMIN
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void getUsersInventoryTest() throws ApiException {
Boolean inactive = null;
Integer size = null;
Integer page = null;
String filterItemName = null;
Integer filterItemId = null;
String filterUsername = null;
String filterGroup = null;
String filterDate = null;
PageResourceUserInventoryResource response = api.getUsersInventory(inactive, size, page, filterItemName, filterItemId, filterUsername, filterGroup, filterDate);
// TODO: test validations
}
/**
* Grant an entitlement
*
* <b>Permissions Needed:</b> INVENTORY_ADMIN
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void grantUserEntitlementTest() throws ApiException {
Integer userId = null;
EntitlementGrantRequest grantRequest = null;
api.grantUserEntitlement(userId, grantRequest);
// TODO: test validations
}
/**
* Update an entitlement item
*
* <b>Permissions Needed:</b> INVENTORY_ADMIN
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void updateEntitlementItemTest() throws ApiException {
Integer entitlementId = null;
Boolean cascade = null;
EntitlementItem entitlementItem = null;
api.updateEntitlementItem(entitlementId, cascade, entitlementItem);
// TODO: test validations
}
/**
* Update an entitlement template
*
* <b>Permissions Needed:</b> TEMPLATE_ADMIN
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void updateEntitlementTemplateTest() throws ApiException {
String id = null;
ItemTemplateResource template = null;
ItemTemplateResource response = api.updateEntitlementTemplate(id, template);
// TODO: test validations
}
/**
* Set the behavior data for an inventory entry
*
* <b>Permissions Needed:</b> INVENTORY_ADMIN
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void updateUserInventoryBehaviorDataTest() throws ApiException {
Integer userId = null;
Integer id = null;
Object data = null;
api.updateUserInventoryBehaviorData(userId, id, data);
// TODO: test validations
}
/**
* Set the expiration date
*
* Will change the current grace period for a subscription but not the bill date (possibly even ending before having the chance to re-bill). <br><br><b>Permissions Needed:</b> INVENTORY_ADMIN
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void updateUserInventoryExpiresTest() throws ApiException {
Integer userId = null;
Integer id = null;
Long timestamp = null;
api.updateUserInventoryExpires(userId, id, timestamp);
// TODO: test validations
}
/**
* Set the status for an inventory entry
*
* <b>Permissions Needed:</b> INVENTORY_ADMIN
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void updateUserInventoryStatusTest() throws ApiException {
Integer userId = null;
Integer id = null;
InventoryStatusWrapper inventoryStatus = null;
api.updateUserInventoryStatus(userId, id, inventoryStatus);
// TODO: test validations
}
/**
* Use an item
*
* <b>Permissions Needed:</b> INVENTORY_ADMIN or owner
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void useUserEntitlementItemTest() throws ApiException {
String userId = null;
Integer itemId = null;
String sku = null;
String info = null;
api.useUserEntitlementItem(userId, itemId, sku, info);
// TODO: test validations
}
}
|
|
/*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dremio.service.reflection;
import static com.dremio.service.reflection.ReflectionStatus.AVAILABILITY_STATUS.AVAILABLE;
import static com.dremio.service.reflection.ReflectionUtils.isTerminal;
import static com.dremio.service.reflection.proto.MaterializationState.DEPRECATED;
import static com.dremio.service.reflection.proto.MaterializationState.FAILED;
import static com.dremio.service.reflection.proto.ReflectionState.ACTIVE;
import static com.dremio.service.reflection.proto.ReflectionState.REFRESHING;
import static com.dremio.service.users.SystemUser.SYSTEM_USERNAME;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.Future;
import com.dremio.exec.planner.acceleration.MaterializationDescriptor;
import com.dremio.exec.proto.UserBitShared.QueryProfile;
import com.dremio.exec.proto.UserBitShared.QueryResult;
import com.dremio.exec.proto.UserBitShared.ReflectionType;
import com.dremio.exec.server.MaterializationDescriptorProvider;
import com.dremio.service.job.QueryProfileRequest;
import com.dremio.service.job.proto.JobProtobuf;
import com.dremio.service.jobs.JobNotFoundException;
import com.dremio.service.jobs.JobsService;
import com.dremio.service.reflection.MaterializationCache.CacheViewer;
import com.dremio.service.reflection.proto.ExternalReflection;
import com.dremio.service.reflection.proto.Materialization;
import com.dremio.service.reflection.proto.MaterializationId;
import com.dremio.service.reflection.proto.MaterializationState;
import com.dremio.service.reflection.proto.ReflectionEntry;
import com.dremio.service.reflection.proto.ReflectionId;
import com.dremio.service.reflection.proto.ReflectionState;
import com.dremio.service.reflection.store.MaterializationStore;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
/**
* Monitors current status of reflections.
*/
public class ReflectionMonitor {
private static final boolean IS_DEBUG = java.lang.management.ManagementFactory.getRuntimeMXBean().getInputArguments().toString().indexOf("-agentlib:jdwp") > 0;
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ReflectionMonitor.class);
private final ReflectionService reflections;
private final ReflectionStatusService statusService;
private final MaterializationDescriptorProvider materializations;
private final JobsService jobsService;
private final MaterializationStore materializationStore;
private final long delay;
private final long maxWait;
public ReflectionMonitor(ReflectionService reflections, ReflectionStatusService statusService,
MaterializationDescriptorProvider materializations, JobsService jobsService,
MaterializationStore materializationStore, long delay, long maxWait) {
this.reflections = reflections;
this.statusService = statusService;
this.materializations = materializations;
this.jobsService = jobsService;
this.materializationStore = materializationStore;
this.delay = delay;
this.maxWait = maxWait;
}
public ReflectionMonitor withWait(long maxWait) {
return new ReflectionMonitor(reflections, statusService, materializations, jobsService, materializationStore, delay, maxWait);
}
public void waitUntilRefreshed(final ReflectionId reflectionId) {
waitForState(reflectionId, REFRESHING);
waitForState(reflectionId, ACTIVE);
}
public ReflectionEntry waitForState(final ReflectionId reflectionId, final ReflectionState state) {
Optional<ReflectionEntry> reflection;
Wait w = new Wait();
while (w.loop()) {
reflection = reflections.getEntry(reflectionId);
if (reflection.isPresent()) {
logger.debug("reflection {} is {}", reflection.get().getName(), reflection.get().getState());
if(reflection.get().getState() == state) {
return reflection.get();
}
} else {
logger.debug("reflection not available");
}
}
throw new IllegalStateException();
}
public Materialization waitUntilMaterialized(ReflectionId id) {
return waitUntilMaterialized(id, null);
}
/**
* Wait until any materialization is done after a specific materialization
* @param reflectionId reflection id
* @param materialization the specific materialization
* @return the materialization which is done after the specific materialization
*/
public Materialization waitUntilMaterialized(final ReflectionId reflectionId, final Materialization materialization) {
final MaterializationId materializationId = (materialization == null) ? null : materialization.getId();
Wait w = new Wait();
while (w.loop()) {
// Get the last materialization done and return it if it's done after the specific materialization
final Materialization lastMaterializationDone = materializationStore.getLastMaterializationDone(reflectionId);
if (lastMaterializationDone != null && !Objects.equals(materializationId, lastMaterializationDone.getId())
&& (materialization == null || lastMaterializationDone.getInitRefreshSubmit() > materialization.getInitRefreshSubmit())) {
return lastMaterializationDone;
}
// Throws materialization fail error if there is a failed materialization after the specific materialization
final Materialization lastMaterializationFailed = materializationStore.getLastMaterializationFailed(reflectionId);
if (lastMaterializationFailed != null && !Objects.equals(materializationId, lastMaterializationFailed.getId())
&& (materialization == null || lastMaterializationFailed.getInitRefreshSubmit() > materialization.getInitRefreshSubmit())) {
throwMaterializationError(lastMaterializationFailed);
}
}
throw new IllegalStateException();
}
public void waitTillReflectionManagerHasCycled(){
ReflectionManager reflectionManager = reflections.getReflectionManager();
long last = reflectionManager.getLastWakeupTime();
Wait w = new Wait();
//We need to wait till 2 refresh cycles have completed to ensure we were not in the middle of one
boolean cycled = false;
do {
if(last < reflectionManager.getLastWakeupTime()) {
if(cycled){
return;
} else {
cycled = true;
}
} else {
reflections.wakeupManager("Testing");
}
} while(w.loop());
throw new IllegalStateException();
}
/**
* Throws a runtime exception for a failed materialization with its error message
* @param failedMaterialization failed materialization
*/
private void throwMaterializationError(final Materialization failedMaterialization) {
Preconditions.checkArgument(failedMaterialization.getState() == FAILED, "materialization did not fail");
final QueryProfileRequest request = QueryProfileRequest.newBuilder()
.setJobId(JobProtobuf.JobId.newBuilder()
.setId(failedMaterialization.getInitRefreshJobId())
.build())
.setUserName(SYSTEM_USERNAME)
.build();
try {
final QueryProfile queryProfile = jobsService.getProfile(request);
if (queryProfile.getState() == QueryResult.QueryState.FAILED) {
throw new RuntimeException("Materialization failed: " + queryProfile.getError());
} else {
throw new RuntimeException(String.format("Refresh job completed, but materialization failed with %s.", failedMaterialization.getFailure().getMessage()));
}
} catch (JobNotFoundException e) {
throw new RuntimeException("Failed to get refresh job profile after materialization failed.");
}
}
public void waitUntilCached(Materialization m) {
waitUntilCached(m.getId());
}
public void waitUntilCached(MaterializationId id) {
Wait w = new Wait();
final CacheViewer cacheViewer = reflections.getCacheViewerProvider().get();
while (w.loop()) {
if (cacheViewer.isCached(id)) {
return;
}
}
throw new IllegalStateException();
}
/**
* wait for the first materialization of a reflection to be refreshing
* @param id reflection id
* @return the running materialization
*/
public Materialization waitUntilMaterializationRunning(final ReflectionId id) {
return waitUntilMaterializationRunning(id, (MaterializationId) null);
}
/**
* wait for the next materialization of a reflection to be refreshing
* @param id reflection id
* @param m previous materialization of the reflection
* @return the running materialization
*/
public Materialization waitUntilMaterializationRunning(final ReflectionId id, Materialization m) {
return waitUntilMaterializationRunning(id, m.getId());
}
/**
* wait for the next materialization of a reflection to be refreshing
*
* **Note:** Given that RUNNING is an intermediate state, unit tests that use this need to be carefully written.
* Otherwise we may see random failures if the materialization completes before the monitor got the chance to notice
* it was in running state.
*
* @param id reflection id
* @param lastMaterializationId previous materialization id of the reflection
* @return the running materialization
*/
public Materialization waitUntilMaterializationRunning(final ReflectionId id, MaterializationId lastMaterializationId) {
Wait w = new Wait();
while (w.loop()) {
final Materialization lastMaterialization = materializationStore.getLastMaterialization(id);
if (lastMaterialization != null &&
!Objects.equals(lastMaterializationId, lastMaterialization.getId()) &&
lastMaterialization.getState() == MaterializationState.RUNNING) {
return lastMaterialization;
}
}
throw new IllegalStateException();
}
public Materialization waitUntilMaterializationFails(final ReflectionId id) {
return waitUntilMaterializationFails(id, (MaterializationId) null);
}
public Materialization waitUntilMaterializationFails(final ReflectionId id, Materialization m) {
return waitUntilMaterializationFails(id, m.getId());
}
public Materialization waitUntilMaterializationFails(final ReflectionId id, MaterializationId lastMaterializationId) {
Wait w = new Wait();
while (w.loop()) {
final Materialization lastMaterialization = materializationStore.getLastMaterialization(id);
if (lastMaterialization != null &&
!Objects.equals(lastMaterializationId, lastMaterialization.getId()) &&
lastMaterialization.getState() == MaterializationState.FAILED) {
return lastMaterialization;
}
}
throw new IllegalStateException();
}
/**
* wait for the first materialization of a reflection to be canceled
* @param id reflection id
* @return the canceled materialization
*/
public Materialization waitUntilMaterializationCanceled(final ReflectionId id) {
return waitUntilMaterializationCanceled(id, (MaterializationId) null);
}
/**
* wait for the next materialization of a reflection to be canceled
* @param id reflection id
* @param m previous materialization of the reflection
* @return the canceled materialization
*/
public Materialization waitUntilMaterializationCanceled(final ReflectionId id, Materialization m) {
return waitUntilMaterializationCanceled(id, m.getId());
}
/**
* wait for the next materialization of a reflection to be canceled
* @param id reflection id
* @param lastMaterializationId previous materialization id of the reflection
* @return the canceled materialization
*/
public Materialization waitUntilMaterializationCanceled(final ReflectionId id, MaterializationId lastMaterializationId) {
Wait w = new Wait();
while (w.loop()) {
final Materialization lastMaterialization = materializationStore.getLastMaterialization(id);
if (lastMaterialization != null &&
!Objects.equals(lastMaterializationId, lastMaterialization.getId()) &&
lastMaterialization.getState() == MaterializationState.CANCELED) {
return lastMaterialization;
}
}
throw new IllegalStateException();
}
public Materialization waitUntilMaterializationFinished(final ReflectionId id, Materialization m) {
return waitUntilMaterializationFinished(id, m != null ? m.getId() : null);
}
public Materialization waitUntilMaterializationFinished(final ReflectionId id, MaterializationId lastMaterializationId) {
Wait w = new Wait();
while (w.loop()) {
final Materialization lastMaterialization = materializationStore.getLastMaterialization(id);
if (lastMaterialization != null &&
!Objects.equals(lastMaterializationId, lastMaterialization.getId()) &&
isTerminal(lastMaterialization.getState())) {
return lastMaterialization;
}
}
throw new IllegalStateException();
}
public void waitUntilCanAccelerate(final ReflectionId reflectionId) {
Wait w = new Wait();
while(w.loop()) {
if (statusService.getReflectionStatus(reflectionId).getAvailabilityStatus() == AVAILABLE) {
return;
}
}
throw new IllegalStateException();
}
public void waitUntilNoMaterializationsAvailable() {
Wait w = new Wait();
while (w.loop()) {
if (materializations.get().isEmpty()) {
return;
}
}
throw new IllegalStateException();
}
public void waitUntilNoMoreRefreshing(long requestTime, long numMaterializations) {
Wait w = new Wait();
while (w.loop()) {
Future<?> future = reflections.wakeupManager("start refresh");
try {
future.get();
} catch (Exception e) {
Throwables.propagate(e);
}
List<MaterializationDescriptor> materializationDescriptorList = materializations.get();
if ((materializationDescriptorList.size() == numMaterializations) && materializationDescriptorList.stream()
.filter(m -> m.getReflectionType() != ReflectionType.EXTERNAL)
.noneMatch(m -> {
Optional<ReflectionEntry> e = reflections.getEntry(new ReflectionId(m.getLayoutId()));
long lastSuccessful = e.get().getLastSuccessfulRefresh();
return e.transform(r -> (r.getState() == REFRESHING || ((lastSuccessful != 0L) && (lastSuccessful < requestTime)))).or(false);
})) {
break;
}
}
}
private class Wait {
private final long expire = System.currentTimeMillis() + maxWait;
private int loop = 0;
public boolean loop() {
loop++;
if(loop == 1) {
return true;
}
if(System.currentTimeMillis() > expire && !IS_DEBUG) {
throw new TimeoutException();
}
try {
Thread.sleep(delay);
}catch(InterruptedException ex) {
throw Throwables.propagate(ex);
}
return true;
}
}
public void waitUntilExternalReflectionsRemoved(String externalReflectionId) {
Wait wait = new Wait();
while (wait.loop()) {
final Optional<ExternalReflection> entry = reflections.getExternalReflectionById(externalReflectionId);
if (!entry.isPresent()) {
return;
}
}
}
public void waitUntilRemoved(final ReflectionId reflectionId) {
Wait wait = new Wait();
while (wait.loop()) {
final Optional<ReflectionEntry> entry = reflections.getEntry(reflectionId);
if (!entry.isPresent()) {
return;
}
}
}
public void waitUntilDeleted(final MaterializationId materializationId) {
Wait wait = new Wait();
while (wait.loop()) {
final Materialization m = materializationStore.get(materializationId);
if (m == null) {
return;
}
}
}
public void waitUntilDeprecated(Materialization m) {
waitUntilDeprecated(m.getId());
}
public void waitUntilDeprecated(MaterializationId id) {
Wait w = new Wait();
while (w.loop()) {
if (materializationStore.get(id).getState() == DEPRECATED) {
return;
}
}
throw new IllegalStateException();
}
/**
* Thrown when {@link Wait} times out
*/
public static class TimeoutException extends RuntimeException {
TimeoutException() {
super("Maximum wait for event was exceeded.");
}
}
}
|
|
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.searchdefinition.parser;
import com.yahoo.io.IOUtils;
import com.yahoo.io.reader.NamedReader;
import static com.yahoo.config.model.test.TestUtil.joinLines;
import java.nio.charset.StandardCharsets;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.util.List;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertThrows;
/**
* @author arnej
*/
public class IntermediateCollectionTestCase {
@Test
public void can_add_minimal_schema() throws Exception {
String input = joinLines
("schema foo {",
" document foo {",
" }",
"}");
var collection = new IntermediateCollection();
ParsedSchema schema = collection.addSchemaFromString(input);
assertEquals("foo", schema.name());
assertTrue(schema.hasDocument());
assertEquals("foo", schema.getDocument().name());
}
@Test
public void names_may_differ() throws Exception {
String input = joinLines
("schema foo_search {",
" document foo {",
" }",
"}");
var collection = new IntermediateCollection();
ParsedSchema schema = collection.addSchemaFromString(input);
assertEquals("foo_search", schema.name());
assertTrue(schema.hasDocument());
assertEquals("foo", schema.getDocument().name());
}
@Test
public void can_add_schema_files() throws Exception {
var collection = new IntermediateCollection();
collection.addSchemaFromFile("src/test/derived/deriver/child.sd");
collection.addSchemaFromFile("src/test/derived/deriver/grandparent.sd");
collection.addSchemaFromFile("src/test/derived/deriver/parent.sd");
var schemes = collection.getParsedSchemas();
assertEquals(schemes.size(), 3);
var schema = schemes.get("child");
assertTrue(schema != null);
assertEquals(schema.name(), "child");
schema = schemes.get("parent");
assertTrue(schema != null);
assertEquals(schema.name(), "parent");
schema = schemes.get("grandparent");
assertTrue(schema != null);
assertEquals(schema.name(), "grandparent");
}
NamedReader readerOf(String fileName) throws Exception {
File f = new File(fileName);
FileReader fr = new FileReader(f, StandardCharsets.UTF_8);
BufferedReader br = new BufferedReader(fr);
return new NamedReader(fileName, br);
}
@Test
public void can_add_schemas() throws Exception {
var collection = new IntermediateCollection();
collection.addSchemaFromReader(readerOf("src/test/derived/deriver/child.sd"));
collection.addSchemaFromReader(readerOf("src/test/derived/deriver/grandparent.sd"));
collection.addSchemaFromReader(readerOf("src/test/derived/deriver/parent.sd"));
var schemes = collection.getParsedSchemas();
assertEquals(schemes.size(), 3);
var schema = schemes.get("child");
assertTrue(schema != null);
assertEquals(schema.name(), "child");
schema = schemes.get("parent");
assertTrue(schema != null);
assertEquals(schema.name(), "parent");
schema = schemes.get("grandparent");
assertTrue(schema != null);
assertEquals(schema.name(), "grandparent");
}
ParsedRankProfile get(List<ParsedRankProfile> all, String name) {
for (var rp : all) {
if (rp.name().equals(name)) return rp;
}
return null;
}
@Test
public void can_add_extra_rank_profiles() throws Exception {
var collection = new IntermediateCollection();
collection.addSchemaFromFile("src/test/derived/rankprofilemodularity/test.sd");
collection.addRankProfileFile("test", "src/test/derived/rankprofilemodularity/test/outside_schema1.profile");
collection.addRankProfileFile("test", readerOf("src/test/derived/rankprofilemodularity/test/outside_schema2.profile"));
var schemes = collection.getParsedSchemas();
assertEquals(schemes.size(), 1);
var schema = schemes.get("test");
assertTrue(schema != null);
assertEquals(schema.name(), "test");
var rankProfiles = schema.getRankProfiles();
assertEquals(rankProfiles.size(), 7);
var outside = get(rankProfiles, "outside_schema1");
assertTrue(outside != null);
assertEquals(outside.name(), "outside_schema1");
var functions = outside.getFunctions();
assertEquals(functions.size(), 1);
assertEquals(functions.get(0).name(), "fo1");
outside = get(rankProfiles, "outside_schema2");
assertTrue(outside != null);
assertEquals(outside.name(), "outside_schema2");
functions = outside.getFunctions();
assertEquals(functions.size(), 1);
assertEquals(functions.get(0).name(), "fo2");
}
@Test
public void name_mismatch_throws() throws Exception {
var collection = new IntermediateCollection();
var ex = assertThrows(IllegalArgumentException.class, () ->
collection.addSchemaFromReader(readerOf("src/test/cfg/application/sdfilenametest/schemas/notmusic.sd")));
assertEquals("The file containing schema 'music' must be named 'music.sd', was 'notmusic.sd'",
ex.getMessage());
}
@Test
public void bad_parse_throws() throws Exception {
var collection = new IntermediateCollection();
var ex = assertThrows(ParseException.class, () ->
collection.addSchemaFromFile("src/test/examples/badparse.sd"));
assertTrue(ex.getMessage().startsWith("Failed parsing schema from src/test/examples/badparse.sd: Encountered"));
ex = assertThrows(ParseException.class, () ->
collection.addSchemaFromReader(readerOf("src/test/examples/badparse.sd")));
assertTrue(ex.getMessage().startsWith("Failed parsing schema from src/test/examples/badparse.sd: Encountered"));
collection.addSchemaFromFile("src/test/derived/rankprofilemodularity/test.sd");
collection.addRankProfileFile("test", "src/test/derived/rankprofilemodularity/test/outside_schema1.profile");
ex = assertThrows(ParseException.class, () ->
collection.addRankProfileFile("test", "src/test/examples/badparse.sd"));
assertTrue(ex.getMessage().startsWith("Failed parsing rank-profile from src/test/examples/badparse.sd: Encountered"));
}
@Test
public void can_resolve_document_inheritance() throws Exception {
var collection = new IntermediateCollection();
collection.addSchemaFromFile("src/test/derived/deriver/child.sd");
collection.addSchemaFromFile("src/test/derived/deriver/grandparent.sd");
collection.addSchemaFromFile("src/test/derived/deriver/parent.sd");
collection.resolveInternalConnections();
var schemes = collection.getParsedSchemas();
assertEquals(schemes.size(), 3);
var childDoc = schemes.get("child").getDocument();
var inherits = childDoc.getResolvedInherits();
assertEquals(inherits.size(), 1);
var parentDoc = inherits.get(0);
assertEquals(parentDoc.name(), "parent");
inherits = parentDoc.getResolvedInherits();
assertEquals(inherits.size(), 1);
assertEquals(inherits.get(0).name(), "grandparent");
}
@Test
public void can_detect_schema_inheritance_cycles() throws Exception {
var collection = new IntermediateCollection();
collection.addSchemaFromString("schema foo inherits bar { document foo {} }");
collection.addSchemaFromString("schema bar inherits qux { document bar {} }");
collection.addSchemaFromString("schema qux inherits foo { document qux {} }");
assertEquals(collection.getParsedSchemas().size(), 3);
var ex = assertThrows(IllegalArgumentException.class, () ->
collection.resolveInternalConnections());
assertTrue(ex.getMessage().startsWith("Inheritance/reference cycle for schemas: "));
}
@Test
public void can_detect_document_inheritance_cycles() throws Exception {
var collection = new IntermediateCollection();
collection.addSchemaFromString("schema foo { document foo inherits bar {} }");
collection.addSchemaFromString("schema bar { document bar inherits qux {} }");
collection.addSchemaFromString("schema qux { document qux inherits foo {} }");
assertEquals(collection.getParsedSchemas().size(), 3);
var ex = assertThrows(IllegalArgumentException.class, () ->
collection.resolveInternalConnections());
System.err.println("ex: "+ex.getMessage());
assertTrue(ex.getMessage().startsWith("Inheritance/reference cycle for documents: "));
}
@Test
public void can_detect_missing_doc() throws Exception {
var collection = new IntermediateCollection();
collection.addSchemaFromString("schema foo { document foo inherits bar {} }");
collection.addSchemaFromString("schema qux { document qux inherits foo {} }");
assertEquals(collection.getParsedSchemas().size(), 2);
var ex = assertThrows(IllegalArgumentException.class, () ->
collection.resolveInternalConnections());
assertEquals("document foo inherits from unavailable document bar", ex.getMessage());
}
@Test
public void can_detect_document_reference_cycle() throws Exception {
var collection = new IntermediateCollection();
collection.addSchemaFromString("schema foo { document foo { field oneref type reference<bar> {} } }");
collection.addSchemaFromString("schema bar { document bar { field tworef type reference<foo> {} } }");
assertEquals(collection.getParsedSchemas().size(), 2);
var ex = assertThrows(IllegalArgumentException.class, () ->
collection.resolveInternalConnections());
System.err.println("ex: "+ex.getMessage());
assertTrue(ex.getMessage().startsWith("Inheritance/reference cycle for documents: "));
}
@Test
public void can_detect_cycles_with_reference() throws Exception {
var collection = new IntermediateCollection();
collection.addSchemaFromString("schema foo { document foodoc inherits bardoc {} }");
collection.addSchemaFromString("schema bar { document bardoc { field myref type reference<qux> { } } }");
collection.addSchemaFromString("schema qux inherits foo { document qux inherits foodoc {} }");
assertEquals(collection.getParsedSchemas().size(), 3);
var ex = assertThrows(IllegalArgumentException.class, () ->
collection.resolveInternalConnections());
System.err.println("ex: "+ex.getMessage());
assertTrue(ex.getMessage().startsWith("Inheritance/reference cycle for documents: "));
}
}
|
|
package epizza.order.checkout;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.javamoney.moneta.Money;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.data.domain.PageRequest;
import org.springframework.hateoas.EntityLinks;
import org.springframework.hateoas.MediaTypes;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.restdocs.JUnitRestDocumentation;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.ResultActions;
import org.springframework.web.context.WebApplicationContext;
import java.net.URI;
import epizza.order.DbCleanupRule;
import epizza.order.OrderApplicationTest;
import epizza.order.catalog.Pizza;
import lombok.SneakyThrows;
import static com.epages.restdocs.WireMockDocumentation.wiremockJson;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.mockito.Mockito.verify;
import static org.springframework.restdocs.mockmvc.MockMvcRestDocumentation.document;
import static org.springframework.restdocs.mockmvc.MockMvcRestDocumentation.documentationConfiguration;
import static org.springframework.restdocs.payload.PayloadDocumentation.fieldWithPath;
import static org.springframework.restdocs.payload.PayloadDocumentation.requestFields;
import static org.springframework.restdocs.payload.PayloadDocumentation.responseFields;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.header;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import static org.springframework.test.web.servlet.setup.MockMvcBuilders.webAppContextSetup;
@RunWith(SpringRunner.class)
@OrderApplicationTest
public class OrderControllerTest {
@Rule
public final JUnitRestDocumentation restDocumentation = new JUnitRestDocumentation("build/generated-snippets");
@Rule
@Autowired
public DbCleanupRule dbCleanupRule;
@Autowired
private OrderService orderService;
@Autowired
private EntityLinks entityLinks;
@Autowired
private WebApplicationContext context;
@Autowired
private ObjectMapper objectMapper;
@MockBean
private OrderEventPublisher orderEventPublisher;
private MockMvc mockMvc;
private ResultActions ordersResultAction;
private String jsonInput;
private Order order;
@Before
public void setupContext() {
mockMvc = webAppContextSetup(context)
.apply(documentationConfiguration(this.restDocumentation).uris().withPort(80))
.build();
}
@Test
@SneakyThrows
public void should_create_order() {
givenInputData();
whenOrderCreated();
ordersResultAction
.andExpect(status().is(HttpStatus.CREATED.value()))
.andExpect(header().string(HttpHeaders.LOCATION, containsString("/orders")))
.andDo(document("order-create", //
requestFields( //
fieldWithPath("comment").description("delivery comment"), //
fieldWithPath("lineItems[].pizza").description("which pizza do you want?"), //
fieldWithPath("lineItems[].quantity").description("how many pizzas do you eat today?"), //
fieldWithPath("deliveryAddress.firstname").description("Your first name"), //
fieldWithPath("deliveryAddress.lastname").description("Your last name"), //
fieldWithPath("deliveryAddress.street").description("Your stree"), //
fieldWithPath("deliveryAddress.city").description("Your city"), //
fieldWithPath("deliveryAddress.postalCode").description("Your postal code"), //
fieldWithPath("deliveryAddress.telephone").description("Your telephone"), //
fieldWithPath("deliveryAddress.email").description("Your email address").optional() //
)
// SCHNIPP
, wiremockJson()
// SCHNAPP
))
;
verify(orderEventPublisher).sendOrderCreatedEvent(order);
}
@Test
@SneakyThrows
public void should_get_order() {
givenExistingOrder();
whenOrderRetrieved();
ordersResultAction
.andExpect(status().is(HttpStatus.OK.value()))
.andExpect(jsonPath("$.status", is(order.getStatus().name())))
.andExpect(jsonPath("$.totalPrice", notNullValue()))
.andExpect(jsonPath("$.orderItems", hasSize(order.getOrderItems().size())))
.andExpect(jsonPath("$.deliveryAddress.firstname", is(order.getDeliveryAddress().getFirstname())))
.andExpect(jsonPath("$._links.self.href",
is(entityLinks.linkForSingleResource(Order.class, order.getId()).toUri().toString())))
.andDo(document("order-get",
responseFields(
fieldWithPath("_id").description("Order identifier"),
fieldWithPath("status").description("Order status"),
fieldWithPath("orderedAt").description("Order creation timestamp"),
fieldWithPath("totalPrice").description("Total order amount"),
fieldWithPath("estimatedTimeOfBakingCompletion").description("Estimated time of baking completion"),
// SCHNIPP
fieldWithPath("estimatedTimeOfDelivery").description("Estimated time of delivery"),
fieldWithPath("deliveryBoy").description("Delivery boy"),
// SCHNAPP
fieldWithPath("comment").description("Customer's comment"),
fieldWithPath("orderItems[]._links.pizza").description("Link to ordered pizza"),
fieldWithPath("orderItems[].quantity").description("Number of pizzas"),
fieldWithPath("orderItems[].price").description("Price (Currency symbol and numeric value)"),
fieldWithPath("deliveryAddress").description("Delivery address as POSTed when <<resources-order-create,creating an Order>>"),
fieldWithPath("_links").description("<<links,Links>> to other resources")
)
// SCHNIPP
, wiremockJson()
// SCHNAPP
)) //
;
}
@Test
@SneakyThrows
public void should_get_all_orders() {
givenExistingOrder();
whenAllOrdersRetrieved();
ordersResultAction
.andExpect(status().is(HttpStatus.OK.value()))
.andDo(document("orders-list",
responseFields(
fieldWithPath("_embedded").description("Current page of <<resources-order-get,Orders>>"),
fieldWithPath("page").description("<<paging,Paging>> information"),
fieldWithPath("_links").description("<<links,Links>> to other resources")
)
// SCHNIPP
, wiremockJson()
// SCHNAPP
)) //
;
}
@SneakyThrows
private void whenAllOrdersRetrieved() {
ordersResultAction = mockMvc.perform(get("/orders").accept(MediaTypes.HAL_JSON));
}
@SneakyThrows
private void whenOrderRetrieved() {
URI orderUri = entityLinks.linkForSingleResource(Order.class, order.getId()).toUri();
ordersResultAction = mockMvc.perform(get(orderUri)
.accept(MediaTypes.HAL_JSON))
.andDo(print());
}
private void givenExistingOrder() {
Order newOrder = new Order();
newOrder.setComment("some comment");
Address address = Address.builder()
.city("Hamburg")
.firstname("Mathias")
.lastname("Dpunkt")
.postalCode("22222")
.street("Pilatuspool 2")
.telephone("+4908154711")
.build();
newOrder.setDeliveryAddress(address);
OrderItem orderItem = OrderItem.builder()
.pizza(Pizza.builder().id(1L).price(Money.parse("EUR 1.23")).build())
.quantity(2)
.build();
newOrder.addOrderItem(orderItem);
order = orderService.create(newOrder);
}
@SneakyThrows
private void whenOrderCreated() {
ordersResultAction = mockMvc.perform(post("/orders")
.contentType(MediaType.APPLICATION_JSON)
.content(jsonInput))
.andExpect(status().is2xxSuccessful());
order = orderService.getAll(new PageRequest(0, 20)).iterator().next();
}
@SneakyThrows
private void givenInputData() {
ImmutableMap<String, String> address = ImmutableMap.<String, String>builder()
.put("firstname", "Mathias")
.put("lastname", "Dpunkt")
.put("street", "Somestreet 1")
.put("city", "Hamburg")
.put("telephone", "+49404321343")
.put("postalCode", "22305") //
.put("email", "[email protected]") //
.build();
jsonInput = objectMapper.writeValueAsString(ImmutableMap.of(
"comment", "Some comment",
"deliveryAddress", address,
"lineItems", ImmutableList.of(ImmutableMap.of(
"quantity", 1,
"pizza", "http://localhost/pizzas/1"
)
)
));
}
}
|
|
package com.ajax.json;
import java.io.StringWriter;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import org.apache.log4j.Logger;
/**
* Test class. This file is not formally a member of the org.json library. It is
* just a casual test tool.
*/
public class Test {
/**
* Entry point.
*
* @param args
*/
protected static Logger logger = Logger.getLogger(Test.class);
public static void main(String args[]) {
Iterator it;
JSONArray a;
JSONObject j;
JSONStringer jj;
Object o;
String s;
/**
* Obj is a typical class that implements JSONString. It also provides
* some beanie methods that can be used to construct a JSONObject. It
* also demonstrates constructing a JSONObject with an array of names.
*/
class Obj implements JSONString {
public String aString;
public double aNumber;
public boolean aBoolean;
public Obj(String string, double n, boolean b) {
this.aString = string;
this.aNumber = n;
this.aBoolean = b;
}
public double getNumber() {
return this.aNumber;
}
public String getString() {
return this.aString;
}
public boolean isBoolean() {
return this.aBoolean;
}
public String getBENT() {
return "All uppercase key";
}
public String getX() {
return "x";
}
public String toJSONString() {
return "{" + JSONObject.quote(this.aString) + ":" + JSONObject.doubleToString(this.aNumber) + "}";
}
public String toString() {
return this.getString() + " " + this.getNumber() + " " + this.isBoolean() + "." + this.getBENT() + " " + this.getX();
}
}
Obj obj = new Obj("A beany object", 42, true);
try {
s = "[0.1]";
a = new JSONArray(s);
System.out.println(a.toString());
System.out.println("");
j = XML.toJSONObject("<![CDATA[This is a collection of test patterns and examples for org.json.]]> Ignore the stuff past the end. ");
System.out.println(j.toString());
System.out.println("");
j = new JSONObject();
o = null;
j.put("booga", o);
j.put("wooga", JSONObject.NULL);
System.out.println(j.toString());
System.out.println("");
j = new JSONObject();
j.increment("two");
j.increment("two");
System.out.println(j.toString());
System.out.println("");
s = "<test><blank></blank><empty/></test>";
j = XML.toJSONObject(s);
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
s = "{ \"list of lists\" : [ [1, 2, 3], [4, 5, 6], ] }";
j = new JSONObject(s);
System.out.println(j.toString(4));
System.out.println(XML.toString(j));
s = "<recipe name=\"bread\" prep_time=\"5 mins\" cook_time=\"3 hours\"> <title>Basic bread</title> <ingredient amount=\"8\" unit=\"dL\">Flour</ingredient> <ingredient amount=\"10\" unit=\"grams\">Yeast</ingredient> <ingredient amount=\"4\" unit=\"dL\" state=\"warm\">Water</ingredient> <ingredient amount=\"1\" unit=\"teaspoon\">Salt</ingredient> <instructions> <step>Mix all ingredients together.</step> <step>Knead thoroughly.</step> <step>Cover with a cloth, and leave for one hour in warm room.</step> <step>Knead again.</step> <step>Place in a bread baking tin.</step> <step>Cover with a cloth, and leave for one hour in warm room.</step> <step>Bake in the oven at 180(degrees)C for 30 minutes.</step> </instructions> </recipe> ";
j = XML.toJSONObject(s);
System.out.println(j.toString(4));
System.out.println();
j = JSONML.toJSONObject(s);
System.out.println(j.toString());
System.out.println(JSONML.toString(j));
System.out.println();
a = JSONML.toJSONArray(s);
System.out.println(a.toString(4));
System.out.println(JSONML.toString(a));
System.out.println();
s = "<div id=\"demo\" class=\"JSONML\"><p>JSONML is a transformation between <b>JSON</b> and <b>XML</b> that preserves ordering of document features.</p><p>JSONML can work with JSON arrays or JSON objects.</p><p>Three<br/>little<br/>words</p></div>";
j = JSONML.toJSONObject(s);
System.out.println(j.toString(4));
System.out.println(JSONML.toString(j));
System.out.println();
a = JSONML.toJSONArray(s);
System.out.println(a.toString(4));
System.out.println(JSONML.toString(a));
System.out.println();
s = "<person created=\"2006-11-11T19:23\" modified=\"2006-12-31T23:59\">\n <firstName>Robert</firstName>\n <lastName>Smith</lastName>\n <address type=\"home\">\n <street>12345 Sixth Ave</street>\n <city>Anytown</city>\n <state>CA</state>\n <postalCode>98765-4321</postalCode>\n </address>\n </person>";
j = XML.toJSONObject(s);
System.out.println(j.toString(4));
j = new JSONObject(obj);
System.out.println(j.toString());
s = "{ \"entity\": { \"imageURL\": \"\", \"name\": \"IXXXXXXXXXXXXX\", \"id\": 12336, \"ratingCount\": null, \"averageRating\": null } }";
j = new JSONObject(s);
System.out.println(j.toString(2));
jj = new JSONStringer();
s = jj.object().key("single").value("MARIE HAA'S").key("Johnny").value("MARIE HAA\\'S").key("foo").value("bar").key("baz").array().object().key("quux").value("Thanks, Josh!").endObject().endArray().key("obj keys").value(JSONObject.getNames(obj)).endObject().toString();
System.out.println(s);
System.out.println(new JSONStringer().object().key("a").array().array().array().value("b").endArray().endArray().endArray().endObject().toString());
jj = new JSONStringer();
jj.array();
jj.value(1);
jj.array();
jj.value(null);
jj.array();
jj.object();
jj.key("empty-array").array().endArray();
jj.key("answer").value(42);
jj.key("null").value(null);
jj.key("false").value(false);
jj.key("true").value(true);
jj.key("big").value(123456789e+88);
jj.key("small").value(123456789e-88);
jj.key("empty-object").object().endObject();
jj.key("long");
jj.value(9223372036854775807L);
jj.endObject();
jj.value("two");
jj.endArray();
jj.value(true);
jj.endArray();
jj.value(98.6);
jj.value(-100.0);
jj.object();
jj.endObject();
jj.object();
jj.key("one");
jj.value(1.00);
jj.endObject();
jj.value(obj);
jj.endArray();
System.out.println(jj.toString());
System.out.println(new JSONArray(jj.toString()).toString(4));
int ar[] = { 1, 2, 3 };
JSONArray ja = new JSONArray(ar);
System.out.println(ja.toString());
String sa[] = { "aString", "aNumber", "aBoolean" };
j = new JSONObject(obj, sa);
j.put("Testing JSONString interface", obj);
System.out.println(j.toString(4));
j = new JSONObject("{slashes: '///', closetag: '</script>', backslash:'\\\\', ei: {quotes: '\"\\''},eo: {a: '\"quoted\"', b:\"don't\"}, quotes: [\"'\", '\"']}");
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
j = new JSONObject("{foo: [true, false,9876543210, 0.0, 1.00000001, 1.000000000001, 1.00000000000000001," + " .00000000000000001, 2.00, 0.1, 2e100, -32,[],{}, \"string\"], " + " to : null, op : 'Good'," + "ten:10} postfix comment");
j.put("String", "98.6");
j.put("JSONObject", new JSONObject());
j.put("JSONArray", new JSONArray());
j.put("int", 57);
j.put("double", 123456789012345678901234567890.);
j.put("true", true);
j.put("false", false);
j.put("null", JSONObject.NULL);
j.put("bool", "true");
j.put("zero", -0.0);
j.put("\\u2028", "\u2028");
j.put("\\u2029", "\u2029");
a = j.getJSONArray("foo");
a.put(666);
a.put(2001.99);
a.put("so \"fine\".");
a.put("so <fine>.");
a.put(true);
a.put(false);
a.put(new JSONArray());
a.put(new JSONObject());
j.put("keys", JSONObject.getNames(j));
System.out.println(j.toString(4));
System.out.println(XML.toString(j));
System.out.println("String: " + j.getDouble("String"));
System.out.println(" bool: " + j.getBoolean("bool"));
System.out.println(" to: " + j.getString("to"));
System.out.println(" true: " + j.getString("true"));
System.out.println(" foo: " + j.getJSONArray("foo"));
System.out.println(" op: " + j.getString("op"));
System.out.println(" ten: " + j.getInt("ten"));
System.out.println(" oops: " + j.optBoolean("oops"));
s = "<xml one = 1 two=' \"2\" '><five></five>First \u0009<content><five></five> This is \"content\". <three> 3 </three>JSON does not preserve the sequencing of elements and contents.<three> III </three> <three> T H R E E</three><four/>Content text is an implied structure in XML. <six content=\"6\"/>JSON does not have implied structure:<seven>7</seven>everything is explicit.<![CDATA[CDATA blocks<are><supported>!]]></xml>";
j = XML.toJSONObject(s);
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
ja = JSONML.toJSONArray(s);
System.out.println(ja.toString(4));
System.out.println(JSONML.toString(ja));
System.out.println("");
s = "<xml do='0'>uno<a re='1' mi='2'>dos<b fa='3'/>tres<c>true</c>quatro</a>cinqo<d>seis<e/></d></xml>";
ja = JSONML.toJSONArray(s);
System.out.println(ja.toString(4));
System.out.println(JSONML.toString(ja));
System.out.println("");
s = "<mapping><empty/> <class name = \"Customer\"> <field name = \"ID\" type = \"string\"> <bind-xml name=\"ID\" node=\"attribute\"/> </field> <field name = \"FirstName\" type = \"FirstName\"/> <field name = \"MI\" type = \"MI\"/> <field name = \"LastName\" type = \"LastName\"/> </class> <class name = \"FirstName\"> <field name = \"text\"> <bind-xml name = \"text\" node = \"text\"/> </field> </class> <class name = \"MI\"> <field name = \"text\"> <bind-xml name = \"text\" node = \"text\"/> </field> </class> <class name = \"LastName\"> <field name = \"text\"> <bind-xml name = \"text\" node = \"text\"/> </field> </class></mapping>";
j = XML.toJSONObject(s);
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
ja = JSONML.toJSONArray(s);
System.out.println(ja.toString(4));
System.out.println(JSONML.toString(ja));
System.out.println("");
j = XML.toJSONObject("<?xml version=\"1.0\" ?><Book Author=\"Anonymous\"><Title>Sample Book</Title><Chapter id=\"1\">This is chapter 1. It is not very long or interesting.</Chapter><Chapter id=\"2\">This is chapter 2. Although it is longer than chapter 1, it is not any more interesting.</Chapter></Book>");
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
j = XML.toJSONObject("<!DOCTYPE bCard 'http://www.cs.caltech.edu/~adam/schemas/bCard'><bCard><?xml default bCard firstname = '' lastname = '' company = '' email = '' homepage = ''?><bCard firstname = 'Rohit' lastname = 'Khare' company = 'MCI' email = '[email protected]' homepage = 'http://pest.w3.org/'/><bCard firstname = 'Adam' lastname = 'Rifkin' company = 'Caltech Infospheres Project' email = '[email protected]' homepage = 'http://www.cs.caltech.edu/~adam/'/></bCard>");
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
j = XML.toJSONObject("<?xml version=\"1.0\"?><customer> <firstName> <text>Fred</text> </firstName> <ID>fbs0001</ID> <lastName> <text>Scerbo</text> </lastName> <MI> <text>B</text> </MI></customer>");
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
j = XML.toJSONObject("<!ENTITY tp-address PUBLIC '-//ABC University::Special Collections Library//TEXT (titlepage: name and address)//EN' 'tpspcoll.sgm'><list type='simple'><head>Repository Address </head><item>Special Collections Library</item><item>ABC University</item><item>Main Library, 40 Circle Drive</item><item>Ourtown, Pennsylvania</item><item>17654 USA</item></list>");
System.out.println(j.toString());
System.out.println(XML.toString(j));
System.out.println("");
j = XML.toJSONObject("<test intertag status=ok><empty/>deluxe<blip sweet=true>&"toot"&toot;A</blip><x>eks</x><w>bonus</w><w>bonus2</w></test>");
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
j = HTTP.toJSONObject("GET / HTTP/1.0\nAccept: image/gif, image/x-xbitmap, image/jpeg, image/pjpeg, application/vnd.ms-powerpoint, application/vnd.ms-excel, application/msword, */*\nAccept-Language: en-us\nUser-Agent: Mozilla/4.0 (compatible; MSIE 5.5; Windows 98; Win 9x 4.90; T312461; Q312461)\nHost: www.nokko.com\nConnection: keep-alive\nAccept-encoding: gzip, deflate\n");
System.out.println(j.toString(2));
System.out.println(HTTP.toString(j));
System.out.println("");
j = HTTP.toJSONObject("HTTP/1.1 200 Oki Doki\nDate: Sun, 26 May 2002 17:38:52 GMT\nServer: Apache/1.3.23 (Unix) mod_perl/1.26\nKeep-Alive: timeout=15, max=100\nConnection: Keep-Alive\nTransfer-Encoding: chunked\nContent-Type: text/html\n");
System.out.println(j.toString(2));
System.out.println(HTTP.toString(j));
System.out.println("");
j = new JSONObject("{nix: null, nux: false, null: 'null', 'Request-URI': '/', Method: 'GET', 'HTTP-Version': 'HTTP/1.0'}");
System.out.println(j.toString(2));
System.out.println("isNull: " + j.isNull("nix"));
System.out.println(" has: " + j.has("nix"));
System.out.println(XML.toString(j));
System.out.println(HTTP.toString(j));
System.out.println("");
j = XML.toJSONObject("<?xml version='1.0' encoding='UTF-8'?>" + "\n\n" + "<SOAP-ENV:Envelope" + " xmlns:SOAP-ENV=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:xsi=\"http://www.w3.org/1999/XMLSchema-instance\"" + " xmlns:xsd=\"http://www.w3.org/1999/XMLSchema\">" + "<SOAP-ENV:Body><ns1:doGoogleSearch" + " xmlns:ns1=\"urn:GoogleSearch\"" + " SOAP-ENV:encodingStyle=\"http://schemas.xmlsoap.org/soap/encoding/\">" + "<key xsi:type=\"xsd:string\">GOOGLEKEY</key> <q" + " xsi:type=\"xsd:string\">'+search+'</q> <start" + " xsi:type=\"xsd:int\">0</start> <maxResults" + " xsi:type=\"xsd:int\">10</maxResults> <filter" + " xsi:type=\"xsd:boolean\">true</filter> <restrict" + " xsi:type=\"xsd:string\"></restrict> <safeSearch" + " xsi:type=\"xsd:boolean\">false</safeSearch> <lr" + " xsi:type=\"xsd:string\"></lr> <ie" + " xsi:type=\"xsd:string\">latin1</ie> <oe" + " xsi:type=\"xsd:string\">latin1</oe>" + "</ns1:doGoogleSearch>" + "</SOAP-ENV:Body></SOAP-ENV:Envelope>");
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
j = new JSONObject("{Envelope: {Body: {\"ns1:doGoogleSearch\": {oe: \"latin1\", filter: true, q: \"'+search+'\", key: \"GOOGLEKEY\", maxResults: 10, \"SOAP-ENV:encodingStyle\": \"http://schemas.xmlsoap.org/soap/encoding/\", start: 0, ie: \"latin1\", safeSearch:false, \"xmlns:ns1\": \"urn:GoogleSearch\"}}}}");
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
j = CookieList.toJSONObject(" f%oo = b+l=ah ; o;n%40e = t.wo ");
System.out.println(j.toString(2));
System.out.println(CookieList.toString(j));
System.out.println("");
j = Cookie.toJSONObject("f%oo=blah; secure ;expires = April 24, 2002");
System.out.println(j.toString(2));
System.out.println(Cookie.toString(j));
System.out.println("");
j = new JSONObject("{script: 'It is not allowed in HTML to send a close script tag in a string<script>because it confuses browsers</script>so we insert a backslash before the /'}");
System.out.println(j.toString());
System.out.println("");
JSONTokener jt = new JSONTokener("{op:'test', to:'session', pre:1}{op:'test', to:'session', pre:2}");
j = new JSONObject(jt);
System.out.println(j.toString());
System.out.println("pre: " + j.optInt("pre"));
int i = jt.skipTo('{');
System.out.println(i);
j = new JSONObject(jt);
System.out.println(j.toString());
System.out.println("");
a = CDL.toJSONArray("Comma delimited list test, '\"Strip\"Quotes', 'quote, comma', No quotes, 'Single Quotes', \"Double Quotes\"\n1,'2',\"3\"\n,'It is \"good,\"', \"It works.\"\n\n");
s = CDL.toString(a);
System.out.println(s);
System.out.println("");
System.out.println(a.toString(4));
System.out.println("");
a = CDL.toJSONArray(s);
System.out.println(a.toString(4));
System.out.println("");
a = new JSONArray(" [\"<escape>\", next is an implied null , , ok,] ");
System.out.println(a.toString());
System.out.println("");
System.out.println(XML.toString(a));
System.out.println("");
j = new JSONObject("{ fun => with non-standard forms ; forgiving => This package can be used to parse formats that are similar to but not stricting conforming to JSON; why=To make it easier to migrate existing data to JSON,one = [[1.00]]; uno=[[{1=>1}]];'+':+6e66 ;pluses=+++;empty = '' , 'double':0.666,true: TRUE, false: FALSE, null=NULL;[true] = [[!,@;*]]; string=> o. k. ; \r oct=0666; hex=0x666; dec=666; o=0999; noh=0x0x}");
System.out.println(j.toString(4));
System.out.println("");
if (j.getBoolean("true") && !j.getBoolean("false")) {
System.out.println("It's all good");
}
System.out.println("");
j = new JSONObject(j, new String[] { "dec", "oct", "hex", "missing" });
System.out.println(j.toString(4));
System.out.println("");
System.out.println(new JSONStringer().array().value(a).value(j).endArray());
j = new JSONObject("{string: \"98.6\", long: 2147483648, int: 2147483647, longer: 9223372036854775807, double: 9223372036854775808}");
System.out.println(j.toString(4));
System.out.println("\ngetInt");
System.out.println("int " + j.getInt("int"));
System.out.println("long " + j.getInt("long"));
System.out.println("longer " + j.getInt("longer"));
// System.out.println("double " + j.getInt("double"));
// System.out.println("string " + j.getInt("string"));
System.out.println("\ngetLong");
System.out.println("int " + j.getLong("int"));
System.out.println("long " + j.getLong("long"));
System.out.println("longer " + j.getLong("longer"));
// System.out.println("double " + j.getLong("double"));
// System.out.println("string " + j.getLong("string"));
System.out.println("\ngetDouble");
System.out.println("int " + j.getDouble("int"));
System.out.println("long " + j.getDouble("long"));
System.out.println("longer " + j.getDouble("longer"));
System.out.println("double " + j.getDouble("double"));
System.out.println("string " + j.getDouble("string"));
j.put("good sized", 9223372036854775807L);
System.out.println(j.toString(4));
a = new JSONArray("[2147483647, 2147483648, 9223372036854775807, 9223372036854775808]");
System.out.println(a.toString(4));
System.out.println("\nKeys: ");
it = j.keys();
while (it.hasNext()) {
s = (String) it.next();
System.out.println(s + ": " + j.getString(s));
}
System.out.println("\naccumulate: ");
j = new JSONObject();
j.accumulate("stooge", "Curly");
j.accumulate("stooge", "Larry");
j.accumulate("stooge", "Moe");
a = j.getJSONArray("stooge");
a.put(5, "Shemp");
System.out.println(j.toString(4));
System.out.println("\nwrite:");
System.out.println(j.write(new StringWriter()));
s = "<xml empty><a></a><a>1</a><a>22</a><a>333</a></xml>";
j = XML.toJSONObject(s);
System.out.println(j.toString(4));
System.out.println(XML.toString(j));
s = "<book><chapter>Content of the first chapter</chapter><chapter>Content of the second chapter <chapter>Content of the first subchapter</chapter> <chapter>Content of the second subchapter</chapter></chapter><chapter>Third Chapter</chapter></book>";
j = XML.toJSONObject(s);
System.out.println(j.toString(4));
System.out.println(XML.toString(j));
a = JSONML.toJSONArray(s);
System.out.println(a.toString(4));
System.out.println(JSONML.toString(a));
Collection c = null;
Map m = null;
j = new JSONObject(m);
a = new JSONArray(c);
j.append("stooge", "Joe DeRita");
j.append("stooge", "Shemp");
j.accumulate("stooges", "Curly");
j.accumulate("stooges", "Larry");
j.accumulate("stooges", "Moe");
j.accumulate("stoogearray", j.get("stooges"));
j.put("map", m);
j.put("collection", c);
j.put("array", a);
a.put(m);
a.put(c);
System.out.println(j.toString(4));
s = "{plist=Apple; AnimalSmells = { pig = piggish; lamb = lambish; worm = wormy; }; AnimalSounds = { pig = oink; lamb = baa; worm = baa; Lisa = \"Why is the worm talking like a lamb?\" } ; AnimalColors = { pig = pink; lamb = black; worm = pink; } } ";
j = new JSONObject(s);
System.out.println(j.toString(4));
s = " (\"San Francisco\", \"New York\", \"Seoul\", \"London\", \"Seattle\", \"Shanghai\")";
a = new JSONArray(s);
System.out.println(a.toString());
s = "<a ichi='1' ni='2'><b>The content of b</b> and <c san='3'>The content of c</c><d>do</d><e></e><d>re</d><f/><d>mi</d></a>";
j = XML.toJSONObject(s);
System.out.println(j.toString(2));
System.out.println(XML.toString(j));
System.out.println("");
ja = JSONML.toJSONArray(s);
System.out.println(ja.toString(4));
System.out.println(JSONML.toString(ja));
System.out.println("");
s = "<Root><MsgType type=\"node\"><BatchType type=\"string\">111111111111111</BatchType></MsgType></Root>";
j = JSONML.toJSONObject(s);
System.out.println(j);
ja = JSONML.toJSONArray(s);
System.out.println(ja);
System.out.println("\nTesting Exceptions: ");
System.out.print("Exception: ");
try {
a = new JSONArray("[\n\r\n\r}");
System.out.println(a.toString());
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
System.out.print("Exception: ");
try {
a = new JSONArray("<\n\r\n\r ");
System.out.println(a.toString());
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
System.out.print("Exception: ");
try {
a = new JSONArray();
a.put(Double.NEGATIVE_INFINITY);
a.put(Double.NaN);
System.out.println(a.toString());
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
System.out.print("Exception: ");
try {
System.out.println(j.getDouble("stooge"));
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
System.out.print("Exception: ");
try {
System.out.println(j.getDouble("howard"));
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
System.out.print("Exception: ");
try {
System.out.println(j.put(null, "howard"));
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
System.out.print("Exception: ");
try {
System.out.println(a.getDouble(0));
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
System.out.print("Exception: ");
try {
System.out.println(a.get(-1));
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
System.out.print("Exception: ");
try {
System.out.println(a.put(Double.NaN));
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
System.out.print("Exception: ");
try {
j = XML.toJSONObject("<a><b> ");
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
System.out.print("Exception: ");
try {
j = XML.toJSONObject("<a></b> ");
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
System.out.print("Exception: ");
try {
j = XML.toJSONObject("<a></a ");
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
System.out.print("Exception: ");
try {
ja = new JSONArray(new Object());
System.out.println(ja.toString());
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
System.out.print("Exception: ");
try {
s = "[)";
a = new JSONArray(s);
System.out.println(a.toString());
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
System.out.print("Exception: ");
try {
s = "<xml";
ja = JSONML.toJSONArray(s);
System.out.println(ja.toString(4));
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
System.out.print("Exception: ");
try {
s = "<right></wrong>";
ja = JSONML.toJSONArray(s);
System.out.println(ja.toString(4));
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
System.out.print("Exception: ");
try {
s = "{\"koda\": true, \"koda\": true}";
j = new JSONObject(s);
System.out.println(j.toString(4));
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
System.out.print("Exception: ");
try {
jj = new JSONStringer();
s = jj.object().key("bosanda").value("MARIE HAA'S").key("bosanda").value("MARIE HAA\\'S").endObject().toString();
System.out.println(j.toString(4));
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e);
}
} catch (Exception e) {
logger.error("Exception occurred:",e);
System.out.println(e.toString());
}
}
}
|
|
/*
* Copyright (c) 1997, 2005, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.tools.doclets.formats.html;
import java.io.*;
import java.util.*;
import com.sun.javadoc.*;
import com.sun.tools.doclets.internal.toolkit.*;
import com.sun.tools.doclets.internal.toolkit.util.*;
import com.sun.tools.doclets.formats.html.markup.*;
/**
* Class to generate file for each package contents in the right-hand
* frame. This will list all the Class Kinds in the package. A click on any
* class-kind will update the frame with the clicked class-kind page.
*
* @author Atul M Dambalkar
* @author Bhavesh Patel (Modified)
*/
public class PackageWriterImpl extends HtmlDocletWriter
implements PackageSummaryWriter {
/**
* The prev package name in the alpha-order list.
*/
protected PackageDoc prev;
/**
* The next package name in the alpha-order list.
*/
protected PackageDoc next;
/**
* The package being documented.
*/
protected PackageDoc packageDoc;
/**
* The name of the output file.
*/
private static final String OUTPUT_FILE_NAME = "package-summary.html";
/**
* Constructor to construct PackageWriter object and to generate
* "package-summary.html" file in the respective package directory.
* For example for package "java.lang" this will generate file
* "package-summary.html" file in the "java/lang" directory. It will also
* create "java/lang" directory in the current or the destination directory
* if it doesen't exist.
*
* @param configuration the configuration of the doclet.
* @param packageDoc PackageDoc under consideration.
* @param prev Previous package in the sorted array.
* @param next Next package in the sorted array.
*/
public PackageWriterImpl(ConfigurationImpl configuration,
PackageDoc packageDoc, PackageDoc prev, PackageDoc next)
throws IOException {
super(configuration, DirectoryManager.getDirectoryPath(packageDoc), OUTPUT_FILE_NAME,
DirectoryManager.getRelativePath(packageDoc.name()));
this.prev = prev;
this.next = next;
this.packageDoc = packageDoc;
}
/**
* Return the name of the output file.
*
* @return the name of the output file.
*/
public String getOutputFileName() {
return OUTPUT_FILE_NAME;
}
/**
* {@inheritDoc}
*/
public Content getPackageHeader(String heading) {
String pkgName = packageDoc.name();
Content bodyTree = getBody(true, getWindowTitle(pkgName));
addTop(bodyTree);
addNavLinks(true, bodyTree);
HtmlTree div = new HtmlTree(HtmlTag.DIV);
div.addStyle(HtmlStyle.header);
Content annotationContent = new HtmlTree(HtmlTag.P);
addAnnotationInfo(packageDoc, annotationContent);
div.addContent(annotationContent);
Content tHeading = HtmlTree.HEADING(HtmlConstants.TITLE_HEADING, true,
HtmlStyle.title, packageLabel);
tHeading.addContent(getSpace());
Content packageHead = new RawHtml(heading);
tHeading.addContent(packageHead);
div.addContent(tHeading);
if (packageDoc.inlineTags().length > 0 && ! configuration.nocomment) {
HtmlTree p = new HtmlTree(HtmlTag.P);
p.addStyle(HtmlStyle.subTitle);
addSummaryComment(packageDoc, p);
div.addContent(p);
Content space = getSpace();
Content descLink = getHyperLink("", "package_description",
descriptionLabel, "", "");
Content descPara = new HtmlTree(HtmlTag.P, seeLabel, space, descLink);
div.addContent(descPara);
}
bodyTree.addContent(div);
return bodyTree;
}
/**
* {@inheritDoc}
*/
public Content getContentHeader() {
HtmlTree div = new HtmlTree(HtmlTag.DIV);
div.addStyle(HtmlStyle.contentContainer);
return div;
}
/**
* {@inheritDoc}
*/
public Content getSummaryHeader() {
HtmlTree ul = new HtmlTree(HtmlTag.UL);
ul.addStyle(HtmlStyle.blockList);
return ul;
}
/**
* {@inheritDoc}
*/
public void addClassesSummary(ClassDoc[] classes, String label,
String tableSummary, String[] tableHeader, Content summaryContentTree) {
if(classes.length > 0) {
Arrays.sort(classes);
Content caption = getTableCaption(label);
Content table = HtmlTree.TABLE(HtmlStyle.packageSummary, 0, 3, 0,
tableSummary, caption);
table.addContent(getSummaryTableHeader(tableHeader, "col"));
Content tbody = new HtmlTree(HtmlTag.TBODY);
for (int i = 0; i < classes.length; i++) {
if (!Util.isCoreClass(classes[i]) ||
!configuration.isGeneratedDoc(classes[i])) {
continue;
}
Content classContent = new RawHtml(getLink(new LinkInfoImpl(
LinkInfoImpl.CONTEXT_PACKAGE, classes[i], false)));
Content tdClass = HtmlTree.TD(HtmlStyle.colFirst, classContent);
HtmlTree tr = HtmlTree.TR(tdClass);
if (i%2 == 0)
tr.addStyle(HtmlStyle.altColor);
else
tr.addStyle(HtmlStyle.rowColor);
HtmlTree tdClassDescription = new HtmlTree(HtmlTag.TD);
tdClassDescription.addStyle(HtmlStyle.colLast);
if (Util.isDeprecated(classes[i])) {
tdClassDescription.addContent(deprecatedLabel);
if (classes[i].tags("deprecated").length > 0) {
addSummaryDeprecatedComment(classes[i],
classes[i].tags("deprecated")[0], tdClassDescription);
}
}
else
addSummaryComment(classes[i], tdClassDescription);
tr.addContent(tdClassDescription);
tbody.addContent(tr);
}
table.addContent(tbody);
Content li = HtmlTree.LI(HtmlStyle.blockList, table);
summaryContentTree.addContent(li);
}
}
/**
* {@inheritDoc}
*/
public void addPackageDescription(Content packageContentTree) {
if (packageDoc.inlineTags().length > 0) {
packageContentTree.addContent(getMarkerAnchor("package_description"));
Content h2Content = new StringContent(
configuration.getText("doclet.Package_Description",
packageDoc.name()));
packageContentTree.addContent(HtmlTree.HEADING(HtmlConstants.PACKAGE_HEADING,
true, h2Content));
addInlineComment(packageDoc, packageContentTree);
}
}
/**
* {@inheritDoc}
*/
public void addPackageTags(Content packageContentTree) {
addTagsInfo(packageDoc, packageContentTree);
}
/**
* {@inheritDoc}
*/
public void addPackageFooter(Content contentTree) {
addNavLinks(false, contentTree);
addBottom(contentTree);
}
/**
* {@inheritDoc}
*/
public void printDocument(Content contentTree) {
printHtmlDocument(configuration.metakeywords.getMetaKeywords(packageDoc),
true, contentTree);
}
/**
* Get "Use" link for this pacakge in the navigation bar.
*
* @return a content tree for the class use link
*/
protected Content getNavLinkClassUse() {
Content useLink = getHyperLink("package-use.html", "",
useLabel, "", "");
Content li = HtmlTree.LI(useLink);
return li;
}
/**
* Get "PREV PACKAGE" link in the navigation bar.
*
* @return a content tree for the previous link
*/
public Content getNavLinkPrevious() {
Content li;
if (prev == null) {
li = HtmlTree.LI(prevpackageLabel);
} else {
String path = DirectoryManager.getRelativePath(packageDoc.name(),
prev.name());
li = HtmlTree.LI(getHyperLink(path + "package-summary.html", "",
prevpackageLabel, "", ""));
}
return li;
}
/**
* Get "NEXT PACKAGE" link in the navigation bar.
*
* @return a content tree for the next link
*/
public Content getNavLinkNext() {
Content li;
if (next == null) {
li = HtmlTree.LI(nextpackageLabel);
} else {
String path = DirectoryManager.getRelativePath(packageDoc.name(),
next.name());
li = HtmlTree.LI(getHyperLink(path + "package-summary.html", "",
nextpackageLabel, "", ""));
}
return li;
}
/**
* Get "Tree" link in the navigation bar. This will be link to the package
* tree file.
*
* @return a content tree for the tree link
*/
protected Content getNavLinkTree() {
Content useLink = getHyperLink("package-tree.html", "",
treeLabel, "", "");
Content li = HtmlTree.LI(useLink);
return li;
}
/**
* Highlight "Package" in the navigation bar, as this is the package page.
*
* @return a content tree for the package link
*/
protected Content getNavLinkPackage() {
Content li = HtmlTree.LI(HtmlStyle.navBarCell1Rev, packageLabel);
return li;
}
}
|
|
package com.vaadin.book;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.vaadin.Application;
import com.vaadin.book.examples.AbstractExampleItem;
import com.vaadin.book.examples.BookExample;
import com.vaadin.book.examples.BookExampleLibrary;
import com.vaadin.book.examples.CaptionedExampleItem;
import com.vaadin.book.examples.ExampleCtgr;
import com.vaadin.book.examples.RedirctItem;
import com.vaadin.book.examples.SourceFragment;
import com.vaadin.data.Property;
import com.vaadin.data.Property.ValueChangeEvent;
import com.vaadin.terminal.ExternalResource;
import com.vaadin.terminal.Sizeable;
import com.vaadin.terminal.ThemeResource;
import com.vaadin.terminal.gwt.server.HttpServletRequestListener;
import com.vaadin.terminal.gwt.server.WebApplicationContext;
import com.vaadin.ui.Alignment;
import com.vaadin.ui.CustomComponent;
import com.vaadin.ui.Embedded;
import com.vaadin.ui.HorizontalLayout;
import com.vaadin.ui.Label;
import com.vaadin.ui.Link;
import com.vaadin.ui.NativeSelect;
import com.vaadin.ui.Panel;
import com.vaadin.ui.Tree;
import com.vaadin.ui.UriFragmentUtility;
import com.vaadin.ui.UriFragmentUtility.FragmentChangedEvent;
import com.vaadin.ui.UriFragmentUtility.FragmentChangedListener;
import com.vaadin.ui.VerticalLayout;
import com.vaadin.ui.Window;
public class BookExamplesApplication extends Application implements HttpServletRequestListener {
private static final long serialVersionUID = 5548861727207728718L;
@Override
public void init() {
final Window main = new Window("Book Examples");
main.setImmediate(true);
setMainWindow(main);
setTheme("book-examples");
// Set session timeout to 10 seconds
//((WebApplicationContext)getContext()).getHttpSession().setMaxInactiveInterval(10);
System.out.println("Session timeout: " +
((WebApplicationContext)getContext()).getHttpSession().getMaxInactiveInterval() +
" seconds");
init(main);
}
public void init(final Window main) {
VerticalLayout mainLayout = new VerticalLayout();
main.setContent(mainLayout);
mainLayout.setSizeFull();
HorizontalLayout titlebar = new HorizontalLayout();
titlebar.addStyleName("titlebar");
titlebar.setWidth("100%");
Label title = new Label("Book of Vaadin Examples");
title.addStyleName("title");
titlebar.addComponent(title);
titlebar.setComponentAlignment(title, Alignment.MIDDLE_RIGHT);
Embedded logo = new Embedded(null, new ThemeResource("img/vaadin-logo.png"));
titlebar.addComponent(logo);
titlebar.setComponentAlignment(logo, Alignment.MIDDLE_RIGHT);
main.addComponent(titlebar);
HorizontalLayout hor = new HorizontalLayout();
hor.setSizeFull();
main.addComponent(hor);
mainLayout.setExpandRatio(hor, 1.0f);
final Panel menupanel = new Panel("Examples");
menupanel.addStyleName("menupanel");
menupanel.setWidth(null);
menupanel.setHeight("100%");
menupanel.getContent().setWidth(null);
//menupanel.getContent().setHeight("100%");
hor.addComponent(menupanel);
final Tree menu = new Tree();
menu.setWidth(null);
// menu.setHeight("100%");
menu.setImmediate(true);
menupanel.addComponent(menu);
final Panel viewpanel = new Panel("Selected Example");
viewpanel.addStyleName("viewpanel");
viewpanel.setSizeFull();
VerticalLayout viewlayout = new VerticalLayout();
viewlayout.addStyleName("viewlayout");
viewlayout.setSpacing(true);
viewlayout.setMargin(true);
viewpanel.setContent(viewlayout);
hor.addComponent(viewpanel);
hor.setExpandRatio(viewpanel, 1.0f);
WebApplicationContext ctx = (WebApplicationContext) getContext();
BookExampleLibrary library = BookExampleLibrary.getInstance(ctx.getBaseDirectory());
AbstractExampleItem[] examples = library.getAllExamples();
// Collect redirects here
final HashMap<String,String> redirects = new HashMap<String,String>();
// Collect examples here
final HashMap<String,CaptionedExampleItem> exampleitems = new HashMap<String,CaptionedExampleItem>();
// Build the menu and collect redirections
for (int i=0; i<examples.length; i++)
if (examples[i] instanceof BookExample || examples[i] instanceof ExampleCtgr) {
CaptionedExampleItem example = (CaptionedExampleItem) examples[i];
exampleitems.put(example.getExampleId(), example);
String itemid = example.getExampleId();
menu.addItem(itemid);
menu.setItemCaption(itemid, example.getShortName());
if (examples[i].getParentId() != null)
menu.setParent(itemid, examples[i].getParentId());
} else if (examples[i] instanceof RedirctItem) {
RedirctItem redirect = (RedirctItem) examples[i];
redirects.put(redirect.getExampleId(), redirect.redirectid);
}
// Expand the menu
for (int i=0; i<examples.length; i++) {
if (examples[i].getParentId() == null)
menu.expandItemsRecursively(examples[i].getExampleId());
if (examples[i].isCollapsed())
menu.collapseItem(examples[i].getExampleId());
if (menu.getChildren(examples[i].getExampleId()) == null)
menu.setChildrenAllowed(examples[i].getExampleId(), false);
}
// Set selected example as given in the URI fragment
final UriFragmentUtility urifu = new UriFragmentUtility();
urifu.addListener(new FragmentChangedListener() {
private static final long serialVersionUID = -6588416218607827834L;
public void fragmentChanged(FragmentChangedEvent source) {
String fragment = source.getUriFragmentUtility().getFragment();
if (fragment != null) {
// Handle redirection
while (redirects.containsKey(fragment))
fragment = redirects.get(fragment);
menu.setValue(fragment);
// Open the tree nodes leading to the example
for (Object parent = menu.getParent(fragment);
parent != null;
parent = menu.getParent(parent))
menu.expandItem(parent);
}
}
});
mainLayout.addComponent(urifu);
// Handle menu selection
menu.addListener(new Property.ValueChangeListener() {
private static final long serialVersionUID = 8236533959795019956L;
public void valueChange(ValueChangeEvent event) {
viewpanel.removeAllComponents();
String selection = (String) event.getProperty().getValue();
// Find the example
CaptionedExampleItem exampleItem = exampleitems.get(selection);
if (selection != null && exampleItem == null)
main.showNotification("Invalid item " + selection);
else if (exampleItem != null) {
if (exampleItem.getClass().isAssignableFrom(ExampleCtgr.class)) {
if (menu.hasChildren(exampleItem.getExampleId())) {
menu.select((String) menu.getChildren(exampleItem.getExampleId()).toArray()[0]);
}
} else { // A leaf
BookExample example = (BookExample) exampleItem;
// Load unless already loaded
WebApplicationContext ctx = (WebApplicationContext) getContext();
example.loadExample(ctx.getBaseDirectory());
if (example.getDescription() != null) {
Label descLabel = new Label(example.getDescription(), Label.CONTENT_XHTML);
descLabel.addStyleName("example-description");
viewpanel.addComponent(descLabel);
}
// The actual example component
viewpanel.addComponent(example.createInstance());
// Java sources on the left, CSS on the right
HorizontalLayout horizontalOrder = new HorizontalLayout();
horizontalOrder.addStyleName("sourcecontainer");
horizontalOrder.setSpacing(true);
horizontalOrder.setMargin(true);
Panel bookRefs = null;
Panel forumLinks = null;
Panel kbRefs = null;
List<SourceFragment> fragments = example.getSourceFragments();
if (fragments != null) {
// Java Sources are laid out vertically
VerticalLayout verticalListings = new VerticalLayout();
verticalListings.setSizeUndefined();
verticalListings.setSpacing(true);
horizontalOrder.addComponent(verticalListings);
// Find the widest source fragment
int widestIndex = 0;
int widestWidth = 0;
for (int fragmentNum = 0; fragmentNum < fragments.size(); fragmentNum++)
if (fragments.get(fragmentNum).getSrcWidth() > widestWidth) {
widestIndex = fragmentNum;
widestWidth = fragments.get(fragmentNum).getSrcWidth();
}
System.out.println("Widest listing: " + widestIndex + " which is " + widestWidth);
for (int fragmentNum = 0; fragmentNum < fragments.size(); fragmentNum++) {
SourceFragment fragment = fragments.get(fragmentNum);
// Have caption only in the beginning of the listings
String listingCaption = fragmentNum == 0? "Source Code" : "";
String srcurl = "http://dev.vaadin.com/browser/doc/book-examples/trunk/src" + fragment.getSrcName();
SourceListing listing = new SourceListing(listingCaption, srcurl, fragment);
verticalListings.addComponent(listing);
// Use the width of the widest listing for all listings
if (fragmentNum == widestIndex)
listing.setWidth(Sizeable.SIZE_UNDEFINED, 0);
else
listing.setWidth("100%");
if (!fragment.getBookRefs().isEmpty()) {
bookRefs = new Panel("Book References");
bookRefs.setSizeUndefined();
for (Iterator<String> iter = fragment.getBookRefs().iterator(); iter.hasNext();) {
String ref = iter.next();
int hashPos = ref.indexOf('#');
String refFragment = "";
if (hashPos != -1) {
refFragment = "#" + ref.replace('#', '.');
ref = ref.substring(0, hashPos);
}
String bookUrl = "http://vaadin.com/book/-/page/" + ref + ".html" + refFragment;
Link link = new Link(bookUrl, new ExternalResource(bookUrl));
link.setTargetName("_new");
bookRefs.addComponent(link);
}
}
if (!fragment.getForumLinks().isEmpty()) {
forumLinks = new Panel("Forum Messages");
forumLinks.setSizeUndefined();
for (Iterator<String> iter = fragment.getForumLinks().iterator(); iter.hasNext();) {
String url = iter.next();
Link link = new Link(url, new ExternalResource(url));
link.setTargetName("_new");
forumLinks.addComponent(link);
}
}
if (!fragment.getKbRefs().isEmpty()) {
kbRefs = new Panel("Pro Account Knowledge Base Articles");
kbRefs.setSizeUndefined();
for (Iterator<SourceFragment.Ref> iter = fragment.getKbRefs().iterator(); iter.hasNext();) {
SourceFragment.Ref ref = iter.next();
String url = "http://vaadin.com/knowledge-base#" + ref.ref;
Link link = new Link(ref.caption, new ExternalResource(url));
link.setTargetName("_new");
forumLinks.addComponent(link);
}
}
}
}
// Show associated CSS
if (example.getCssFragments() != null && example.getCssFragments().size() > 0) {
SourceFragment csscode = example.getCssFragments().get(0);
String srcurl = "http://dev.vaadin.com/browser/doc/book-examples/trunk/WebContent/VAADIN/themes/book-examples/styles.css";
horizontalOrder.addComponent(new SourceListing("CSS Code", srcurl, csscode));
}
if (horizontalOrder.getComponentIterator().hasNext())
viewpanel.addComponent(horizontalOrder);
if (bookRefs != null)
viewpanel.addComponent(bookRefs);
if (forumLinks != null)
viewpanel.addComponent(forumLinks);
if (kbRefs != null)
viewpanel.addComponent(kbRefs);
urifu.setFragment(example.getExampleId());
}
}
}
});
Tree.ItemStyleGenerator itemStyleGenerator = new Tree.ItemStyleGenerator() {
private static final long serialVersionUID = -3231268865512947125L;
public String getStyle(Object itemId) {
// Chapter title items do not contain a period
if (!((String)itemId).contains("."))
return "chaptertitle";
return null;
}
};
menu.setItemStyleGenerator(itemStyleGenerator);
}
/** Source code listing. */
public class SourceListing extends CustomComponent {
private static final long serialVersionUID = -1864980807288021761L;
VerticalLayout layout = new VerticalLayout();
Label srcview;
/**
* @param caption caption for the source listing box
* @param srcCode the source code
*/
public SourceListing(String caption, String url, final SourceFragment fragment) {
setSizeUndefined(); // Layout size is also set with custom setWidth()
// Source caption
HorizontalLayout titlebar = new HorizontalLayout();
titlebar.setWidth("100%");
Label captionLabel = new Label(caption);
captionLabel.addStyleName("sourcecaption");
captionLabel.setSizeUndefined();
titlebar.addComponent(captionLabel);
titlebar.setComponentAlignment(captionLabel, Alignment.BOTTOM_LEFT);
// Link to source repository
String filename = url.substring(url.lastIndexOf('/') + 1);
if (fragment.getFragmentPos() > 0)
url = url + "#L" + fragment.getFragmentPos();
Link srcLink = new Link(filename, new ExternalResource(url));
srcLink.setTargetName("_new");
srcLink.setDescription("Click link to repository open source file in new window");
titlebar.addComponent(srcLink);
titlebar.setComponentAlignment(srcLink, Alignment.BOTTOM_RIGHT);
layout.addComponent(titlebar);
// The actual source code listing
srcview = new Label(fragment.getSrcCode(), Label.CONTENT_PREFORMATTED);
srcview.addStyleName("sourcecode");
srcview.setWidth("-1");
layout.addComponent(srcview);
final NativeSelect mode = new NativeSelect();
mode.addItem("Plain");
mode.addItem("DocBook");
mode.addItem("JavaDoc");
mode.addItem("MarkDown");
mode.setValue("Plain");
mode.setNullSelectionAllowed(false);
mode.setMultiSelect(false);
layout.addComponent(mode);
layout.setComponentAlignment(mode, Alignment.MIDDLE_RIGHT);
mode.addListener(new Property.ValueChangeListener() {
private static final long serialVersionUID = 2161991423208388790L;
public void valueChange(ValueChangeEvent event) {
String selected = (String)mode.getValue();
if ("Plain".equals(selected)) {
srcview.setValue(fragment.getSrcCode());
} else if ("DocBook".equals(selected)) {
String trimmed = fragment.getSrcCode().trim();
String dbcode = "<programlisting><?pocket-size 65% ?><![CDATA[" +
trimmed + "]]></programlisting>\n";
srcview.setValue(dbcode);
} else if ("JavaDoc".equals(selected)) {
String trimmed = " * " + fragment.getSrcCode().trim().replace("\n", "\n * ");
String dbcode = " * <pre>\n" +
trimmed + "\n * </pre>\n";
srcview.setValue(dbcode);
} else if ("MarkDown".equals(selected)) {
String trimmed = " " + fragment.getSrcCode().trim().replace("\n", "\n ");
srcview.setValue(trimmed);
}
}
});
mode.setImmediate(true);
setCompositionRoot(layout);
}
/** Set width for both the component and its root layout. */
@Override
public void setWidth(String width) {
super.setWidth(width);
if (layout != null)
layout.setWidth(width);
if (srcview != null)
srcview.setWidth(width);
}
/** Set width for both the component and its root layout. */
@Override
public void setWidth(float width, int unit) {
super.setWidth(width, unit);
if (layout != null)
layout.setWidth(width, unit);
if (srcview != null)
srcview.setWidth(width, unit);
}
}
// BEGIN-EXAMPLE: advanced.applicationwindow.automatic
int windowCount = 1;
@Override
public Window getWindow(String name) {
// See if the window already exists in the application
Window window = super.getWindow(name);
// If a dynamically created window is requested, but
// it does not exist yet, create it.
if (window == null) {
// Create the new window object
window = new Window("Book Examples Window " +
++windowCount);
// As the window did not exist, the name parameter is
// an automatically generated name for a new window.
window.setName(name);
// Add it to the application as a regular
// application-level window. This must be done before
// calling open, which requires that the window
// is attached to the application.
addWindow(window);
// Open it with the proper URL that includes the
// automatically generated window name
window.open(new ExternalResource(window.getURL()));
// Fill the window with stuff
init(window);
}
return window;
}
// END-EXAMPLE: advanced.applicationwindow.automatic
// BEGIN-EXAMPLE: advanced.servletrequestlistener.introduction
// In the sending application class we define:
int clicks = 0;
public void setClicks(int clicks) {
this.clicks = clicks;
}
public int getClicks() {
return clicks;
}
// END-EXAMPLE: advanced.servletrequestlistener.introduction
// Override the default implementation
public static SystemMessages getSystemMessages() {
CustomizedSystemMessages messages = new CustomizedSystemMessages();
messages.setCommunicationErrorCaption("Comm Err");
messages.setCommunicationErrorMessage("This is really bad.");
messages.setCommunicationErrorNotificationEnabled(false);
messages.setCommunicationErrorURL("http://vaadin.com");
return messages;
}
public interface MyHttpListener extends Serializable {
void onRequestStart(HttpServletRequest request,
HttpServletResponse response);
}
ArrayList<MyHttpListener> myHttpListeners = new ArrayList<MyHttpListener>();
public void addMyHttpListener(MyHttpListener listener) {
myHttpListeners.add(listener);
}
public void removeMyHttpListener(MyHttpListener listener) {
myHttpListeners.remove(listener);
}
public void removeAllMyHttpListeners() {
myHttpListeners.removeAll(myHttpListeners);
}
@Override
public void onRequestStart(HttpServletRequest request,
HttpServletResponse response) {
for (MyHttpListener l: myHttpListeners)
l.onRequestStart(request, response);
}
@Override
public void onRequestEnd(HttpServletRequest request,
HttpServletResponse response) {
}
}
|
|
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.base;
import android.os.Handler;
import android.support.annotation.IntDef;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.LinkedList;
import java.util.List;
/**
* A Promise class to be used as a placeholder for a result that will be provided asynchronously.
* It must only be accessed from a single thread.
* @param <T> The type the Promise will be fulfilled with.
*/
public class Promise<T> {
// TODO(peconn): Implement rejection handlers that can recover from rejection.
@IntDef({PromiseState.UNFULFILLED, PromiseState.FULFILLED, PromiseState.REJECTED})
@Retention(RetentionPolicy.SOURCE)
private @interface PromiseState {
int UNFULFILLED = 0;
int FULFILLED = 1;
int REJECTED = 2;
}
@PromiseState
private int mState = PromiseState.UNFULFILLED;
private T mResult;
private final List<Callback<T>> mFulfillCallbacks = new LinkedList<>();
private Exception mRejectReason;
private final List<Callback<Exception>> mRejectCallbacks = new LinkedList<>();
private final Thread mThread = Thread.currentThread();
private final Handler mHandler = new Handler();
private boolean mThrowingRejectionHandler;
/**
* A function class for use when chaining Promises with {@link Promise#then(Function)}.
* @param <A> The type of the function input.
* @param <R> The type of the function output.
*/
public interface Function<A, R> {
R apply(A argument);
}
/**
* A function class for use when chaining Promises with {@link Promise#then(AsyncFunction)}.
* @param <A> The type of the function input.
* @param <R> The type of the function output.
*/
public interface AsyncFunction<A, R> {
Promise<R> apply(A argument);
}
/**
* An exception class for when a rejected Promise is not handled and cannot pass the rejection
* to a subsequent Promise.
*/
public static class UnhandledRejectionException extends RuntimeException {
public UnhandledRejectionException(String message, Throwable cause) {
super(message, cause);
}
}
/**
* Convenience method that calls {@link #then(Callback, Callback)} providing a rejection
* {@link Callback} that throws a {@link UnhandledRejectionException}. Only use this on
* Promises that do not have rejection handlers or dependant Promises.
*/
public void then(Callback<T> onFulfill) {
checkThread();
// Allow multiple single argument then(Callback)'s, but don't bother adding duplicate
// throwing rejection handlers.
if (mThrowingRejectionHandler) {
thenInner(onFulfill);
return;
}
assert mRejectCallbacks.size() == 0 : "Do not call the single argument "
+ "Promise.then(Callback) on a Promise that already has a rejection handler.";
Callback<Exception> onReject = new Callback<Exception>() {
@Override
public void onResult(Exception reason) {
throw new UnhandledRejectionException(
"Promise was rejected without a rejection handler.", reason);
}
};
then(onFulfill, onReject);
mThrowingRejectionHandler = true;
}
/**
* Queues {@link Callback}s to be run when the Promise is either fulfilled or rejected. If the
* Promise is already fulfilled or rejected, the appropriate callback will be run on the next
* iteration of the message loop.
*
* @param onFulfill The Callback to be called on fulfillment.
* @param onReject The Callback to be called on rejection. The argument to onReject will
* may be null if the Promise was rejected manually.
*/
public void then(Callback<T> onFulfill, Callback<Exception> onReject) {
checkThread();
thenInner(onFulfill);
exceptInner(onReject);
}
/**
* Adds a rejection handler to the Promise. This handler will be called if this Promise or any
* Promises this Promise depends on is rejected or fails. The {@link Callback} will be given
* the exception that caused the rejection, or null if the rejection was manual (caused by a
* call to {@link #reject()}.
*/
public void except(Callback<Exception> onReject) {
checkThread();
exceptInner(onReject);
}
private void thenInner(Callback<T> onFulfill) {
if (mState == PromiseState.FULFILLED) {
postCallbackToLooper(onFulfill, mResult);
} else if (mState == PromiseState.UNFULFILLED) {
mFulfillCallbacks.add(onFulfill);
}
}
private void exceptInner(Callback<Exception> onReject) {
assert !mThrowingRejectionHandler : "Do not add an exception handler to a Promise you have "
+ "called the single argument Promise.then(Callback) on.";
if (mState == PromiseState.REJECTED) {
postCallbackToLooper(onReject, mRejectReason);
} else if (mState == PromiseState.UNFULFILLED) {
mRejectCallbacks.add(onReject);
}
}
/**
* Queues a {@link Promise.Function} to be run when the Promise is fulfilled. When this Promise
* is fulfilled, the function will be run and its result will be place in the returned Promise.
*/
public <R> Promise<R> then(final Function<T, R> function) {
checkThread();
// Create a new Promise to store the result of the function.
final Promise<R> promise = new Promise<>();
// Once this Promise is fulfilled:
// - Apply the given function to the result.
// - Fulfill the new Promise.
thenInner(new Callback<T>() {
@Override
public void onResult(T result) {
try {
promise.fulfill(function.apply(result));
} catch (Exception e) {
// If function application fails, reject the next Promise.
promise.reject(e);
}
}
});
// If this Promise is rejected, reject the next Promise.
exceptInner(new Callback<Exception>() {
@Override
public void onResult(Exception reason) {
promise.reject(reason);
}
});
return promise;
}
/**
* Queues a {@link Promise.AsyncFunction} to be run when the Promise is fulfilled. When this
* Promise is fulfilled, the AsyncFunction will be run. When the result of the AsyncFunction is
* available, it will be placed in the returned Promise.
*/
public <R> Promise<R> then(final AsyncFunction<T, R> function) {
checkThread();
// Create a new Promise to be returned.
final Promise<R> promise = new Promise<>();
// Once this Promise is fulfilled:
// - Apply the given function to the result (giving us an inner Promise).
// - On fulfillment of this inner Promise, fulfill our return Promise.
thenInner(new Callback<T>() {
@Override
public void onResult(T result) {
try {
// When the inner Promise is fulfilled, fulfill the return Promise.
// Alternatively, if the inner Promise is rejected, reject the return Promise.
function.apply(result).then(new Callback<R>() {
@Override
public void onResult(R result1) {
promise.fulfill(result1);
}
}, new Callback<Exception>() {
@Override
public void onResult(Exception reason) {
promise.reject(reason);
}
});
} catch (Exception e) {
// If creating the inner Promise failed, reject the next Promise.
promise.reject(e);
}
}
});
// If this Promise is rejected, reject the next Promise.
exceptInner(new Callback<Exception>() {
@Override
public void onResult(Exception reason) {
promise.reject(reason);
}
});
return promise;
}
/**
* Fulfills the Promise with the result and passes it to any {@link Callback}s previously queued
* on the next iteration of the message loop.
*/
public void fulfill(final T result) {
checkThread();
assert mState == PromiseState.UNFULFILLED;
mState = PromiseState.FULFILLED;
mResult = result;
for (final Callback<T> callback : mFulfillCallbacks) {
postCallbackToLooper(callback, result);
}
mFulfillCallbacks.clear();
}
/**
* Rejects the Promise, rejecting all those Promises that rely on it.
*
* This may throw an exception if a dependent Promise fails to handle the rejection, so it is
* important to make it explicit when a Promise may be rejected, so that users of that Promise
* know to provide rejection handling.
*/
public void reject(final Exception reason) {
checkThread();
assert mState == PromiseState.UNFULFILLED;
mState = PromiseState.REJECTED;
mRejectReason = reason;
for (final Callback<Exception> callback : mRejectCallbacks) {
postCallbackToLooper(callback, reason);
}
mRejectCallbacks.clear();
}
/**
* Rejects a Promise, see {@link #reject(Exception)}.
*/
public void reject() {
reject(null);
}
/**
* Returns whether the promise is fulfilled.
*/
public boolean isFulfilled() {
checkThread();
return mState == PromiseState.FULFILLED;
}
/**
* Returns whether the promise is rejected.
*/
public boolean isRejected() {
checkThread();
return mState == PromiseState.REJECTED;
}
/**
* Must be called after the promise has been fulfilled.
*
* @return The promised result.
*/
public T getResult() {
assert isFulfilled();
return mResult;
}
/**
* Convenience method to return a Promise fulfilled with the given result.
*/
public static <T> Promise<T> fulfilled(T result) {
Promise<T> promise = new Promise<>();
promise.fulfill(result);
return promise;
}
private void checkThread() {
assert mThread == Thread.currentThread() : "Promise must only be used on a single Thread.";
}
// We use a different template parameter here so this can be used for both T and Throwables.
private <S> void postCallbackToLooper(final Callback<S> callback, final S result) {
// Post the callbacks to the Thread looper so we don't get a long chain of callbacks
// holding up the thread.
mHandler.post(new Runnable() {
@Override
public void run() {
callback.onResult(result);
}
});
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io.elasticsearch;
import static org.apache.beam.sdk.io.elasticsearch.ElasticsearchIO.ConnectionConfiguration;
import static org.apache.beam.sdk.io.elasticsearch.ElasticsearchIOTestCommon.ES_TYPE;
import static org.apache.beam.sdk.io.elasticsearch.ElasticsearchIOTestCommon.UPDATE_INDEX;
import static org.apache.beam.sdk.io.elasticsearch.ElasticsearchIOTestCommon.UPDATE_TYPE;
import static org.apache.beam.sdk.io.elasticsearch.ElasticsearchIOTestCommon.getEsIndex;
import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
import java.io.IOException;
import java.io.Serializable;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Collection;
import org.apache.beam.sdk.testing.TestPipeline;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.transport.Netty4Plugin;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
/*
Cannot use @RunWith(JUnit4.class) with ESIntegTestCase
Cannot have @BeforeClass @AfterClass with ESIntegTestCase
*/
/** Tests for {@link ElasticsearchIO} version 5. */
@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
// use cluster of 1 node that has data + master roles
@ESIntegTestCase.ClusterScope(scope = SUITE, numDataNodes = 1, supportsDedicatedMasters = false)
public class ElasticsearchIOTest extends ESIntegTestCase implements Serializable {
private ElasticsearchIOTestCommon elasticsearchIOTestCommon;
private ConnectionConfiguration connectionConfiguration;
private String[] fillAddresses() {
ArrayList<String> result = new ArrayList<>();
for (InetSocketAddress address : cluster().httpAddresses()) {
result.add(String.format("http://%s:%s", address.getHostString(), address.getPort()));
}
return result.toArray(new String[result.size()]);
}
@Override
protected Settings nodeSettings(int nodeOrdinal) {
System.setProperty("es.set.netty.runtime.available.processors", "false");
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
.put("http.enabled", "true")
// had problems with some jdk, embedded ES was too slow for bulk insertion,
// and queue of 50 was full. No pb with real ES instance (cf testWrite integration test)
.put("thread_pool.bulk.queue_size", 400)
.build();
}
@Override
public Settings indexSettings() {
return Settings.builder()
.put(super.indexSettings())
// useful to have updated sizes for getEstimatedSize
.put("index.store.stats_refresh_interval", 0)
.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
ArrayList<Class<? extends Plugin>> plugins = new ArrayList<>();
plugins.add(Netty4Plugin.class);
return plugins;
}
@Before
public void setup() throws IOException {
if (connectionConfiguration == null) {
connectionConfiguration =
ConnectionConfiguration.create(fillAddresses(), getEsIndex(), ES_TYPE)
.withSocketAndRetryTimeout(120000)
.withConnectTimeout(5000);
elasticsearchIOTestCommon =
new ElasticsearchIOTestCommon(connectionConfiguration, getRestClient(), false);
}
}
@Rule public TestPipeline pipeline = TestPipeline.create();
@Test
public void testSizes() throws Exception {
// need to create the index using the helper method (not create it at first insertion)
// for the indexSettings() to be run
createIndex(getEsIndex());
elasticsearchIOTestCommon.testSizes();
}
@Test
public void testRead() throws Exception {
// need to create the index using the helper method (not create it at first insertion)
// for the indexSettings() to be run
createIndex(getEsIndex());
elasticsearchIOTestCommon.setPipeline(pipeline);
elasticsearchIOTestCommon.testRead();
}
@Test
public void testReadWithQueryString() throws Exception {
// need to create the index using the helper method (not create it at first insertion)
// for the indexSettings() to be run
createIndex(getEsIndex());
elasticsearchIOTestCommon.setPipeline(pipeline);
elasticsearchIOTestCommon.testReadWithQueryString();
}
@Test
public void testReadWithQueryValueProvider() throws Exception {
// need to create the index using the helper method (not create it at first insertion)
// for the indexSettings() to be run
createIndex(getEsIndex());
elasticsearchIOTestCommon.setPipeline(pipeline);
elasticsearchIOTestCommon.testReadWithQueryValueProvider();
}
@Test
public void testWrite() throws Exception {
elasticsearchIOTestCommon.setPipeline(pipeline);
elasticsearchIOTestCommon.testWrite();
}
@Rule public ExpectedException expectedException = ExpectedException.none();
@Test
public void testWriteWithErrors() throws Exception {
elasticsearchIOTestCommon.setExpectedException(expectedException);
elasticsearchIOTestCommon.testWriteWithErrors();
}
@Test
public void testWriteWithMaxBatchSize() throws Exception {
elasticsearchIOTestCommon.testWriteWithMaxBatchSize();
}
@Test
public void testWriteWithMaxBatchSizeBytes() throws Exception {
elasticsearchIOTestCommon.testWriteWithMaxBatchSizeBytes();
}
@Test
public void testSplit() throws Exception {
// need to create the index using the helper method (not create it at first insertion)
// for the indexSettings() to be run
createIndex(getEsIndex());
elasticsearchIOTestCommon.testSplit(2_000);
}
@Test
public void testWriteWithIdFn() throws Exception {
elasticsearchIOTestCommon.setPipeline(pipeline);
elasticsearchIOTestCommon.testWriteWithIdFn();
}
@Test
public void testWriteWithIndexFn() throws Exception {
elasticsearchIOTestCommon.setPipeline(pipeline);
elasticsearchIOTestCommon.testWriteWithIndexFn();
}
@Test
public void testWriteWithTypeFn() throws Exception {
elasticsearchIOTestCommon.setPipeline(pipeline);
elasticsearchIOTestCommon.testWriteWithTypeFn2x5x();
}
@Test
public void testWriteFullAddressing() throws Exception {
elasticsearchIOTestCommon.setPipeline(pipeline);
elasticsearchIOTestCommon.testWriteWithFullAddressing();
}
@Test
public void testWritePartialUpdate() throws Exception {
elasticsearchIOTestCommon.setPipeline(pipeline);
elasticsearchIOTestCommon.testWritePartialUpdate();
}
@Test
public void testWritePartialUpdateWithErrors() throws Exception {
// cannot share elasticsearchIOTestCommon because tests run in parallel.
ConnectionConfiguration connectionConfiguration =
ConnectionConfiguration.create(fillAddresses(), UPDATE_INDEX, UPDATE_TYPE);
ElasticsearchIOTestCommon elasticsearchIOTestCommonWithErrors =
new ElasticsearchIOTestCommon(connectionConfiguration, getRestClient(), false);
elasticsearchIOTestCommonWithErrors.setPipeline(pipeline);
elasticsearchIOTestCommonWithErrors.testWritePartialUpdateWithErrors();
}
@Test
public void testReadWithMetadata() throws Exception {
elasticsearchIOTestCommon.setPipeline(pipeline);
elasticsearchIOTestCommon.testReadWithMetadata();
}
@Test
public void testDefaultRetryPredicate() throws IOException {
elasticsearchIOTestCommon.testDefaultRetryPredicate(getRestClient());
}
@Test
public void testWriteRetry() throws Throwable {
elasticsearchIOTestCommon.setExpectedException(expectedException);
elasticsearchIOTestCommon.setPipeline(pipeline);
elasticsearchIOTestCommon.testWriteRetry();
}
@Test
public void testWriteRetryValidRequest() throws Throwable {
elasticsearchIOTestCommon.setPipeline(pipeline);
elasticsearchIOTestCommon.testWriteRetryValidRequest();
}
}
|
|
package org.apache.archiva.configuration;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.archiva.redback.components.registry.RegistryException;
import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.annotation.DirtiesContext.ClassMode;
import org.springframework.test.context.ContextConfiguration;
import javax.inject.Inject;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import static org.easymock.EasyMock.*;
import static org.junit.Assert.*;
/**
* Test the configuration store.
*/
@RunWith(ArchivaSpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" })
@DirtiesContext(classMode = ClassMode.AFTER_EACH_TEST_METHOD)
public class ArchivaConfigurationTest
{
private Logger log = LoggerFactory.getLogger( getClass() );
@Inject
protected ApplicationContext applicationContext;
@Inject
FileTypes filetypes;
public static Path getTestFile( String path )
{
return Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), path );
}
@SuppressWarnings( "unchecked" )
protected <T> T lookup( Class<T> clazz, String hint )
{
return (T) applicationContext.getBean( "archivaConfiguration#" + hint, ArchivaConfiguration.class );
}
@Test
public void testGetConfigurationFromDefaults()
throws Exception
{
ArchivaConfiguration archivaConfiguration = lookup( ArchivaConfiguration.class, "test-defaults" );
Configuration configuration = archivaConfiguration.getConfiguration();
assertConfiguration( configuration, 2, 1, 1 );
assertEquals( "check network proxies", 0, configuration.getNetworkProxies().size() );
ManagedRepositoryConfiguration repository = configuration.getManagedRepositories().get( 0 );
assertEquals( "check managed repositories", "${appserver.base}/repositories/internal",
repository.getLocation() );
assertEquals( "check managed repositories", "Archiva Managed Internal Repository", repository.getName() );
assertEquals( "check managed repositories", "internal", repository.getId() );
assertEquals( "check managed repositories", "default", repository.getLayout() );
assertTrue( "check managed repositories", repository.isScanned() );
}
@Test
public void testGetConfigurationFromRegistryWithASingleNamedConfigurationResource()
throws Exception
{
ArchivaConfiguration archivaConfiguration = lookup( ArchivaConfiguration.class, "test-configuration" );
Configuration configuration = archivaConfiguration.getConfiguration();
assertConfiguration( configuration, 2, 2, 2 );
assertEquals( "check network proxies", 1, configuration.getNetworkProxies().size() );
ManagedRepositoryConfiguration repository = configuration.getManagedRepositories().get( 0 );
assertEquals( "check managed repositories", "${appserver.base}/repositories/internal",
repository.getLocation() );
assertEquals( "check managed repositories", "Archiva Managed Internal Repository", repository.getName() );
assertEquals( "check managed repositories", "internal", repository.getId() );
assertEquals( "check managed repositories", "default", repository.getLayout() );
assertTrue( "check managed repositories", repository.isScanned() );
}
/**
* Ensures that the provided configuration matches the details present in the archiva-default.xml file.
*/
private void assertConfiguration( Configuration configuration, int managedExpected, int remoteExpected,
int proxyConnectorExpected )
throws Exception
{
assertEquals( "check managed repositories: " + configuration.getManagedRepositories(), managedExpected,
configuration.getManagedRepositories().size() );
assertEquals( "check remote repositories: " + configuration.getRemoteRepositories(), remoteExpected,
configuration.getRemoteRepositories().size() );
assertEquals( "check proxy connectors:" + configuration.getProxyConnectors(), proxyConnectorExpected,
configuration.getProxyConnectors().size() );
RepositoryScanningConfiguration repoScanning = configuration.getRepositoryScanning();
assertNotNull( "check repository scanning", repoScanning );
assertEquals( "check file types", 4, repoScanning.getFileTypes().size() );
assertEquals( "check known consumers", 9, repoScanning.getKnownContentConsumers().size() );
assertEquals( "check invalid consumers", 1, repoScanning.getInvalidContentConsumers().size() );
List<String> patterns = filetypes.getFileTypePatterns( "artifacts" );
assertNotNull( "check 'artifacts' file type", patterns );
assertEquals( "check 'artifacts' patterns", 13, patterns.size() );
WebappConfiguration webapp = configuration.getWebapp();
assertNotNull( "check webapp", webapp );
UserInterfaceOptions ui = webapp.getUi();
assertNotNull( "check webapp ui", ui );
assertTrue( "check showFindArtifacts", ui.isShowFindArtifacts() );
assertTrue( "check appletFindEnabled", ui.isAppletFindEnabled() );
}
@Test
public void testGetConfigurationFromRegistryWithTwoConfigurationResources()
throws Exception
{
ArchivaConfiguration archivaConfiguration = lookup( ArchivaConfiguration.class, "test-configuration-both" );
Configuration configuration = archivaConfiguration.getConfiguration();
// from base
assertEquals( "check repositories", 2, configuration.getManagedRepositories().size() );
assertEquals( "check repositories", 2, configuration.getRemoteRepositories().size() );
// from user
assertEquals( "check proxy connectors", 2, configuration.getProxyConnectors().size() );
WebappConfiguration webapp = configuration.getWebapp();
assertNotNull( "check webapp", webapp );
UserInterfaceOptions ui = webapp.getUi();
assertNotNull( "check webapp ui", ui );
// from base
assertFalse( "check showFindArtifacts", ui.isShowFindArtifacts() );
// from user
assertFalse( "check appletFindEnabled", ui.isAppletFindEnabled() );
}
@Test
public void testGetConfigurationSystemOverride()
throws Exception
{
System.setProperty( "org.apache.archiva.webapp.ui.appletFindEnabled", "false" );
ArchivaConfiguration archivaConfiguration = lookup( ArchivaConfiguration.class, "test-configuration" );
archivaConfiguration.reload();
try
{
Configuration configuration = archivaConfiguration.getConfiguration();
assertFalse( "check boolean", configuration.getWebapp().getUi().isAppletFindEnabled() );
}
finally
{
System.getProperties().remove( "org.apache.archiva.webapp.ui.appletFindEnabled" );
archivaConfiguration.reload();
Configuration configuration = archivaConfiguration.getConfiguration();
assertTrue( "check boolean", configuration.getWebapp().getUi().isAppletFindEnabled() );
}
}
@Test
public void testStoreConfiguration()
throws Exception
{
Path file = getTestFile( "target/test/test-file.xml" );
Files.deleteIfExists(file);
assertFalse( Files.exists(file) );
// TODO: remove with commons-configuration 1.4
//file.getParentFile().mkdirs();
//FileUtils.writeStringToFile( file, "<configuration/>", null );
DefaultArchivaConfiguration archivaConfiguration =
(DefaultArchivaConfiguration) lookup( ArchivaConfiguration.class, "test-save" );
archivaConfiguration.reload();
Configuration configuration = new Configuration();
configuration.setVersion( "1" );
configuration.setWebapp( new WebappConfiguration() );
configuration.getWebapp().setUi( new UserInterfaceOptions() );
configuration.getWebapp().getUi().setAppletFindEnabled( false );
// add a change listener
ConfigurationListener listener = createMock( ConfigurationListener.class );
archivaConfiguration.addListener( listener );
listener.configurationEvent( new ConfigurationEvent( ConfigurationEvent.SAVED ) );
replay( listener );
archivaConfiguration.save( configuration );
verify( listener );
assertTrue( "Check file exists", Files.exists(file) );
// check it
configuration = archivaConfiguration.getConfiguration();
assertFalse( "check value", configuration.getWebapp().getUi().isAppletFindEnabled() );
// read it back
archivaConfiguration = (DefaultArchivaConfiguration) lookup( ArchivaConfiguration.class, "test-read-saved" );
archivaConfiguration.reload();
configuration = archivaConfiguration.getConfiguration();
assertFalse( "check value", configuration.getWebapp().getUi().isAppletFindEnabled() );
}
private static ConfigurationListener createConfigurationListenerMockControl()
{
return createMock( ConfigurationListener.class );// MockControl.createControl( ConfigurationListener.class );
}
@Test
public void testStoreConfigurationUser()
throws Exception
{
Path baseFile = getTestFile( "target/test/test-file.xml" );
Files.deleteIfExists( baseFile );
assertFalse( Files.exists(baseFile) );
Path userFile = getTestFile( "target/test/test-file-user.xml" );
Files.deleteIfExists( userFile );
assertFalse( Files.exists(userFile) );
Files.createDirectories(userFile.getParent());
FileUtils.writeStringToFile( userFile.toFile(), "<configuration/>", Charset.forName( "UTF-8" ) );
ArchivaConfiguration archivaConfiguration = lookup( ArchivaConfiguration.class, "test-save-user" );
Configuration configuration = new Configuration();
configuration.setWebapp( new WebappConfiguration() );
configuration.getWebapp().setUi( new UserInterfaceOptions() );
configuration.getWebapp().getUi().setAppletFindEnabled( false );
archivaConfiguration.save( configuration );
assertTrue( "Check file exists", Files.exists(userFile) );
assertFalse( "Check file not created", Files.exists(baseFile) );
// check it
configuration = archivaConfiguration.getConfiguration();
assertFalse( "check value", configuration.getWebapp().getUi().isAppletFindEnabled() );
}
@Test
public void testStoreConfigurationLoadedFromDefaults()
throws Exception
{
Path baseFile = getTestFile( "target/test/test-file.xml" );
Files.delete(baseFile);
assertFalse( Files.exists(baseFile) );
Path userFile = getTestFile( "target/test/test-file-user.xml" );
Files.deleteIfExists(userFile);
assertFalse( Files.exists(userFile) );
ArchivaConfiguration archivaConfiguration = lookup( ArchivaConfiguration.class, "test-save-user-defaults" );
archivaConfiguration.reload();
Configuration configuration = new Configuration();
configuration.setWebapp( new WebappConfiguration() );
configuration.getWebapp().setUi( new UserInterfaceOptions() );
configuration.getWebapp().getUi().setAppletFindEnabled( false );
// add a change listener
ConfigurationListener listener = createConfigurationListenerMockControl();
archivaConfiguration.addListener( listener );
listener.configurationEvent( new ConfigurationEvent( ConfigurationEvent.SAVED ) );
replay( listener );
archivaConfiguration.save( configuration );
verify( listener );
assertTrue( "Check file exists", Files.exists(userFile) );
assertFalse( "Check file not created", Files.exists(baseFile) );
// check it
configuration = archivaConfiguration.getConfiguration();
assertFalse( "check value", configuration.getWebapp().getUi().isAppletFindEnabled() );
}
@Test
public void testDefaultUserConfigFilename()
throws Exception
{
DefaultArchivaConfiguration archivaConfiguration =
(DefaultArchivaConfiguration) lookup( ArchivaConfiguration.class, "default" );
String expectedFile = System.getProperty( "user.home" ) + "/.m2/archiva.xml";
String systemFile = System.getProperty(ArchivaConfiguration.USER_CONFIG_PROPERTY);
if (StringUtils.isNotEmpty( systemFile )) {
expectedFile = systemFile;
} else
{
String envFile = System.getenv( ArchivaConfiguration.USER_CONFIG_ENVVAR );
if ( StringUtils.isNotEmpty( envFile ) )
expectedFile = envFile;
}
archivaConfiguration.reload();
assertEquals( expectedFile,
archivaConfiguration.getUserConfigFilename() );
assertEquals( System.getProperty( "appserver.base", "${appserver.base}" ) + "/conf/archiva.xml",
archivaConfiguration.getAltConfigFilename() );
}
@Test
public void testStoreConfigurationFallback()
throws Exception
{
Path baseFile = getTestFile( "target/test/test-file.xml" );
Files.deleteIfExists(baseFile);
assertFalse( Files.exists(baseFile) );
Path userFile = getTestFile( "target/test/test-file-user.xml" );
Files.deleteIfExists(userFile);
assertFalse( Files.exists(userFile) );
Files.createDirectories( baseFile.getParent());
FileUtils.writeStringToFile( baseFile.toFile(), "<configuration/>", Charset.forName( "UTF-8" ) );
ArchivaConfiguration archivaConfiguration =
(ArchivaConfiguration) lookup( ArchivaConfiguration.class, "test-save-user-fallback" );
archivaConfiguration.reload();
Configuration configuration = new Configuration();
configuration.setWebapp( new WebappConfiguration() );
configuration.getWebapp().setUi( new UserInterfaceOptions() );
configuration.getWebapp().getUi().setAppletFindEnabled( false );
archivaConfiguration.save( configuration );
assertTrue( "Check file exists", Files.exists(baseFile) );
assertFalse( "Check file not created", Files.exists(userFile) );
// check it
configuration = archivaConfiguration.getConfiguration();
assertFalse( "check value", configuration.getWebapp().getUi().isAppletFindEnabled() );
}
@Test
public void testStoreConfigurationFailsWhenReadFromBothLocationsNoLists()
throws Exception
{
Path baseFile = getTestFile( "target/test/test-file.xml" );
Files.deleteIfExists(baseFile);
assertFalse( Files.exists(baseFile) );
Path userFile = getTestFile( "target/test/test-file-user.xml" );
Files.deleteIfExists(userFile);
assertFalse( Files.exists(userFile) );
Files.createDirectories( baseFile.getParent() );
FileUtils.writeStringToFile( baseFile.toFile(), "<configuration/>", Charset.forName( "UTF-8" ) );
Files.createDirectories( userFile.getParent());
FileUtils.writeStringToFile( userFile.toFile(), "<configuration/>", Charset.forName( "UTF-8" ) );
ArchivaConfiguration archivaConfiguration = lookup( ArchivaConfiguration.class, "test-save-user" );
archivaConfiguration.reload();
Configuration configuration = archivaConfiguration.getConfiguration();
assertTrue( "check value", configuration.getWebapp().getUi().isAppletFindEnabled() );
configuration.getWebapp().getUi().setAppletFindEnabled( false );
archivaConfiguration.save( configuration );
assertTrue( "Check file exists", Files.exists(baseFile) );
assertEquals( "Check base file is unchanged", "<configuration/>",
FileUtils.readFileToString( baseFile.toFile(), Charset.forName( "UTF-8" ) ) );
assertTrue( "Check file exists", Files.exists(userFile) );
assertFalse( "Check base file is changed",
"<configuration/>".equals( FileUtils.readFileToString( userFile.toFile(), Charset.forName( "UTF-8" ) ) ) );
// check it
configuration = archivaConfiguration.getConfiguration();
assertFalse( "check value", configuration.getWebapp().getUi().isAppletFindEnabled() );
}
@Test
public void testStoreConfigurationFailsWhenReadFromBothLocationsUserHasLists()
throws Exception
{
Path baseFile = getTestFile( "target/test/test-file.xml" );
Files.deleteIfExists(baseFile);
assertFalse( Files.exists(baseFile) );
Path userFile = getTestFile( "target/test/test-file-user.xml" );
Files.deleteIfExists(userFile);
assertFalse( Files.exists(userFile) );
Files.createDirectories( userFile.getParent() );
FileUtils.copyFile( getTestFile( "src/test/conf/conf-user.xml" ).toFile(), userFile.toFile() );
Files.createDirectories(baseFile.getParent());
FileUtils.writeStringToFile( baseFile.toFile(), "<configuration/>", Charset.forName( "UTF-8" ) );
ArchivaConfiguration archivaConfiguration = lookup( ArchivaConfiguration.class, "test-save-user" );
archivaConfiguration.reload();
Configuration configuration = archivaConfiguration.getConfiguration();
assertTrue( "check value", configuration.getWebapp().getUi().isShowFindArtifacts() );
configuration.getWebapp().getUi().setShowFindArtifacts( false );
archivaConfiguration.save( configuration );
assertTrue( "Check file exists", Files.exists(baseFile) );
assertEquals( "Check base file is unchanged", "<configuration/>",
FileUtils.readFileToString( baseFile.toFile(), Charset.forName( "UTF-8" ) ) );
assertTrue( "Check file exists", Files.exists(userFile) );
assertFalse( "Check base file is changed",
"<configuration/>".equals( FileUtils.readFileToString( userFile.toFile(), Charset.forName( "UTF-8" ) ) ) );
// check it
configuration = archivaConfiguration.getConfiguration();
assertFalse( "check value", configuration.getWebapp().getUi().isShowFindArtifacts() );
}
@Test
public void testStoreConfigurationFailsWhenReadFromBothLocationsAppserverHasLists()
throws Exception
{
Path baseFile = getTestFile( "target/test/test-file.xml" );
Files.deleteIfExists(baseFile);
assertFalse( Files.exists(baseFile) );
Path userFile = getTestFile( "target/test/test-file-user.xml" );
Files.deleteIfExists(userFile);
assertFalse( Files.exists(userFile) );
Files.createDirectories(baseFile.getParent());
FileUtils.copyFile( getTestFile( "src/test/conf/conf-base.xml" ).toFile(), baseFile.toFile() );
Files.createDirectories(userFile.getParent());
FileUtils.writeStringToFile( userFile.toFile(), "<configuration/>", Charset.forName( "UTF-8" ) );
ArchivaConfiguration archivaConfiguration = lookup( ArchivaConfiguration.class, "test-save-user" );
archivaConfiguration.reload();
Configuration configuration = archivaConfiguration.getConfiguration();
assertTrue( "check value", configuration.getWebapp().getUi().isAppletFindEnabled() );
configuration.getWebapp().getUi().setAppletFindEnabled( false );
try
{
archivaConfiguration.save( configuration );
fail( "Configuration saving should not succeed if it was loaded from two locations" );
}
catch ( IndeterminateConfigurationException e )
{
// check it was reverted
configuration = archivaConfiguration.getConfiguration();
assertTrue( "check value", configuration.getWebapp().getUi().isAppletFindEnabled() );
}
}
@Test
public void testLoadConfigurationFromInvalidBothLocationsOnDisk()
throws Exception
{
String propFile = System.getProperty( ArchivaConfiguration.USER_CONFIG_PROPERTY );
System.setProperty( ArchivaConfiguration.USER_CONFIG_PROPERTY, "${basedir}/target/*intentionally:invalid*/.m2/archiva-user.xml" );
ArchivaConfiguration archivaConfiguration =
lookup( ArchivaConfiguration.class, "test-not-allowed-to-write-to-both" );
Configuration config = archivaConfiguration.getConfiguration();
try
{
archivaConfiguration.save( config );
fail( "Should have thrown a RegistryException because the configuration can't be saved." );
}
catch ( RegistryException e )
{
/* expected exception */
}
if (propFile!=null)
{
System.setProperty( ArchivaConfiguration.USER_CONFIG_PROPERTY, propFile );
}
}
@Test
public void testLoadConfigurationFromInvalidUserLocationOnDisk()
throws Exception
{
Path testConfDir = getTestFile( "target/test-appserver-base/conf/" );
Files.createDirectories( testConfDir );
ArchivaConfiguration archivaConfiguration =
lookup( ArchivaConfiguration.class, "test-not-allowed-to-write-to-user" );
Configuration config = archivaConfiguration.getConfiguration();
archivaConfiguration.save( config );
// No Exception == test passes.
// Expected Path is: Should not have thrown an exception.
}
@Test
public void testConfigurationUpgradeFrom13()
throws Exception
{
ArchivaConfiguration archivaConfiguration = lookup( ArchivaConfiguration.class, "test-upgrade-1.3" );
// we just use the defaults when upgrading from 1.3 at this point.
Configuration configuration = archivaConfiguration.getConfiguration();
assertConfiguration( configuration, 2, 2, 2 );
assertEquals( "check network proxies", 0, configuration.getNetworkProxies().size() );
ManagedRepositoryConfiguration repository = configuration.getManagedRepositories().get( 0 );
assertEquals( "check managed repositories", "${appserver.base}/data/repositories/internal",
repository.getLocation() );
assertEquals( "check managed repositories", "Archiva Managed Internal Repository", repository.getName() );
assertEquals( "check managed repositories", "internal", repository.getId() );
assertEquals( "check managed repositories", "default", repository.getLayout() );
assertTrue( "check managed repositories", repository.isScanned() );
log.info( "knowContentConsumers {}", configuration.getRepositoryScanning().getKnownContentConsumers() );
assertFalse(
configuration.getRepositoryScanning().getKnownContentConsumers().contains( "update-db-artifact" ) );
assertFalse( configuration.getRepositoryScanning().getKnownContentConsumers().contains(
"update-db-repository-metadata" ) );
assertTrue(
configuration.getRepositoryScanning().getKnownContentConsumers().contains( "create-archiva-metadata" ) );
assertTrue(
configuration.getRepositoryScanning().getKnownContentConsumers().contains( "duplicate-artifacts" ) );
}
@Test
public void testCronExpressionsWithComma()
throws Exception
{
Path baseFile = getTestFile( "target/test/test-file.xml" );
Files.deleteIfExists(baseFile);
assertFalse( Files.exists(baseFile) );
Path userFile = getTestFile( "target/test/test-file-user.xml" );
Files.deleteIfExists(userFile);
assertFalse( Files.exists(userFile) );
Files.createDirectories(baseFile.getParent());
FileUtils.copyFile( getTestFile( "src/test/conf/escape-cron-expressions.xml" ).toFile(), baseFile.toFile() );
Files.createDirectories(userFile.getParent());
FileUtils.writeStringToFile( userFile.toFile(), "<configuration/>", Charset.defaultCharset() );
final ArchivaConfiguration archivaConfiguration = lookup( ArchivaConfiguration.class, "test-cron-expressions" );
archivaConfiguration.reload();
Configuration configuration = archivaConfiguration.getConfiguration();
ManagedRepositoryConfiguration repository = configuration.getManagedRepositories().get( 0 );
assertEquals( "check cron expression", "0 0,30 * * * ?", repository.getRefreshCronExpression().trim() );
// add a test listener to confirm it doesn't see the escaped format. We don't need to test the number of calls,
// etc. as it's done in other tests
archivaConfiguration.addListener( new ConfigurationListener()
{
@Override
public void configurationEvent( ConfigurationEvent event )
{
assertEquals( ConfigurationEvent.SAVED, event.getType() );
}
} );
archivaConfiguration.save( configuration );
configuration = archivaConfiguration.getConfiguration();
// test for the escape character '\' showing up on repositories.jsp
repository.setRefreshCronExpression( "0 0,20 0 * * ?" );
archivaConfiguration.save( configuration );
repository = archivaConfiguration.getConfiguration().findManagedRepositoryById( "snapshots" );
assertEquals( "check cron expression", "0 0,20 0 * * ?", repository.getRefreshCronExpression() );
}
@Test
public void testRemoveLastElements()
throws Exception
{
Path baseFile = getTestFile( "target/test/test-file.xml" );
Files.deleteIfExists(baseFile);
assertFalse( Files.exists(baseFile) );
Path userFile = getTestFile( "target/test/test-file-user.xml" );
Files.deleteIfExists(userFile);
assertFalse( Files.exists(userFile) );
Files.createDirectories( baseFile.getParent() );
FileUtils.copyFile( getTestFile( "src/test/conf/conf-single-list-elements.xml" ).toFile(), baseFile.toFile() );
Files.createDirectories( userFile.getParent());
FileUtils.writeStringToFile( userFile.toFile(), "<configuration/>", Charset.forName( "UTF-8" ) );
ArchivaConfiguration archivaConfiguration = lookup( ArchivaConfiguration.class, "test-remove-central" );
archivaConfiguration.reload();
Configuration configuration = archivaConfiguration.getConfiguration();
RepositoryGroupConfiguration repositoryGroup = configuration.getRepositoryGroups().get( 0 );
assertNotNull( repositoryGroup );
configuration.removeRepositoryGroup( repositoryGroup );
assertTrue( configuration.getRepositoryGroups().isEmpty() );
RemoteRepositoryConfiguration repository = configuration.getRemoteRepositoriesAsMap().get( "central" );
assertNotNull( repository );
configuration.removeRemoteRepository( repository );
assertTrue( configuration.getRemoteRepositories().isEmpty() );
ManagedRepositoryConfiguration managedRepository =
configuration.getManagedRepositoriesAsMap().get( "snapshots" );
assertNotNull( managedRepository );
configuration.removeManagedRepository( managedRepository );
assertTrue( configuration.getManagedRepositories().isEmpty() );
ProxyConnectorConfiguration proxyConnector = configuration.getProxyConnectors().get( 0 );
assertNotNull( proxyConnector );
configuration.removeProxyConnector( proxyConnector );
assertTrue( configuration.getProxyConnectors().isEmpty() );
NetworkProxyConfiguration networkProxy = configuration.getNetworkProxiesAsMap().get( "proxy" );
assertNotNull( networkProxy );
configuration.removeNetworkProxy( networkProxy );
assertTrue( configuration.getNetworkProxies().isEmpty() );
LegacyArtifactPath path = configuration.getLegacyArtifactPaths().get( 0 );
assertNotNull( path );
configuration.removeLegacyArtifactPath( path );
assertTrue( configuration.getLegacyArtifactPaths().isEmpty() );
RepositoryScanningConfiguration scanning = configuration.getRepositoryScanning();
String consumer = scanning.getKnownContentConsumers().get( 0 );
assertNotNull( consumer );
scanning.removeKnownContentConsumer( consumer );
// default values
assertFalse( scanning.getKnownContentConsumers().isEmpty() );
consumer = scanning.getInvalidContentConsumers().get( 0 );
assertNotNull( consumer );
scanning.removeInvalidContentConsumer( consumer );
assertTrue( scanning.getInvalidContentConsumers().isEmpty() );
archivaConfiguration.save( configuration );
archivaConfiguration = lookup( ArchivaConfiguration.class, "test-read-saved" );
configuration = archivaConfiguration.getConfiguration();
assertNull( configuration.getRemoteRepositoriesAsMap().get( "central" ) );
assertTrue( configuration.getRepositoryGroups().isEmpty() );
assertNull( configuration.getManagedRepositoriesAsMap().get( "snapshots" ) );
assertTrue( configuration.getProxyConnectors().isEmpty() );
assertNull( configuration.getNetworkProxiesAsMap().get( "proxy" ) );
assertTrue( configuration.getLegacyArtifactPaths().isEmpty() );
scanning = configuration.getRepositoryScanning();
assertFalse( scanning.getKnownContentConsumers().isEmpty() );
assertTrue( scanning.getInvalidContentConsumers().isEmpty() );
}
/**
* [MRM-582] Remote Repositories with empty <username> and <password> fields shouldn't be created in configuration.
*/
@Test
public void testGetConfigurationFixEmptyRemoteRepoUsernamePassword()
throws Exception
{
ArchivaConfiguration archivaConfiguration = lookup( ArchivaConfiguration.class, "test-configuration" );
Configuration configuration = archivaConfiguration.getConfiguration();
assertConfiguration( configuration, 2, 2, 2 );
assertEquals( "check remote repositories", 2, configuration.getRemoteRepositories().size() );
RemoteRepositoryConfiguration repository =
configuration.getRemoteRepositoriesAsMap().get( "maven2-repository.dev.java.net" );
assertEquals( "remote repository.url", "https://maven2-repository.dev.java.net/nonav/repository",
repository.getUrl() );
assertEquals( "remote repository.name", "Java.net Repository for Maven 2", repository.getName() );
assertEquals( "remote repository.id", "maven2-repository.dev.java.net", repository.getId() );
assertEquals( "remote repository.layout", "default", repository.getLayout() );
assertNull( "remote repository.username == null", repository.getUsername() );
assertNull( "remote repository.password == null", repository.getPassword() );
}
}
|
|
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.ui;
import com.intellij.ide.BrowserUtil;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.DefaultActionGroup;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.fileChooser.FileChooserFactory;
import com.intellij.openapi.ide.CopyPasteManager;
import com.intellij.openapi.options.ex.SingleConfigurableEditor;
import com.intellij.openapi.options.newEditor.SettingsDialog;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.*;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.HyperlinkLabel;
import com.intellij.ui.TextFieldWithHistory;
import com.intellij.ui.TextFieldWithHistoryWithBrowseButton;
import com.intellij.ui.components.panels.NonOpaquePanel;
import com.intellij.util.Consumer;
import com.intellij.util.NotNullProducer;
import com.intellij.util.ObjectUtils;
import com.intellij.util.PlatformIcons;
import com.intellij.util.containers.ComparatorUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.update.UiNotifyConnector;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.HyperlinkEvent;
import javax.swing.event.HyperlinkListener;
import javax.swing.event.PopupMenuEvent;
import javax.swing.event.PopupMenuListener;
import javax.swing.table.DefaultTableCellRenderer;
import javax.swing.table.TableCellRenderer;
import javax.swing.table.TableColumn;
import javax.swing.text.AttributeSet;
import javax.swing.text.BadLocationException;
import javax.swing.text.Document;
import javax.swing.text.Element;
import javax.swing.text.html.HTML;
import javax.swing.text.html.HTMLDocument;
import java.awt.*;
import java.awt.datatransfer.StringSelection;
import java.awt.datatransfer.Transferable;
import java.awt.event.ActionListener;
import java.util.*;
import java.util.List;
public class SwingHelper {
private static final Logger LOG = Logger.getInstance(SwingHelper.class);
private static final String DIALOG_RESIZED_TO_FIT_TEXT = "INTELLIJ_DIALOG_RESIZED_TO_FIT_TEXT";
/**
* Creates panel whose content consists of given {@code children} components
* stacked vertically each on another in a given order.
*
* @param childAlignmentX Component.LEFT_ALIGNMENT, Component.CENTER_ALIGNMENT or Component.RIGHT_ALIGNMENT
* @param children children components
* @return created panel
*/
@NotNull
public static JPanel newVerticalPanel(float childAlignmentX, Component... children) {
return newGenericBoxPanel(true, childAlignmentX, children);
}
@NotNull
public static JPanel newLeftAlignedVerticalPanel(Component... children) {
return newVerticalPanel(Component.LEFT_ALIGNMENT, children);
}
@NotNull
public static JPanel newLeftAlignedVerticalPanel(@NotNull Collection<Component> children) {
return newVerticalPanel(Component.LEFT_ALIGNMENT, children);
}
@NotNull
public static JPanel newVerticalPanel(float childAlignmentX, @NotNull Collection<Component> children) {
return newVerticalPanel(childAlignmentX, children.toArray(new Component[children.size()]));
}
/**
* Creates panel whose content consists of given {@code children} components horizontally
* stacked each on another in a given order.
*
* @param childAlignmentY Component.TOP_ALIGNMENT, Component.CENTER_ALIGNMENT or Component.BOTTOM_ALIGNMENT
* @param children children components
* @return created panel
*/
@NotNull
public static JPanel newHorizontalPanel(float childAlignmentY, Component... children) {
return newGenericBoxPanel(false, childAlignmentY, children);
}
private static JPanel newGenericBoxPanel(boolean verticalOrientation,
float childAlignment,
Component... children) {
JPanel panel = new JPanel();
int axis = verticalOrientation ? BoxLayout.Y_AXIS : BoxLayout.X_AXIS;
panel.setLayout(new BoxLayout(panel, axis));
for (Component child : children) {
panel.add(child, childAlignment);
if (child instanceof JComponent) {
JComponent jChild = (JComponent)child;
if (verticalOrientation) {
jChild.setAlignmentX(childAlignment);
}
else {
jChild.setAlignmentY(childAlignment);
}
}
}
return panel;
}
@NotNull
public static JPanel wrapWithoutStretch(@NotNull JComponent component) {
JPanel panel = new JPanel(new FlowLayout(FlowLayout.LEFT, 0, 0));
panel.add(component);
return panel;
}
@NotNull
public static JPanel wrapWithHorizontalStretch(@NotNull JComponent component) {
JPanel panel = new JPanel(new BorderLayout(0, 0));
panel.add(component, BorderLayout.NORTH);
return panel;
}
public static void setPreferredWidthToFitText(@NotNull TextFieldWithHistoryWithBrowseButton component) {
int childWidth = calcWidthToFitText(component.getChildComponent().getTextEditor(), JBUI.scale(32));
setPreferredWidthForComponentWithBrowseButton(component, childWidth);
}
public static void setPreferredWidthToFitText(@NotNull TextFieldWithBrowseButton component) {
int childWidth = calcWidthToFitText(component.getChildComponent(), JBUI.scale(20));
setPreferredWidthForComponentWithBrowseButton(component, childWidth);
}
private static <T extends JComponent> void setPreferredWidthForComponentWithBrowseButton(@NotNull ComponentWithBrowseButton<T> component,
int childPrefWidth) {
Dimension buttonPrefSize = component.getButton().getPreferredSize();
setPreferredWidth(component, childPrefWidth + buttonPrefSize.width);
}
public static void setPreferredWidthToFitText(@NotNull JTextField textField) {
setPreferredWidthToFitText(textField, JBUI.scale(15));
}
public static void setPreferredWidthToFitText(@NotNull JTextField textField, int additionalWidth) {
setPreferredSizeToFitText(textField, StringUtil.notNullize(textField.getText()), additionalWidth);
}
public static void setPreferredWidthToFitText(@NotNull JTextField textField, @NotNull String text) {
setPreferredSizeToFitText(textField, text, JBUI.scale(15));
}
private static void setPreferredSizeToFitText(@NotNull JTextField textField, @NotNull String text, int additionalWidth) {
int width = calcWidthToFitText(textField, text, additionalWidth);
setPreferredWidth(textField, width);
}
private static int calcWidthToFitText(@NotNull JTextField textField, int additionalWidth) {
return calcWidthToFitText(textField, textField.getText(), additionalWidth);
}
private static int calcWidthToFitText(@NotNull JTextField textField, @NotNull String text, int additionalWidth) {
return textField.getFontMetrics(textField.getFont()).stringWidth(text) + additionalWidth;
}
public static void adjustDialogSizeToFitPreferredSize(@NotNull DialogWrapper dialogWrapper) {
JRootPane rootPane = dialogWrapper.getRootPane();
Dimension componentSize = rootPane.getSize();
Dimension componentPreferredSize = rootPane.getPreferredSize();
if (componentPreferredSize.width <= componentSize.width && componentPreferredSize.height <= componentSize.height) {
return;
}
int dw = Math.max(0, componentPreferredSize.width - componentSize.width);
int dh = Math.max(0, componentPreferredSize.height - componentSize.height);
Dimension oldDialogSize = dialogWrapper.getSize();
Dimension newDialogSize = new Dimension(oldDialogSize.width + dw, oldDialogSize.height + dh);
dialogWrapper.setSize(newDialogSize.width, newDialogSize.height);
rootPane.revalidate();
rootPane.repaint();
LOG.info("DialogWrapper '" + dialogWrapper.getTitle() + "' has been re-sized (added width: " + dw + ", added height: " + dh + ")");
}
public static void resizeDialogToFitTextFor(@NotNull final JComponent... components) {
if (components.length == 0) return;
doWithDialogWrapper(components[0], dialogWrapper -> {
if (dialogWrapper instanceof SettingsDialog || dialogWrapper instanceof SingleConfigurableEditor) {
for (Component component : components) {
if (component instanceof TextFieldWithHistoryWithBrowseButton) {
setPreferredWidthToFitText((TextFieldWithHistoryWithBrowseButton)component);
}
else if (component instanceof TextFieldWithBrowseButton) {
setPreferredWidthToFitText((TextFieldWithBrowseButton)component);
}
else if (component instanceof JTextField) {
setPreferredWidthToFitText((JTextField)component);
}
}
ApplicationManager.getApplication().invokeLater(() -> adjustDialogSizeToFitPreferredSize(dialogWrapper), ModalityState.any());
}
});
}
private static void doWithDialogWrapper(@NotNull final JComponent component, @NotNull final Consumer<DialogWrapper> consumer) {
UIUtil.invokeLaterIfNeeded(() -> {
if (component.getClientProperty(DIALOG_RESIZED_TO_FIT_TEXT) != null) {
return;
}
component.putClientProperty(DIALOG_RESIZED_TO_FIT_TEXT, true);
DialogWrapper dialogWrapper = DialogWrapper.findInstance(component);
if (dialogWrapper != null) {
consumer.consume(dialogWrapper);
}
else {
UiNotifyConnector.doWhenFirstShown(component, () -> {
DialogWrapper dialogWrapper1 = DialogWrapper.findInstance(component);
if (dialogWrapper1 != null) {
consumer.consume(dialogWrapper1);
}
});
}
});
}
public static <T> void updateItems(@NotNull JComboBox<T> comboBox,
@NotNull List<T> newItems,
@Nullable T newSelectedItemIfSelectionCannotBePreserved) {
if (!shouldUpdate(comboBox, newItems)) {
return;
}
Object itemToSelect = comboBox.getSelectedItem();
boolean preserveSelection = true;
//noinspection SuspiciousMethodCalls
if (!newItems.contains(itemToSelect)) {
if (newItems.contains(newSelectedItemIfSelectionCannotBePreserved)) {
itemToSelect = newSelectedItemIfSelectionCannotBePreserved;
}
else {
itemToSelect = null;
preserveSelection = false;
}
}
comboBox.removeAllItems();
for (T newItem : newItems) {
comboBox.addItem(newItem);
}
if (preserveSelection) {
int count = comboBox.getItemCount();
for (int i = 0; i < count; i++) {
Object item = comboBox.getItemAt(i);
if (ComparatorUtil.equalsNullable(itemToSelect, item)) {
comboBox.setSelectedIndex(i);
break;
}
}
}
}
private static <T> boolean shouldUpdate(@NotNull JComboBox<T> comboBox, @NotNull List<T> newItems) {
int count = comboBox.getItemCount();
if (newItems.size() != count) {
return true;
}
for (int i = 0; i < count; i++) {
Object oldItem = comboBox.getItemAt(i);
T newItem = newItems.get(i);
if (!ComparatorUtil.equalsNullable(oldItem, newItem)) {
return true;
}
}
return false;
}
public static void setNoBorderCellRendererFor(@NotNull TableColumn column) {
final TableCellRenderer previous = column.getCellRenderer();
column.setCellRenderer(new DefaultTableCellRenderer() {
@Override
public Component getTableCellRendererComponent(JTable table,
Object value,
boolean isSelected,
boolean hasFocus,
int row,
int column) {
Component component;
if (previous != null) {
component = previous.getTableCellRendererComponent(table, value, isSelected, false, row, column);
}
else {
component = super.getTableCellRendererComponent(table, value, isSelected, false, row, column);
}
if (component instanceof JComponent) {
((JComponent)component).setBorder(null);
}
return component;
}
});
}
public static void addHistoryOnExpansion(@NotNull final TextFieldWithHistory textFieldWithHistory,
@NotNull final NotNullProducer<List<String>> historyProvider) {
textFieldWithHistory.addPopupMenuListener(new PopupMenuListener() {
@Override
public void popupMenuWillBecomeVisible(PopupMenuEvent e) {
List<String> history = historyProvider.produce();
setHistory(textFieldWithHistory, ContainerUtil.notNullize(history), true);
// one-time initialization
textFieldWithHistory.removePopupMenuListener(this);
}
@Override
public void popupMenuWillBecomeInvisible(PopupMenuEvent e) {
}
@Override
public void popupMenuCanceled(PopupMenuEvent e) {
}
});
}
public static void setHistory(@NotNull TextFieldWithHistory textFieldWithHistory,
@NotNull List<String> history,
boolean mergeWithPrevHistory) {
Set<String> newHistorySet = ContainerUtil.newHashSet(history);
List<String> prevHistory = textFieldWithHistory.getHistory();
List<String> mergedHistory = ContainerUtil.newArrayListWithCapacity(history.size());
if (mergeWithPrevHistory) {
for (String item : prevHistory) {
if (!newHistorySet.contains(item)) {
mergedHistory.add(item);
}
}
}
mergedHistory.addAll(history);
String oldText = StringUtil.notNullize(textFieldWithHistory.getText());
String oldSelectedItem = ObjectUtils.tryCast(textFieldWithHistory.getSelectedItem(), String.class);
if (!mergedHistory.contains(oldSelectedItem)) {
oldSelectedItem = null;
}
textFieldWithHistory.setHistory(mergedHistory);
setLongestAsPrototype(textFieldWithHistory, mergedHistory);
if (oldSelectedItem != null) {
textFieldWithHistory.setSelectedItem(oldSelectedItem);
}
if (!oldText.equals(oldSelectedItem)) {
textFieldWithHistory.setText(oldText);
}
}
private static void setLongestAsPrototype(@NotNull JComboBox comboBox, @NotNull List<String> variants) {
Object prototypeDisplayValue = comboBox.getPrototypeDisplayValue();
String prototypeDisplayValueStr = null;
if (prototypeDisplayValue instanceof String) {
prototypeDisplayValueStr = (String)prototypeDisplayValue;
}
else if (prototypeDisplayValue != null) {
return;
}
String longest = StringUtil.notNullize(prototypeDisplayValueStr);
boolean updated = false;
for (String s : variants) {
if (longest.length() < s.length()) {
longest = s;
updated = true;
}
}
if (updated) {
comboBox.setPrototypeDisplayValue(longest);
}
}
public static void installFileCompletionAndBrowseDialog(@Nullable Project project,
@NotNull TextFieldWithHistoryWithBrowseButton textFieldWithHistoryWithBrowseButton,
@NotNull @Nls(capitalization = Nls.Capitalization.Title) String browseDialogTitle,
@NotNull FileChooserDescriptor fileChooserDescriptor) {
doInstall(project,
textFieldWithHistoryWithBrowseButton,
textFieldWithHistoryWithBrowseButton.getChildComponent().getTextEditor(),
browseDialogTitle,
fileChooserDescriptor,
TextComponentAccessor.TEXT_FIELD_WITH_HISTORY_WHOLE_TEXT);
}
public static void installFileCompletionAndBrowseDialog(@Nullable Project project,
@NotNull TextFieldWithBrowseButton textFieldWithBrowseButton,
@NotNull @Nls(capitalization = Nls.Capitalization.Title) String browseDialogTitle,
@NotNull FileChooserDescriptor fileChooserDescriptor) {
doInstall(project,
textFieldWithBrowseButton,
textFieldWithBrowseButton.getTextField(),
browseDialogTitle,
fileChooserDescriptor,
TextComponentAccessor.TEXT_FIELD_WHOLE_TEXT);
}
private static <T extends JComponent> void doInstall(@Nullable Project project,
@NotNull ComponentWithBrowseButton<T> componentWithBrowseButton,
@NotNull JTextField textField,
@NotNull @Nls(capitalization = Nls.Capitalization.Title) String browseDialogTitle,
@NotNull FileChooserDescriptor fileChooserDescriptor,
@NotNull TextComponentAccessor<T> textComponentAccessor) {
fileChooserDescriptor = fileChooserDescriptor.withShowHiddenFiles(SystemInfo.isUnix);
componentWithBrowseButton.addBrowseFolderListener(
browseDialogTitle,
null,
project,
fileChooserDescriptor,
textComponentAccessor
);
FileChooserFactory.getInstance().installFileCompletion(
textField,
fileChooserDescriptor,
true,
project
);
}
@NotNull
public static HyperlinkLabel createWebHyperlink(@NotNull String url) {
return createWebHyperlink(url, url);
}
@NotNull
public static HyperlinkLabel createWebHyperlink(@NotNull String text, @NotNull String url) {
HyperlinkLabel hyperlink = new HyperlinkLabel(text);
hyperlink.setHyperlinkTarget(url);
DefaultActionGroup actionGroup = new DefaultActionGroup();
actionGroup.add(new OpenLinkInBrowser(url));
actionGroup.add(new CopyLinkAction(url));
hyperlink.setComponentPopupMenu(ActionManager.getInstance().createActionPopupMenu("web hyperlink", actionGroup).getComponent());
return hyperlink;
}
public static void setPreferredWidth(@NotNull Component component, int width) {
Dimension preferredSize = component.getPreferredSize();
preferredSize.width = width;
component.setPreferredSize(preferredSize);
}
public static boolean scrollToReference(JEditorPane view, String reference) {
reference = StringUtil.trimStart(reference, "#");
List<String> toCheck = Arrays.asList("a", "h1", "h2", "h3", "h4");
Document document = view.getDocument();
if (document instanceof HTMLDocument) {
List<Element> list = new ArrayList<>();
for (Element root : document.getRootElements()) {
getAllElements(root, list, toCheck);
}
for (Element element : list) {
AttributeSet attributes = element.getAttributes();
String nm = (String)attributes.getAttribute(HTML.Attribute.NAME);
if (nm == null) nm = (String)attributes.getAttribute(HTML.Attribute.ID);
if ((nm != null) && nm.equals(reference)) {
try {
int pos = element.getStartOffset();
Rectangle r = view.modelToView(pos);
if (r != null) {
Rectangle vis = view.getVisibleRect();
r.y -= 5;
r.height = vis.height;
view.scrollRectToVisible(r);
return true;
}
}
catch (BadLocationException ex) {
//ignore
}
}
}
}
return false;
}
private static void getAllElements(Element root, List<Element> list, List<String> toCheck) {
if (toCheck.contains(root.getName().toLowerCase(Locale.US))) {
list.add(root);
}
for (int i = 0; i < root.getElementCount(); i++) {
getAllElements(root.getElement(i), list, toCheck);
}
}
public static class HtmlViewerBuilder {
private boolean myCarryTextOver;
private String myDisabledHtml;
private Font myFont;
private Color myBackground;
private Color myForeground;
public JEditorPane create() {
final JEditorPane textPane = new JEditorPane() {
private boolean myEnabled = true;
private String myEnabledHtml;
@Override
public Dimension getPreferredSize() {
// This trick makes text component to carry text over to the next line
// if the text line width exceeds parent's width
Dimension dimension = super.getPreferredSize();
if (myCarryTextOver) {
dimension.width = 0;
}
return dimension;
}
@Override
public void setText(String t) {
if (myDisabledHtml != null) {
if (myEnabled) {
myEnabledHtml = t;
}
}
super.setText(t);
}
@Override
public void setEnabled(boolean enabled) {
if (myDisabledHtml != null) {
myEnabled = enabled;
if (myEnabled) {
setText(myEnabledHtml);
} else {
setText(myDisabledHtml);
}
super.setEnabled(true);
} else {
super.setEnabled(enabled);
}
}
};
textPane.setFont(myFont != null ? myFont : UIUtil.getLabelFont());
textPane.setContentType(UIUtil.HTML_MIME);
textPane.setEditable(false);
if (myBackground != null) {
textPane.setBackground(myBackground);
}
else {
textPane.setOpaque(false);
}
textPane.setForeground(myForeground != null ? myForeground : UIUtil.getLabelForeground());
textPane.setFocusable(false);
return textPane;
}
public HtmlViewerBuilder setCarryTextOver(boolean carryTextOver) {
myCarryTextOver = carryTextOver;
return this;
}
public HtmlViewerBuilder setDisabledHtml(String disabledHtml) {
myDisabledHtml = disabledHtml;
return this;
}
public HtmlViewerBuilder setFont(Font font) {
myFont = font;
return this;
}
public HtmlViewerBuilder setBackground(Color background) {
myBackground = background;
return this;
}
public HtmlViewerBuilder setForeground(Color foreground) {
myForeground = foreground;
return this;
}
}
@NotNull
public static JEditorPane createHtmlViewer(boolean lineWrap,
@Nullable Font font,
@Nullable Color background,
@Nullable Color foreground) {
final JEditorPane textPane;
if (lineWrap) {
textPane = new JEditorPane() {
@Override
public Dimension getPreferredSize() {
// This trick makes text component to carry text over to the next line
// if the text line width exceeds parent's width
Dimension dimension = super.getPreferredSize();
dimension.width = 0;
return dimension;
}
};
}
else {
textPane = new JEditorPane();
}
textPane.setFont(font != null ? font : UIUtil.getLabelFont());
textPane.setContentType(UIUtil.HTML_MIME);
textPane.setEditable(false);
if (background != null) {
textPane.setBackground(background);
}
else {
NonOpaquePanel.setTransparent(textPane);
}
textPane.setForeground(foreground != null ? foreground : UIUtil.getLabelForeground());
textPane.setFocusable(false);
return textPane;
}
public static void setHtml(@NotNull JEditorPane editorPane,
@NotNull String bodyInnerHtml,
@Nullable Color foregroundColor) {
String html = String.format(
"<html><head>%s</head><body>%s</body></html>",
UIUtil.getCssFontDeclaration(editorPane.getFont(), foregroundColor, null, null),
bodyInnerHtml
);
editorPane.setText(html);
}
@NotNull
public static TextFieldWithHistoryWithBrowseButton createTextFieldWithHistoryWithBrowseButton(@Nullable Project project,
@NotNull String browseDialogTitle,
@NotNull FileChooserDescriptor fileChooserDescriptor,
@Nullable NotNullProducer<List<String>> historyProvider) {
TextFieldWithHistoryWithBrowseButton textFieldWithHistoryWithBrowseButton = new TextFieldWithHistoryWithBrowseButton();
TextFieldWithHistory textFieldWithHistory = textFieldWithHistoryWithBrowseButton.getChildComponent();
textFieldWithHistory.setHistorySize(-1);
textFieldWithHistory.setMinimumAndPreferredWidth(0);
if (historyProvider != null) {
addHistoryOnExpansion(textFieldWithHistory, historyProvider);
}
installFileCompletionAndBrowseDialog(
project,
textFieldWithHistoryWithBrowseButton,
browseDialogTitle,
fileChooserDescriptor
);
return textFieldWithHistoryWithBrowseButton;
}
@NotNull
public static <C extends JComponent> ComponentWithBrowseButton<C> wrapWithInfoButton(@NotNull final C component,
@NotNull String infoButtonTooltip,
@NotNull ActionListener listener) {
ComponentWithBrowseButton<C> comp = new ComponentWithBrowseButton<>(component, listener);
FixedSizeButton uiHelpButton = comp.getButton();
uiHelpButton.setToolTipText(infoButtonTooltip);
uiHelpButton.setIcon(UIUtil.getBalloonInformationIcon());
uiHelpButton.setHorizontalAlignment(SwingConstants.CENTER);
uiHelpButton.setVerticalAlignment(SwingConstants.CENTER);
return comp;
}
private static class CopyLinkAction extends AnAction {
private final String myUrl;
private CopyLinkAction(@NotNull String url) {
super("Copy Link Address", null, PlatformIcons.COPY_ICON);
myUrl = url;
}
@Override
public void update(AnActionEvent e) {
e.getPresentation().setEnabled(true);
}
@Override
public void actionPerformed(AnActionEvent e) {
Transferable content = new StringSelection(myUrl);
CopyPasteManager.getInstance().setContents(content);
}
}
private static class OpenLinkInBrowser extends AnAction {
private final String myUrl;
private OpenLinkInBrowser(@NotNull String url) {
super("Open Link in Browser", null, PlatformIcons.WEB_ICON);
myUrl = url;
}
@Override
public void update(AnActionEvent e) {
e.getPresentation().setEnabled(true);
}
@Override
public void actionPerformed(AnActionEvent e) {
BrowserUtil.browse(myUrl);
}
}
public final static String ELLIPSIS = "...";
public static final String ERROR_STR = "www";
public static String truncateStringWithEllipsis(final String text, final int maxWidth, final FontMetrics fm) {
return truncateStringWithEllipsis(text, maxWidth, new WidthCalculator() {
@Override
public int stringWidth(String s) {
return fm.stringWidth(s);
}
@Override
public int charWidth(char c) {
return fm.charWidth(c);
}
});
}
public interface WidthCalculator {
int stringWidth(final String s);
int charWidth(final char c);
}
public static String truncateStringWithEllipsis(@NotNull final String text, final int maxWidth, final WidthCalculator fm) {
final int error = fm.stringWidth(ERROR_STR);
final int wholeWidth = fm.stringWidth(text) + error;
if (wholeWidth <= maxWidth || text.isEmpty()) return text;
final int ellipsisWidth = fm.stringWidth(ELLIPSIS) + error; // plus some reserve
if (ellipsisWidth >= maxWidth) return ELLIPSIS;
final int availableWidth = maxWidth - ellipsisWidth;
int currentLen = (int)Math.floor(availableWidth / (((double) wholeWidth) / text.length()));
final String currentSubstring = text.substring(0, currentLen);
int realWidth = fm.stringWidth(currentSubstring);
if (realWidth >= availableWidth) {
int delta = 0;
for (int i = currentLen - 1; i >= 0; i--) {
if ((realWidth - delta) < availableWidth) return text.substring(0, i) + ELLIPSIS;
delta += fm.charWidth(currentSubstring.charAt(i));
}
return text.substring(0, 1) + ELLIPSIS;
} else {
int delta = 0;
for (int i = currentLen; i < text.length(); i++) {
if ((realWidth + delta) >= availableWidth) return text.substring(0, i) + ELLIPSIS;
delta += fm.charWidth(text.charAt(i));
}
return text.substring(0, currentLen) + ELLIPSIS;
}
}
public static JEditorPane createHtmlLabel(@NotNull final String innerHtml, @Nullable String disabledHtml,
@Nullable final Consumer<String> hyperlinkListener) {
disabledHtml = disabledHtml == null ? innerHtml : disabledHtml;
final Font font = UIUtil.getLabelFont();
String html = String.format(
"<html><head>%s</head><body>%s</body></html>",
UIUtil.getCssFontDeclaration(font, UIUtil.getInactiveTextColor(), null, null),
innerHtml
);
String disabled = String.format(
"<html><head>%s</head><body>%s</body></html>",
UIUtil.getCssFontDeclaration(font, UIUtil.getInactiveTextColor(), null, null),
disabledHtml
);
final JEditorPane pane = new SwingHelper.HtmlViewerBuilder()
.setCarryTextOver(false)
.setFont(UIUtil.getLabelFont())
.setDisabledHtml(disabled)
.create();
pane.setText(html);
pane.addHyperlinkListener(
new HyperlinkListener() {
public void hyperlinkUpdate(HyperlinkEvent e) {
if (e.getEventType() == HyperlinkEvent.EventType.ACTIVATED) {
if (hyperlinkListener != null) hyperlinkListener.consume(e.getURL() == null ? "" : e.getURL().toString());
else BrowserUtil.browse(e.getURL());
}
}
}
);
return pane;
}
}
|
|
package socialeyser.bl.services.impl;
import java.util.Date;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import socialeyser.bl.services.interfaces.CrisisEvaluator;
import socialeyser.model.CrisisLevels;
import socialeyser.model.Enrichment;
import socialeyser.model.Media;
import socialeyser.model.Message;
import socialeyser.model.MonitoringMetrics;
import socialeyser.model.RawMessage;
import socialeyser.model.User;
public class CrisisEvaluatorImpl
implements CrisisEvaluator
{
private static long CRISISINTERVALTHRESHOLD = 300000L;
private MonitoringMetrics metrics = new MonitoringMetrics();
private String currentCrisisLevel;
private static final Logger log = LoggerFactory.getLogger(CrisisEvaluatorImpl.class);
private Date lastCrisisReported;
public String getCurrentCrisisLevel()
{
return this.currentCrisisLevel;
}
public void setCurrentCrisisLevel(String currentCrisisLevel)
{
this.currentCrisisLevel = currentCrisisLevel;
}
public Date getLastCrisisReported()
{
return this.lastCrisisReported;
}
public void setLastCrisisReported(Date lastCrisisReported)
{
this.lastCrisisReported = lastCrisisReported;
}
public CrisisEvaluatorImpl()
{
setCurrentCrisisLevel(CrisisLevels.NONE.getName());
}
public MonitoringMetrics getMetrics()
{
return this.metrics;
}
public void setMetrics(MonitoringMetrics metrics)
{
this.metrics = metrics;
}
public void computeMetrics(Message message)
{
long currentMessageCount = getMetrics().getMessageCount();
getMetrics().setMessageCount(currentMessageCount++);
User user = message.getRawMessage().getUser();
Map<Integer, List<User>> userCounts = getMetrics().getUserCounts();
Map<Integer, List<String>> hashtagCounts = getMetrics().getHashtagCounts();
Map<Integer, List<Media>> mediaCounts = getMetrics().getMediaCounts();
Map<Integer, List<String>> linkCounts = getMetrics().getLinkCounts();
updateUserCount(message, userCounts);
updateHashtags(message, hashtagCounts);
updateMediaCount(message, mediaCounts);
updateLinks(message, linkCounts);
updateAverages(message);
}
private void updateLinks(Message message, Map<Integer, List<String>> linkCounts)
{
boolean linksAdded = false;
Set<Map.Entry<Integer, List<String>>> entry = linkCounts.entrySet();
if (entry.size() == 0)
{
List<String> linksList = new LinkedList();
for (String s : message.getRawMessage().getLinks()) {
linksList.add(s);
}
linkCounts.put(new Integer(1), linksList);
}
else
{
for (String s : message.getRawMessage().getLinks())
{
Iterator<Map.Entry<Integer, List<String>>> it = entry.iterator();
while ((it.hasNext()) && (!linksAdded))
{
Map.Entry<Integer, List<String>> i = (Map.Entry)it.next();
List<String> linksList = (List)i.getValue();
if (linksList.contains(s))
{
linksList.remove(s);
if (linksList.size() == 0) {
linkCounts.remove(i.getKey());
}
linksList = (List)linkCounts.get(new Integer(((Integer)i.getKey()).intValue() + 1));
if (linksList == null)
{
linksList = new LinkedList();
linksList.add(s);
linkCounts.put(new Integer(((Integer)i.getKey()).intValue() + 1), linksList);
}
else
{
linksList.add(s);
}
linksAdded = true;
}
}
if (!linksAdded)
{
List<String> linksList = (List)linkCounts.get(new Integer(1));
if (linksList == null)
{
linksList = new LinkedList();
linksList.add(s);
linkCounts.put(new Integer(1), linksList);
}
else
{
linksList.add(s);
}
}
}
}
}
private void updateAverages(Message message)
{
getMetrics().increaseTotalInfluence(message.getEnrichment().getUserInfluence());
getMetrics().increaseTotalSentiment(new Double(message.getEnrichment().getSemanticClassification()).doubleValue());
getMetrics().increaseMessageCount(1L);
getMetrics().setInfluenceAverage(getMetrics().getTotalInfluence());
getMetrics().setSentimentAverage(getMetrics().getTotalSentiment());
}
private void updateMediaCount(Message message, Map<Integer, List<Media>> mediaCounts)
{
boolean mediaAdded = false;
Set<Map.Entry<Integer, List<Media>>> entry = mediaCounts.entrySet();
if (entry.size() == 0)
{
List<Media> mediasList = new LinkedList();
for (Media m : message.getRawMessage().getMedias()) {
mediasList.add(m);
}
mediaCounts.put(new Integer(1), mediasList);
}
else
{
for (Media m : message.getRawMessage().getMedias())
{
Iterator<Map.Entry<Integer, List<Media>>> it = entry.iterator();
while ((it.hasNext()) && (!mediaAdded))
{
Map.Entry<Integer, List<Media>> i = (Map.Entry)it.next();
List<Media> mediasList = (List)i.getValue();
if (mediasList.contains(m))
{
mediasList.remove(m);
if (mediasList.size() == 0) {
mediaCounts.remove(i.getKey());
}
mediasList = (List)mediaCounts.get(new Integer(((Integer)i.getKey()).intValue() + 1));
if (mediasList == null)
{
mediasList = new LinkedList();
mediasList.add(m);
mediaCounts.put(new Integer(((Integer)i.getKey()).intValue() + 1), mediasList);
}
else
{
mediasList.add(m);
}
mediaAdded = true;
}
}
if (!mediaAdded)
{
List<Media> mediasList = (List)mediaCounts.get(new Integer(1));
if (mediasList == null)
{
mediasList = new LinkedList();
mediasList.add(m);
mediaCounts.put(new Integer(1), mediasList);
}
else
{
mediasList.add(m);
}
}
}
}
}
private void updateHashtags(Message message, Map<Integer, List<String>> hashtagCounts)
{
boolean hashtagAdded = false;
Set<Map.Entry<Integer, List<String>>> entries = hashtagCounts.entrySet();
if (entries.size() == 0)
{
List<String> hashTagsList = new LinkedList();
for (String s : message.getRawMessage().getHashtags()) {
hashTagsList.add(s);
}
hashtagCounts.put(new Integer(1), hashTagsList);
}
else
{
for (String s : message.getRawMessage().getHashtags())
{
Iterator<Map.Entry<Integer, List<String>>> it = entries.iterator();
while ((it.hasNext()) && (!hashtagAdded))
{
Map.Entry<Integer, List<String>> entry = (Map.Entry)it.next();
List<String> hashTagsList = (List)entry.getValue();
if (hashTagsList.contains(s))
{
hashTagsList.remove(s);
if (hashTagsList.size() == 0) {
hashtagCounts.remove(entry.getKey());
}
hashTagsList = (List)hashtagCounts.get(new Integer(((Integer)entry.getKey()).intValue() + 1));
if (hashTagsList == null)
{
hashTagsList = new LinkedList();
hashTagsList.add(s);
hashtagCounts.put(new Integer(((Integer)entry.getKey()).intValue() + 1), hashTagsList);
}
else
{
hashTagsList.add(s);
}
hashtagAdded = true;
}
}
if (!hashtagAdded)
{
List<String> hashTagsList = (List)hashtagCounts.get(new Integer(1));
if (hashTagsList == null)
{
hashTagsList = new LinkedList();
hashTagsList.add(s);
hashtagCounts.put(new Integer(1), hashTagsList);
}
else
{
hashTagsList.add(s);
}
}
}
}
}
private void updateUserCount(Message message, Map<Integer, List<User>> userCounts)
{
boolean userAdded = false;
User u = message.getRawMessage().getUser();
Set<Map.Entry<Integer, List<User>>> entrySet = userCounts.entrySet();
if (entrySet.size() == 0)
{
List<User> usersList = new LinkedList();
usersList.add(u);
userCounts.put(new Integer(1), usersList);
}
else
{
Iterator<Map.Entry<Integer, List<User>>> it = entrySet.iterator();
while ((it.hasNext()) && (!userAdded))
{
Map.Entry<Integer, List<User>> e = (Map.Entry)it.next();
List<User> usersList = (List)e.getValue();
if (usersList.contains(u))
{
usersList.remove(u);
if (usersList.size() == 0) {
userCounts.remove(e.getKey());
}
usersList = (List)userCounts.get(new Integer(((Integer)e.getKey()).intValue() + 1));
if (usersList == null)
{
usersList = new LinkedList();
usersList.add(u);
userCounts.put(new Integer(((Integer)e.getKey()).intValue() + 1), usersList);
}
}
else
{
usersList.add(u);
}
userAdded = true;
}
if (!userAdded)
{
List<User> usersList = (List)userCounts.get(new Integer(1));
if (usersList == null)
{
usersList = new LinkedList();
usersList.add(u);
userCounts.put(new Integer(1), usersList);
}
else
{
usersList.add(u);
}
}
}
}
public String getCrisisLevel()
{
String crisisLevel = CrisisLevels.NONE.getName();
if (getMetrics().getSentimentAverage() < 0.0D)
{
double random = Math.random();
if (random <= 0.33D)
{
if (getCurrentCrisisLevel().equals(CrisisLevels.LIGHT.getName()))
{
if (new Date().getTime() - getLastCrisisReported().getTime() >= CRISISINTERVALTHRESHOLD)
{
crisisLevel = CrisisLevels.LIGHT.getName();
setLastCrisisReported(new Date());
setCurrentCrisisLevel(crisisLevel);
}
}
else
{
crisisLevel = CrisisLevels.LIGHT.getName();
setLastCrisisReported(new Date());
setCurrentCrisisLevel(crisisLevel);
}
}
else if (random <= 0.66D)
{
if (getCurrentCrisisLevel().equals(CrisisLevels.MEDIUM.getName()))
{
if (new Date().getTime() - getLastCrisisReported().getTime() >= CRISISINTERVALTHRESHOLD)
{
crisisLevel = CrisisLevels.MEDIUM.getName();
setLastCrisisReported(new Date());
setCurrentCrisisLevel(crisisLevel);
}
}
else
{
crisisLevel = CrisisLevels.MEDIUM.getName();
setLastCrisisReported(new Date());
setCurrentCrisisLevel(crisisLevel);
setCurrentCrisisLevel(crisisLevel);
}
}
else if (getCurrentCrisisLevel().equals(CrisisLevels.DANGEROUS.getName()))
{
if (new Date().getTime() - getLastCrisisReported().getTime() >= CRISISINTERVALTHRESHOLD)
{
crisisLevel = CrisisLevels.DANGEROUS.getName();
setLastCrisisReported(new Date());
setCurrentCrisisLevel(crisisLevel);
}
}
else
{
crisisLevel = CrisisLevels.DANGEROUS.getName();
setLastCrisisReported(new Date());
setCurrentCrisisLevel(crisisLevel);
}
}
return crisisLevel;
}
}
|
|
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.widgets;
import hudson.Functions;
import hudson.model.ModelObject;
import hudson.model.Run;
import jenkins.widgets.HistoryPageEntry;
import jenkins.widgets.HistoryPageFilter;
import org.apache.commons.collections.IteratorUtils;
import org.kohsuke.stapler.Header;
import org.kohsuke.stapler.Stapler;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.annotation.CheckForNull;
import javax.servlet.ServletException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
/**
* Displays the history of records (normally {@link Run}s) on the side panel.
*
* @param <O>
* Owner of the widget.
* @param <T>
* Type individual record.
* @author Kohsuke Kawaguchi
*/
public class HistoryWidget<O extends ModelObject,T> extends Widget {
/**
* The given data model of records. Newer ones first.
*/
public Iterable<T> baseList;
/**
* Indicates the next build number that client ajax should fetch.
*/
private String nextBuildNumberToFetch;
/**
* URL of the {@link #owner}.
*/
public final String baseUrl;
public final O owner;
private boolean trimmed;
public final Adapter<? super T> adapter;
final Long newerThan;
final Long olderThan;
final String searchString;
/**
* First transient build record. Everything >= this will be discarded when AJAX call is made.
*/
private String firstTransientBuildKey;
/**
* @param owner
* The parent model object that owns this widget.
*/
public HistoryWidget(O owner, Iterable<T> baseList, Adapter<? super T> adapter) {
StaplerRequest currentRequest = Stapler.getCurrentRequest();
this.adapter = adapter;
this.baseList = baseList;
this.baseUrl = Functions.getNearestAncestorUrl(currentRequest,owner);
this.owner = owner;
this.newerThan = getPagingParam(currentRequest, "newer-than");
this.olderThan = getPagingParam(currentRequest, "older-than");
this.searchString = currentRequest.getParameter("search");;
}
/**
* Title of the widget.
*/
public String getDisplayName() {
return Messages.BuildHistoryWidget_DisplayName();
}
@Override
public String getUrlName() {
return "buildHistory";
}
public String getFirstTransientBuildKey() {
return firstTransientBuildKey;
}
private Iterable<HistoryPageEntry<T>> updateFirstTransientBuildKey(Iterable<HistoryPageEntry<T>> source) {
String key=null;
for (HistoryPageEntry<T> t : source) {
if(adapter.isBuilding(t.getEntry())) {
key = adapter.getKey(t.getEntry());
}
}
firstTransientBuildKey = key;
return source;
}
/**
* The records to be rendered this time.
*/
public Iterable<HistoryPageEntry<T>> getRenderList() {
if(trimmed) {
List<HistoryPageEntry<T>> pageEntries = toPageEntries(baseList);
if(pageEntries.size() > THRESHOLD) {
return updateFirstTransientBuildKey(pageEntries.subList(0,THRESHOLD));
} else {
trimmed=false;
return updateFirstTransientBuildKey(pageEntries);
}
} else {
// to prevent baseList's concrete type from getting picked up by <j:forEach> in view
return updateFirstTransientBuildKey(toPageEntries(baseList));
}
}
private List<HistoryPageEntry<T>> toPageEntries(Iterable<T> historyItemList) {
Iterator<T> iterator = historyItemList.iterator();
if (!iterator.hasNext()) {
return Collections.EMPTY_LIST;
}
List<HistoryPageEntry<T>> pageEntries = new ArrayList<HistoryPageEntry<T>>();
while (iterator.hasNext()) {
pageEntries.add(new HistoryPageEntry<T>(iterator.next()));
}
return pageEntries;
}
/**
* Get a {@link jenkins.widgets.HistoryPageFilter} for rendering a page of queue items.
*/
public HistoryPageFilter getHistoryPageFilter() {
HistoryPageFilter<T> historyPageFilter = newPageFilter();
historyPageFilter.add(IteratorUtils.toList(baseList.iterator()));
historyPageFilter.widget = this;
return historyPageFilter;
}
protected HistoryPageFilter<T> newPageFilter() {
HistoryPageFilter<T> historyPageFilter = new HistoryPageFilter<T>(THRESHOLD);
if (newerThan != null) {
historyPageFilter.setNewerThan(newerThan);
} else if (olderThan != null) {
historyPageFilter.setOlderThan(olderThan);
}
if (searchString != null) {
historyPageFilter.setSearchString(searchString);
}
return historyPageFilter;
}
public boolean isTrimmed() {
return trimmed;
}
public void setTrimmed(boolean trimmed) {
this.trimmed = trimmed;
}
/**
* Handles AJAX requests from browsers to update build history.
*
* @param n
* The build 'number' to fetch. This is string because various variants
* uses non-numbers as the build key.
*/
public void doAjax( StaplerRequest req, StaplerResponse rsp,
@Header("n") String n ) throws IOException, ServletException {
rsp.setContentType("text/html;charset=UTF-8");
// pick up builds to send back
List<T> items = new ArrayList<T>();
if (n != null) {
String nn=null; // we'll compute next n here
// list up all builds >=n.
for (T t : baseList) {
if(adapter.compare(t,n)>=0) {
items.add(t);
if(adapter.isBuilding(t))
nn = adapter.getKey(t); // the next fetch should start from youngest build in progress
} else
break;
}
if (nn==null) {
if (items.isEmpty()) {
// nothing to report back. next fetch should retry the same 'n'
nn=n;
} else {
// every record fetched this time is frozen. next fetch should start from the next build
nn=adapter.getNextKey(adapter.getKey(items.get(0)));
}
}
baseList = items;
rsp.setHeader("n",nn);
firstTransientBuildKey = nn; // all builds >= nn should be marked transient
}
HistoryPageFilter page = getHistoryPageFilter();
updateFirstTransientBuildKey(page.runs);
req.getView(page,"ajaxBuildHistory.jelly").forward(req,rsp);
}
static final int THRESHOLD = Integer.getInteger(HistoryWidget.class.getName()+".threshold",30);
public String getNextBuildNumberToFetch() {
return nextBuildNumberToFetch;
}
public void setNextBuildNumberToFetch(String nextBuildNumberToFetch) {
this.nextBuildNumberToFetch = nextBuildNumberToFetch;
}
public interface Adapter<T> {
/**
* If record is newer than the key, return a positive number.
*/
int compare(T record, String key);
String getKey(T record);
boolean isBuilding(T record);
String getNextKey(String key);
}
private Long getPagingParam(@CheckForNull StaplerRequest currentRequest, @CheckForNull String name) {
if (currentRequest == null || name == null) {
return null;
}
String paramVal = currentRequest.getParameter(name);
if (paramVal == null) {
return null;
}
try {
return new Long(paramVal);
} catch (NumberFormatException nfe) {
return null;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.index;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult;
import org.apache.phoenix.coprocessor.MetaDataProtocol.MutationCode;
import org.apache.phoenix.hbase.index.table.HTableInterfaceReference;
import org.apache.phoenix.hbase.index.write.DelegateIndexFailurePolicy;
import org.apache.phoenix.hbase.index.write.KillServerOnFailurePolicy;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
import org.apache.phoenix.query.QueryServices;
import org.apache.phoenix.query.QueryServicesOptions;
import org.apache.phoenix.schema.PIndexState;
import org.apache.phoenix.schema.PTable;
import org.apache.phoenix.schema.PTable.IndexType;
import org.apache.phoenix.util.IndexUtil;
import org.apache.phoenix.util.MetaDataUtil;
import org.apache.phoenix.util.PhoenixRuntime;
import org.apache.phoenix.util.QueryUtil;
import org.apache.phoenix.util.SchemaUtil;
import org.apache.phoenix.util.ServerUtil;
import com.google.common.collect.Multimap;
/**
*
* Handler called in the event that index updates cannot be written to their
* region server. First attempts to disable the index and failing that falls
* back to the default behavior of killing the region server.
*
*/
public class PhoenixIndexFailurePolicy extends DelegateIndexFailurePolicy {
private static final Log LOG = LogFactory.getLog(PhoenixIndexFailurePolicy.class);
public static final String DISABLE_INDEX_ON_WRITE_FAILURE = "DISABLE_INDEX_ON_WRITE_FAILURE";
public static final String REBUILD_INDEX_ON_WRITE_FAILURE = "REBUILD_INDEX_ON_WRITE_FAILURE";
public static final String BLOCK_DATA_TABLE_WRITES_ON_WRITE_FAILURE = "BLOCK_DATA_TABLE_WRITES_ON_WRITE_FAILURE";
private RegionCoprocessorEnvironment env;
private boolean blockDataTableWritesOnFailure;
private boolean disableIndexOnFailure;
private boolean rebuildIndexOnFailure;
public PhoenixIndexFailurePolicy() {
super(new KillServerOnFailurePolicy());
}
@Override
public void setup(Stoppable parent, RegionCoprocessorEnvironment env) {
super.setup(parent, env);
this.env = env;
rebuildIndexOnFailure = env.getConfiguration().getBoolean(QueryServices.INDEX_FAILURE_HANDLING_REBUILD_ATTRIB,
QueryServicesOptions.DEFAULT_INDEX_FAILURE_HANDLING_REBUILD);
HTableDescriptor htd = env.getRegion().getTableDesc();
// If rebuild index is turned off globally, no need to check the table because the background thread
// won't be running in this case
if (rebuildIndexOnFailure) {
String value = htd.getValue(REBUILD_INDEX_ON_WRITE_FAILURE);
if (value != null) {
rebuildIndexOnFailure = Boolean.parseBoolean(value);
}
}
String value = htd.getValue(DISABLE_INDEX_ON_WRITE_FAILURE);
if (value == null) {
disableIndexOnFailure = env.getConfiguration().getBoolean(QueryServices.INDEX_FAILURE_DISABLE_INDEX,
QueryServicesOptions.DEFAULT_INDEX_FAILURE_DISABLE_INDEX);
} else {
disableIndexOnFailure = Boolean.parseBoolean(value);
}
value = htd.getValue(BLOCK_DATA_TABLE_WRITES_ON_WRITE_FAILURE);
if (value == null) {
blockDataTableWritesOnFailure = env.getConfiguration().getBoolean(QueryServices.INDEX_FAILURE_BLOCK_WRITE,
QueryServicesOptions.DEFAULT_INDEX_FAILURE_BLOCK_WRITE);
} else {
blockDataTableWritesOnFailure = Boolean.parseBoolean(value);
}
}
/**
* Attempt to disable the index table when we can't write to it, preventing future updates until the index is
* brought up to date, but allowing historical reads to continue until then.
* <p>
* In the case that we cannot reach the metadata information, we will fall back to the default policy and kill
* this server, so we can attempt to replay the edits on restart.
* </p>
* @param attempted the mutations that were attempted to be written and the tables to which they were written
* @param cause root cause of the failure
*/
@Override
public void handleFailure(Multimap<HTableInterfaceReference, Mutation> attempted, Exception cause) throws IOException {
boolean throwing = true;
long timestamp = HConstants.LATEST_TIMESTAMP;
try {
timestamp = handleFailureWithExceptions(attempted, cause);
throwing = false;
} catch (Throwable t) {
LOG.warn("handleFailure failed", t);
super.handleFailure(attempted, cause);
throwing = false;
} finally {
if (!throwing) {
throw ServerUtil.wrapInDoNotRetryIOException("Unable to update the following indexes: " + attempted.keySet(), cause, timestamp);
}
}
}
private long handleFailureWithExceptions(Multimap<HTableInterfaceReference, Mutation> attempted,
Exception cause) throws Throwable {
Set<HTableInterfaceReference> refs = attempted.asMap().keySet();
Map<String, Long> indexTableNames = new HashMap<String, Long>(refs.size());
// start by looking at all the tables to which we attempted to write
long timestamp = 0;
boolean leaveIndexActive = blockDataTableWritesOnFailure || !disableIndexOnFailure;
for (HTableInterfaceReference ref : refs) {
long minTimeStamp = 0;
// get the minimum timestamp across all the mutations we attempted on that table
// FIXME: all cell timestamps should be the same
Collection<Mutation> mutations = attempted.get(ref);
if (mutations != null) {
for (Mutation m : mutations) {
for (List<Cell> kvs : m.getFamilyCellMap().values()) {
for (Cell kv : kvs) {
if (minTimeStamp == 0 || (kv.getTimestamp() >= 0 && minTimeStamp > kv.getTimestamp())) {
minTimeStamp = kv.getTimestamp();
}
}
}
}
}
timestamp = minTimeStamp;
// If the data table has local index column families then get local indexes to disable.
if (ref.getTableName().equals(env.getRegion().getTableDesc().getNameAsString())
&& MetaDataUtil.hasLocalIndexColumnFamily(env.getRegion().getTableDesc())) {
for (String tableName : getLocalIndexNames(ref, mutations)) {
indexTableNames.put(tableName, minTimeStamp);
}
} else {
indexTableNames.put(ref.getTableName(), minTimeStamp);
}
}
// Nothing to do if we're not disabling the index and not rebuilding on failure
if (!disableIndexOnFailure && !rebuildIndexOnFailure) {
return timestamp;
}
PIndexState newState = disableIndexOnFailure ? PIndexState.DISABLE : PIndexState.ACTIVE;
// for all the index tables that we've found, try to disable them and if that fails, try to
for (Map.Entry<String, Long> tableTimeElement :indexTableNames.entrySet()){
String indexTableName = tableTimeElement.getKey();
long minTimeStamp = tableTimeElement.getValue();
// We need a way of differentiating the block writes to data table case from
// the leave index active case. In either case, we need to know the time stamp
// at which writes started failing so we can rebuild from that point. If we
// keep the index active *and* have a positive INDEX_DISABLE_TIMESTAMP_BYTES,
// then writes to the data table will be blocked (this is client side logic
// and we can't change this in a minor release). So we use the sign of the
// time stamp to differentiate.
if (!disableIndexOnFailure && !blockDataTableWritesOnFailure) {
minTimeStamp *= -1;
}
// Disable the index by using the updateIndexState method of MetaDataProtocol end point coprocessor.
HTableInterface systemTable = env.getTable(SchemaUtil
.getPhysicalTableName(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES, env.getConfiguration()));
MetaDataMutationResult result = IndexUtil.setIndexDisableTimeStamp(indexTableName, minTimeStamp,
systemTable, newState);
if (result.getMutationCode() == MutationCode.TABLE_NOT_FOUND) {
LOG.info("Index " + indexTableName + " has been dropped. Ignore uncommitted mutations");
continue;
}
if (result.getMutationCode() != MutationCode.TABLE_ALREADY_EXISTS) {
if (leaveIndexActive) {
LOG.warn("Attempt to update INDEX_DISABLE_TIMESTAMP " + " failed with code = "
+ result.getMutationCode());
// If we're not disabling the index, then we don't want to throw as throwing
// will lead to the RS being shutdown.
if (blockDataTableWritesOnFailure) {
throw new DoNotRetryIOException("Attempt to update INDEX_DISABLE_TIMESTAMP failed.");
}
} else {
LOG.warn("Attempt to disable index " + indexTableName + " failed with code = "
+ result.getMutationCode() + ". Will use default failure policy instead.");
throw new DoNotRetryIOException("Attempt to disable " + indexTableName + " failed.");
}
}
if (leaveIndexActive)
LOG.info("Successfully update INDEX_DISABLE_TIMESTAMP for " + indexTableName + " due to an exception while writing updates.",
cause);
else
LOG.info("Successfully disabled index " + indexTableName + " due to an exception while writing updates.",
cause);
}
// Return the cell time stamp (note they should all be the same)
return timestamp;
}
private Collection<? extends String> getLocalIndexNames(HTableInterfaceReference ref,
Collection<Mutation> mutations) throws IOException {
Set<String> indexTableNames = new HashSet<String>(1);
PhoenixConnection conn = null;
try {
conn = QueryUtil.getConnectionOnServer(this.env.getConfiguration()).unwrap(
PhoenixConnection.class);
PTable dataTable = PhoenixRuntime.getTableNoCache(conn, ref.getTableName());
List<PTable> indexes = dataTable.getIndexes();
// local index used to get view id from index mutation row key.
PTable localIndex = null;
Map<ImmutableBytesWritable, String> localIndexNames =
new HashMap<ImmutableBytesWritable, String>();
for (PTable index : indexes) {
if (index.getIndexType() == IndexType.LOCAL
&& index.getIndexState() == PIndexState.ACTIVE) {
if (localIndex == null) localIndex = index;
localIndexNames.put(new ImmutableBytesWritable(MetaDataUtil.getViewIndexIdDataType().toBytes(
index.getViewIndexId())), index.getName().getString());
}
}
if (localIndex == null) {
return Collections.emptySet();
}
IndexMaintainer indexMaintainer = localIndex.getIndexMaintainer(dataTable, conn);
HRegionInfo regionInfo = this.env.getRegion().getRegionInfo();
int offset =
regionInfo.getStartKey().length == 0 ? regionInfo.getEndKey().length
: regionInfo.getStartKey().length;
byte[] viewId = null;
for (Mutation mutation : mutations) {
viewId =
indexMaintainer.getViewIndexIdFromIndexRowKey(
new ImmutableBytesWritable(mutation.getRow(), offset,
mutation.getRow().length - offset));
String indexTableName = localIndexNames.get(new ImmutableBytesWritable(viewId));
indexTableNames.add(indexTableName);
}
} catch (ClassNotFoundException e) {
throw new IOException(e);
} catch (SQLException e) {
throw new IOException(e);
} finally {
if (conn != null) {
try {
conn.close();
} catch (SQLException e) {
throw new IOException(e);
}
}
}
return indexTableNames;
}
}
|
|
/* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.engedu.ghost;
import android.content.res.AssetManager;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.text.Editable;
import android.text.TextUtils;
import android.text.TextWatcher;
import android.text.method.KeyListener;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
import java.io.IOException;
import java.io.InputStream;
import java.util.Random;
public class GhostActivityEditText extends AppCompatActivity {
private static final String TAG = "GhostActivity";
private static final String COMPUTER_TURN = "Computer's turn";
private static final String USER_TURN = "Your turn";
private static final String KEY_USER_TURN = "keyUserTurn";
private static final String KEY_CURRENT_WORD = "keyCurrentWord";
private static final String KEY_SAVED_STATUS = "keySavedStatus";
private GhostDictionary dictionary;
private boolean userTurn = false;
private Random random = new Random();
private String currentWord = "";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_ghost);
AssetManager assetManager = getAssets();
try {
InputStream inputStream = assetManager.open("words.txt");
dictionary = new SimpleDictionary(inputStream);
} catch (IOException e) {
Toast toast = Toast.makeText(this, "Could not load dictionary", Toast.LENGTH_LONG);
toast.show();
}
if (savedInstanceState == null) {
onStart(null);
} else {
userTurn = savedInstanceState.getBoolean(KEY_USER_TURN);
currentWord = savedInstanceState.getString(KEY_CURRENT_WORD);
String status = savedInstanceState.getString(KEY_SAVED_STATUS);
((EditText) findViewById(R.id.ghostText)).setText(currentWord);
((TextView) findViewById(R.id.gameStatus)).setText(status);
}
final EditText ghostText = ((EditText) findViewById(R.id.ghostText));
ghostText.addTextChangedListener(new TextWatcher() {
private String textBeforeChange;
private boolean userInitiated = true;
@Override
public void beforeTextChanged(CharSequence charSequence, int start, int count,
int after) {
textBeforeChange = charSequence.toString();
}
@Override
public void onTextChanged(CharSequence charSequence, int start, int before, int count) {
}
@Override
public void afterTextChanged(Editable editable) {
ghostText.setSelection(editable.length());
if (currentWord.length() == 0 && editable.length() == 0) {
// We are resetting the game, so we don't need to do any saving.
return;
}
if (!userInitiated) {
// The text watcher can get in a loop if we make changes to the editable here
// unless we check whether the user initiated the change or not.
userInitiated = true;
return;
}
if (userTurn && (editable.length() == 0 ||
!editable.subSequence(0, editable.length() - 1).toString()
.equals(textBeforeChange))) {
resetText(editable);
} else if (userTurn) {
char last = editable.charAt(editable.length() - 1);
if ('A' <= last && last <= 'z') {
currentWord += (last + "").toLowerCase();
computerTurn();
} else {
resetText(editable);
}
}
}
private void resetText(Editable editable) {
userInitiated = false;
editable.replace(0, editable.length(), textBeforeChange);
}
});
}
@Override
protected void onSaveInstanceState(Bundle outState) {
outState.putBoolean(KEY_USER_TURN, userTurn);
outState.putString(KEY_CURRENT_WORD, currentWord);
outState.putString(KEY_SAVED_STATUS,
((TextView) findViewById(R.id.gameStatus)).getText().toString());
super.onSaveInstanceState(outState);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_ghost, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* Handler for the "Reset" button.
* Randomly determines whether the game starts with a user turn or a computer turn.
* @param view
* @return true
*/
public boolean onStart(View view) {
userTurn = random.nextBoolean();
currentWord = "";
TextView text = (TextView) findViewById(R.id.ghostText);
text.setText(currentWord);
TextView label = (TextView) findViewById(R.id.gameStatus);
if (userTurn) {
label.setText(USER_TURN);
} else {
label.setText(COMPUTER_TURN);
computerTurn();
}
return true;
}
/**
* Handler for the "Challenge" button.
* @param unused
*/
public void challenge(View unused) {
doChallenge(/* from user */ true);
}
/**
* Challenges the current word. Returns true if the challenge was successful, false otherwise.
* @param fromUser
* @return
*/
private boolean doChallenge(boolean fromUser) {
TextView status = (TextView) findViewById(R.id.gameStatus);
if (dictionary.isWord(currentWord)) {
if (!fromUser) {
// It is a word! The user loses.
status.setText(String.format("%s is a word. The computer wins!", currentWord));
return true;
} else {
// The computer loses, it has formed a word.
status.setText(String.format("%s is a word. You win!", currentWord));
return true;
}
} else if (TextUtils.isEmpty(dictionary.getAnyWordStartingWith(currentWord))) {
if (!fromUser) {
// This is not a valid word prefix. The user loses.
status.setText(String.format("%s is an invalid prefix. The computer wins!",
currentWord));
return true;
} else {
status.setText(String.format("%s is an invalid prefix. You win!", currentWord));
return true;
}
}
if (fromUser) {
// We've challenged and failed. The user loses.
status.setText(String.format("%s is a valid prefix and not a word. The computer wins!",
currentWord));
}
return false;
}
private void computerTurn() {
TextView status = (TextView) findViewById(R.id.gameStatus);
// Checks if the user's currentWord is a full word, or if it is an invalid prefix.
boolean challengeSuccessful = doChallenge(/* from the computer */ false);
if (challengeSuccessful) {
return;
}
userTurn = false;
// Do computer turn stuff then make it the user's turn again
status.setText(COMPUTER_TURN);
String next = dictionary.getGoodWordStartingWith(currentWord);
currentWord += next.charAt(currentWord.length());
// We can now update the text field with the computer's word.
((TextView) findViewById(R.id.ghostText)).setText(currentWord);
// Keep playing...
userTurn = true;
status.setText(USER_TURN);
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.indices.stats;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.cache.query.QueryCacheStats;
import org.elasticsearch.index.cache.request.RequestCacheStats;
import org.elasticsearch.index.engine.SegmentsStats;
import org.elasticsearch.index.fielddata.FieldDataStats;
import org.elasticsearch.index.flush.FlushStats;
import org.elasticsearch.index.get.GetStats;
import org.elasticsearch.index.indexing.IndexingStats;
import org.elasticsearch.index.merge.MergeStats;
import org.elasticsearch.index.percolator.stats.PercolateStats;
import org.elasticsearch.index.recovery.RecoveryStats;
import org.elasticsearch.index.refresh.RefreshStats;
import org.elasticsearch.index.search.stats.SearchStats;
import org.elasticsearch.index.shard.DocsStats;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.store.StoreStats;
import org.elasticsearch.index.suggest.stats.SuggestStats;
import org.elasticsearch.index.translog.TranslogStats;
import org.elasticsearch.index.warmer.WarmerStats;
import org.elasticsearch.search.suggest.completion.CompletionStats;
import java.io.IOException;
/**
*/
public class CommonStats implements Streamable, ToXContent {
public CommonStats() {
this(CommonStatsFlags.NONE);
}
public CommonStats(CommonStatsFlags flags) {
CommonStatsFlags.Flag[] setFlags = flags.getFlags();
for (CommonStatsFlags.Flag flag : setFlags) {
switch (flag) {
case Docs:
docs = new DocsStats();
break;
case Store:
store = new StoreStats();
break;
case Indexing:
indexing = new IndexingStats();
break;
case Get:
get = new GetStats();
break;
case Search:
search = new SearchStats();
break;
case Merge:
merge = new MergeStats();
break;
case Refresh:
refresh = new RefreshStats();
break;
case Flush:
flush = new FlushStats();
break;
case Warmer:
warmer = new WarmerStats();
break;
case QueryCache:
queryCache = new QueryCacheStats();
break;
case FieldData:
fieldData = new FieldDataStats();
break;
case Completion:
completion = new CompletionStats();
break;
case Segments:
segments = new SegmentsStats();
break;
case Percolate:
percolate = new PercolateStats();
break;
case Translog:
translog = new TranslogStats();
break;
case Suggest:
suggest = new SuggestStats();
break;
case RequestCache:
requestCache = new RequestCacheStats();
break;
case Recovery:
recoveryStats = new RecoveryStats();
break;
default:
throw new IllegalStateException("Unknown Flag: " + flag);
}
}
}
public CommonStats(IndexShard indexShard, CommonStatsFlags flags) {
CommonStatsFlags.Flag[] setFlags = flags.getFlags();
for (CommonStatsFlags.Flag flag : setFlags) {
switch (flag) {
case Docs:
docs = indexShard.docStats();
break;
case Store:
store = indexShard.storeStats();
break;
case Indexing:
indexing = indexShard.indexingStats(flags.types());
break;
case Get:
get = indexShard.getStats();
break;
case Search:
search = indexShard.searchStats(flags.groups());
break;
case Merge:
merge = indexShard.mergeStats();
break;
case Refresh:
refresh = indexShard.refreshStats();
break;
case Flush:
flush = indexShard.flushStats();
break;
case Warmer:
warmer = indexShard.warmerStats();
break;
case QueryCache:
queryCache = indexShard.queryCacheStats();
break;
case FieldData:
fieldData = indexShard.fieldDataStats(flags.fieldDataFields());
break;
case Completion:
completion = indexShard.completionStats(flags.completionDataFields());
break;
case Segments:
segments = indexShard.segmentStats();
break;
case Percolate:
percolate = indexShard.shardPercolateService().stats();
break;
case Translog:
translog = indexShard.translogStats();
break;
case Suggest:
suggest = indexShard.suggestStats();
break;
case RequestCache:
requestCache = indexShard.requestCache().stats();
break;
case Recovery:
recoveryStats = indexShard.recoveryStats();
break;
default:
throw new IllegalStateException("Unknown Flag: " + flag);
}
}
}
@Nullable
public DocsStats docs;
@Nullable
public StoreStats store;
@Nullable
public IndexingStats indexing;
@Nullable
public GetStats get;
@Nullable
public SearchStats search;
@Nullable
public MergeStats merge;
@Nullable
public RefreshStats refresh;
@Nullable
public FlushStats flush;
@Nullable
public WarmerStats warmer;
@Nullable
public QueryCacheStats queryCache;
@Nullable
public FieldDataStats fieldData;
@Nullable
public PercolateStats percolate;
@Nullable
public CompletionStats completion;
@Nullable
public SegmentsStats segments;
@Nullable
public TranslogStats translog;
@Nullable
public SuggestStats suggest;
@Nullable
public RequestCacheStats requestCache;
@Nullable
public RecoveryStats recoveryStats;
public void add(CommonStats stats) {
if (docs == null) {
if (stats.getDocs() != null) {
docs = new DocsStats();
docs.add(stats.getDocs());
}
} else {
docs.add(stats.getDocs());
}
if (store == null) {
if (stats.getStore() != null) {
store = new StoreStats();
store.add(stats.getStore());
}
} else {
store.add(stats.getStore());
}
if (indexing == null) {
if (stats.getIndexing() != null) {
indexing = new IndexingStats();
indexing.add(stats.getIndexing());
}
} else {
indexing.add(stats.getIndexing());
}
if (get == null) {
if (stats.getGet() != null) {
get = new GetStats();
get.add(stats.getGet());
}
} else {
get.add(stats.getGet());
}
if (search == null) {
if (stats.getSearch() != null) {
search = new SearchStats();
search.add(stats.getSearch());
}
} else {
search.add(stats.getSearch());
}
if (merge == null) {
if (stats.getMerge() != null) {
merge = new MergeStats();
merge.add(stats.getMerge());
}
} else {
merge.add(stats.getMerge());
}
if (refresh == null) {
if (stats.getRefresh() != null) {
refresh = new RefreshStats();
refresh.add(stats.getRefresh());
}
} else {
refresh.add(stats.getRefresh());
}
if (flush == null) {
if (stats.getFlush() != null) {
flush = new FlushStats();
flush.add(stats.getFlush());
}
} else {
flush.add(stats.getFlush());
}
if (warmer == null) {
if (stats.getWarmer() != null) {
warmer = new WarmerStats();
warmer.add(stats.getWarmer());
}
} else {
warmer.add(stats.getWarmer());
}
if (queryCache == null) {
if (stats.getQueryCache() != null) {
queryCache = new QueryCacheStats();
queryCache.add(stats.getQueryCache());
}
} else {
queryCache.add(stats.getQueryCache());
}
if (fieldData == null) {
if (stats.getFieldData() != null) {
fieldData = new FieldDataStats();
fieldData.add(stats.getFieldData());
}
} else {
fieldData.add(stats.getFieldData());
}
if (percolate == null) {
if (stats.getPercolate() != null) {
percolate = new PercolateStats();
percolate.add(stats.getPercolate());
}
} else {
percolate.add(stats.getPercolate());
}
if (completion == null) {
if (stats.getCompletion() != null) {
completion = new CompletionStats();
completion.add(stats.getCompletion());
}
} else {
completion.add(stats.getCompletion());
}
if (segments == null) {
if (stats.getSegments() != null) {
segments = new SegmentsStats();
segments.add(stats.getSegments());
}
} else {
segments.add(stats.getSegments());
}
if (translog == null) {
if (stats.getTranslog() != null) {
translog = new TranslogStats();
translog.add(stats.getTranslog());
}
} else {
translog.add(stats.getTranslog());
}
if (suggest == null) {
if (stats.getSuggest() != null) {
suggest = new SuggestStats();
suggest.add(stats.getSuggest());
}
} else {
suggest.add(stats.getSuggest());
}
if (requestCache == null) {
if (stats.getRequestCache() != null) {
requestCache = new RequestCacheStats();
requestCache.add(stats.getRequestCache());
}
} else {
requestCache.add(stats.getRequestCache());
}
if (recoveryStats == null) {
if (stats.getRecoveryStats() != null) {
recoveryStats = new RecoveryStats();
recoveryStats.add(stats.getRecoveryStats());
}
} else {
recoveryStats.add(stats.getRecoveryStats());
}
}
@Nullable
public DocsStats getDocs() {
return this.docs;
}
@Nullable
public StoreStats getStore() {
return store;
}
@Nullable
public IndexingStats getIndexing() {
return indexing;
}
@Nullable
public GetStats getGet() {
return get;
}
@Nullable
public SearchStats getSearch() {
return search;
}
@Nullable
public MergeStats getMerge() {
return merge;
}
@Nullable
public RefreshStats getRefresh() {
return refresh;
}
@Nullable
public FlushStats getFlush() {
return flush;
}
@Nullable
public WarmerStats getWarmer() {
return this.warmer;
}
@Nullable
public QueryCacheStats getQueryCache() {
return this.queryCache;
}
@Nullable
public FieldDataStats getFieldData() {
return this.fieldData;
}
@Nullable
public PercolateStats getPercolate() {
return percolate;
}
@Nullable
public CompletionStats getCompletion() {
return completion;
}
@Nullable
public SegmentsStats getSegments() {
return segments;
}
@Nullable
public TranslogStats getTranslog() {
return translog;
}
@Nullable
public SuggestStats getSuggest() {
return suggest;
}
@Nullable
public RequestCacheStats getRequestCache() {
return requestCache;
}
@Nullable
public RecoveryStats getRecoveryStats() {
return recoveryStats;
}
public static CommonStats readCommonStats(StreamInput in) throws IOException {
CommonStats stats = new CommonStats();
stats.readFrom(in);
return stats;
}
/**
* Utility method which computes total memory by adding
* FieldData, Percolate, Segments (memory, index writer, version map)
*/
public ByteSizeValue getTotalMemory() {
long size = 0;
if (this.getFieldData() != null) {
size += this.getFieldData().getMemorySizeInBytes();
}
if (this.getQueryCache() != null) {
size += this.getQueryCache().getMemorySizeInBytes();
}
if (this.getPercolate() != null) {
size += this.getPercolate().getMemorySizeInBytes();
}
if (this.getSegments() != null) {
size += this.getSegments().getMemoryInBytes() +
this.getSegments().getIndexWriterMemoryInBytes() +
this.getSegments().getVersionMapMemoryInBytes();
}
return new ByteSizeValue(size);
}
@Override
public void readFrom(StreamInput in) throws IOException {
if (in.readBoolean()) {
docs = DocsStats.readDocStats(in);
}
if (in.readBoolean()) {
store = StoreStats.readStoreStats(in);
}
if (in.readBoolean()) {
indexing = IndexingStats.readIndexingStats(in);
}
if (in.readBoolean()) {
get = GetStats.readGetStats(in);
}
if (in.readBoolean()) {
search = SearchStats.readSearchStats(in);
}
if (in.readBoolean()) {
merge = MergeStats.readMergeStats(in);
}
if (in.readBoolean()) {
refresh = RefreshStats.readRefreshStats(in);
}
if (in.readBoolean()) {
flush = FlushStats.readFlushStats(in);
}
if (in.readBoolean()) {
warmer = WarmerStats.readWarmerStats(in);
}
if (in.readBoolean()) {
queryCache = QueryCacheStats.readQueryCacheStats(in);
}
if (in.readBoolean()) {
fieldData = FieldDataStats.readFieldDataStats(in);
}
if (in.readBoolean()) {
percolate = PercolateStats.readPercolateStats(in);
}
if (in.readBoolean()) {
completion = CompletionStats.readCompletionStats(in);
}
if (in.readBoolean()) {
segments = SegmentsStats.readSegmentsStats(in);
}
translog = in.readOptionalStreamable(new TranslogStats());
suggest = in.readOptionalStreamable(new SuggestStats());
requestCache = in.readOptionalStreamable(new RequestCacheStats());
recoveryStats = in.readOptionalStreamable(new RecoveryStats());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
if (docs == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
docs.writeTo(out);
}
if (store == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
store.writeTo(out);
}
if (indexing == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
indexing.writeTo(out);
}
if (get == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
get.writeTo(out);
}
if (search == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
search.writeTo(out);
}
if (merge == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
merge.writeTo(out);
}
if (refresh == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
refresh.writeTo(out);
}
if (flush == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
flush.writeTo(out);
}
if (warmer == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
warmer.writeTo(out);
}
if (queryCache == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
queryCache.writeTo(out);
}
if (fieldData == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
fieldData.writeTo(out);
}
if (percolate == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
percolate.writeTo(out);
}
if (completion == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
completion.writeTo(out);
}
if (segments == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
segments.writeTo(out);
}
out.writeOptionalStreamable(translog);
out.writeOptionalStreamable(suggest);
out.writeOptionalStreamable(requestCache);
out.writeOptionalStreamable(recoveryStats);
}
// note, requires a wrapping object
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (docs != null) {
docs.toXContent(builder, params);
}
if (store != null) {
store.toXContent(builder, params);
}
if (indexing != null) {
indexing.toXContent(builder, params);
}
if (get != null) {
get.toXContent(builder, params);
}
if (search != null) {
search.toXContent(builder, params);
}
if (merge != null) {
merge.toXContent(builder, params);
}
if (refresh != null) {
refresh.toXContent(builder, params);
}
if (flush != null) {
flush.toXContent(builder, params);
}
if (warmer != null) {
warmer.toXContent(builder, params);
}
if (queryCache != null) {
queryCache.toXContent(builder, params);
}
if (fieldData != null) {
fieldData.toXContent(builder, params);
}
if (percolate != null) {
percolate.toXContent(builder, params);
}
if (completion != null) {
completion.toXContent(builder, params);
}
if (segments != null) {
segments.toXContent(builder, params);
}
if (translog != null) {
translog.toXContent(builder, params);
}
if (suggest != null) {
suggest.toXContent(builder, params);
}
if (requestCache != null) {
requestCache.toXContent(builder, params);
}
if (recoveryStats != null) {
recoveryStats.toXContent(builder, params);
}
return builder;
}
}
|
|
/*
* Copyright (C) 2012 uPhyca Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.uphyca.testing.support.v4;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.uphyca.testing.AndroidTestRunner;
import com.uphyca.testing.support.v4.FragmentUnitTestCaseTest.MyFragment;
@RunWith(AndroidTestRunner.class)
public class FragmentUnitTestCaseTest extends FragmentUnitTestCase<MyFragment> {
public static final class MyFragment extends Fragment {
public boolean onCreateCalled;
public boolean onAttachCalled;
public boolean onActivityCreatedCalled;
public boolean onStartCalled;
public boolean onResumeCalled;
public boolean onPauseCalled;
public boolean onStopCalled;
public boolean onDestroyCalled;
public boolean onDettachCalled;
public boolean onCreateViewCalled;
public boolean onDestroyViewCalled;
public boolean onViewCreatedCalled;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
onCreateCalled = true;
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
onAttachCalled = true;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
onActivityCreatedCalled = true;
}
@Override
public void onStart() {
super.onStart();
onStartCalled = true;
}
@Override
public void onResume() {
super.onResume();
onResumeCalled = true;
}
@Override
public void onPause() {
super.onPause();
onPauseCalled = true;
}
@Override
public void onStop() {
super.onStop();
onStopCalled = true;
}
@Override
public void onDestroy() {
super.onDestroy();
onDestroyCalled = true;
}
@Override
public void onDetach() {
super.onDetach();
onDettachCalled = true;
}
@Override
public View onCreateView(LayoutInflater inflater,
ViewGroup container,
Bundle savedInstanceState) {
try {
return null;
} finally {
onCreateViewCalled = true;
}
}
@Override
public void onViewCreated(View view,
Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
onViewCreatedCalled = true;
}
@Override
public void onDestroyView() {
super.onDestroyView();
onDestroyViewCalled = true;
}
}
public FragmentUnitTestCaseTest() {
super(MyFragment.class);
}
@Override
@After
public void tearDown() throws Exception {
MyFragment fragment = getFragment();
assertFalse(fragment.onDestroyViewCalled);
assertFalse(fragment.onDestroyCalled);
assertFalse(fragment.onDettachCalled);
super.tearDown();
assertTrue(fragment.onDestroyCalled);
assertTrue(fragment.onDettachCalled);
}
@Test
public void testPreconditions() {
startFragment(null, null, null);
assertNotNull(getFragment());
}
@Test
public void testOnCreate() {
startFragment(null, null, null);
assertTrue(getFragment().onAttachCalled);
assertTrue(getFragment().onCreateCalled);
}
@Test
public void testLifeCycles() {
// Activate fragment
startFragment(null, null, null);
assertTrue(getFragment().onAttachCalled);
assertTrue(getFragment().onCreateCalled);
assertFalse(getFragment().onCreateViewCalled);
assertFalse(getFragment().onActivityCreatedCalled);
assertFalse(getFragment().onViewCreatedCalled);
getFragmentInstrumentation().callFragmentOnActivityCreated();
assertTrue(getFragment().onCreateViewCalled);
assertTrue(getFragment().onViewCreatedCalled);
assertTrue(getFragment().onActivityCreatedCalled);
assertNull(getFragment().getView());
assertFalse(getFragment().onStartCalled);
getFragmentInstrumentation().callFragmentOnStart();
assertTrue(getFragment().onStartCalled);
assertFalse(getFragment().onResumeCalled);
getFragmentInstrumentation().callFragmentOnResume();
assertTrue(getFragment().onResumeCalled);
// Now fragment is active
// Deactivate fragment
assertFalse(getFragment().onPauseCalled);
getFragmentInstrumentation().callFragmentOnPause();
assertTrue(getFragment().onPauseCalled);
assertFalse(getFragment().onStopCalled);
getFragmentInstrumentation().callFragmentOnStop();
assertTrue(getFragment().onStopCalled);
}
@Test
public void shouldCaptureStartedActivityIntent() {
// Given
startFragment(null, null, null);
// When
Intent startIntent = new Intent(Intent.ACTION_VIEW);
getFragment().startActivity(startIntent);
// Then
Intent startedIntent = getStartedActivityIntent();
assertNotNull(startedIntent);
}
}
|
|
/*
* Copyright (C) 2012 DataStax Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.driver.core;
import com.datastax.driver.core.exceptions.*;
import java.net.InetAddress;
import java.util.HashMap;
import org.apache.commons.lang.StringUtils;
import org.testng.annotations.Test;
import static com.datastax.driver.core.TestUtils.waitForDown;
import static org.testng.Assert.*;
/**
* Tests Exception classes with seperate clusters per test, when applicable
*/
public class ExceptionsTest {
/**
* Tests the AlreadyExistsException.
* Create a keyspace twice and a table twice.
* Catch and test all the exception methods.
*/
@Test(groups = "short")
public void alreadyExistsException() throws Throwable {
Cluster.Builder builder = Cluster.builder();
CCMBridge.CCMCluster c = CCMBridge.buildCluster(1, builder);
try {
String keyspace = "TestKeyspace";
String table = "TestTable";
String[] cqlCommands = new String[]{
String.format(TestUtils.CREATE_KEYSPACE_SIMPLE_FORMAT, keyspace, 1),
"USE " + keyspace,
String.format(TestUtils.CREATE_TABLE_SIMPLE_FORMAT, table)
};
// Create the schema once
c.session.execute(cqlCommands[0]);
c.session.execute(cqlCommands[1]);
c.session.execute(cqlCommands[2]);
// Try creating the keyspace again
try {
c.session.execute(cqlCommands[0]);
} catch (AlreadyExistsException e) {
String expected = String.format("Keyspace %s already exists", keyspace.toLowerCase());
assertEquals(e.getMessage(), expected);
assertEquals(e.getKeyspace(), keyspace.toLowerCase());
assertEquals(e.getTable(), null);
assertEquals(e.wasTableCreation(), false);
}
c.session.execute(cqlCommands[1]);
// Try creating the table again
try {
c.session.execute(cqlCommands[2]);
} catch (AlreadyExistsException e) {
// TODO: Pending CASSANDRA-5362 this won't work. So let's re-enable this once C* 1.2.4
// is released
//assertEquals(e.getKeyspace(), keyspace.toLowerCase());
//assertEquals(e.getTable(), table.toLowerCase());
assertEquals(e.wasTableCreation(), true);
}
} catch (Throwable e) {
throw e;
} finally {
c.discard();
}
}
/**
* Placeholder test for the AuthenticationException.
* Testing pending CCM authenticated sessions integration.
*/
public void authenticationException() throws Exception {
// TODO: Modify CCM to accept authenticated sessions
}
/**
* Tests DriverInternalError.
* Tests basic message, rethrow, and copy abilities.
*/
@Test(groups = "unit")
public void driverInternalError() throws Exception {
String errorMessage = "Test Message";
try {
throw new DriverInternalError(errorMessage);
} catch (DriverInternalError e1) {
try {
throw new DriverInternalError(e1);
} catch (DriverInternalError e2) {
assertTrue(StringUtils.contains(e2.getMessage(), errorMessage));
DriverInternalError copy = (DriverInternalError) e2.copy();
assertEquals(copy.getMessage(), e2.getMessage());
}
}
}
/**
* Tests InvalidConfigurationInQueryException.
* Tests basic message abilities.
*/
@Test(groups = "unit")
public void invalidConfigurationInQueryException() throws Exception {
String errorMessage = "Test Message";
try {
throw new InvalidConfigurationInQueryException(errorMessage);
} catch (InvalidConfigurationInQueryException e) {
assertEquals(e.getMessage(), errorMessage);
}
}
/**
* Tests InvalidQueryException.
* Tests basic message and copy abilities.
*/
@Test(groups = "unit")
public void invalidQueryException() throws Exception {
String errorMessage = "Test Message";
try {
throw new InvalidQueryException(errorMessage);
} catch (InvalidQueryException e) {
assertEquals(e.getMessage(), errorMessage);
InvalidQueryException copy = (InvalidQueryException) e.copy();
assertEquals(copy.getMessage(), e.getMessage());
}
}
/**
* Tests InvalidTypeException.
* Tests basic message and copy abilities.
*/
@Test(groups = "unit")
public void invalidTypeException() throws Exception {
String errorMessage = "Test Message";
try {
throw new InvalidTypeException(errorMessage);
} catch (InvalidTypeException e) {
assertEquals(e.getMessage(), errorMessage);
InvalidTypeException copy = (InvalidTypeException) e.copy();
assertEquals(copy.getMessage(), e.getMessage());
}
}
/**
* Tests the NoHostAvailableException.
* by attempting to build a cluster using the IP address "255.255.255.255"
* and test all available exception methods.
*/
@Test(groups = "short")
public void noHostAvailableException() throws Exception {
String ipAddress = "255.255.255.255";
HashMap<InetAddress, String> errorsHashMap = new HashMap<InetAddress, String>();
errorsHashMap.put(InetAddress.getByName(ipAddress), "[/255.255.255.255] Cannot connect");
try {
Cluster.builder().addContactPoints("255.255.255.255").build();
} catch (NoHostAvailableException e) {
assertEquals(e.getErrors(), errorsHashMap);
NoHostAvailableException copy = (NoHostAvailableException) e.copy();
assertEquals(copy.getMessage(), e.getMessage());
assertEquals(copy.getErrors(), e.getErrors());
}
}
/**
* Tests the ReadTimeoutException.
* Create a 3 node cluster and write out a single key at CL.ALL.
* Then forcibly kill single node and attempt a read of the key at CL.ALL.
* Catch and test all available exception methods.
*/
@Test(groups = "long")
public void readTimeoutException() throws Throwable {
Cluster.Builder builder = Cluster.builder();
CCMBridge.CCMCluster c = CCMBridge.buildCluster(3, builder);
try {
String keyspace = "TestKeyspace";
String table = "TestTable";
int replicationFactor = 3;
String key = "1";
c.session.execute(String.format(TestUtils.CREATE_KEYSPACE_SIMPLE_FORMAT, keyspace, replicationFactor));
c.session.execute("USE " + keyspace);
c.session.execute(String.format(TestUtils.CREATE_TABLE_SIMPLE_FORMAT, table));
c.session.execute(new SimpleStatement(String.format(TestUtils.INSERT_FORMAT, table, key, "foo", 42, 24.03f)).setConsistencyLevel(ConsistencyLevel.ALL));
c.session.execute(new SimpleStatement(String.format(TestUtils.SELECT_ALL_FORMAT, table)).setConsistencyLevel(ConsistencyLevel.ALL));
c.cassandraCluster.forceStop(2);
try{
c.session.execute(new SimpleStatement(String.format(TestUtils.SELECT_ALL_FORMAT, table)).setConsistencyLevel(ConsistencyLevel.ALL));
} catch (ReadTimeoutException e) {
assertEquals(e.getConsistencyLevel(), ConsistencyLevel.ALL);
assertEquals(e.getReceivedAcknowledgements(), 2);
assertEquals(e.getRequiredAcknowledgements(), 3);
assertEquals(e.wasDataRetrieved(), true);
ReadTimeoutException copy = (ReadTimeoutException) e.copy();
assertEquals(copy.getMessage(), e.getMessage());
assertEquals(copy.wasDataRetrieved(), e.wasDataRetrieved());
}
} catch (Throwable e) {
throw e;
} finally {
c.discard();
}
}
/**
* Tests SyntaxError.
* Tests basic message and copy abilities.
*/
@Test(groups = "unit")
public void syntaxError() throws Exception {
String errorMessage = "Test Message";
try {
throw new SyntaxError(errorMessage);
} catch (SyntaxError e) {
assertEquals(e.getMessage(), errorMessage);
SyntaxError copy = (SyntaxError) e.copy();
assertEquals(copy.getMessage(), e.getMessage());
}
}
/**
* Tests TraceRetrievalException.
* Tests basic message and copy abilities.
*/
@Test(groups = "unit")
public void traceRetrievalException() throws Exception {
String errorMessage = "Test Message";
try {
throw new TraceRetrievalException(errorMessage);
} catch (TraceRetrievalException e) {
assertEquals(e.getMessage(), errorMessage);
TraceRetrievalException copy = (TraceRetrievalException) e.copy();
assertEquals(copy.getMessage(), e.getMessage());
}
}
/**
* Tests TruncateException.
* Tests basic message and copy abilities.
*/
@Test(groups = "unit")
public void truncateException() throws Exception {
String errorMessage = "Test Message";
try {
throw new TruncateException(errorMessage);
} catch (TruncateException e) {
assertEquals(e.getMessage(), errorMessage);
TruncateException copy = (TruncateException) e.copy();
assertEquals(copy.getMessage(), e.getMessage());
}
}
/**
* Tests UnauthorizedException.
* Tests basic message and copy abilities.
*/
@Test(groups = "unit")
public void unauthorizedException() throws Exception {
String errorMessage = "Test Message";
try {
throw new UnauthorizedException(errorMessage);
} catch (UnauthorizedException e) {
assertEquals(e.getMessage(), errorMessage);
UnauthorizedException copy = (UnauthorizedException) e.copy();
assertEquals(copy.getMessage(), e.getMessage());
}
}
/**
* Tests the UnavailableException.
* Create a 3 node cluster and write out a single key at CL.ALL.
* Then kill single node, wait for gossip to propogate the new state,
* and attempt to read and write the same key at CL.ALL.
* Catch and test all available exception methods.
*/
@Test(groups = "long")
public void unavailableException() throws Throwable {
Cluster.Builder builder = Cluster.builder();
CCMBridge.CCMCluster c = CCMBridge.buildCluster(3, builder);
try {
String keyspace = "TestKeyspace";
String table = "TestTable";
int replicationFactor = 3;
String key = "1";
c.session.execute(String.format(TestUtils.CREATE_KEYSPACE_SIMPLE_FORMAT, keyspace, replicationFactor));
c.session.execute("USE " + keyspace);
c.session.execute(String.format(TestUtils.CREATE_TABLE_SIMPLE_FORMAT, table));
c.session.execute(new SimpleStatement(String.format(TestUtils.INSERT_FORMAT, table, key, "foo", 42, 24.03f)).setConsistencyLevel(ConsistencyLevel.ALL));
c.session.execute(new SimpleStatement(String.format(TestUtils.SELECT_ALL_FORMAT, table)).setConsistencyLevel(ConsistencyLevel.ALL));
c.cassandraCluster.stop(2);
waitForDown(CCMBridge.IP_PREFIX + "2", c.cluster);
try{
c.session.execute(new SimpleStatement(String.format(TestUtils.SELECT_ALL_FORMAT, table)).setConsistencyLevel(ConsistencyLevel.ALL));
} catch (UnavailableException e) {
String expectedError = String.format("Not enough replica available for query at consistency %s (%d required but only %d alive)", "ALL", 3, 2);
assertEquals(e.getMessage(), expectedError);
assertEquals(e.getConsistency(), ConsistencyLevel.ALL);
assertEquals(e.getRequiredReplicas(), replicationFactor);
assertEquals(e.getAliveReplicas(), replicationFactor - 1);
}
try{
c.session.execute(new SimpleStatement(String.format(TestUtils.INSERT_FORMAT, table, key, "foo", 42, 24.03f)).setConsistencyLevel(ConsistencyLevel.ALL));
} catch (UnavailableException e) {
String expectedError = String.format("Not enough replica available for query at consistency %s (%d required but only %d alive)", "ALL", 3, 2);
assertEquals(e.getMessage(), expectedError);
assertEquals(e.getConsistency(), ConsistencyLevel.ALL);
assertEquals(e.getRequiredReplicas(), replicationFactor);
assertEquals(e.getAliveReplicas(), replicationFactor - 1);
}
} catch (Throwable e) {
throw e;
} finally {
c.discard();
}
}
/**
* Tests the WriteTimeoutException.
* Create a 3 node cluster and write out a single key at CL.ALL.
* Then forcibly kill single node and attempt to write the same key at CL.ALL.
* Catch and test all available exception methods.
*/
@Test(groups = "long")
public void writeTimeoutException() throws Throwable {
Cluster.Builder builder = Cluster.builder();
CCMBridge.CCMCluster c = CCMBridge.buildCluster(3, builder);
try {
String keyspace = "TestKeyspace";
String table = "TestTable";
int replicationFactor = 3;
String key = "1";
c.session.execute(String.format(TestUtils.CREATE_KEYSPACE_SIMPLE_FORMAT, keyspace, replicationFactor));
c.session.execute("USE " + keyspace);
c.session.execute(String.format(TestUtils.CREATE_TABLE_SIMPLE_FORMAT, table));
c.session.execute(new SimpleStatement(String.format(TestUtils.INSERT_FORMAT, table, key, "foo", 42, 24.03f)).setConsistencyLevel(ConsistencyLevel.ALL));
c.session.execute(new SimpleStatement(String.format(TestUtils.SELECT_ALL_FORMAT, table)).setConsistencyLevel(ConsistencyLevel.ALL));
c.cassandraCluster.forceStop(2);
try{
c.session.execute(new SimpleStatement(String.format(TestUtils.INSERT_FORMAT, table, key, "foo", 42, 24.03f)).setConsistencyLevel(ConsistencyLevel.ALL));
} catch (WriteTimeoutException e) {
assertEquals(e.getConsistencyLevel(), ConsistencyLevel.ALL);
assertEquals(e.getReceivedAcknowledgements(), 2);
assertEquals(e.getRequiredAcknowledgements(), 3);
assertEquals(e.getWriteType(), WriteType.SIMPLE);
}
} catch (Throwable e) {
throw e;
} finally {
c.discard();
}
}
}
|
|
/*
* Gutter.java
* Copyright (C) 1999, 2000 mike dillon
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package org.gjt.sp.jedit.textarea;
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
import javax.swing.border.*;
import org.gjt.sp.jedit.*;
public class Gutter extends JComponent implements SwingConstants
{
public Gutter(View view, JEditTextArea textArea)
{
this.view = view;
this.textArea = textArea;
setDoubleBuffered(true);
MouseHandler ml = new MouseHandler();
addMouseListener(ml);
addMouseMotionListener(ml);
}
public void paintComponent(Graphics gfx)
{
if (!collapsed)
{
// fill the background
Rectangle r = gfx.getClipBounds();
gfx.setColor(getBackground());
gfx.fillRect(r.x, r.y, r.width, r.height);
// if buffer is loading, don't paint anything
if (!textArea.getBuffer().isLoaded())
return;
// paint custom highlights, if there are any
if (highlights != null) paintCustomHighlights(gfx);
// paint line numbers, if they are enabled
if (lineNumberingEnabled) paintLineNumbers(gfx);
}
}
private void paintLineNumbers(Graphics gfx)
{
FontMetrics pfm = textArea.getPainter().getFontMetrics();
int lineHeight = pfm.getHeight();
Rectangle clip = gfx.getClipBounds();
int baseline = (clip.y - clip.y % lineHeight) + (int) Math.round(
(this.baseline + lineHeight - pfm.getDescent()) / 2.0);
int firstLine = clip.y / lineHeight + textArea.getFirstLine() + 1;
int lastLine = firstLine + clip.height / lineHeight;
int caretLine = textArea.getCaretLine() + 1;
int firstValidLine = firstLine > 1 ? firstLine : 1;
int lastValidLine = (lastLine > textArea.getLineCount())
? textArea.getLineCount() : lastLine;
boolean highlightCurrentLine = currentLineHighlightEnabled
&& (textArea.getSelectionStart() == textArea.getSelectionEnd());
gfx.setFont(getFont());
Color fg = getForeground();
Color hfg = getHighlightedForeground();
Color clfg = getCurrentLineForeground();
String number;
int offset;
for (int line = firstLine; line <= lastLine;
line++, baseline += lineHeight)
{
// only print numbers for valid lines
if (line < firstValidLine || line > lastValidLine)
continue;
number = Integer.toString(line);
switch (alignment)
{
case RIGHT:
offset = gutterSize.width - collapsedSize.width
- (fm.stringWidth(number) + 1);
break;
case CENTER:
offset = ((gutterSize.width - collapsedSize.width)
- fm.stringWidth(number)) / 2;
break;
case LEFT: default:
offset = 1;
}
if (line == caretLine && highlightCurrentLine)
{
gfx.setColor(clfg);
}
else if (interval > 1 && line % interval == 0)
{
gfx.setColor(hfg);
}
else
{
gfx.setColor(fg);
}
gfx.drawString(number, ileft + offset, baseline);
}
}
private void paintCustomHighlights(Graphics gfx)
{
int lineHeight = textArea.getPainter().getFontMetrics()
.getHeight();
int firstLine = textArea.getFirstLine();
int lastLine = firstLine + (getHeight() / lineHeight);
int y = 0;
for (int line = firstLine; line < lastLine;
line++, y += lineHeight)
{
highlights.paintHighlight(gfx, line, y);
}
}
/**
* Marks a line as needing a repaint.
* @param line The line to invalidate
*/
public final void invalidateLine(int line)
{
if(collapsed)
return;
FontMetrics pfm = textArea.getPainter().getFontMetrics();
repaint(0,textArea.lineToY(line) + pfm.getDescent() + pfm.getLeading(),
getWidth(),pfm.getHeight());
}
/**
* Marks a range of lines as needing a repaint.
* @param firstLine The first line to invalidate
* @param lastLine The last line to invalidate
*/
public final void invalidateLineRange(int firstLine, int lastLine)
{
if(collapsed)
return;
FontMetrics pfm = textArea.getPainter().getFontMetrics();
repaint(0,textArea.lineToY(firstLine) + pfm.getDescent() + pfm.getLeading(),
getWidth(),(lastLine - firstLine + 1) * pfm.getHeight());
}
/**
* Adds a custom highlight painter.
* @param highlight The highlight
*/
public void addCustomHighlight(TextAreaHighlight highlight)
{
highlight.init(textArea, highlights);
highlights = highlight;
}
/**
* Convenience method for setting a default matte border on the right
* with the specified border width and color
* @param width The border width (in pixels)
* @param color1 The focused border color
* @param color2 The unfocused border color
* @param color3 The gutter/text area gap color
*/
public void setBorder(int width, Color color1, Color color2, Color color3)
{
this.borderWidth = width;
focusBorder = new CompoundBorder(new MatteBorder(0,0,0,width,color3),
new MatteBorder(0,0,0,width,color1));
noFocusBorder = new CompoundBorder(new MatteBorder(0,0,0,width,color3),
new MatteBorder(0,0,0,width,color2));
updateBorder();
}
/**
* Sets the border differently if the text area has focus or not.
*/
public void updateBorder()
{
// because we are called from the text area's focus handler,
// we do an invokeLater() so that the view's focus handler
// has a chance to execute and set the edit pane properly
SwingUtilities.invokeLater(new Runnable()
{
public void run()
{
if(view.getEditPane() == null)
return;
if(view.getEditPane().getTextArea() == textArea)
setBorder(focusBorder);
else
setBorder(noFocusBorder);
}
});
}
/*
* JComponent.setBorder(Border) is overridden here to cache the left
* inset of the border (if any) to avoid having to fetch it during every
* repaint.
*/
public void setBorder(Border border)
{
super.setBorder(border);
if (border == null)
{
ileft = 0;
collapsedSize.width = 0;
collapsedSize.height = 0;
}
else
{
Insets insets = border.getBorderInsets(this);
ileft = insets.left;
collapsedSize.width = insets.left + insets.right;
collapsedSize.height = insets.top + insets.bottom;
}
}
/*
* JComponent.setFont(Font) is overridden here to cache the baseline for
* the font. This avoids having to get the font metrics during every
* repaint.
*/
public void setFont(Font font)
{
super.setFont(font);
fm = getFontMetrics(font);
baseline = fm.getAscent();
}
/**
* Get the foreground color for highlighted line numbers
* @return The highlight color
*/
public Color getHighlightedForeground()
{
return intervalHighlight;
}
public void setHighlightedForeground(Color highlight)
{
intervalHighlight = highlight;
}
public Color getCurrentLineForeground()
{
return currentLineHighlight;
}
public void setCurrentLineForeground(Color highlight)
{
currentLineHighlight = highlight;
}
/**
* Set the width of the expanded gutter
* @param width The gutter width
*/
public void setGutterWidth(int width)
{
if (width < collapsedSize.width) width = collapsedSize.width;
gutterSize.width = width;
// if the gutter is expanded, ask the text area to revalidate
// the layout to resize the gutter
if (!collapsed) textArea.revalidate();
}
/**
* Get the width of the expanded gutter
* @return The gutter width
*/
public int getGutterWidth()
{
return gutterSize.width;
}
/*
* Component.getPreferredSize() is overridden here to support the
* collapsing behavior.
*/
public Dimension getPreferredSize()
{
if (collapsed)
{
return collapsedSize;
}
else
{
return gutterSize;
}
}
public Dimension getMinimumSize()
{
return getPreferredSize();
}
public String getToolTipText(MouseEvent evt)
{
return (highlights == null) ? null :
highlights.getToolTipText(evt);
}
/**
* Identifies whether or not the line numbers are drawn in the gutter
* @return true if the line numbers are drawn, false otherwise
*/
public boolean isLineNumberingEnabled()
{
return lineNumberingEnabled;
}
/**
* Turns the line numbering on or off and causes the gutter to be
* repainted.
* @param enabled true if line numbers are drawn, false otherwise
*/
public void setLineNumberingEnabled(boolean enabled)
{
if (lineNumberingEnabled == enabled) return;
lineNumberingEnabled = enabled;
repaint();
}
/**
* Toggles line numbering.
* @param enabled true if line numbers are drawn, false otherwise
*/
public void toggleLineNumberingEnabled()
{
setLineNumberingEnabled(!lineNumberingEnabled);
}
/**
* Identifies whether the horizontal alignment of the line numbers.
* @return Gutter.RIGHT, Gutter.CENTER, Gutter.LEFT
*/
public int getLineNumberAlignment()
{
return alignment;
}
/**
* Sets the horizontal alignment of the line numbers.
* @param alignment Gutter.RIGHT, Gutter.CENTER, Gutter.LEFT
*/
public void setLineNumberAlignment(int alignment)
{
if (this.alignment == alignment) return;
this.alignment = alignment;
repaint();
}
/**
* Identifies whether the gutter is collapsed or expanded.
* @return true if the gutter is collapsed, false if it is expanded
*/
public boolean isCollapsed()
{
return collapsed;
}
/**
* Sets whether the gutter is collapsed or expanded and force the text
* area to update its layout if there is a change.
* @param collapsed true if the gutter is collapsed,
* false if it is expanded
*/
public void setCollapsed(boolean collapsed)
{
if (this.collapsed == collapsed) return;
this.collapsed = collapsed;
textArea.revalidate();
}
/**
* Toggles whether the gutter is collapsed or expanded.
*/
public void toggleCollapsed()
{
setCollapsed(!collapsed);
}
/**
* Makes the gutter's current size the default for future sessions.
* @since jEdit 2.7pre2
*/
public void saveGutterSize()
{
jEdit.setProperty("view.gutter.width", Integer.toString(
gutterSize.width));
}
/**
* Sets the number of lines between highlighted line numbers.
* @return The number of lines between highlighted line numbers or
* zero if highlighting is disabled
*/
public int getHighlightInterval()
{
return interval;
}
/**
* Sets the number of lines between highlighted line numbers. Any value
* less than or equal to one will result in highlighting being disabled.
* @param interval The number of lines between highlighted line numbers
*/
public void setHighlightInterval(int interval)
{
if (interval <= 1) interval = 0;
this.interval = interval;
repaint();
}
public boolean isCurrentLineHighlightEnabled()
{
return currentLineHighlightEnabled;
}
public void setCurrentLineHighlightEnabled(boolean enabled)
{
if (currentLineHighlightEnabled == enabled) return;
currentLineHighlightEnabled = enabled;
repaint();
}
public JPopupMenu getContextMenu()
{
return context;
}
public void setContextMenu(JPopupMenu context)
{
this.context = context;
}
// private members
private View view;
private JEditTextArea textArea;
private JPopupMenu context;
private TextAreaHighlight highlights;
private int baseline = 0;
private int ileft = 0;
private Dimension gutterSize = new Dimension(0,0);
private Dimension collapsedSize = new Dimension(0,0);
private Color intervalHighlight;
private Color currentLineHighlight;
private FontMetrics fm;
private int alignment;
private int interval = 0;
private boolean lineNumberingEnabled = true;
private boolean currentLineHighlightEnabled = false;
private boolean collapsed = false;
private int borderWidth;
private Border focusBorder, noFocusBorder;
class MouseHandler extends MouseAdapter implements MouseMotionListener
{
public void mousePressed(MouseEvent e)
{
if(e.getX() >= getWidth() - borderWidth)
{
e.translatePoint(-getWidth(),0);
textArea.mouseHandler.mousePressed(e);
//return;
}
else if(context != null && (e.getModifiers()
& InputEvent.BUTTON3_MASK) != 0)
{
if(context.isVisible())
context.setVisible(false);
else
{
//XXX this is a hack to make sure the
//XXX actions get the right text area
textArea.requestFocus();
context.show(Gutter.this,
e.getX()+1, e.getY()+1);
}
}
else if(e.getClickCount() == 2)
toggleCollapsed();
else
{
dragStart = e.getPoint();
startWidth = gutterSize.width;
}
}
public void mouseDragged(MouseEvent e)
{
if ((e.getModifiers() & InputEvent.BUTTON3_MASK) != 0)
return;
if(collapsed || e.getX() >= getWidth() - borderWidth)
{
e.translatePoint(-getWidth(),0);
textArea.mouseHandler.mouseDragged(e);
return;
}
if (dragStart == null) return;
gutterSize.width = startWidth + e.getX() - dragStart.x;
if (gutterSize.width < collapsedSize.width)
gutterSize.width = startWidth;
textArea.revalidate();
}
/* public void mouseExited(MouseEvent e)
{
if (dragStart != null && dragStart.x > e.getPoint().x)
{
setCollapsed(true);
gutterSize.width = startWidth;
textArea.revalidate();
}
//dragStart = null;
} */
public void mouseMoved(MouseEvent e) {}
public void mouseReleased(MouseEvent e)
{
if ((e.getModifiers() & InputEvent.BUTTON3_MASK) != 0)
return;
if(collapsed || e.getX() >= getWidth() - borderWidth)
{
e.translatePoint(-getWidth(),0);
textArea.mouseHandler.mouseReleased(e);
return;
}
dragStart = null;
}
private Point dragStart = null;
private int startWidth = 0;
}
}
/*
* ChangeLog:
* $ Log$
*/
|
|
/*
*
* Copyright 2004 The Ant-Contrib project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sf.antcontrib.cpptasks.trolltech;
import java.io.File;
import java.util.Vector;
import net.sf.antcontrib.cpptasks.CCTask;
import net.sf.antcontrib.cpptasks.OptimizationEnum;
import net.sf.antcontrib.cpptasks.compiler.CommandLineCompiler;
import net.sf.antcontrib.cpptasks.compiler.CommandLineCompilerConfiguration;
import net.sf.antcontrib.cpptasks.compiler.LinkType;
import net.sf.antcontrib.cpptasks.compiler.Linker;
import net.sf.antcontrib.cpptasks.compiler.Processor;
import net.sf.antcontrib.cpptasks.compiler.ProgressMonitor;
import net.sf.antcontrib.cpptasks.gcc.LdLinker;
import net.sf.antcontrib.cpptasks.parser.Parser;
import net.sf.antcontrib.cpptasks.VersionInfo;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.types.Environment;
/**
* Adapter for the Trolltech Qt UIC Compiler.
*
* @author Curt Arnold
*/
public final class UserInterfaceCompiler
extends CommandLineCompiler {
/**
* Singleton instance.
*/
private static final UserInterfaceCompiler INSTANCE = new
UserInterfaceCompiler(
false, null);
/**
* Gets singleton instance of compiler.
* @return MetaObjectCompiler singleton instance
*/
public static UserInterfaceCompiler getInstance() {
return INSTANCE;
}
/**
* Constructor.
* @param newEnvironment boolean establish an new environment.
* @param env Environment environment.
*/
private UserInterfaceCompiler(final boolean newEnvironment,
final Environment env) {
super("uic", "-version", new String[] {".ui"}
, new String[0], ".h", false, null, newEnvironment, env);
}
/**
* Add arguments for debug, etc.
* @param args Vector command argument list
* @param debug boolean build for debug if true
* @param multithreaded boolean build for multithreading if true
* @param exceptions boolean enable exceptions if true
* @param linkType LinkType output and runtime type
* @param rtti Boolean enable run-time type identification if true
* @param optimization OptimizationEnum optimization
*/
protected void addImpliedArgs(final Vector args,
final boolean debug,
final boolean multithreaded,
final boolean exceptions,
final LinkType linkType,
final Boolean rtti,
final OptimizationEnum optimization) {
}
/**
* Add arguments for specified warning level.
* @param args Vector command line arguments
* @param level int warning level value
*/
protected void addWarningSwitch(final Vector args, final int level) {
}
/**
* Change enviroment (deprecated).
* @param newEnvironment boolean use new environment.
* @param env Environment environment
* @return Processor modified processor
*/
public Processor changeEnvironment(final boolean newEnvironment,
final Environment env) {
return this;
}
/**
* The include parser for C will work just fine, but we didn't want to
* inherit from CommandLineCCompiler.
* @param source source file to be parsed
* @return parser
*/
protected Parser createParser(final File source) {
return new UserInterfaceParser();
}
/**
* Gets number of command line arguments per input file.
* @return int number of command line arguments per input file.
*/
protected int getArgumentCountPerInputFile() {
return 3;
}
/**
* Gets output file names.
* @param inputFile String input file name
* @param versionInfo version info, not used by this compiler.
* @return String[] output file names
*/
public String[] getOutputFileNames(final String inputFile,
final VersionInfo versionInfo) {
//
// if a recognized input file
//
String baseName = getBaseOutputName(inputFile);
return new String[] {
baseName + ".h",
baseName + ".cpp",
"moc_" + baseName + ".cpp"};
}
/**
* Gets input file arguments.
* @param outputDir File output directory
* @param filename String input file name.
* @param index int argument index,
* 0 to getNumberOfArgumentsPerInputFile() -1
* @return String input file argument
*/
protected String getInputFileArgument(final File outputDir,
final String filename,
final int index) {
switch (index) {
case 0:
return "-o";
case 1:
String outputFileName = getOutputFileNames(filename, null)[0];
return new File(outputDir, outputFileName)
.toString();
case 2:
return filename;
default:
return null;
}
}
/**
* Gets maximum length of command line.
* @return int maximum length of command line
*/
public int getMaximumCommandLength() {
return 1024;
}
/**
* Gets maximum number of input files processed per command.
* @return int maximum number of input files processed per command.
*/
protected int getMaximumInputFilesPerCommand() {
return 1;
}
/**
* Gets include directory switch.
* @param includeDir String include directory
* @return String command switch to add specified directory to search path
*/
protected String getIncludeDirSwitch(final String includeDir) {
return "";
}
/**
* Gets switch to define preprocessor macro.
* @param buffer StringBuffer command line argument
* @param define String macro name
* @param value String macro value, may be null.
*/
protected void getDefineSwitch(final StringBuffer buffer,
final String define,
final String value) {
}
/**
* Gets switch to undefine preprocessor macro.
* @param buffer StringBuffer command line argument
* @param define String macro name
*/
protected void getUndefineSwitch(final StringBuffer buffer,
final String define) {
}
/**
* Gets standard include paths.
* @return File[] standard include paths
*/
protected File[] getEnvironmentIncludePath() {
return new File[0];
}
/**
* Gets linker associated with this type.
* @param type LinkType linker, returns ld.
* @return Linker
*/
public Linker getLinker(final LinkType type) {
return LdLinker.getInstance();
}
/**
* Compiles an .ui file into the corresponding .h, .cpp and moc_*.cpp files.
* @param task current cc task
* @param outputDir output directory
* @param sourceFiles source files
* @param args command line arguments that appear before input files
* @param endArgs command line arguments that appear after input files
* @param relentless if true, do not stop at first compilation error
* @param config compiler configuration
* @param monitor progress monitor
*/
public void compile(final CCTask task,
final File outputDir,
final String[] sourceFiles,
final String[] args,
final String[] endArgs,
final boolean relentless,
final CommandLineCompilerConfiguration config,
final ProgressMonitor monitor) {
BuildException exc = null;
String[] thisSource = new String[1];
String[] uicCommand = new String[args.length + endArgs.length + 4];
uicCommand[0] = "uic";
String[] uicImplCommand = new String[args.length + endArgs.length + 6];
uicImplCommand[0] = "uic";
String[] mocCommand = new String[args.length + endArgs.length + 4];
mocCommand[0] = "moc";
for (int i = 0; i < args.length; i++) {
uicCommand[i + 1] = args[i];
uicImplCommand[i + 1] = args[i];
mocCommand[i + i] = args[i];
}
uicCommand[args.length + 1] = "-o";
uicImplCommand[args.length + 1] = "-o";
mocCommand[args.length + 1] = "-o";
int uicIndex = args.length + 4;
int uicImplIndex = args.length + 6;
int mocIndex = args.length + 4;
for (int i = 0; i < endArgs.length; i++) {
uicCommand[uicIndex++] = endArgs[i];
uicImplCommand[uicImplIndex++] = endArgs[i];
mocCommand[mocIndex++] = endArgs[i];
}
for (int j = 0; j < sourceFiles.length; j++) {
uicIndex = args.length + 2;
uicImplIndex = args.length + 2;
mocIndex = args.length + 2;
String[] outputFileNames = getOutputFileNames(sourceFiles[j], null);
uicCommand[uicIndex++] = outputFileNames[0];
uicCommand[uicIndex++] = sourceFiles[j];
uicImplCommand[uicImplIndex++] = outputFileNames[1];
uicImplCommand[uicImplIndex++] = "-impl";
uicImplCommand[uicImplIndex++] = outputFileNames[0];
uicImplCommand[uicImplIndex++] = sourceFiles[j];
mocCommand[mocIndex++] = outputFileNames[2];
mocCommand[mocIndex++] = outputFileNames[0];
int retval = runCommand(task, outputDir, uicCommand);
if (retval == 0) {
retval = runCommand(task, outputDir, uicImplCommand);
if (retval == 0) {
retval = runCommand(task, outputDir, mocCommand);
}
}
if (monitor != null) {
thisSource[0] = sourceFiles[j];
monitor.progress(thisSource);
}
//
// if the process returned a failure code and
// we aren't holding an exception from an earlier
// interation
if (retval != 0 && exc == null) {
//
// construct the exception
//
exc = new BuildException(this.getCommand()
+ " failed with return code " + retval, task
.getLocation());
//
// and throw it now unless we are relentless
//
if (!relentless) {
throw exc;
}
}
}
//
// if the compiler returned a failure value earlier
// then throw an exception
if (exc != null) {
throw exc;
}
}
/**
* Get total command line length due to the input file.
* @param outputDir File output directory
* @param inputFile String input file
* @return int characters added to command line for the input file.
*/
protected int getTotalArgumentLengthForInputFile(
final File outputDir,
final String inputFile) {
String arg1 = getInputFileArgument(outputDir, inputFile, 1);
String arg2 = getInputFileArgument(outputDir, inputFile, 2);
return arg1.length() + arg2.length() + 4;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.component.dsl;
import javax.annotation.Generated;
import org.apache.camel.Component;
import org.apache.camel.builder.component.AbstractComponentBuilder;
import org.apache.camel.builder.component.ComponentBuilder;
import org.apache.camel.component.sjms.SjmsComponent;
/**
* The sjms component (simple jms) allows messages to be sent to (or consumed
* from) a JMS Queue or Topic (uses JMS 1.x API).
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.ComponentDslMojo")
public interface SjmsComponentBuilderFactory {
/**
* Simple JMS (camel-sjms)
* The sjms component (simple jms) allows messages to be sent to (or
* consumed from) a JMS Queue or Topic (uses JMS 1.x API).
*
* Category: messaging
* Since: 2.11
* Maven coordinates: org.apache.camel:camel-sjms
*/
static SjmsComponentBuilder sjms() {
return new SjmsComponentBuilderImpl();
}
/**
* Builder for the Simple JMS component.
*/
interface SjmsComponentBuilder extends ComponentBuilder<SjmsComponent> {
/**
* The maximum number of connections available to endpoints started
* under this component.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 1
* Group: common
*/
default SjmsComponentBuilder connectionCount(
java.lang.Integer connectionCount) {
doSetProperty("connectionCount", connectionCount);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default SjmsComponentBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Backoff in millis on consumer pool reconnection attempts.
*
* The option is a: <code>long</code> type.
*
* Default: 5000
* Group: consumer
*/
default SjmsComponentBuilder reconnectBackOff(long reconnectBackOff) {
doSetProperty("reconnectBackOff", reconnectBackOff);
return this;
}
/**
* Try to apply reconnection logic on consumer pool.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: consumer
*/
default SjmsComponentBuilder reconnectOnError(boolean reconnectOnError) {
doSetProperty("reconnectOnError", reconnectOnError);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default SjmsComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the component should use basic property binding (Camel 2.x)
* or the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default SjmsComponentBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* The client ID to use when creating javax.jms.Connection when using
* the default
* org.apache.camel.component.sjms.jms.ConnectionFactoryResource.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: advanced
*/
default SjmsComponentBuilder connectionClientId(
java.lang.String connectionClientId) {
doSetProperty("connectionClientId", connectionClientId);
return this;
}
/**
* A ConnectionFactory is required to enable the SjmsComponent. It can
* be set directly or set set as part of a ConnectionResource.
*
* The option is a: <code>javax.jms.ConnectionFactory</code> type.
*
* Group: advanced
*/
default SjmsComponentBuilder connectionFactory(
javax.jms.ConnectionFactory connectionFactory) {
doSetProperty("connectionFactory", connectionFactory);
return this;
}
/**
* The max wait time in millis to block and wait on free connection when
* the pool is exhausted when using the default
* org.apache.camel.component.sjms.jms.ConnectionFactoryResource.
*
* The option is a: <code>long</code> type.
*
* Default: 5000
* Group: advanced
*/
default SjmsComponentBuilder connectionMaxWait(long connectionMaxWait) {
doSetProperty("connectionMaxWait", connectionMaxWait);
return this;
}
/**
* A ConnectionResource is an interface that allows for customization
* and container control of the ConnectionFactory. See Plugable
* Connection Resource Management for further details.
*
* The option is a:
* <code>org.apache.camel.component.sjms.jms.ConnectionResource</code>
* type.
*
* Group: advanced
*/
default SjmsComponentBuilder connectionResource(
org.apache.camel.component.sjms.jms.ConnectionResource connectionResource) {
doSetProperty("connectionResource", connectionResource);
return this;
}
/**
* When using the default
* org.apache.camel.component.sjms.jms.ConnectionFactoryResource then
* should each javax.jms.Connection be tested (calling start) before
* returned from the pool.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*/
default SjmsComponentBuilder connectionTestOnBorrow(
boolean connectionTestOnBorrow) {
doSetProperty("connectionTestOnBorrow", connectionTestOnBorrow);
return this;
}
/**
* To use a custom DestinationCreationStrategy.
*
* The option is a:
* <code>org.apache.camel.component.sjms.jms.DestinationCreationStrategy</code> type.
*
* Group: advanced
*/
default SjmsComponentBuilder destinationCreationStrategy(
org.apache.camel.component.sjms.jms.DestinationCreationStrategy destinationCreationStrategy) {
doSetProperty("destinationCreationStrategy", destinationCreationStrategy);
return this;
}
/**
* Pluggable strategy for encoding and decoding JMS keys so they can be
* compliant with the JMS specification. Camel provides one
* implementation out of the box: default. The default strategy will
* safely marshal dots and hyphens (. and -). Can be used for JMS
* brokers which do not care whether JMS header keys contain illegal
* characters. You can provide your own implementation of the
* org.apache.camel.component.jms.JmsKeyFormatStrategy and refer to it
* using the # notation.
*
* The option is a:
* <code>org.apache.camel.component.sjms.jms.JmsKeyFormatStrategy</code>
* type.
*
* Group: advanced
*/
default SjmsComponentBuilder jmsKeyFormatStrategy(
org.apache.camel.component.sjms.jms.JmsKeyFormatStrategy jmsKeyFormatStrategy) {
doSetProperty("jmsKeyFormatStrategy", jmsKeyFormatStrategy);
return this;
}
/**
* To use the given MessageCreatedStrategy which are invoked when Camel
* creates new instances of javax.jms.Message objects when Camel is
* sending a JMS message.
*
* The option is a:
* <code>org.apache.camel.component.sjms.jms.MessageCreatedStrategy</code> type.
*
* Group: advanced
*/
default SjmsComponentBuilder messageCreatedStrategy(
org.apache.camel.component.sjms.jms.MessageCreatedStrategy messageCreatedStrategy) {
doSetProperty("messageCreatedStrategy", messageCreatedStrategy);
return this;
}
/**
* To use a custom TimedTaskManager.
*
* The option is a:
* <code>org.apache.camel.component.sjms.taskmanager.TimedTaskManager</code> type.
*
* Group: advanced
*/
default SjmsComponentBuilder timedTaskManager(
org.apache.camel.component.sjms.taskmanager.TimedTaskManager timedTaskManager) {
doSetProperty("timedTaskManager", timedTaskManager);
return this;
}
/**
* To use a custom org.apache.camel.spi.HeaderFilterStrategy to filter
* header to and from Camel message.
*
* The option is a:
* <code>org.apache.camel.spi.HeaderFilterStrategy</code> type.
*
* Group: filter
*/
default SjmsComponentBuilder headerFilterStrategy(
org.apache.camel.spi.HeaderFilterStrategy headerFilterStrategy) {
doSetProperty("headerFilterStrategy", headerFilterStrategy);
return this;
}
/**
* The password to use when creating javax.jms.Connection when using the
* default
* org.apache.camel.component.sjms.jms.ConnectionFactoryResource.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default SjmsComponentBuilder connectionPassword(
java.lang.String connectionPassword) {
doSetProperty("connectionPassword", connectionPassword);
return this;
}
/**
* The username to use when creating javax.jms.Connection when using the
* default
* org.apache.camel.component.sjms.jms.ConnectionFactoryResource.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default SjmsComponentBuilder connectionUsername(
java.lang.String connectionUsername) {
doSetProperty("connectionUsername", connectionUsername);
return this;
}
/**
* To configure which kind of commit strategy to use. Camel provides two
* implementations out of the box, default and batch.
*
* The option is a:
* <code>org.apache.camel.component.sjms.TransactionCommitStrategy</code> type.
*
* Group: transaction
*/
default SjmsComponentBuilder transactionCommitStrategy(
org.apache.camel.component.sjms.TransactionCommitStrategy transactionCommitStrategy) {
doSetProperty("transactionCommitStrategy", transactionCommitStrategy);
return this;
}
}
class SjmsComponentBuilderImpl
extends
AbstractComponentBuilder<SjmsComponent>
implements
SjmsComponentBuilder {
@Override
protected SjmsComponent buildConcreteComponent() {
return new SjmsComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "connectionCount": ((SjmsComponent) component).setConnectionCount((java.lang.Integer) value); return true;
case "bridgeErrorHandler": ((SjmsComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "reconnectBackOff": ((SjmsComponent) component).setReconnectBackOff((long) value); return true;
case "reconnectOnError": ((SjmsComponent) component).setReconnectOnError((boolean) value); return true;
case "lazyStartProducer": ((SjmsComponent) component).setLazyStartProducer((boolean) value); return true;
case "basicPropertyBinding": ((SjmsComponent) component).setBasicPropertyBinding((boolean) value); return true;
case "connectionClientId": ((SjmsComponent) component).setConnectionClientId((java.lang.String) value); return true;
case "connectionFactory": ((SjmsComponent) component).setConnectionFactory((javax.jms.ConnectionFactory) value); return true;
case "connectionMaxWait": ((SjmsComponent) component).setConnectionMaxWait((long) value); return true;
case "connectionResource": ((SjmsComponent) component).setConnectionResource((org.apache.camel.component.sjms.jms.ConnectionResource) value); return true;
case "connectionTestOnBorrow": ((SjmsComponent) component).setConnectionTestOnBorrow((boolean) value); return true;
case "destinationCreationStrategy": ((SjmsComponent) component).setDestinationCreationStrategy((org.apache.camel.component.sjms.jms.DestinationCreationStrategy) value); return true;
case "jmsKeyFormatStrategy": ((SjmsComponent) component).setJmsKeyFormatStrategy((org.apache.camel.component.sjms.jms.JmsKeyFormatStrategy) value); return true;
case "messageCreatedStrategy": ((SjmsComponent) component).setMessageCreatedStrategy((org.apache.camel.component.sjms.jms.MessageCreatedStrategy) value); return true;
case "timedTaskManager": ((SjmsComponent) component).setTimedTaskManager((org.apache.camel.component.sjms.taskmanager.TimedTaskManager) value); return true;
case "headerFilterStrategy": ((SjmsComponent) component).setHeaderFilterStrategy((org.apache.camel.spi.HeaderFilterStrategy) value); return true;
case "connectionPassword": ((SjmsComponent) component).setConnectionPassword((java.lang.String) value); return true;
case "connectionUsername": ((SjmsComponent) component).setConnectionUsername((java.lang.String) value); return true;
case "transactionCommitStrategy": ((SjmsComponent) component).setTransactionCommitStrategy((org.apache.camel.component.sjms.TransactionCommitStrategy) value); return true;
default: return false;
}
}
}
}
|
|
package com.cmendenhall.tests;
import com.cmendenhall.views.swing.*;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import javax.swing.*;
import java.awt.*;
import static junit.framework.Assert.assertFalse;
import static junit.framework.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
@RunWith(JUnit4.class)
public class SwingViewTest {
private SwingView swingView;
@Before
public void setUp() {
swingView = new SwingView();
}
@Test
public void swingViewHasCorrectSize() {
assertEquals(350, swingView.getWidth());
assertEquals(400, swingView.getHeight());
}
private Component getComponent(Container container, String name) {
for (Component component : container.getComponents()) {
if (name.equals(component.getName())) {
return component;
}
}
return null;
}
@Test
public void swingViewHasMessagePanel() {
MessagePanel messagePanel =
(MessagePanel)getComponent(swingView.getContentPane(), "messagePanel");
JLabel label =
(JLabel)getComponent(messagePanel, "messagePanelLabel");
assertTrue(label.isVisible());
assertEquals("Welcome to Tic-Tac-Toe", label.getText());
}
@Test
public void swingViewHasBoardPanel() {
BoardPanel boardPanel =
(BoardPanel)getComponent(swingView.getContentPane(), "boardPanel");
assertTrue(boardPanel.isVisible());
}
@Test
public void boardPanelContainsJTable() {
BoardPanel boardPanel =
(BoardPanel)getComponent(swingView.getContentPane(), "boardPanel");
JTable boardTable =
(JTable)getComponent(boardPanel, "boardTable");
assertTrue(boardTable.isVisible());
}
@Test
public void boardPanelCorrectlyDisplaysBoard() {
BoardPanel boardPanel =
(BoardPanel)getComponent(swingView.getContentPane(), "boardPanel");
boardPanel.loadBoard(TicTacToeTestHelper.noWins);
}
@Test
public void swingViewHasConfigPanel() {
ConfigPanel configPanel =
(ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
assertTrue(configPanel.isVisible());
}
@Test
public void configPanelHasGameActionPanel() {
ConfigPanel configPanel =
(ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
GameActionPanel gameActionPanel =
(GameActionPanel)getComponent(configPanel, "gameActionPanel");
}
@Test
public void gameActionPanelHasNewGameButton() {
ConfigPanel configPanel =
(ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
GameActionPanel gameActionPanel =
(GameActionPanel)getComponent(configPanel, "gameActionPanel");
JButton newGameButton =
(JButton)getComponent(gameActionPanel, "newGameButton");
assertTrue(newGameButton.isVisible());
}
@Test
public void newGameButtonHasCorrectLabel() {
ConfigPanel configPanel =
(ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
GameActionPanel gameActionPanel =
(GameActionPanel)getComponent(configPanel, "gameActionPanel");
JButton newGameButton =
(JButton)getComponent(gameActionPanel, "newGameButton");
assertEquals("New game", newGameButton.getText());
}
@Test
public void configPanelHasPlayerOneConfigPanel() {
ConfigPanel configPanel =
(ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
PlayerConfigPanel playerOneConfigPanel =
(PlayerConfigPanel)getComponent(configPanel, "playerOneConfigPanel");
assertTrue(playerOneConfigPanel.isVisible());
}
@Test
public void configPanelHasPlayerTwoConfigPanel() {
ConfigPanel configPanel =
(ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
PlayerConfigPanel playerTwoConfigPanel =
(PlayerConfigPanel)getComponent(configPanel, "playerTwoConfigPanel");
assertTrue(playerTwoConfigPanel.isVisible());
}
@Test
public void playerConfigPanelHasTwoRadioButtons() {
ConfigPanel configPanel =
(ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
PlayerConfigPanel playerTwoConfigPanel =
(PlayerConfigPanel)getComponent(configPanel, "playerTwoConfigPanel");
JRadioButton humanButton =
(JRadioButton)getComponent(playerTwoConfigPanel, "humanButton");
JRadioButton computerButton =
(JRadioButton)getComponent(playerTwoConfigPanel, "computerButton");
assertTrue(humanButton.isVisible());
assertTrue(computerButton.isVisible());
}
@Test
public void playerConfigPanelHasCorrectLabel() {
ConfigPanel configPanel =
(ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
PlayerConfigPanel playerTwoConfigPanel =
(PlayerConfigPanel)getComponent(configPanel, "playerTwoConfigPanel");
JLabel playerTwo =
(JLabel)getComponent(playerTwoConfigPanel, "playerLabel");
assertEquals("Player O", playerTwo.getText());
}
@Test
public void playerConfigPanelStoresPlayerConfigState() {
ConfigPanel configPanel =
(ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
PlayerConfigPanel playerTwoConfigPanel =
(PlayerConfigPanel)getComponent(configPanel, "playerTwoConfigPanel");
assertTrue(playerTwoConfigPanel.humanSelected());
}
@Test
public void radioButtonsHaveCorrectLabels() {
ConfigPanel configPanel =
(ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
PlayerConfigPanel playerTwoConfigPanel =
(PlayerConfigPanel)getComponent(configPanel, "playerTwoConfigPanel");
JRadioButton humanButton =
(JRadioButton)getComponent(playerTwoConfigPanel, "humanButton");
JRadioButton computerButton =
(JRadioButton)getComponent(playerTwoConfigPanel, "computerButton");
assertEquals("Human", humanButton.getText());
assertEquals("Computer", computerButton.getText());
}
@Test
public void configPanelHasBoardConfigPanel() {
ConfigPanel configPanel =
(ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
BoardConfigPanel boardConfigPanel =
(BoardConfigPanel)getComponent(configPanel, "boardConfigPanel");
assertTrue(boardConfigPanel.isVisible());
}
@Test
public void boardConfigPanelHasBoardSizeSpinner() {
ConfigPanel configPanel =
(ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
BoardConfigPanel boardConfigPanel =
(BoardConfigPanel)getComponent(configPanel, "boardConfigPanel");
JSpinner boardSizeSpinner =
(JSpinner)getComponent(boardConfigPanel, "boardSizeSpinner");
assertTrue(boardSizeSpinner.isVisible());
}
@Test
public void boardConfigPanelSpinnerStoresSpinnerState() {
ConfigPanel configPanel =
(ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
BoardConfigPanel boardConfigPanel =
(BoardConfigPanel)getComponent(configPanel, "boardConfigPanel");
assertEquals("3", boardConfigPanel.boardSize());
}
@Test
public void messagePanelShouldDisplayMessage() {
swingView.displayMessage("Shall we play a game?");
MessagePanel messagePanel = (MessagePanel)getComponent(swingView.getContentPane(), "messagePanel");
JLabel message = (JLabel)getComponent(messagePanel, "messagePanelLabel");
assertEquals("Shall we play a game?", message.getText());
}
@Test
public void boardSizeSpinnerCanBeEnabled() {
ConfigPanel configPanel = (ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
BoardConfigPanel boardConfigPanel = (BoardConfigPanel)getComponent(configPanel, "boardConfigPanel");
JSpinner spinner = (JSpinner)getComponent(boardConfigPanel, "boardSizeSpinner");
boardConfigPanel.enableSpinner();
assertTrue(spinner.isEnabled());
}
@Test
public void boardSizeSpinnerCanBeDisabled() {
ConfigPanel configPanel = (ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
BoardConfigPanel boardConfigPanel = (BoardConfigPanel)getComponent(configPanel, "boardConfigPanel");
JSpinner spinner = (JSpinner)getComponent(boardConfigPanel, "boardSizeSpinner");
boardConfigPanel.disableSpinner();
assertFalse(spinner.isEnabled());
}
@Test
public void newGameButtonCanBeDisabled() {
ConfigPanel configPanel = (ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
GameActionPanel gameActionPanel = (GameActionPanel)getComponent(configPanel, "gameActionPanel");
JButton newGameButton = (JButton)getComponent(gameActionPanel, "newGameButton");
gameActionPanel.disableNewGameButton();
assertFalse(newGameButton.isEnabled());
}
@Test
public void newGameButtonCanBeEnabled() {
ConfigPanel configPanel = (ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
GameActionPanel gameActionPanel = (GameActionPanel)getComponent(configPanel, "gameActionPanel");
JButton newGameButton = (JButton)getComponent(gameActionPanel, "newGameButton");
gameActionPanel.disableNewGameButton();
gameActionPanel.enableNewGameButton();
assertTrue(newGameButton.isEnabled());
}
@Test
public void configButtonsCanBeEnabled() {
ConfigPanel configPanel = (ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
BoardConfigPanel boardConfigPanel = (BoardConfigPanel)getComponent(configPanel, "boardConfigPanel");
JSpinner spinner = (JSpinner)getComponent(boardConfigPanel, "boardSizeSpinner");
PlayerConfigPanel playerOneConfigPanel = (PlayerConfigPanel)getComponent(configPanel, "playerOneConfigPanel");
JRadioButton humanButtonOne = (JRadioButton)getComponent(playerOneConfigPanel, "humanButton");
JRadioButton computerButtonOne = (JRadioButton)getComponent(playerOneConfigPanel, "computerButton");
PlayerConfigPanel playerTwoConfigPanel = (PlayerConfigPanel)getComponent(configPanel, "playerTwoConfigPanel");
JRadioButton humanButtonTwo = (JRadioButton)getComponent(playerTwoConfigPanel, "humanButton");
JRadioButton computerButtonTwo = (JRadioButton)getComponent(playerTwoConfigPanel, "computerButton");
GameActionPanel gameActionPanel = (GameActionPanel)getComponent(configPanel, "gameActionPanel");
JButton newGameButton = (JButton)getComponent(gameActionPanel, "newGameButton");
configPanel.addNewGameListener();
newGameButton.doClick();
configPanel.enableConfigButtons();
assertTrue(spinner.isEnabled());
assertTrue(humanButtonOne.isEnabled());
assertTrue(humanButtonTwo.isEnabled());
assertTrue(computerButtonOne.isEnabled());
assertTrue(computerButtonTwo.isEnabled());
assertTrue(newGameButton.isEnabled());
}
@Test
public void boardCanBeDisabled() {
BoardPanel boardPanel = (BoardPanel)getComponent(swingView.getContentPane(), "boardPanel");
boardPanel.disableBoard();
JTable boardTable = (JTable)getComponent(boardPanel, "boardTable");
assertFalse(boardTable.isEnabled());
}
@Test
public void boardCanBeEnabled() {
BoardPanel boardPanel = (BoardPanel)getComponent(swingView.getContentPane(), "boardPanel");
boardPanel.disableBoard();
boardPanel.enableBoard();
JTable boardTable = (JTable)getComponent(boardPanel, "boardTable");
assertTrue(boardTable.isEnabled());
}
@Test
public void boardCanBeEnabledByView() {
BoardPanel boardPanel = (BoardPanel)getComponent(swingView.getContentPane(), "boardPanel");
boardPanel.disableBoard();
swingView.enableBoard();
JTable boardTable = (JTable)getComponent(boardPanel, "boardTable");
assertTrue(boardTable.isEnabled());
}
@Test
public void enabledBoardHasWhiteBackground() {
BoardPanel boardPanel = (BoardPanel)getComponent(swingView.getContentPane(), "boardPanel");
boardPanel.enableBoard();
JTable boardTable = (JTable)getComponent(boardPanel, "boardTable");
Color backgroundColor = boardTable.getBackground();
assertEquals(backgroundColor, Color.WHITE);
}
@Test
public void viewDisplaysCorrectBoards() {
swingView.displayBoard(TicTacToeTestHelper.noWins);
BoardPanel boardPanel = (BoardPanel)getComponent(swingView.getContentPane(), "boardPanel");
JTable board = (JTable)getComponent(boardPanel, "boardTable");
/*
String topLeft = (String)board.getValueAt(0, 0);
String topMiddle = (String)board.getValueAt(0, 1);
String topRight = (String)board.getValueAt(0, 2);
String middleLeft = (String)board.getValueAt(1, 0);
String middleCenter = (String)board.getValueAt(1, 1);
String middleRight = (String)board.getValueAt(1, 2);
String lowerLeft = (String)board.getValueAt(2, 0);
String lowerCenter = (String)board.getValueAt(2, 1);
String lowerRight = (String)board.getValueAt(2, 2);
assertEquals("O", topLeft);
assertEquals("O", topMiddle);
assertEquals("X", topRight);
assertEquals("X", middleLeft);
assertEquals("X", middleCenter);
assertEquals("O", middleRight);
assertEquals("O", lowerLeft);
assertEquals("X", lowerCenter);
assertEquals("X", lowerRight);*/
}
@Test
public void viewCanResizeWindow() {
swingView.resizeWindow(500, 500);
Dimension viewSize = swingView.getSize();
assertEquals(500, viewSize.getHeight(), 0);
assertEquals(500, viewSize.getWidth(), 0);
}
@Test
public void configPanelReturnsCorrectView() {
ConfigPanel configPanel = (ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
SwingView view = configPanel.getView();
assertEquals(swingView, view);
}
@Test
public void configPanelEnqueuesCorrectInput() {
ConfigPanel configPanel = (ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
configPanel.sendConfigInput();
assertEquals("3", swingView.getInput());
assertEquals("h", swingView.getInput());
assertEquals("h", swingView.getInput());
}
@Test
public void viewClearsInputOnReload() {
ConfigPanel configPanel = (ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
configPanel.sendConfigInput();
swingView.reload();
assertEquals("", swingView.getInput());
}
@Test
public void viewDisablesBoardOnReload() {
swingView.reload();
BoardPanel boardPanel = (BoardPanel)getComponent(swingView.getContentPane(), "boardPanel");
JTable board = (JTable)getComponent(boardPanel, "boardTable");
assertFalse(board.isEnabled());
}
@Test
public void viewEnablesConfigButtonsOnReload() {
ConfigPanel configPanel = (ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
BoardConfigPanel boardConfigPanel = (BoardConfigPanel)getComponent(configPanel, "boardConfigPanel");
JSpinner spinner = (JSpinner)getComponent(boardConfigPanel, "boardSizeSpinner");
PlayerConfigPanel playerOneConfigPanel = (PlayerConfigPanel)getComponent(configPanel, "playerOneConfigPanel");
JRadioButton humanButtonOne = (JRadioButton)getComponent(playerOneConfigPanel, "humanButton");
JRadioButton computerButtonOne = (JRadioButton)getComponent(playerOneConfigPanel, "computerButton");
PlayerConfigPanel playerTwoConfigPanel = (PlayerConfigPanel)getComponent(configPanel, "playerTwoConfigPanel");
JRadioButton humanButtonTwo = (JRadioButton)getComponent(playerTwoConfigPanel, "humanButton");
JRadioButton computerButtonTwo = (JRadioButton)getComponent(playerTwoConfigPanel, "computerButton");
GameActionPanel gameActionPanel = (GameActionPanel)getComponent(configPanel, "gameActionPanel");
JButton newGameButton = (JButton)getComponent(gameActionPanel, "newGameButton");
swingView.reload();
assertTrue(spinner.isEnabled());
assertTrue(humanButtonOne.isEnabled());
assertTrue(humanButtonTwo.isEnabled());
assertTrue(computerButtonOne.isEnabled());
assertTrue(computerButtonTwo.isEnabled());
assertTrue(newGameButton.isEnabled());
}
@Test
public void boardTableStylerAppliesCorrectStyle() {
BoardPanel boardPanel = (BoardPanel)getComponent(swingView.getContentPane(), "boardPanel");
JTable board = (JTable)getComponent(boardPanel, "boardTable");
BoardTableStyler.applyStyle(board);
assertEquals("boardTable", board.getName());
assertTrue(board.getShowVerticalLines());
assertTrue(board.getShowHorizontalLines());
assertFalse(board.getColumnSelectionAllowed());
assertFalse(board.getRowSelectionAllowed());
}
@Test
public void windowSizeChangesWhenBoardIsResized() {
ConfigPanel configPanel = (ConfigPanel)getComponent(swingView.getContentPane(), "configPanel");
BoardConfigPanel boardConfigPanel = (BoardConfigPanel)getComponent(configPanel, "boardConfigPanel");
JSpinner spinner = (JSpinner)getComponent(boardConfigPanel, "boardSizeSpinner");
GameActionPanel gameActionPanel = (GameActionPanel)getComponent(configPanel, "gameActionPanel");
JButton newGameButton = (JButton)getComponent(gameActionPanel, "newGameButton");
spinner.setValue(6);
configPanel.addNewGameListener();
newGameButton.doClick();
Dimension viewSize = swingView.getSize();
assertEquals(350, viewSize.getWidth(), 0);
assertEquals(550, viewSize.getHeight(), 0);
}
}
|
|
package org.deeplearning4j.nn.transferlearning;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.deeplearning4j.nn.conf.*;
import org.deeplearning4j.nn.conf.distribution.Distribution;
import org.deeplearning4j.nn.conf.layers.*;
import org.deeplearning4j.nn.conf.stepfunctions.StepFunction;
import org.deeplearning4j.nn.weights.WeightInit;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.activations.IActivation;
import org.nd4j.linalg.learning.config.IUpdater;
import org.nd4j.shade.jackson.annotation.JsonInclude;
import org.nd4j.shade.jackson.annotation.JsonTypeInfo;
import org.nd4j.shade.jackson.core.JsonProcessingException;
import java.io.IOException;
import java.util.Map;
/**
* Created by Alex on 21/02/2017.
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, include = JsonTypeInfo.As.PROPERTY, property = "type")
@JsonInclude(JsonInclude.Include.NON_NULL)
@NoArgsConstructor
@AllArgsConstructor
@Data
@Builder(builderClassName = "Builder")
public class FineTuneConfiguration {
protected IActivation activationFn;
protected WeightInit weightInit;
protected Double biasInit;
protected Distribution dist;
protected Double learningRate;
protected Double biasLearningRate;
protected Map<Integer, Double> learningRateSchedule;
protected Double lrScoreBasedDecay;
protected Double l1;
protected Double l2;
protected Double l1Bias;
protected Double l2Bias;
protected Double dropOut;
@Deprecated
protected Updater updater;
protected IUpdater iUpdater;
@Deprecated
protected Double momentum;
@Deprecated
protected Map<Integer, Double> momentumSchedule;
@Deprecated
protected Double epsilon;
@Deprecated
protected Double rho;
@Deprecated
protected Double rmsDecay;
@Deprecated
protected Double adamMeanDecay;
@Deprecated
protected Double adamVarDecay;
protected Boolean miniBatch;
protected Integer numIterations;
protected Integer maxNumLineSearchIterations;
protected Long seed;
protected Boolean useRegularization;
protected OptimizationAlgorithm optimizationAlgo;
protected StepFunction stepFunction;
protected Boolean useDropConnect;
protected Boolean minimize;
protected GradientNormalization gradientNormalization;
protected Double gradientNormalizationThreshold;
protected LearningRatePolicy learningRatePolicy;
protected Double lrPolicyDecayRate;
protected Double lrPolicySteps;
protected Double lrPolicyPower;
protected ConvolutionMode convolutionMode;
protected Boolean pretrain;
protected Boolean backprop;
protected BackpropType backpropType;
protected Integer tbpttFwdLength;
protected Integer tbpttBackLength;
//Lombok builder. Note that the code below ADDS OR OVERRIDES the lombok implementation; the final builder class
// is the composite of the lombok parts and the parts defined here
//partial implementation to allow public no-arg constructor (lombok default is package private)
//Plus some implementations to match NeuralNetConfiguration builder methods
public static class Builder {
public Builder() {}
public Builder seed(int seed) {
this.seed = (long) seed;
return this;
}
public Builder seed(long seed) {
this.seed = seed;
return this;
}
public Builder regularization(boolean regularization) {
this.useRegularization = regularization;
return this;
}
public Builder iterations(int iterations) {
this.numIterations = iterations;
return this;
}
public Builder activation(Activation activation) {
this.activationFn = activation.getActivationFunction();
return this;
}
public Builder updater(IUpdater updater) {
return iUpdater(updater);
}
public Builder updater(Updater updater) {
this.updater = updater;
return updater(updater.getIUpdaterWithDefaultConfig());
}
}
public NeuralNetConfiguration appliedNeuralNetConfiguration(NeuralNetConfiguration nnc) {
applyToNeuralNetConfiguration(nnc);
nnc = new NeuralNetConfiguration.Builder(nnc.clone()).build();
return nnc;
}
public void applyToNeuralNetConfiguration(NeuralNetConfiguration nnc) {
Layer l = nnc.getLayer();
Updater originalUpdater = null;
WeightInit origWeightInit = null;
if (l != null) {
if (dropOut != null)
l.setDropOut(dropOut);
}
if (l != null && l instanceof BaseLayer) {
BaseLayer bl = (BaseLayer) l;
originalUpdater = bl.getUpdater();
origWeightInit = bl.getWeightInit();
if (activationFn != null)
bl.setActivationFn(activationFn);
if (weightInit != null)
bl.setWeightInit(weightInit);
if (biasInit != null)
bl.setBiasInit(biasInit);
if (dist != null)
bl.setDist(dist);
if (learningRate != null) {
//usually the same learning rate is applied to both bias and weights
//so always overwrite the learning rate to both?
bl.setLearningRate(learningRate);
bl.setBiasLearningRate(learningRate);
}
if (biasLearningRate != null)
bl.setBiasLearningRate(biasLearningRate);
if (learningRateSchedule != null)
bl.setLearningRateSchedule(learningRateSchedule);
// if(lrScoreBasedDecay != null)
if (l1 != null)
bl.setL1(l1);
if (l2 != null)
bl.setL2(l2);
if (l1Bias != null)
bl.setL1Bias(l1Bias);
if (l2Bias != null)
bl.setL2Bias(l2Bias);
if (updater != null)
bl.setUpdater(updater);
if (iUpdater != null)
bl.setIUpdater(iUpdater);
if (momentum != null)
bl.setMomentum(momentum);
if (momentumSchedule != null)
bl.setMomentum(momentum);
if (epsilon != null)
bl.setEpsilon(epsilon);
if (rho != null)
bl.setRho(rho);
if (rmsDecay != null)
bl.setRmsDecay(rmsDecay);
if (adamMeanDecay != null)
bl.setAdamMeanDecay(adamMeanDecay);
if (adamVarDecay != null)
bl.setAdamVarDecay(adamVarDecay);
if (gradientNormalization != null)
bl.setGradientNormalization(gradientNormalization);
if (gradientNormalizationThreshold != null)
bl.setGradientNormalizationThreshold(gradientNormalizationThreshold);
}
if (miniBatch != null)
nnc.setMiniBatch(miniBatch);
if (numIterations != null)
nnc.setNumIterations(numIterations);
if (maxNumLineSearchIterations != null)
nnc.setMaxNumLineSearchIterations(maxNumLineSearchIterations);
if (seed != null)
nnc.setSeed(seed);
if (useRegularization != null)
nnc.setUseRegularization(useRegularization);
if (optimizationAlgo != null)
nnc.setOptimizationAlgo(optimizationAlgo);
if (stepFunction != null)
nnc.setStepFunction(stepFunction);
if (useDropConnect != null)
nnc.setUseDropConnect(useDropConnect);
if (minimize != null)
nnc.setMinimize(minimize);
if (learningRatePolicy != null)
nnc.setLearningRatePolicy(learningRatePolicy);
if (lrPolicySteps != null)
nnc.setLrPolicySteps(lrPolicySteps);
if (lrPolicyPower != null)
nnc.setLrPolicyPower(lrPolicyPower);
if (convolutionMode != null && l instanceof ConvolutionLayer) {
((ConvolutionLayer) l).setConvolutionMode(convolutionMode);
}
if (convolutionMode != null && l instanceof SubsamplingLayer) {
((SubsamplingLayer) l).setConvolutionMode(convolutionMode);
}
//Check the updater config. If we change updaters, we want to remove the old config to avoid warnings
if (l != null && l instanceof BaseLayer && updater != null && originalUpdater != null
&& updater != originalUpdater) {
BaseLayer bl = (BaseLayer) l;
switch (originalUpdater) {
case ADAM:
case ADAMAX:
if (adamMeanDecay == null)
bl.setAdamMeanDecay(Double.NaN);
if (adamVarDecay == null)
bl.setAdamVarDecay(Double.NaN);
break;
case ADADELTA:
if (rho == null)
bl.setRho(Double.NaN);
if (epsilon == null)
bl.setEpsilon(Double.NaN);
break;
case NESTEROVS:
if (momentum == null)
bl.setMomentum(Double.NaN);
if (momentumSchedule == null)
bl.setMomentumSchedule(null);
if (epsilon == null)
bl.setEpsilon(Double.NaN);
break;
case ADAGRAD:
if (epsilon == null)
bl.setEpsilon(Double.NaN);
break;
case RMSPROP:
if (rmsDecay == null)
bl.setRmsDecay(Double.NaN);
if (epsilon == null)
bl.setEpsilon(Double.NaN);
break;
//Other cases: no changes required
}
}
//Check weight init. Remove dist if originally was DISTRIBUTION, and isn't now -> remove no longer needed distribution
if (l != null && l instanceof BaseLayer && origWeightInit == WeightInit.DISTRIBUTION && weightInit != null
&& weightInit != WeightInit.DISTRIBUTION) {
((BaseLayer) l).setDist(null);
}
//Perform validation. This also sets the defaults for updaters. For example, Updater.RMSProp -> set rmsDecay
if (l != null) {
LayerValidation.updaterValidation(l.getLayerName(), l, learningRate, momentum, momentumSchedule,
adamMeanDecay, adamVarDecay, rho, rmsDecay, epsilon);
boolean useDropCon = (useDropConnect == null ? nnc.isUseDropConnect() : useDropConnect);
LayerValidation.generalValidation(l.getLayerName(), l, nnc.isUseRegularization(), useDropCon, dropOut, l2,
l2Bias, l1, l1Bias, dist);
}
//Also: update the LR, L1 and L2 maps, based on current config (which might be different to original config)
if (nnc.variables(false) != null) {
for (String s : nnc.variables(false)) {
nnc.setLayerParamLR(s);
}
}
}
public void applyToMultiLayerConfiguration(MultiLayerConfiguration conf) {
if (pretrain != null)
conf.setPretrain(pretrain);
if (backprop != null)
conf.setBackprop(backprop);
if (backpropType != null)
conf.setBackpropType(backpropType);
if (tbpttFwdLength != null)
conf.setTbpttFwdLength(tbpttFwdLength);
if (tbpttBackLength != null)
conf.setTbpttBackLength(tbpttBackLength);
}
public void applyToComputationGraphConfiguration(ComputationGraphConfiguration conf) {
if (pretrain != null)
conf.setPretrain(pretrain);
if (backprop != null)
conf.setBackprop(backprop);
if (backpropType != null)
conf.setBackpropType(backpropType);
if (tbpttFwdLength != null)
conf.setTbpttFwdLength(tbpttFwdLength);
if (tbpttBackLength != null)
conf.setTbpttBackLength(tbpttBackLength);
}
public NeuralNetConfiguration.Builder appliedNeuralNetConfigurationBuilder() {
NeuralNetConfiguration.Builder confBuilder = new NeuralNetConfiguration.Builder();
if (activationFn != null)
confBuilder.setActivationFn(activationFn);
if (weightInit != null)
confBuilder.setWeightInit(weightInit);
if (biasInit != null)
confBuilder.setBiasInit(biasInit);
if (dist != null)
confBuilder.setDist(dist);
if (learningRate != null) {
//usually the same learning rate is applied to both bias and weights
//HOWEVER: this is set elsewhere. in the NNC, we only want to override the normal LR
confBuilder.setLearningRate(learningRate);
}
if (biasLearningRate != null)
confBuilder.setBiasLearningRate(biasLearningRate);
if (learningRateSchedule != null)
confBuilder.setLearningRateSchedule(learningRateSchedule);
// if(lrScoreBasedDecay != null)
if (l1 != null)
confBuilder.setL1(l1);
if (l2 != null)
confBuilder.setL2(l2);
if (l1Bias != null)
confBuilder.setL1Bias(l1Bias);
if (l2Bias != null)
confBuilder.setL2Bias(l2Bias);
if (dropOut != null)
confBuilder.setDropOut(dropOut);
if (iUpdater != null)
confBuilder.updater(iUpdater);
if (updater != null)
confBuilder.setUpdater(updater);
if (momentum != null)
confBuilder.setMomentum(momentum);
if (momentumSchedule != null)
confBuilder.setMomentum(momentum);
if (epsilon != null)
confBuilder.setEpsilon(epsilon);
if (rho != null)
confBuilder.setRho(rho);
if (rmsDecay != null)
confBuilder.setRmsDecay(rmsDecay);
if (adamMeanDecay != null)
confBuilder.setAdamMeanDecay(adamMeanDecay);
if (adamVarDecay != null)
confBuilder.setAdamVarDecay(adamVarDecay);
if (miniBatch != null)
confBuilder.setMiniBatch(miniBatch);
if (numIterations != null)
confBuilder.setNumIterations(numIterations);
if (maxNumLineSearchIterations != null)
confBuilder.setMaxNumLineSearchIterations(maxNumLineSearchIterations);
if (seed != null)
confBuilder.setSeed(seed);
if (useRegularization != null)
confBuilder.setUseRegularization(useRegularization);
if (optimizationAlgo != null)
confBuilder.setOptimizationAlgo(optimizationAlgo);
if (stepFunction != null)
confBuilder.setStepFunction(stepFunction);
if (useDropConnect != null)
confBuilder.setUseDropConnect(useDropConnect);
if (minimize != null)
confBuilder.setMinimize(minimize);
if (gradientNormalization != null)
confBuilder.setGradientNormalization(gradientNormalization);
if (gradientNormalizationThreshold != null)
confBuilder.setGradientNormalizationThreshold(gradientNormalizationThreshold);
if (learningRatePolicy != null)
confBuilder.setLearningRatePolicy(learningRatePolicy);
if (lrPolicySteps != null)
confBuilder.setLrPolicySteps(lrPolicySteps);
if (lrPolicyPower != null)
confBuilder.setLrPolicyPower(lrPolicyPower);
return confBuilder;
}
public String toJson() {
try {
return NeuralNetConfiguration.mapper().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
public String toYaml() {
try {
return NeuralNetConfiguration.mapperYaml().writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
public static FineTuneConfiguration fromJson(String json) {
try {
return NeuralNetConfiguration.mapper().readValue(json, FineTuneConfiguration.class);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public static FineTuneConfiguration fromYaml(String yaml) {
try {
return NeuralNetConfiguration.mapperYaml().readValue(yaml, FineTuneConfiguration.class);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
|
|
package org.roaringbitmap.buffer;
import static org.junit.Assert.assertEquals;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import org.junit.Test;
public class TestAdversarialInputs {
// copy to a temporary file
private File copy(String resourceName) throws IOException {
// old-school Java, could be improved
File tmpfile = File.createTempFile("RoaringBitmapTestAdversarialInputs", "bin");
tmpfile.deleteOnExit();
OutputStream resStreamOut = null;
InputStream stream = null;
try {
ClassLoader classLoader = getClass().getClassLoader();
stream = classLoader.getResourceAsStream(resourceName);
if(stream == null) {
throw new IOException("Cannot get resource \"" + resourceName + "\".");
}
int readBytes;
byte[] buffer = new byte[4096];
resStreamOut = new FileOutputStream(tmpfile);
while ((readBytes = stream.read(buffer)) > 0) {
resStreamOut.write(buffer, 0, readBytes);
}
} finally {
if(stream != null) stream.close();
if(resStreamOut != null) resStreamOut.close();
}
return tmpfile;
}
public ByteBuffer memoryMap(String resourceName) throws IOException {
File tmpfile = copy(resourceName);
long totalcount = tmpfile.length();
RandomAccessFile memoryMappedFile = new RandomAccessFile(tmpfile, "r");
ByteBuffer bb = memoryMappedFile.getChannel().map(FileChannel.MapMode.READ_ONLY, 0, totalcount); // even though we have two bitmaps, we have one map, maps are expensive!!!
memoryMappedFile.close(); // we can safely close
bb.position(0);
return bb;
}
@Test
public void testInputGoodFile1() throws IOException {
File file = copy("testdata/bitmapwithruns.bin");
MutableRoaringBitmap rb = new MutableRoaringBitmap();
// should not throw an exception
rb.deserialize(new DataInputStream(new FileInputStream(file)));
assertEquals(rb.getCardinality(), 200100);
file.delete();
}
@Test
public void testInputGoodFile1Mapped() throws IOException {
ByteBuffer bb = memoryMap("testdata/bitmapwithruns.bin");
ImmutableRoaringBitmap rb = new ImmutableRoaringBitmap(bb);
assertEquals(rb.getCardinality(), 200100);
}
@Test
public void testInputGoodFile2() throws IOException {
File file = copy("testdata/bitmapwithoutruns.bin");
MutableRoaringBitmap rb = new MutableRoaringBitmap();
// should not throw an exception
rb.deserialize(new DataInputStream(new FileInputStream(file)));
assertEquals(rb.getCardinality(), 200100);
file.delete();
}
@Test
public void testInputGoodFile2Mapped() throws IOException {
ByteBuffer bb = memoryMap("testdata/bitmapwithoutruns.bin");
ImmutableRoaringBitmap rb = new ImmutableRoaringBitmap(bb);
assertEquals(rb.getCardinality(), 200100);
}
@Test(expected = IOException.class)
public void testInputBadFile1() throws IOException {
File file = copy("testdata/crashproneinput1.bin");
MutableRoaringBitmap rb = new MutableRoaringBitmap();
// should not work
rb.deserialize(new DataInputStream(new FileInputStream(file)));
file.delete();
}
@Test(expected = IndexOutOfBoundsException.class)
public void testInputBadFile1Mapped() throws IOException {
ByteBuffer bb = memoryMap("testdata/crashproneinput1.bin");
ImmutableRoaringBitmap rb = new ImmutableRoaringBitmap(bb);
System.out.println(rb.getCardinality()); // won't get here
}
@Test(expected = IOException.class)
public void testInputBadFile2() throws IOException {
File file = copy("testdata/crashproneinput2.bin");
MutableRoaringBitmap rb = new MutableRoaringBitmap();
// should not work
rb.deserialize(new DataInputStream(new FileInputStream(file)));
file.delete();
}
@Test(expected = IndexOutOfBoundsException.class)
public void testInputBadFile2Mapped() throws IOException {
ByteBuffer bb = memoryMap("testdata/crashproneinput2.bin");
ImmutableRoaringBitmap rb = new ImmutableRoaringBitmap(bb);
System.out.println(rb.getCardinality()); // won't get here
}
@Test(expected = IOException.class)
public void testInputBadFile3() throws IOException {
File file = copy("testdata/crashproneinput3.bin");
MutableRoaringBitmap rb = new MutableRoaringBitmap();
// should not work
rb.deserialize(new DataInputStream(new FileInputStream(file)));
file.delete();
}
@Test(expected = IndexOutOfBoundsException.class)
public void testInputBadFile3Mapped() throws IOException {
ByteBuffer bb = memoryMap("testdata/crashproneinput3.bin");
ImmutableRoaringBitmap rb = new ImmutableRoaringBitmap(bb);
System.out.println(rb.getCardinality()); // won't get here
}
@Test(expected = IOException.class)
public void testInputBadFile4() throws IOException {
File file = copy("testdata/crashproneinput4.bin");
MutableRoaringBitmap rb = new MutableRoaringBitmap();
// should not work
rb.deserialize(new DataInputStream(new FileInputStream(file)));
file.delete();
}
@Test(expected = IndexOutOfBoundsException.class)
public void testInputBadFile4Mapped() throws IOException {
ByteBuffer bb = memoryMap("testdata/crashproneinput4.bin");
ImmutableRoaringBitmap rb = new ImmutableRoaringBitmap(bb);
System.out.println(rb.getCardinality()); // won't get here
}
@Test(expected = IOException.class)
public void testInputBadFile5() throws IOException {
File file = copy("testdata/crashproneinput5.bin");
MutableRoaringBitmap rb = new MutableRoaringBitmap();
// should not work
rb.deserialize(new DataInputStream(new FileInputStream(file)));
file.delete();
}
@Test(expected = IndexOutOfBoundsException.class)
public void testInputBadFile5Mapped() throws IOException {
ByteBuffer bb = memoryMap("testdata/crashproneinput5.bin");
ImmutableRoaringBitmap rb = new ImmutableRoaringBitmap(bb);
System.out.println(rb.getCardinality()); // won't get here
}
@Test(expected = IOException.class)
public void testInputBadFile6() throws IOException {
File file = copy("testdata/crashproneinput6.bin");
MutableRoaringBitmap rb = new MutableRoaringBitmap();
// should not work
rb.deserialize(new DataInputStream(new FileInputStream(file)));
file.delete();
}
@Test(expected = IndexOutOfBoundsException.class)
public void testInputBadFile6Mapped() throws IOException {
ByteBuffer bb = memoryMap("testdata/crashproneinput6.bin");
ImmutableRoaringBitmap rb = new ImmutableRoaringBitmap(bb);
System.out.println(rb.getCardinality()); // won't get here
}
@Test(expected = IOException.class)
public void testInputBadFile7() throws IOException {
File file = copy("testdata/crashproneinput7.bin");
MutableRoaringBitmap rb = new MutableRoaringBitmap();
// should not work
rb.deserialize(new DataInputStream(new FileInputStream(file)));
file.delete();
}
@Test(expected = IllegalArgumentException.class)
public void testInputBadFile7Mapped() throws IOException {
ByteBuffer bb = memoryMap("testdata/crashproneinput7.bin");
ImmutableRoaringBitmap rb = new ImmutableRoaringBitmap(bb);
System.out.println(rb.getCardinality()); // won't get here
}
@Test(expected = IOException.class)
public void testInputBadFile8() throws IOException {
File file = copy("testdata/crashproneinput8.bin");
MutableRoaringBitmap rb = new MutableRoaringBitmap();
// should not work
rb.deserialize(new DataInputStream(new FileInputStream(file)));
file.delete();
}
@Test(expected = IndexOutOfBoundsException.class)
public void testInputBadFile8Mapped() throws IOException {
ByteBuffer bb = memoryMap("testdata/crashproneinput8.bin");
ImmutableRoaringBitmap rb = new ImmutableRoaringBitmap(bb);
System.out.println(rb.getCardinality()); // won't get here
}
}
|
|
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.gemstone.gemfire.internal.cache.locks;
import java.sql.Connection;
import com.gemstone.gemfire.LogWriter;
import com.gemstone.gemfire.cache.ConflictException;
import com.gemstone.gemfire.cache.IsolationLevel;
import com.gemstone.gemfire.cache.LockTimeoutException;
import com.gemstone.gemfire.internal.Assert;
import com.gemstone.gemfire.internal.cache.AbstractOperationMessage;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.cache.RegionEntry;
import com.gemstone.gemfire.internal.cache.TXManagerImpl;
import com.gemstone.gemfire.internal.cache.TXStateProxy;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.internal.util.ArrayUtils;
/**
* This enumeration defines how an entry or object should be locked, when should
* the lock be released, exceptions to be thrown when lock acquisition fails,
* retries for locks etc. where each lock is obtained locally on each node.
*
* <p>
*
* Multiple modes of operation are provided:
*
* a) fail-fast (default): The operation fails immediately with a
* {@link ConflictException} if the lock could not be obtained on any of the
* nodes (eager conflict detection for transactions).
*
* NOTE: the schemes mentioned below are not yet implemented except for
* "b) waiting" which is now provided as a system property that applies to each
* transaction {@link ExclusiveSharedSynchronizer#WRITE_LOCK_TIMEOUT_PROP}.
*
* b) waiting: Wait for the lock to be acquired on all the nodes subject to a
* maximum timeout. There is no special distributed deadlock detection so those
* cases will simply timeout after the given limit is reached.
*
* c) retry (future work): This is with optional random sleep between the
* retries and subject to a maximum timeout. Again there is no special
* distributed livelock detection so those cases will timeout after the given
* limit is reached. Using the random sleep should minimize such cases.
*
* d) retry with increased priority (future work): This is a variation of the
* retry scheme where each retry is with a higher priority in addition to the
* random sleep. This requires preemptable locks that will cause the other
* operation currently holding the lock to fail if its priority is lower than
* that of this operation.
*
* e) wait with elder election for deadlocks (future work): In case of deadlock
* detection on a single object (i.e. lock successful on some nodes while
* failure on others), a distributed barrier will be created using the locks
* with one of the members driving the operations and applying the operations on
* the good side of the barrier to completion.
*
* @author swale
* @since 7.0
*/
public enum LockingPolicy {
/**
* Defines a dummy no locking policy.
*/
NONE {
@Override
public LockMode getReadLockMode() {
return null;
}
@Override
public LockMode getWriteLockMode() {
return null;
}
@Override
public LockMode getReadOnlyLockMode() {
return null;
}
@Override
public final void acquireLock(ExclusiveSharedLockObject lockObj,
LockMode mode, int flags, Object lockOwner, Object context,
AbstractOperationMessage msg) throws ConflictException,
LockTimeoutException {
// nothing to be done
}
@Override
public final long getTimeout(Object lockObj, LockMode newMode,
LockMode currentMode, int flags, final long msecs) {
return msecs;
}
@Override
public final IsolationLevel getIsolationLevel() {
return IsolationLevel.NONE;
}
@Override
public final boolean isFailFast() {
return false;
}
@Override
public final Object lockForRead(final ExclusiveSharedLockObject lockObj,
LockMode mode, Object lockOwner, final Object context,
final int iContext, AbstractOperationMessage msg,
boolean allowTombstones, ReadEntryUnderLock reader) {
// no locking to be done
return reader.readEntry(lockObj, context, iContext, allowTombstones);
}
@Override
public final boolean lockedForWrite(ExclusiveSharedLockObject lockObj,
Object owner, Object context) {
return false; // not used
}
},
/**
* The default fail-fast mode to be used by
* {@link Connection#TRANSACTION_READ_COMMITTED} transactions that will fail
* with a {@link ConflictException} if write lock cannot be obtained
* immediately. However when a {@link LockMode#SH} lock is to be obtained with
* an existing {@link LockMode#EX} lock, then it will be blocking. The
* rationale being that the {@link LockMode#EX} mode will be used for
* transactional operations only during commit when atomicity of the commit is
* required, so there is no actual conflict. In addition the read locks are
* always acquired for zero duration only ({@link #lockForRead} releases the
* lock immediately) so that reads will wait for any pending commits to
* complete but will themselves not block any commits since we just want to
*/
FAIL_FAST_TX {
@Override
public LockMode getReadLockMode() {
return LockMode.SH;
}
@Override
public LockMode getWriteLockMode() {
return LockMode.EX_SH;
}
@Override
public final void acquireLock(final ExclusiveSharedLockObject lockObj,
final LockMode mode, final int flags, final Object lockOwner,
final Object context, final AbstractOperationMessage msg)
throws ConflictException, LockTimeoutException {
acquireLockFailFast(lockObj, mode, flags, lockOwner, context, msg);
}
@Override
public final long getTimeout(final Object lockObj, final LockMode newMode,
final LockMode currentMode, final int flags, final long msecs) {
return getTimeoutFailFast(lockObj, newMode, currentMode, flags);
}
@Override
public final IsolationLevel getIsolationLevel() {
return IsolationLevel.READ_COMMITTED;
}
@Override
public final boolean readOnlyCanStartTX() {
// now RC will also start TX for READ_ONLY (#49371)
return true;
}
@Override
public final boolean isFailFast() {
return true;
}
@Override
public final Object lockForRead(final ExclusiveSharedLockObject lockObj,
final LockMode mode, final Object lockOwner, final Object context,
final int iContext, final AbstractOperationMessage msg,
boolean allowTombstones, ReadEntryUnderLock reader) {
// no locking is done now for SH locks since we cannot guarantee
// distributed commit atomicity for READ_COMMITTED in any case without
// resorting to 2-phase commit with EX waiting for existing SH
// indefinitely
if (mode == LockMode.SH) {
return reader.readEntry(lockObj, context, iContext, allowTombstones);
}
assert mode == LockMode.READ_ONLY: mode;
// need to hold READ_ONLY lock so that any conflicts can be correctly
// detected with batching (which is the default behaviour, #49371)
acquireLockFailFast(lockObj, mode, 0, lockOwner, context, msg);
return Locked;
/*
// read lock is zero duration
acquireLockFailFast(lockObj, mode, 0, lockOwner, context, msg);
try {
// unlock immediately since we do not intend to hold the lock,
// but its possible that entry disappeared after lock acquisition
// (TX commit was in progress), so check for that case
return reader.readEntry(lockObj, context, iContext, allowTombstones);
} finally {
releaseLock(lockObj, mode, lockOwner, false, context);
}
*/
}
},
/**
* The default fail-fast mode to be used by
* {@link Connection#TRANSACTION_REPEATABLE_READ} transactions that will fail
* with a {@link ConflictException} if write lock cannot be obtained
* immediately. However when a read lock ({@link LockMode#SH}) is to be
* obtained with an existing {@link LockMode#EX} lock, then it will be
* blocking. The rationale being that the {@link LockMode#EX} mode will be
* used for transactional operations only during commit so those will be
* blocked until the read lock is released. To provide repeatable-read
* semantics, this policy will hold the read locks till the end of transaction
* so no changes to the locked rows can be committed (i.e. underlying rows in
* the region cannot be changed).
*/
FAIL_FAST_RR_TX {
@Override
public LockMode getReadLockMode() {
return LockMode.SH;
}
@Override
public LockMode getWriteLockMode() {
return LockMode.EX_SH;
}
@Override
public final void acquireLock(final ExclusiveSharedLockObject lockObj,
final LockMode mode, final int flags, final Object lockOwner,
final Object context, final AbstractOperationMessage msg)
throws ConflictException, LockTimeoutException {
// We allow for READ_ONLY locks to co-exist with EX_SH locks to minimize
// conflicts. It also helps us to enforce the policy that reads never get
// a conflict. So in this case the commit becomes two-phase and the lock
// upgrade to EX mode is done in pre-commit phase that throws a conflict
// exception.
// Removed the ALLOW_READ_ONLY_WITH_EX_SH flag due to the following
// possible scenario:
// 1) child starts transactional insert but still not reached insert
// 2) parent delete ReferencedKeyChecker searches and finds nothing,
// not even a transactional entry so does nothing
// 3) parent delete acquires write lock on parent entry
// 4) child insert acquires write lock on child entry and READ_ONLY
// on parent entry (which does not get a conflict due to this flag)
// 5) child insert finishes and commits successfully releasing READ_ONLY
// 6) parent delete then finishes successfully
acquireLockFailFast(lockObj, mode, flags, lockOwner, context, msg);
}
@Override
public final long getTimeout(final Object lockObj,
final LockMode requestedMode, final LockMode currentMode,
final int flags, final long msecs) {
if (TXStateProxy.LOG_FINEST) {
final LogWriter logger = GemFireCacheImpl.getExisting().getLogger();
if (logger.infoEnabled()) {
logger.info("LockingPolicy." + name() + ": found existing lockMode "
+ currentMode + " requested " + requestedMode + " on object: "
+ lockObj);
}
}
// For RR the policy is that all locks wait for EX lock indefinitely since
// it is assumed to be taken only for short duration, while other
// read-write and write-write combinations will have short timeout i.e.
// will throw a conflict immediately. The difference from other policies
// is that EX lock will conflict with READ_ONLY/SH locks, so that
// read-write conflicts are only thrown in pre-commit phase (combined with
// allowReadOnlyWithEXSH to allow for EX_SH and READ_ONLY to co-exist).
switch (currentMode) {
case EX:
// assuming EX_SH is always upgraded to EX so this can never happen
assert requestedMode != LockMode.EX: "unexpected requestedMode EX "
+ "with currentMode EX in getTimeout for " + lockObj;
// check for the special flag that will cause SH/EX_SH to conflict
// with EX -- see comments in
// ExclusiveSharedSynchronizer.CONFLICT_WITH_EX
final long exReadTimeout = Math.max(
ExclusiveSharedSynchronizer.DEFAULT_READ_TIMEOUT,
ExclusiveSharedSynchronizer.READ_LOCK_TIMEOUT * 2);
if ((flags & ExclusiveSharedSynchronizer.CONFLICT_WITH_EX) == 0) {
// wait for EX to be released during commit when acquiring SH lock
// but don't wait a whole lot for update
return (requestedMode == LockMode.SH
&& ((flags & ExclusiveSharedSynchronizer.FOR_UPDATE) != 0)
? exReadTimeout : Math.min(exReadTimeout * 10,
ExclusiveSharedSynchronizer.LOCK_MAX_TIMEOUT));
}
else {
// wait for some time before throwing a conflict for EX
return exReadTimeout;
}
case EX_SH:
if (requestedMode == LockMode.SH) {
// SH should always be allowed with EX_SH so keep retrying
return ExclusiveSharedSynchronizer.LOCK_MAX_TIMEOUT;
}
else if (requestedMode == LockMode.READ_ONLY) {
// wait a bit for READ_ONLY
return ExclusiveSharedSynchronizer.READ_LOCK_TIMEOUT;
}
else {
// conflict immediately for EX_SH
return ExclusiveSharedSynchronizer.WRITE_LOCK_TIMEOUT;
}
case SH:
if (requestedMode == LockMode.EX) {
// wait for some time before throwing a conflict for EX
return ExclusiveSharedSynchronizer.READ_LOCK_TIMEOUT;
}
else if (requestedMode == LockMode.EX_SH) {
// wait for SH => EX_SH upgrade for sometime and then fail, else it
// can get stuck indefinitely if two or more threads are trying to
// do the same (#49341, #46121 etc)
return ExclusiveSharedSynchronizer.READ_LOCK_TIMEOUT;
}
else {
// all other locks are allowed with SH so keep retrying
return ExclusiveSharedSynchronizer.LOCK_MAX_TIMEOUT;
}
default:
assert currentMode == LockMode.READ_ONLY: "unexpected currentMode="
+ "READ_ONLY in getTimeout for " + lockObj;
switch (requestedMode) {
case EX:
// wait for some time before throwing a conflict for EX
return ExclusiveSharedSynchronizer.READ_LOCK_TIMEOUT;
case EX_SH:
// this should fail immediately assuming that they are from
// different txns (TXState level will handle for same TX case)
return ExclusiveSharedSynchronizer.WRITE_LOCK_TIMEOUT;
default:
// SH and READ_ONLY are allowed with READ_ONLY so keep retrying
return ExclusiveSharedSynchronizer.LOCK_MAX_TIMEOUT;
}
}
}
@Override
public final IsolationLevel getIsolationLevel() {
return IsolationLevel.REPEATABLE_READ;
}
@Override
public final boolean zeroDurationReadLocks() {
return false;
}
@Override
public final boolean readCanStartTX() {
return true;
}
@Override
public final boolean readOnlyCanStartTX() {
return true;
}
@Override
public final boolean isFailFast() {
return true;
}
@Override
public final Object lockForRead(final ExclusiveSharedLockObject lockObj,
final LockMode mode, final Object lockOwner, final Object context,
final int iContext, final AbstractOperationMessage msg,
boolean allowTombstones, ReadEntryUnderLock reader) {
// currently only CONFLICT_WITH_EX flag is honoured; if more flags are
// added then ensure that none overlap with those in ReadEntryUnderLock
acquireLockFailFast(lockObj, mode,
(iContext & ExclusiveSharedSynchronizer.CONFLICT_WITH_EX), lockOwner,
context, msg);
return Locked;
}
@Override
public final boolean requiresTwoPhaseCommit(final TXStateProxy proxy) {
// if there has been a write operation, then there is potential for
// conflict at lock upgrade time, so need to use 2-phase commit
return proxy.isDirty();
}
},
/**
* Like {@link #FAIL_FAST_TX} mode, except that for non-transactional
* operations this will wait for other non-transactional operations instead of
* failing. It will still fail eagerly when conflict with a transactional
* operation is detected.
*/
FAIL_FAST_NO_TX {
@Override
public LockMode getReadLockMode() {
return null;
}
@Override
public LockMode getWriteLockMode() {
return LockMode.EX;
}
@Override
public final void acquireLock(final ExclusiveSharedLockObject lockObj,
final LockMode mode, final int flags, final Object lockOwner,
final Object context, final AbstractOperationMessage msg)
throws ConflictException, LockTimeoutException {
acquireLockFailFast(lockObj, mode, flags, lockOwner, context, msg);
}
@Override
public final long getTimeout(final Object lockObj, final LockMode newMode,
final LockMode currentMode, final int flags, final long msecs) {
return getTimeoutFailFast(lockObj, newMode, currentMode, flags);
}
@Override
public final IsolationLevel getIsolationLevel() {
return IsolationLevel.NONE;
}
@Override
public final boolean isFailFast() {
return true;
}
@Override
public final Object lockForRead(final ExclusiveSharedLockObject lockObj,
LockMode mode, Object lockOwner, final Object context,
final int iContext, AbstractOperationMessage msg,
boolean allowTombstones, final ReadEntryUnderLock reader) {
// no locking to be done
return reader.readEntry(lockObj, context, iContext, allowTombstones);
}
@Override
public final boolean lockedForWrite(ExclusiveSharedLockObject lockObj,
Object owner, Object context) {
return lockObj.hasExclusiveLock(owner, context);
}
},
/**
* Defines a snapshot locking policy. i.e. no lock.
* we can add lock later for write to detect write write conflict.
* read can start tx
* This is default lock policy for Transaction isolation level NONE.
*/
SNAPSHOT {
@Override
public LockMode getReadLockMode() {
return null;
}
@Override
public LockMode getWriteLockMode() {
return null;
}
@Override
public LockMode getReadOnlyLockMode() {
return null;
}
@Override
public boolean readCanStartTX() {
return true;
}
@Override
public boolean readOnlyCanStartTX() {
return true;
}
@Override
public final void acquireLock(ExclusiveSharedLockObject lockObj,
LockMode mode, int flags, Object lockOwner, Object context,
AbstractOperationMessage msg) throws ConflictException,
LockTimeoutException {
// TODO: Suranjan Ideally no request should come in this mode.
// put an assert here!
// acquireLockFailFast(lockObj, mode, flags, lockOwner, context, msg);
}
@Override
public final long getTimeout(Object lockObj, LockMode newMode,
LockMode currentMode, int flags, final long msecs) {
return msecs;
}
@Override
public final IsolationLevel getIsolationLevel() {
return IsolationLevel.SNAPSHOT;
}
@Override
public final boolean isFailFast() {
return true;
}
@Override
public final Object lockForRead(final ExclusiveSharedLockObject lockObj,
LockMode mode, Object lockOwner, final Object context,
final int iContext, AbstractOperationMessage msg,
boolean allowTombstones, ReadEntryUnderLock reader) {
// TODO: Suranjan try to see if we can add versioning information here and read
return reader.readEntry(lockObj, context, iContext, allowTombstones);
}
@Override
public void releaseLock(ExclusiveSharedLockObject lockObj,
LockMode mode, Object lockOwner, boolean releaseAll, Object context) {
// no-op
}
},
;
;
/**
* Interface to be implemented by callers of {@link LockingPolicy#lockForRead}
* for reading the region entry under the read lock.
*/
public interface ReadEntryUnderLock {
// all possible flags for iContext are below
public static final int DO_NOT_LOCK_ENTRY = 0x1;
public static final int DESER_UPDATE_STATS = 0x2;
public static final int DESER_DISABLE_COPY_ON_READ = 0x4;
public static final int DESER_PREFER_CD = 0x8;
/** always point below to the last in the list above */
public static final int LAST_FLAG = DESER_PREFER_CD;
/**
* Read the entry while the read lock is held by
* {@link LockingPolicy#lockForRead} and return the value.
*/
Object readEntry(ExclusiveSharedLockObject lockObj, Object context,
int iContext, boolean allowTombstones);
}
@SuppressWarnings("unused")
private static final class NullReader implements ReadEntryUnderLock {
static {
// check that iContext flags should not overlap with CONFLICT_WITH_EX
if (LAST_FLAG >= ExclusiveSharedSynchronizer.CONFLICT_WITH_EX) {
Assert.fail("unexpected LAST_FLAG=" + LAST_FLAG
+ ", CONFLICT_WITH_EX="
+ ExclusiveSharedSynchronizer.CONFLICT_WITH_EX);
}
}
/**
* @see ReadEntryUnderLock#readEntry(ExclusiveSharedLockObject, Object, int,
* boolean)
*/
public final Object readEntry(ExclusiveSharedLockObject lockObj,
Object context, int iContext, boolean allowTombstones) {
return null;
}
}
public static final ReadEntryUnderLock NULL_READER = new NullReader();
/**
* Indicates that the lock on object has been granted and is being held as the
* result of {@link #lockForRead}.
*/
public static final Object Locked = new Object();
/**
* Get the default {@link LockMode} to be used for a read operation.
*
* @return the default {@link LockMode} to be used for acquiring the lock
*/
public abstract LockMode getReadLockMode();
/**
* Get the default {@link LockMode} to be used for a write operation.
*
* @return the default {@link LockMode} to be used for acquiring the lock
*/
public abstract LockMode getWriteLockMode();
/**
* Get the default {@link LockMode} to be used for a read operation that will
* disallow concurrent writers in every e.g. for GFXD foreign key checks.
*
* @return the default {@link LockMode} to be used for acquiring the lock
*/
public LockMode getReadOnlyLockMode() {
return LockMode.READ_ONLY;
}
/**
* Acquire the lock in given mode for the given object. The method can throw
* different exceptions on lock failure including {@link ConflictException} to
* indicate conflict detection mode, {@link LockTimeoutException} to indicate
* a timeout or deadlock.
*
* @param lockObj
* the object to be locked
* @param mode
* the <code>LockMode</code> to acquire the lock
* @param flags
* any additional flags to pass during locking
* @param lockOwner
* the owner of the lock; can be null
* @param context
* any context required to be passed to the
* {@link ExclusiveSharedLockObject#attemptLock} method that can be
* used by the particular locking implementation
* @param msg
* the {@link AbstractOperationMessage} invoking this method; can be
* null
*
* @throws ConflictException
* implementations can choose to throw a {@link ConflictException}
* to indicate a locking policy that fails eagerly detecting
* conflicts using the locks
* @throws LockTimeoutException
* if the lock acquisition has timed out
*/
public abstract void acquireLock(ExclusiveSharedLockObject lockObj,
LockMode mode, int flags, Object lockOwner, Object context,
AbstractOperationMessage msg) throws ConflictException,
LockTimeoutException;
/**
* Release the lock acquired in given mode by a previous call to
* {@link #acquireLock} for the given object. Implementations of
* {@link ExclusiveSharedLockObject} will typically throw an
* {@link IllegalMonitorStateException} if no lock was acquired previously or
* if the owner does not match.
*
* @param lockObj
* the object that is locked
* @param mode
* the <code>LockMode</code> to release the lock
* @param lockOwner
* the owner of the lock; can be null
* @param releaseAll
* release all the read/write locks on the object acquired by the
* <code>lockOwner</code>
* @param context
* any context required to be passed to the
* {@link ExclusiveSharedLockObject#releaseLock} method that can be
* used by the particular locking implementation
*/
public void releaseLock(ExclusiveSharedLockObject lockObj,
LockMode mode, Object lockOwner, boolean releaseAll, Object context)
throws IllegalMonitorStateException {
if (mode != null) {
LogWriter logger = null;
if (ExclusiveSharedSynchronizer.TRACE_LOCK_COMPACT) {
logger = GemFireCacheImpl.getExisting().getLogger();
if (TXStateProxy.LOG_FINEST) {
logger.info("LockingPolicy." + name() + ": releasing lock in mode "
+ mode + " on object: " + lockObj);
}
}
lockObj.releaseLock(mode, releaseAll, lockOwner, context);
if (logger != null) {
logger.info("LockingPolicy." + name()
+ ": released lock in mode " + mode + " on object: "
+ (TXStateProxy.LOG_FINEST ? lockObj : ArrayUtils.objectRefString(
lockObj) + "[lockState=0x" + Integer.toHexString(
lockObj.getState()) + ']'));
}
}
}
/**
* Get the timeout in millis that should be used for waiting in case lock is
* not immediately available.
*
* @param lockObj
* the object to be locked
* @param newMode
* the new mode for which the lock has been requested
* @param currentMode
* the current mode in which the lock is currently held
* @param flags
* any additional flags being sent to alter the locking behaviour
* @param msecs
* the base msecs value provided as lock timeout
*
* @return the lock timeout in millis; a negative value indicates infinite
* wait
*/
public abstract long getTimeout(Object lockObj, LockMode newMode,
LockMode currentMode, int flags, long msecs);
/**
* Returns the transaction's {@link IsolationLevel} corresponding to this
* {@link LockingPolicy}.
*/
public abstract IsolationLevel getIsolationLevel();
/**
* If the read locks have to be held only momentarily (or not at all) during
* the read and not the entire duration of the transaction.
*/
public boolean zeroDurationReadLocks() {
return true;
}
/**
* Returns true if the locking policy will require starting a TXState on
* remote node for read operation requiring SH locks (typically when
* {@link IsolationLevel} for this policy is REPEATABLE_READ or higher).
*
* This would normally be !zeroDurationReadLocks() though that is strictly not
* a requirement.
*/
public boolean readCanStartTX() {
return false;
}
/**
* Returns true if the locking policy will require starting a TXState on
* remote node for read operation requiring READ_ONLY locks i.e. FK checks,
* (typically when {@link IsolationLevel} for this policy is REPEATABLE_READ
* or higher).
*/
public boolean readOnlyCanStartTX() {
return false;
}
/**
* Returns true if the policy requires failing immediately with conflict.
*/
public abstract boolean isFailFast();
/**
* Lock the given object/entry for reading with this {@link LockingPolicy}.
*
* @return {@link #Locked} if the read lock on object was successfully
* acquired and is being held, result of
* {@link ReadEntryUnderLock#readEntry} if the read lock on object was
* successfully acquired and was immediately released while the object
* value was read when the lock was held,
*
* @throws ConflictException
* implementations can choose to throw a {@link ConflictException}
* to indicate a locking policy that fails eagerly detecting
* conflicts using the locks
* @throws LockTimeoutException
* if the lock acquisition has timed out
*/
public abstract Object lockForRead(ExclusiveSharedLockObject lockObj,
LockMode mode, Object lockOwner, Object context, int iContext,
AbstractOperationMessage msg, boolean allowTombstones,
ReadEntryUnderLock reader);
/**
* Returns true if the given lock object currently has a write lock (as
* returned by {@link #getWriteLockMode()}) held on it.
*/
public boolean lockedForWrite(final ExclusiveSharedLockObject lockObj,
final Object owner, final Object context) {
return lockObj.hasExclusiveSharedLock(owner, context);
}
/**
* Returns whether the commit processing requires two-phase commit or not.
*/
public boolean requiresTwoPhaseCommit(TXStateProxy proxy) {
// if we have events that need to be published, then need 2-phase commit
return proxy.isDirty() && proxy.getToBePublishedEvents() != null;
}
/**
* Common lock acquisition routine for the fail-fast modes.
*/
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "IMSE_DONT_CATCH_IMSE",
justification = "LockingPolicy changes IMSE to ConflictException")
protected final void acquireLockFailFast(
final ExclusiveSharedLockObject lockObj, final LockMode mode,
final int flags, final Object lockOwner, final Object context,
final AbstractOperationMessage msg) throws ConflictException,
LockTimeoutException {
if (mode != null) {
LogWriter logger = null;
try {
if (ExclusiveSharedSynchronizer.TRACE_LOCK_COMPACT) {
logger = GemFireCacheImpl.getExisting().getLogger();
if (ExclusiveSharedSynchronizer.TRACE_LOCK) {
logger.info("LockingPolicy." + name() + ": acquiring lock in mode "
+ mode + " with flags=0x" + Integer.toHexString(flags)
+ " on object: " + (TXStateProxy.LOG_FINEST ? lockObj
: ArrayUtils.objectRefString(lockObj) + "[lockState=0x"
+ Integer.toHexString(lockObj.getState()) + ']'));
}
}
if (lockObj.attemptLock(mode, flags, this, 0, lockOwner, context)) {
if (logger != null) {
if (ExclusiveSharedSynchronizer.TRACE_LOCK) {
logger.info("LockingPolicy." + name()
+ ": acquired lock in mode " + mode
+ " with flags=0x" + Integer.toHexString(flags)
+ " on object: " + (TXStateProxy.LOG_FINEST ? lockObj
: ArrayUtils.objectRefString(lockObj) + "[lockState=0x"
+ Integer.toHexString(lockObj.getState()) + ']'));
}
else {
logger.info("LockingPolicy." + name()
+ ": acquired lock in mode " + mode + " on object: "
+ ArrayUtils.objectRefString(lockObj) + "[lockState=0x"
+ Integer.toHexString(lockObj.getState()) + ']');
}
}
return;
}
if (logger != null) {
logger.info("LockingPolicy." + name()
+ ": throwing ConflictException for lock in mode " + mode
+ " with flags=0x" + Integer.toHexString(flags) + " on object: "
+ lockObj);
}
throw new ConflictException(
LocalizedStrings.TX_CONFLICT_ON_OBJECT
.toLocalizedString(getLockObjectString(msg, lockObj, mode,
context, lockOwner), mode));
} catch (IllegalMonitorStateException imse) {
if (logger != null) {
logger.info("LockingPolicy." + name()
+ ": throwing ConflictException for IllegalMonitorStateException"
+ " for lock in mode " + mode + " with flags=0x"
+ Integer.toHexString(flags) + " on object: " + lockObj);
}
throw new ConflictException(
LocalizedStrings.TX_CONFLICT_LOCK_ILLEGAL.toLocalizedString(
getLockObjectString(msg, lockObj, mode, context, lockOwner),
mode.toString(), imse.getLocalizedMessage()), imse);
}
}
}
/**
* Common lock timeout routine for the fail-fast modes.
*/
protected final long getTimeoutFailFast(final Object lockObj,
final LockMode requestedMode, final LockMode currentMode,
final int flags) {
if (TXStateProxy.LOG_FINEST) {
final LogWriter logger = GemFireCacheImpl.getExisting().getLogger();
if (logger.infoEnabled()) {
logger.info("LockingPolicy." + name() + ": found existing lockMode "
+ currentMode + " on object: " + lockObj);
}
}
// Here the policy is that all locks wait for EX lock indefinitely since it
// is assumed to be taken only for short duration, while other read-write
// and write-write combinations will have timeout of zero i.e. will throw a
// conflict immediately. Similarly EX lock will wait for SH lock
// indefinitely since SH locks are assumed to be acquired for very short
// durations only.
switch (currentMode) {
case EX:
// assuming EX_SH is always upgraded to EX so this can never happen
assert requestedMode != LockMode.EX: "unexpected requestedMode EX "
+ "with currentMode EX in getTimeout for " + lockObj;
// wait before failing since EX is assumed to be released after sometime
final long exReadTimeout = Math.max(
ExclusiveSharedSynchronizer.DEFAULT_READ_TIMEOUT,
ExclusiveSharedSynchronizer.READ_LOCK_TIMEOUT * 2);
// wait for EX to be released during commit when acquiring SH lock
// but don't wait a whole lot for update
return (requestedMode == LockMode.SH
&& ((flags & ExclusiveSharedSynchronizer.FOR_UPDATE) != 0)
? exReadTimeout : Math.min(exReadTimeout * 10,
ExclusiveSharedSynchronizer.LOCK_MAX_TIMEOUT));
case EX_SH:
if (requestedMode == LockMode.SH) {
// SH should always be allowed with EX_SH so keep retrying
return ExclusiveSharedSynchronizer.LOCK_MAX_TIMEOUT;
}
else if (requestedMode == LockMode.READ_ONLY) {
// wait a bit for READ_ONLY
return ExclusiveSharedSynchronizer.READ_LOCK_TIMEOUT;
}
else {
// conflict immediately for EX_SH
return ExclusiveSharedSynchronizer.WRITE_LOCK_TIMEOUT;
}
case SH:
if (requestedMode == LockMode.EX) {
// wait indefinitely for EX locks since commit processing is expected
// to be short and bounded; similarly for SH locks for EX lock request
// for SH lock requesting with existing EX lock now conflicting after
// some period otherwise it can deadlock now with multiple SH locks
// being acquired by bulk table scan (t1 => SH on k1, SH on k2;
// t2 => EX on k2, EX on k1)
// [sumedh] cannot fail with conflict here since it is single-phase
// commit for RC which cannot fail during commit
//return currentMode == LockMode.EX && requestedMode == LockMode.SH
// ? ExclusiveSharedSynchronizer.READ_LOCK_TIMEOUT : -1;
return -1;
}
else if (requestedMode == LockMode.EX_SH) {
// wait for SH => EX_SH upgrade for sometime and then fail, else it
// can get stuck indefinitely if two or more threads are trying to
// do the same (#49341, #46121 etc)
return ExclusiveSharedSynchronizer.READ_LOCK_TIMEOUT;
}
else {
// all other locks are allowed with SH so keep retrying
return ExclusiveSharedSynchronizer.LOCK_MAX_TIMEOUT;
}
default:
assert currentMode == LockMode.READ_ONLY: "unexpected currentMode="
+ currentMode + " in getTimeout for " + lockObj;
switch (requestedMode) {
case EX:
// [sumedh] cannot fail with conflict here since it is single-phase
// commit for RC which cannot fail during commit
return -1;
case EX_SH:
// this should fail immediately assuming that they are from
// different txns (TXState level will handle for same TX case)
return ExclusiveSharedSynchronizer.WRITE_LOCK_TIMEOUT;
default:
// SH and READ_ONLY are allowed with READ_ONLY so keep retrying
return ExclusiveSharedSynchronizer.LOCK_MAX_TIMEOUT;
}
}
}
/**
* Get a string representation of the locking object with context for logging.
*/
protected final String getLockObjectString(
final AbstractOperationMessage msg,
final ExclusiveSharedLockObject lockObj, final LockMode lockMode,
final Object context, final Object forOwner) {
if (msg == null) {
return lockObj + "; owner: "
+ getLockOwnerForConflicts(lockObj, lockMode, context, forOwner)
+ "; forOwner: " + forOwner + "; context: " + context;
}
return msg.getConflictObjectString(lockObj, lockMode, context, forOwner)
+ "] while processing message [" + msg.toString();
}
/**
* This will return the lock owner by doing a possibly expensive search among
* all active transactions etc. Should only be used for logging or exception
* strings and never in regular code.
*/
public static final Object getLockOwnerForConflicts(
final ExclusiveSharedLockObject lockObj, final LockMode lockMode,
final Object context, final Object forOwner) {
final Object owner = lockObj.getOwnerId(context);
if (owner == null && lockObj instanceof RegionEntry) {
return TXManagerImpl.searchLockOwner((RegionEntry)lockObj, lockMode,
context, forOwner);
}
return owner;
}
/**
* Cache the array of all enumeration values for this enum.
*/
static final LockingPolicy[] values = values();
/**
* Mapping of {@link IsolationLevel} ordinal to corresponding fail-fast
* transaction {@link LockingPolicy}.
*/
private static final LockingPolicy[] failFastPolicies;
/**
* Mapping of {@link IsolationLevel} ordinal to corresponding waiting mode
* transaction {@link LockingPolicy}.
*/
private static final LockingPolicy[] waitingModePolicies;
static {
int maxFF = -1, maxWM = -1;
for (final LockingPolicy policy : values) {
final int isolationOrdinal = policy.getIsolationLevel().ordinal();
if (policy.isFailFast()) {
if (isolationOrdinal > maxFF) {
maxFF = isolationOrdinal;
}
}
else {
if (isolationOrdinal > maxWM) {
maxWM = isolationOrdinal;
}
}
}
failFastPolicies = new LockingPolicy[maxFF + 1];
waitingModePolicies = new LockingPolicy[maxWM + 1];
for (final LockingPolicy policy : values) {
final int isolationOrdinal = policy.getIsolationLevel().ordinal();
if (policy.isFailFast()) {
failFastPolicies[isolationOrdinal] = policy;
}
else {
waitingModePolicies[isolationOrdinal] = policy;
}
}
}
/**
* Get the {@link LockingPolicy} to use given the {@link IsolationLevel} and a
* boolean indicating whether waiting mode is to be used, or default fail-fast
* mode has to be used.
*/
public static final LockingPolicy fromIsolationLevel(
final IsolationLevel isolationLevel, final boolean waitMode) {
final int isolationOrdinal = isolationLevel.ordinal();
final LockingPolicy policy;
if (waitMode) {
if (isolationOrdinal < waitingModePolicies.length
&& (policy = waitingModePolicies[isolationOrdinal]) != null) {
return policy;
}
throw new UnsupportedOperationException("Unimplemented transaction "
+ "isolation level for waiting mode: " + isolationLevel);
}
else {
if (isolationOrdinal < failFastPolicies.length
&& (policy = failFastPolicies[isolationOrdinal]) != null) {
return policy;
}
throw new UnsupportedOperationException("Unimplemented transaction "
+ "isolation level for fail-fast conflict detection mode: "
+ isolationLevel);
}
}
/**
* Get a {@link LockingPolicy} for given ordinal value ranging from 0 to
* (number of enum values - 1).
*/
public static final LockingPolicy fromOrdinal(final int ordinal) {
return values[ordinal];
}
/**
* Get the number of enumeration values defined for this enum.
*/
public static final int size() {
return values.length;
}
}
|
|
package net.sf.jabref.model.metadata;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import net.sf.jabref.model.bibtexkeypattern.AbstractBibtexKeyPattern;
import net.sf.jabref.model.bibtexkeypattern.DatabaseBibtexKeyPattern;
import net.sf.jabref.model.bibtexkeypattern.GlobalBibtexKeyPattern;
import net.sf.jabref.model.database.BibDatabaseMode;
import net.sf.jabref.model.entry.FieldName;
import net.sf.jabref.model.groups.GroupTreeNode;
import net.sf.jabref.model.groups.event.GroupUpdatedEvent;
import net.sf.jabref.model.metadata.event.MetaDataChangedEvent;
import com.google.common.eventbus.EventBus;
public class MetaData implements Iterable<String> {
public static final String META_FLAG = "jabref-meta: ";
private static final String SAVE_ORDER_CONFIG = "saveOrderConfig";
public static final String SAVE_ACTIONS = "saveActions";
private static final String PREFIX_KEYPATTERN = "keypattern_";
private static final String KEYPATTERNDEFAULT = "keypatterndefault";
private static final String DATABASE_TYPE = "databaseType";
public static final String GROUPSTREE = "groupstree";
private static final String FILE_DIRECTORY = FieldName.FILE + FileDirectoryPreferences.DIR_SUFFIX;
private static final String PROTECTED_FLAG_META = "protectedFlag";
public static final char ESCAPE_CHARACTER = '\\';
public static final char SEPARATOR_CHARACTER = ';';
public static final String SEPARATOR_STRING = String.valueOf(SEPARATOR_CHARACTER);
private final Map<String, List<String>> metaData = new HashMap<>();
private GroupTreeNode groupsRoot;
private final EventBus eventBus = new EventBus();
private AbstractBibtexKeyPattern bibtexKeyPattern;
private Charset encoding;
/**
* The MetaData object stores all meta data sets in Vectors. To ensure that
* the data is written correctly to string, the user of a meta data Vector
* must simply make sure the appropriate changes are reflected in the Vector
* it has been passed.
*/
public MetaData(Map<String, List<String>> parsedData) {
Objects.requireNonNull(parsedData);
clearMetaData();
metaData.putAll(parsedData);
}
/**
* The MetaData object can be constructed with no data in it.
*/
public MetaData() {
// Do nothing
}
public MetaData(Charset encoding) {
this.encoding = encoding;
}
public void setParsedData(Map<String, List<String>> parsedMetaData) {
clearMetaData();
metaData.putAll(parsedMetaData);
}
public Optional<SaveOrderConfig> getSaveOrderConfig() {
List<String> storedSaveOrderConfig = getData(SAVE_ORDER_CONFIG);
if (storedSaveOrderConfig != null) {
return Optional.of(SaveOrderConfig.parse(storedSaveOrderConfig));
}
return Optional.empty();
}
/**
* @return Iterator on all keys stored in the metadata
*/
@Override
public Iterator<String> iterator() {
return metaData.keySet().iterator();
}
/**
* Retrieves the stored meta data.
*
* @param key the key to look up
* @return null if no data is found
*/
public List<String> getData(String key) {
return metaData.get(key);
}
/**
* Removes the given key from metadata.
* Nothing is done if key is not found.
*
* @param key the key to remove
*/
public void remove(String key) {
if (metaData.containsKey(key)) { //otherwise redundant and disturbing events are going to be posted
metaData.remove(key);
postChange();
}
}
/**
* Stores the specified data in this object, using the specified key. For
* certain keys (e.g. "groupstree"), the objects in orderedData are
* reconstructed from their textual (String) representation if they are of
* type String, and stored as an actual instance.
*/
public void putData(String key, List<String> orderedData) {
metaData.put(key, orderedData);
postChange();
}
public Optional<GroupTreeNode> getGroups() {
return Optional.ofNullable(groupsRoot);
}
/**
* Sets a new group root node. <b>WARNING </b>: This invalidates everything
* returned by getGroups() so far!!!
*/
public void setGroups(GroupTreeNode root) {
groupsRoot = root;
eventBus.post(new GroupUpdatedEvent(this));
}
/**
* @return the stored label patterns
*/
public AbstractBibtexKeyPattern getBibtexKeyPattern(GlobalBibtexKeyPattern globalPattern) {
if (bibtexKeyPattern != null) {
return bibtexKeyPattern;
}
bibtexKeyPattern = new DatabaseBibtexKeyPattern(globalPattern);
// read the data from the metadata and store it into the bibtexKeyPattern
for (String key : this) {
if (key.startsWith(PREFIX_KEYPATTERN)) {
List<String> value = getData(key);
String type = key.substring(PREFIX_KEYPATTERN.length());
bibtexKeyPattern.addBibtexKeyPattern(type, value.get(0));
}
}
List<String> defaultPattern = getData(KEYPATTERNDEFAULT);
if (defaultPattern != null) {
bibtexKeyPattern.setDefaultValue(defaultPattern.get(0));
}
return bibtexKeyPattern;
}
/**
* Updates the stored key patterns to the given key patterns.
*
* @param bibtexKeyPattern the key patterns to update to. <br />
* A reference to this object is stored internally and is returned at getBibtexKeyPattern();
*/
public void setBibtexKeyPattern(AbstractBibtexKeyPattern bibtexKeyPattern) {
// remove all keypatterns from metadata
Iterator<String> iterator = this.iterator();
while (iterator.hasNext()) {
String key = iterator.next();
if (key.startsWith(PREFIX_KEYPATTERN)) {
iterator.remove();
}
}
// set new value if it is not a default value
Set<String> allKeys = bibtexKeyPattern.getAllKeys();
for (String key : allKeys) {
if (!bibtexKeyPattern.isDefaultValue(key)) {
List<String> data = new ArrayList<>();
data.add(bibtexKeyPattern.getValue(key).get(0));
String metaDataKey = PREFIX_KEYPATTERN + key;
this.putData(metaDataKey, data);
}
}
// store default pattern
if (bibtexKeyPattern.getDefaultValue() == null) {
this.remove(KEYPATTERNDEFAULT);
} else {
List<String> data = new ArrayList<>();
data.add(bibtexKeyPattern.getDefaultValue().get(0));
this.putData(KEYPATTERNDEFAULT, data);
}
this.bibtexKeyPattern = bibtexKeyPattern;
}
public Optional<List<String>> getSaveActions() {
return Optional.ofNullable(getData(SAVE_ACTIONS));
}
public Optional<BibDatabaseMode> getMode() {
List<String> data = getData(DATABASE_TYPE);
if ((data == null) || data.isEmpty()) {
return Optional.empty();
}
return Optional.of(BibDatabaseMode.parse(data.get(0)));
}
public boolean isProtected() {
List<String> data = getData(PROTECTED_FLAG_META);
if ((data == null) || data.isEmpty()) {
return false;
} else {
return Boolean.parseBoolean(data.get(0));
}
}
public Optional<String> getDefaultFileDirectory() {
List<String> fileDirectory = getData(FILE_DIRECTORY);
if ((fileDirectory == null) || fileDirectory.isEmpty()) {
return Optional.empty();
} else {
return Optional.of(fileDirectory.get(0).trim());
}
}
public Optional<String> getUserFileDirectory(String user) {
List<String> fileDirectory = getData(FILE_DIRECTORY + '-' + user);
if ((fileDirectory == null) || fileDirectory.isEmpty()) {
return Optional.empty();
} else {
return Optional.of(fileDirectory.get(0).trim());
}
}
public Map<String, List<String>> getMetaData() {
return new HashMap<>(metaData);
}
public void setSaveActions(List<String> actionsSerialized) {
putData(SAVE_ACTIONS, actionsSerialized);
}
public void setSaveOrderConfig(SaveOrderConfig saveOrderConfig) {
List<String> serialized = saveOrderConfig.getAsStringList();
putData(SAVE_ORDER_CONFIG, serialized);
}
public void setMode(BibDatabaseMode mode) {
putData(DATABASE_TYPE, Collections.singletonList(mode.getAsString()));
}
public void markAsProtected() {
putData(PROTECTED_FLAG_META, Collections.singletonList("true"));
}
public void setDefaultFileDirectory(String path) {
putData(FILE_DIRECTORY, Collections.singletonList(path));
}
public void clearDefaultFileDirectory() {
remove(FILE_DIRECTORY);
}
public void setUserFileDirectory(String user, String path) {
putData(FILE_DIRECTORY + '-' + user, Collections.singletonList(path.trim()));
}
public void clearUserFileDirectory(String user) {
remove(FILE_DIRECTORY + '-' + user);
}
public void markAsNotProtected() {
remove(PROTECTED_FLAG_META);
}
public void clearSaveActions() {
remove(SAVE_ACTIONS);
}
public void clearSaveOrderConfig() {
remove(SAVE_ORDER_CONFIG);
}
/**
* Posts a new {@link MetaDataChangedEvent} on the {@link EventBus}.
*/
public void postChange() {
eventBus.post(new MetaDataChangedEvent(this));
}
public void postGroupChange() {
eventBus.post(new MetaDataChangedEvent(this));
}
/**
* Returns the encoding used during parsing.
*/
public Optional<Charset> getEncoding() {
return Optional.ofNullable(encoding);
}
public void setEncoding(Charset encoding) {
this.encoding = Objects.requireNonNull(encoding);
}
public void clearMetaData() {
metaData.clear();
}
public void registerListener(Object listener) {
this.eventBus.register(listener);
}
public void unregisterListener(Object listener) {
this.eventBus.unregister(listener);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
MetaData strings = (MetaData) o;
return Objects.equals(metaData, strings.metaData) && Objects.equals(groupsRoot, strings.groupsRoot) && Objects
.equals(bibtexKeyPattern, strings.bibtexKeyPattern) && Objects.equals(encoding, strings.encoding);
}
@Override
public int hashCode() {
return Objects.hash(metaData, groupsRoot, bibtexKeyPattern, encoding);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.nifi.processors.solr;
import com.google.gson.stream.JsonWriter;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.behavior.WritesAttribute;
import org.apache.nifi.annotation.behavior.WritesAttributes;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.components.AllowableValue;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.expression.AttributeExpression;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.ProcessorInitializationContext;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.schema.access.SchemaNotFoundException;
import org.apache.nifi.serialization.RecordSetWriter;
import org.apache.nifi.serialization.RecordSetWriterFactory;
import org.apache.nifi.serialization.record.RecordSchema;
import org.apache.nifi.serialization.record.RecordSet;
import org.apache.nifi.util.StopWatch;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.FieldStatsInfo;
import org.apache.solr.client.solrj.response.IntervalFacet;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.response.RangeFacet;
import org.apache.solr.client.solrj.response.RangeFacet.Count;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.FacetParams;
import org.apache.solr.common.params.MultiMapSolrParams;
import org.apache.solr.common.params.StatsParams;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import static org.apache.nifi.processors.solr.SolrUtils.KERBEROS_CREDENTIALS_SERVICE;
import static org.apache.nifi.processors.solr.SolrUtils.KERBEROS_PASSWORD;
import static org.apache.nifi.processors.solr.SolrUtils.KERBEROS_PRINCIPAL;
import static org.apache.nifi.processors.solr.SolrUtils.KERBEROS_USER_SERVICE;
import static org.apache.nifi.processors.solr.SolrUtils.SOLR_TYPE;
import static org.apache.nifi.processors.solr.SolrUtils.COLLECTION;
import static org.apache.nifi.processors.solr.SolrUtils.SOLR_TYPE_CLOUD;
import static org.apache.nifi.processors.solr.SolrUtils.SSL_CONTEXT_SERVICE;
import static org.apache.nifi.processors.solr.SolrUtils.SOLR_SOCKET_TIMEOUT;
import static org.apache.nifi.processors.solr.SolrUtils.SOLR_CONNECTION_TIMEOUT;
import static org.apache.nifi.processors.solr.SolrUtils.SOLR_MAX_CONNECTIONS;
import static org.apache.nifi.processors.solr.SolrUtils.SOLR_MAX_CONNECTIONS_PER_HOST;
import static org.apache.nifi.processors.solr.SolrUtils.ZK_CLIENT_TIMEOUT;
import static org.apache.nifi.processors.solr.SolrUtils.ZK_CONNECTION_TIMEOUT;
import static org.apache.nifi.processors.solr.SolrUtils.SOLR_LOCATION;
import static org.apache.nifi.processors.solr.SolrUtils.BASIC_USERNAME;
import static org.apache.nifi.processors.solr.SolrUtils.BASIC_PASSWORD;
import static org.apache.nifi.processors.solr.SolrUtils.RECORD_WRITER;
@Tags({"Apache", "Solr", "Get", "Query", "Records"})
@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED)
@CapabilityDescription("Queries Solr and outputs the results as a FlowFile in the format of XML or using a Record Writer")
@WritesAttributes({
@WritesAttribute(attribute = "solr.connect", description = "Solr connect string"),
@WritesAttribute(attribute = "solr.collection", description = "Solr collection"),
@WritesAttribute(attribute = "solr.query", description = "Query string sent to Solr"),
@WritesAttribute(attribute = "solr.cursor.mark", description = "Cursor mark can be used for scrolling Solr"),
@WritesAttribute(attribute = "solr.status.code", description = "Status code of Solr request. A status code of 0 indicates that the request was successfully processed"),
@WritesAttribute(attribute = "solr.query.time", description = "The elapsed time to process the query (in ms)"),
@WritesAttribute(attribute = "solr.start", description = "Solr start parameter (result offset) for the query"),
@WritesAttribute(attribute = "solr.rows", description = "Number of Solr documents to be returned for the query"),
@WritesAttribute(attribute = "solr.number.results", description = "Number of Solr documents that match the query"),
@WritesAttribute(attribute = "mime.type", description = "The mime type of the data format"),
@WritesAttribute(attribute = "querysolr.exeption.class", description = "The Java exception class raised when the processor fails"),
@WritesAttribute(attribute = "querysolr.exeption.message", description = "The Java exception message raised when the processor fails")
})
public class QuerySolr extends SolrProcessor {
public static final AllowableValue MODE_XML = new AllowableValue("XML");
public static final AllowableValue MODE_REC = new AllowableValue("Records");
public static final AllowableValue RETURN_TOP_RESULTS = new AllowableValue("return_only_top_results", "Only top results");
public static final AllowableValue RETURN_ALL_RESULTS = new AllowableValue("return_all_results", "Entire results");
public static final String MIME_TYPE_JSON = "application/json";
public static final String MIME_TYPE_XML = "application/xml";
public static final String ATTRIBUTE_SOLR_CONNECT = "solr.connect";
public static final String ATTRIBUTE_SOLR_COLLECTION = "solr.collection";
public static final String ATTRIBUTE_SOLR_QUERY = "solr.query";
public static final String ATTRIBUTE_CURSOR_MARK = "solr.cursor.mark";
public static final String ATTRIBUTE_SOLR_STATUS = "solr.status.code";
public static final String ATTRIBUTE_SOLR_START = "solr.start";
public static final String ATTRIBUTE_SOLR_ROWS = "solr.rows";
public static final String ATTRIBUTE_SOLR_NUMBER_RESULTS = "solr.number.results";
public static final String ATTRIBUTE_QUERY_TIME = "solr.query.time";
public static final String EXCEPTION = "querysolr.exeption";
public static final String EXCEPTION_MESSAGE = "querysolr.exeption.message";
public static final Integer UPPER_LIMIT_START_PARAM = 10000;
public static final PropertyDescriptor RETURN_TYPE = new PropertyDescriptor
.Builder().name("return_type")
.displayName("Return Type")
.description("Output format of Solr results. Write Solr documents to FlowFiles as XML or using a Record Writer")
.required(true)
.allowableValues(MODE_XML, MODE_REC)
.defaultValue(MODE_XML.getValue())
.build();
public static final PropertyDescriptor SOLR_PARAM_QUERY = new PropertyDescriptor
.Builder().name("solr_param_query")
.displayName("Solr Query")
.description("Solr Query, e. g. field:value")
.required(true)
.addValidator(StandardValidators.createAttributeExpressionLanguageValidator(AttributeExpression.ResultType.STRING))
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.defaultValue("*:*")
.build();
public static final PropertyDescriptor SOLR_PARAM_REQUEST_HANDLER = new PropertyDescriptor
.Builder().name("solr_param_request_handler")
.displayName("Request Handler")
.description("Define a request handler here, e. g. /query")
.required(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.defaultValue("/select")
.build();
public static final PropertyDescriptor SOLR_PARAM_FIELD_LIST = new PropertyDescriptor
.Builder().name("solr_param_field_list")
.displayName("Field List")
.description("Comma separated list of fields to be included into results, e. g. field1,field2")
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.build();
public static final PropertyDescriptor SOLR_PARAM_SORT = new PropertyDescriptor
.Builder().name("solr_param_sort")
.displayName("Sorting of result list")
.description("Comma separated sort clauses to define the sorting of results, e. g. field1 asc, field2 desc")
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.build();
public static final PropertyDescriptor SOLR_PARAM_START = new PropertyDescriptor
.Builder().name("solr_param_start")
.displayName("Start of results")
.description("Offset of result set")
.required(false)
.addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.build();
public static final PropertyDescriptor SOLR_PARAM_ROWS = new PropertyDescriptor
.Builder().name("solr_param_rows")
.displayName("Rows")
.description("Number of results to be returned for a single request")
.required(false)
.addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.build();
public static final PropertyDescriptor AMOUNT_DOCUMENTS_TO_RETURN = new PropertyDescriptor
.Builder().name("amount_documents_to_return")
.displayName("Total amount of returned results")
.description("Total amount of Solr documents to be returned. If this property is set to \"Only top results\", " +
"only single requests will be sent to Solr and the results will be written into single FlowFiles. If it is set to " +
"\"Entire results\", all results matching to the query are retrieved via multiple Solr requests and " +
"returned in multiple FlowFiles. For both options, the number of Solr documents to be returned in a FlowFile depends on " +
"the configuration of the \"Rows\" property")
.required(true)
.allowableValues(RETURN_ALL_RESULTS, RETURN_TOP_RESULTS)
.defaultValue(RETURN_TOP_RESULTS.getValue())
.build();
@Override
protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
return new PropertyDescriptor.Builder()
.description("Specifies the value to send for the '" + propertyDescriptorName + "' Solr parameter")
.name(propertyDescriptorName)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.dynamic(true)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.build();
}
public static final Relationship RESULTS = new Relationship.Builder().name("results")
.description("Results of Solr queries").build();
public static final Relationship FACETS = new Relationship.Builder().name("facets")
.description("Results of faceted search").build();
public static final Relationship STATS = new Relationship.Builder().name("stats")
.description("Stats about Solr index").build();
public static final Relationship ORIGINAL = new Relationship.Builder().name("original")
.description("Original flowfile").build();
public static final Relationship FAILURE = new Relationship.Builder().name("failure")
.description("Failure relationship").build();
private Set<Relationship> relationships;
private List<PropertyDescriptor> descriptors;
@Override
public Set<Relationship> getRelationships() {
return this.relationships;
}
@Override
public List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return this.descriptors;
}
@Override
protected void init(final ProcessorInitializationContext context) {
super.init(context);
final List<PropertyDescriptor> descriptors = new ArrayList<>();
descriptors.add(SOLR_TYPE);
descriptors.add(SOLR_LOCATION);
descriptors.add(COLLECTION);
descriptors.add(RETURN_TYPE);
descriptors.add(RECORD_WRITER);
descriptors.add(SOLR_PARAM_QUERY);
descriptors.add(SOLR_PARAM_REQUEST_HANDLER);
descriptors.add(SOLR_PARAM_FIELD_LIST);
descriptors.add(SOLR_PARAM_SORT);
descriptors.add(SOLR_PARAM_START);
descriptors.add(SOLR_PARAM_ROWS);
descriptors.add(AMOUNT_DOCUMENTS_TO_RETURN);
descriptors.add(KERBEROS_CREDENTIALS_SERVICE);
descriptors.add(KERBEROS_USER_SERVICE);
descriptors.add(KERBEROS_PRINCIPAL);
descriptors.add(KERBEROS_PASSWORD);
descriptors.add(BASIC_USERNAME);
descriptors.add(BASIC_PASSWORD);
descriptors.add(SSL_CONTEXT_SERVICE);
descriptors.add(SOLR_SOCKET_TIMEOUT);
descriptors.add(SOLR_CONNECTION_TIMEOUT);
descriptors.add(SOLR_MAX_CONNECTIONS);
descriptors.add(SOLR_MAX_CONNECTIONS_PER_HOST);
descriptors.add(ZK_CLIENT_TIMEOUT);
descriptors.add(ZK_CONNECTION_TIMEOUT);
this.descriptors = Collections.unmodifiableList(descriptors);
final Set<Relationship> relationships = new HashSet<>();
relationships.add(FAILURE);
relationships.add(RESULTS);
relationships.add(FACETS);
relationships.add(STATS);
relationships.add(ORIGINAL);
this.relationships = Collections.unmodifiableSet(relationships);
}
public static final Set<String> SUPPORTED_SEARCH_COMPONENTS = new HashSet<>();
static {
SUPPORTED_SEARCH_COMPONENTS.addAll(Arrays.asList(StatsParams.STATS, FacetParams.FACET));
}
public static final Set<String> SEARCH_COMPONENTS_ON = new HashSet<>();
static {
SEARCH_COMPONENTS_ON.addAll(Arrays.asList("true", "on", "yes"));
}
@Override
protected final Collection<ValidationResult> additionalCustomValidation(ValidationContext context) {
final Collection<ValidationResult> problems = new ArrayList<>();
if (context.getProperty(RETURN_TYPE).evaluateAttributeExpressions().getValue().equals(MODE_REC.getValue())
&& !context.getProperty(RECORD_WRITER).isSet()) {
problems.add(new ValidationResult.Builder()
.explanation("for writing records a record writer has to be configured")
.valid(false)
.subject("Record writer check")
.build());
}
return problems;
}
@Override
public void doOnTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final ComponentLog logger = getLogger();
FlowFile flowFileOriginal = session.get();
FlowFile flowFileResponse;
if (flowFileOriginal == null) {
if (context.hasNonLoopConnection()) {
return;
}
flowFileResponse = session.create();
} else {
flowFileResponse = session.create(flowFileOriginal);
}
final SolrQuery solrQuery = new SolrQuery();
final boolean isSolrCloud = SOLR_TYPE_CLOUD.equals(context.getProperty(SOLR_TYPE).getValue());
final String collection = context.getProperty(COLLECTION).evaluateAttributeExpressions(flowFileResponse).getValue();
final StringBuilder transitUri = new StringBuilder("solr://");
transitUri.append(getSolrLocation());
if (isSolrCloud) {
transitUri.append(":").append(collection);
}
final StopWatch timer = new StopWatch(false);
try {
solrQuery.setQuery(context.getProperty(SOLR_PARAM_QUERY).evaluateAttributeExpressions(flowFileResponse).getValue());
solrQuery.setRequestHandler(context.getProperty(SOLR_PARAM_REQUEST_HANDLER).evaluateAttributeExpressions(flowFileResponse).getValue());
if (context.getProperty(SOLR_PARAM_FIELD_LIST).isSet()) {
for (final String field : context.getProperty(SOLR_PARAM_FIELD_LIST).evaluateAttributeExpressions(flowFileResponse).getValue()
.split(",")) {
solrQuery.addField(field.trim());
}
}
// Avoid ArrayIndexOutOfBoundsException due to incorrectly configured sorting
try {
if (context.getProperty(SOLR_PARAM_SORT).isSet()) {
final List<SolrQuery.SortClause> sortings = new ArrayList<>();
for (final String sorting : context.getProperty(SOLR_PARAM_SORT).evaluateAttributeExpressions(flowFileResponse).getValue()
.split(",")) {
final String[] sortEntry = sorting.trim().split(" ");
sortings.add(new SolrQuery.SortClause(sortEntry[0], sortEntry[1]));
}
solrQuery.setSorts(sortings);
}
} catch (Exception e) {
throw new ProcessException("Error while parsing the sort clauses for the Solr query");
}
final Integer startParam = context.getProperty(SOLR_PARAM_START).isSet() ? Integer.parseInt(
context.getProperty(SOLR_PARAM_START).evaluateAttributeExpressions(flowFileResponse).getValue()) : CommonParams.START_DEFAULT;
solrQuery.setStart(startParam);
final Integer rowParam = context.getProperty(SOLR_PARAM_ROWS).isSet() ? Integer.parseInt(
context.getProperty(SOLR_PARAM_ROWS).evaluateAttributeExpressions(flowFileResponse).getValue()) : CommonParams.ROWS_DEFAULT;
solrQuery.setRows(rowParam);
final Map<String,String[]> additionalSolrParams = SolrUtils.getRequestParams(context, flowFileResponse);
final Set<String> searchComponents = extractSearchComponents(additionalSolrParams);
solrQuery.add(new MultiMapSolrParams(additionalSolrParams));
final Map<String,String> attributes = new HashMap<>();
attributes.put(ATTRIBUTE_SOLR_CONNECT, getSolrLocation());
if (isSolrCloud) {
attributes.put(ATTRIBUTE_SOLR_COLLECTION, collection);
}
attributes.put(ATTRIBUTE_SOLR_QUERY, solrQuery.toString());
if (flowFileOriginal != null) {
flowFileOriginal = session.putAllAttributes(flowFileOriginal, attributes);
}
flowFileResponse = session.putAllAttributes(flowFileResponse, attributes);
final boolean getEntireResults = RETURN_ALL_RESULTS.equals(context.getProperty(AMOUNT_DOCUMENTS_TO_RETURN).getValue());
boolean processFacetsAndStats = true;
boolean continuePaging = true;
while (continuePaging){
timer.start();
Map<String,String> responseAttributes = new HashMap<>();
responseAttributes.put(ATTRIBUTE_SOLR_START, solrQuery.getStart().toString());
responseAttributes.put(ATTRIBUTE_SOLR_ROWS, solrQuery.getRows().toString());
if (solrQuery.getStart() > UPPER_LIMIT_START_PARAM) {
logger.warn("The start parameter of Solr query {} exceeded the upper limit of {}. The query will not be processed " +
"to avoid performance or memory issues on the part of Solr.", new Object[]{solrQuery.toString(), UPPER_LIMIT_START_PARAM});
flowFileResponse = session.putAllAttributes(flowFileResponse, responseAttributes);
timer.stop();
break;
}
final QueryRequest req = new QueryRequest(solrQuery);
if (isBasicAuthEnabled()) {
req.setBasicAuthCredentials(getUsername(), getPassword());
}
final QueryResponse response = req.process(getSolrClient());
timer.stop();
final Long totalNumberOfResults = response.getResults().getNumFound();
responseAttributes.put(ATTRIBUTE_SOLR_NUMBER_RESULTS, totalNumberOfResults.toString());
responseAttributes.put(ATTRIBUTE_CURSOR_MARK, response.getNextCursorMark());
responseAttributes.put(ATTRIBUTE_SOLR_STATUS, String.valueOf(response.getStatus()));
responseAttributes.put(ATTRIBUTE_QUERY_TIME, String.valueOf(response.getQTime()));
flowFileResponse = session.putAllAttributes(flowFileResponse, responseAttributes);
if (response.getResults().size() > 0) {
if (context.getProperty(RETURN_TYPE).getValue().equals(MODE_XML.getValue())){
flowFileResponse = session.write(flowFileResponse, SolrUtils.getOutputStreamCallbackToTransformSolrResponseToXml(response));
flowFileResponse = session.putAttribute(flowFileResponse, CoreAttributes.MIME_TYPE.key(), MIME_TYPE_XML);
} else {
final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).evaluateAttributeExpressions(flowFileResponse)
.asControllerService(RecordSetWriterFactory.class);
final RecordSchema schema = writerFactory.getSchema(flowFileResponse.getAttributes(), null);
final RecordSet recordSet = SolrUtils.solrDocumentsToRecordSet(response.getResults(), schema);
final StringBuffer mimeType = new StringBuffer();
final FlowFile flowFileResponseRef = flowFileResponse;
flowFileResponse = session.write(flowFileResponse, out -> {
try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out, flowFileResponseRef)) {
writer.write(recordSet);
writer.flush();
mimeType.append(writer.getMimeType());
} catch (SchemaNotFoundException e) {
throw new ProcessException("Could not parse Solr response", e);
}
});
flowFileResponse = session.putAttribute(flowFileResponse, CoreAttributes.MIME_TYPE.key(), mimeType.toString());
}
if (processFacetsAndStats) {
if (searchComponents.contains(FacetParams.FACET)) {
FlowFile flowFileFacets = session.create(flowFileResponse);
flowFileFacets = session.write(flowFileFacets, out -> {
try (
final OutputStreamWriter osw = new OutputStreamWriter(out);
final JsonWriter writer = new JsonWriter(osw)
) {
addFacetsFromSolrResponseToJsonWriter(response, writer);
}
});
flowFileFacets = session.putAttribute(flowFileFacets, CoreAttributes.MIME_TYPE.key(), MIME_TYPE_JSON);
session.getProvenanceReporter().receive(flowFileFacets, transitUri.toString(), timer.getDuration(TimeUnit.MILLISECONDS));
session.transfer(flowFileFacets, FACETS);
}
if (searchComponents.contains(StatsParams.STATS)) {
FlowFile flowFileStats = session.create(flowFileResponse);
flowFileStats = session.write(flowFileStats, out -> {
try (
final OutputStreamWriter osw = new OutputStreamWriter(out);
final JsonWriter writer = new JsonWriter(osw)
) {
addStatsFromSolrResponseToJsonWriter(response, writer);
}
});
flowFileStats = session.putAttribute(flowFileStats, CoreAttributes.MIME_TYPE.key(), MIME_TYPE_JSON);
session.getProvenanceReporter().receive(flowFileStats, transitUri.toString(), timer.getDuration(TimeUnit.MILLISECONDS));
session.transfer(flowFileStats, STATS);
}
processFacetsAndStats = false;
}
}
if (getEntireResults) {
final Integer totalDocumentsReturned = solrQuery.getStart() + solrQuery.getRows();
if (totalDocumentsReturned < totalNumberOfResults) {
solrQuery.setStart(totalDocumentsReturned);
session.getProvenanceReporter().receive(flowFileResponse, transitUri.toString(), timer.getDuration(TimeUnit.MILLISECONDS));
session.transfer(flowFileResponse, RESULTS);
flowFileResponse = session.create(flowFileResponse);
} else {
continuePaging = false;
}
} else {
continuePaging = false;
}
}
} catch (Exception e) {
flowFileResponse = session.penalize(flowFileResponse);
flowFileResponse = session.putAttribute(flowFileResponse, EXCEPTION, e.getClass().getName());
flowFileResponse = session.putAttribute(flowFileResponse, EXCEPTION_MESSAGE, e.getMessage());
session.transfer(flowFileResponse, FAILURE);
logger.error("Failed to execute query {} due to {}. FlowFile will be routed to relationship failure", new Object[]{solrQuery.toString(), e}, e);
if (flowFileOriginal != null) {
flowFileOriginal = session.penalize(flowFileOriginal);
}
}
if (!flowFileResponse.isPenalized()) {
session.getProvenanceReporter().receive(flowFileResponse, transitUri.toString(), timer.getDuration(TimeUnit.MILLISECONDS));
session.transfer(flowFileResponse, RESULTS);
}
if (flowFileOriginal != null) {
if (!flowFileOriginal.isPenalized()) {
session.transfer(flowFileOriginal, ORIGINAL);
} else {
session.remove(flowFileOriginal);
}
}
}
private Set<String> extractSearchComponents(Map<String,String[]> solrParams) {
final Set<String> searchComponentsTemp = new HashSet<>();
for (final String searchComponent : SUPPORTED_SEARCH_COMPONENTS)
if (solrParams.keySet().contains(searchComponent)) {
if (SEARCH_COMPONENTS_ON.contains(solrParams.get(searchComponent)[0])) {
searchComponentsTemp.add(searchComponent);
}
}
return Collections.unmodifiableSet(searchComponentsTemp);
}
private static void addStatsFromSolrResponseToJsonWriter(final QueryResponse response, final JsonWriter writer) throws IOException {
writer.beginObject();
writer.name("stats_fields");
writer.beginObject();
for (Map.Entry<String,FieldStatsInfo> entry: response.getFieldStatsInfo().entrySet()) {
FieldStatsInfo fsi = entry.getValue();
writer.name(entry.getKey());
writer.beginObject();
writer.name("min").value(fsi.getMin().toString());
writer.name("max").value(fsi.getMax().toString());
writer.name("count").value(fsi.getCount());
writer.name("missing").value(fsi.getMissing());
writer.name("sum").value(fsi.getSum().toString());
writer.name("mean").value(fsi.getMean().toString());
writer.name("sumOfSquares").value(fsi.getSumOfSquares());
writer.name("stddev").value(fsi.getStddev());
writer.endObject();
}
writer.endObject();
writer.endObject();
}
private static void addFacetsFromSolrResponseToJsonWriter(final QueryResponse response, final JsonWriter writer) throws IOException {
writer.beginObject();
writer.name("facet_queries");
writer.beginArray();
for (final Map.Entry<String,Integer> facetQuery : response.getFacetQuery().entrySet()){
writer.beginObject();
writer.name("facet").value(facetQuery.getKey());
writer.name("count").value(facetQuery.getValue());
writer.endObject();
}
writer.endArray();
writer.name("facet_fields");
writer.beginObject();
for (final FacetField facetField : response.getFacetFields()){
writer.name(facetField.getName());
writer.beginArray();
for (final FacetField.Count count : facetField.getValues()) {
writer.beginObject();
writer.name("facet").value(count.getName());
writer.name("count").value(count.getCount());
writer.endObject();
}
writer.endArray();
}
writer.endObject();
writer.name("facet_ranges");
writer.beginObject();
for (final RangeFacet rangeFacet : response.getFacetRanges()) {
writer.name(rangeFacet.getName());
writer.beginArray();
final List<Count> list = rangeFacet.getCounts();
for (final Count count : list) {
writer.beginObject();
writer.name("facet").value(count.getValue());
writer.name("count").value(count.getCount());
writer.endObject();
}
writer.endArray();
}
writer.endObject();
writer.name("facet_intervals");
writer.beginObject();
for (final IntervalFacet intervalFacet : response.getIntervalFacets()) {
writer.name(intervalFacet.getField());
writer.beginArray();
for (final IntervalFacet.Count count : intervalFacet.getIntervals()) {
writer.beginObject();
writer.name("facet").value(count.getKey());
writer.name("count").value(count.getCount());
writer.endObject();
}
writer.endArray();
}
writer.endObject();
writer.endObject();
}
}
|
|
package org.apache.archiva.security;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.archiva.redback.authentication.AuthenticationException;
import org.apache.archiva.redback.authentication.AuthenticationResult;
import org.apache.archiva.redback.authorization.UnauthorizedException;
import org.apache.archiva.redback.system.DefaultSecuritySession;
import org.apache.archiva.redback.system.SecuritySession;
import org.apache.archiva.redback.users.User;
import org.apache.archiva.redback.users.UserManager;
import org.apache.archiva.security.common.ArchivaRoleConstants;
import org.easymock.EasyMock;
import org.easymock.IMocksControl;
import org.junit.Before;
import org.junit.Test;
import javax.inject.Inject;
import javax.inject.Named;
import javax.servlet.http.HttpServletRequest;
/**
* ArchivaServletAuthenticatorTest
*/
public class ArchivaServletAuthenticatorTest
extends AbstractSecurityTest
{
@Inject
@Named( value = "servletAuthenticator#test" )
private ServletAuthenticator servletAuth;
private IMocksControl httpServletRequestControl;
private HttpServletRequest request;
@Before
@Override
public void setUp()
throws Exception
{
super.setUp();
httpServletRequestControl = EasyMock.createControl( );
request = httpServletRequestControl.createMock( HttpServletRequest.class );
setupRepository( "corporate" );
}
protected void assignRepositoryManagerRole( String principal, String repoId )
throws Exception
{
roleManager.assignTemplatedRole( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, repoId, principal );
}
@Test
public void testIsAuthenticatedUserExists()
throws Exception
{
AuthenticationResult result = new AuthenticationResult( true, "user", null );
boolean isAuthenticated = servletAuth.isAuthenticated( request, result );
assertTrue( isAuthenticated );
}
@Test
public void testIsAuthenticatedUserDoesNotExist()
throws Exception
{
AuthenticationResult result = new AuthenticationResult( false, "non-existing-user", null );
try
{
servletAuth.isAuthenticated( request, result );
fail( "Authentication exception should have been thrown." );
}
catch ( AuthenticationException e )
{
assertEquals( "User Credentials Invalid", e.getMessage() );
}
}
@Test
public void testIsAuthorizedUserHasWriteAccess()
throws Exception
{
createUser( USER_ALPACA, "Al 'Archiva' Paca" );
assignRepositoryManagerRole( USER_ALPACA, "corporate" );
UserManager userManager = securitySystem.getUserManager();
User user = userManager.findUser( USER_ALPACA );
AuthenticationResult result = new AuthenticationResult( true, USER_ALPACA, null );
SecuritySession session = new DefaultSecuritySession( result, user );
boolean isAuthorized =
servletAuth.isAuthorized( request, session, "corporate", ArchivaRoleConstants.OPERATION_REPOSITORY_UPLOAD );
assertTrue( isAuthorized );
restoreGuestInitialValues( USER_ALPACA );
}
@Test
public void testIsAuthorizedUserHasNoWriteAccess()
throws Exception
{
createUser( USER_ALPACA, "Al 'Archiva' Paca" );
assignRepositoryObserverRole( USER_ALPACA, "corporate" );
//httpServletRequestControl.expectAndReturn( request.getRemoteAddr(), "192.168.111.111" );
EasyMock.expect( request.getRemoteAddr() ).andReturn( "192.168.111.111" );
UserManager userManager = securitySystem.getUserManager();
User user = userManager.findUser( USER_ALPACA );
AuthenticationResult result = new AuthenticationResult( true, USER_ALPACA, null );
SecuritySession session = new DefaultSecuritySession( result, user );
httpServletRequestControl.replay();
try
{
servletAuth.isAuthorized( request, session, "corporate", ArchivaRoleConstants.OPERATION_REPOSITORY_UPLOAD );
fail( "UnauthorizedException should have been thrown." );
}
catch ( UnauthorizedException e )
{
assertEquals( "Access denied for repository corporate", e.getMessage() );
}
httpServletRequestControl.verify();
restoreGuestInitialValues( USER_ALPACA );
}
@Test
public void testIsAuthorizedUserHasReadAccess()
throws Exception
{
createUser( USER_ALPACA, "Al 'Archiva' Paca" );
assignRepositoryObserverRole( USER_ALPACA, "corporate" );
UserManager userManager = securitySystem.getUserManager();
User user = userManager.findUser( USER_ALPACA );
AuthenticationResult result = new AuthenticationResult( true, USER_ALPACA, null );
SecuritySession session = new DefaultSecuritySession( result, user );
boolean isAuthorized =
servletAuth.isAuthorized( request, session, "corporate", ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS );
assertTrue( isAuthorized );
restoreGuestInitialValues( USER_ALPACA );
}
@Test
public void testIsAuthorizedUserHasNoReadAccess()
throws Exception
{
createUser( USER_ALPACA, "Al 'Archiva' Paca" );
UserManager userManager = securitySystem.getUserManager();
User user = userManager.findUser( USER_ALPACA );
AuthenticationResult result = new AuthenticationResult( true, USER_ALPACA, null );
SecuritySession session = new DefaultSecuritySession( result, user );
try
{
servletAuth.isAuthorized( request, session, "corporate", ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS );
fail( "UnauthorizedException should have been thrown." );
}
catch ( UnauthorizedException e )
{
assertEquals( "Access denied for repository corporate", e.getMessage() );
}
restoreGuestInitialValues( USER_ALPACA );
}
@Test
public void testIsAuthorizedGuestUserHasWriteAccess()
throws Exception
{
assignRepositoryManagerRole( USER_GUEST, "corporate" );
boolean isAuthorized =
servletAuth.isAuthorized( USER_GUEST, "corporate", ArchivaRoleConstants.OPERATION_REPOSITORY_UPLOAD );
assertTrue( isAuthorized );
// cleanup previously add karma
restoreGuestInitialValues(USER_GUEST);
}
@Test
public void testIsAuthorizedGuestUserHasNoWriteAccess()
throws Exception
{
assignRepositoryObserverRole( USER_GUEST, "corporate" );
boolean isAuthorized =
servletAuth.isAuthorized( USER_GUEST, "corporate", ArchivaRoleConstants.OPERATION_REPOSITORY_UPLOAD );
assertFalse( isAuthorized );
// cleanup previously add karma
restoreGuestInitialValues(USER_GUEST);
}
@Test
public void testIsAuthorizedGuestUserHasReadAccess()
throws Exception
{
assignRepositoryObserverRole( USER_GUEST, "corporate" );
boolean isAuthorized =
servletAuth.isAuthorized( USER_GUEST, "corporate", ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS );
assertTrue( isAuthorized );
// cleanup previously add karma
restoreGuestInitialValues(USER_GUEST);
}
@Test
public void testIsAuthorizedGuestUserHasNoReadAccess()
throws Exception
{
boolean isAuthorized =
servletAuth.isAuthorized( USER_GUEST, "corporate", ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS );
assertFalse( isAuthorized );
}
}
|
|
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.lang;
import com.intellij.openapi.diagnostic.LoggerRt;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.ByteBuffer;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.BasicFileAttributes;
import java.security.ProtectionDomain;
import java.util.*;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.BiConsumer;
import java.util.function.Predicate;
@ApiStatus.Internal
public final class ClassPath {
static final String CLASS_EXTENSION = ".class";
public static final String CLASSPATH_JAR_FILE_NAME_PREFIX = "classpath";
static final boolean recordLoadingInfo = Boolean.getBoolean("idea.record.classpath.info");
static final boolean recordLoadingTime = recordLoadingInfo || Boolean.getBoolean("idea.record.classloading.stats");
static final boolean logLoadingInfo = Boolean.getBoolean("idea.log.classpath.info");
private static final Collection<Map.Entry<String, Path>> loadedClasses;
private static final Measurer classLoading = new Measurer();
private static final Measurer resourceLoading = new Measurer();
private static final AtomicLong classDefineTotalTime = new AtomicLong();
private final List<Path> files;
private final @Nullable ResourceFileFactory resourceFileFactory;
final boolean mimicJarUrlConnection;
private final List<Loader> loaders = new ArrayList<>();
private volatile boolean allUrlsWereProcessed;
private final AtomicInteger lastLoaderProcessed = new AtomicInteger();
private final Map<Path, Loader> loaderMap = new HashMap<>();
private final ClasspathCache cache = new ClasspathCache();
private final Set<Path> filesWithProtectionDomain;
// true implies that the .jar file will not be modified in the lifetime of the JarLoader
final boolean lockJars;
private final boolean useCache;
final boolean isClassPathIndexEnabled;
private final @Nullable CachePoolImpl cachePool;
private final @Nullable Predicate<? super Path> cachingCondition;
final boolean errorOnMissingJar;
private final @NotNull ClassPath.ClassDataConsumer classDataConsumer;
static {
// insertion order must be preserved
loadedClasses = recordLoadingInfo ? new ConcurrentLinkedQueue<>() : null;
if (logLoadingInfo) {
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
//noinspection UseOfSystemOutOrSystemErr
System.out.println("Classloading requests: " + ClassPath.class.getClassLoader() +
", class=" + classLoading + ", resource=" + resourceLoading);
}, "Shutdown hook for tracing classloading information"));
}
}
interface ClassDataConsumer {
boolean isByteBufferSupported(String name, @Nullable ProtectionDomain protectionDomain);
Class<?> consumeClassData(String name, byte[] data, Loader loader, @Nullable ProtectionDomain protectionDomain) throws IOException;
Class<?> consumeClassData(String name, ByteBuffer data, Loader loader, @Nullable ProtectionDomain protectionDomain) throws IOException;
}
public @Nullable ResourceFileFactory getResourceFileFactory() {
return resourceFileFactory;
}
ClassPath(@NotNull List<Path> files,
@NotNull Set<Path> filesWithProtectionDomain,
@NotNull UrlClassLoader.Builder configuration,
@Nullable ResourceFileFactory resourceFileFactory,
@NotNull ClassPath.ClassDataConsumer classDataConsumer,
boolean mimicJarUrlConnection) {
lockJars = configuration.lockJars;
useCache = configuration.useCache;
cachePool = configuration.cachePool;
cachingCondition = configuration.cachingCondition;
isClassPathIndexEnabled = configuration.isClassPathIndexEnabled;
errorOnMissingJar = configuration.errorOnMissingJar;
this.filesWithProtectionDomain = filesWithProtectionDomain;
this.mimicJarUrlConnection = mimicJarUrlConnection;
this.classDataConsumer = recordLoadingTime ? new MeasuringClassDataConsumer(classDataConsumer) : classDataConsumer;
this.files = new ArrayList<>(files.size());
this.resourceFileFactory = resourceFileFactory;
if (!files.isEmpty()) {
for (int i = files.size() - 1; i >= 0; i--) {
this.files.add(files.get(i));
}
}
}
public interface ResourceFileFactory {
ResourceFile create(Path file) throws IOException;
}
public synchronized void reset(@NotNull List<? extends Path> paths) {
lastLoaderProcessed.set(0);
allUrlsWereProcessed = false;
loaders.clear();
loaderMap.clear();
cache.clearCache();
addFiles(paths);
}
public static @NotNull Collection<Map.Entry<String, Path>> getLoadedClasses() {
return new ArrayList<>(loadedClasses);
}
// in nanoseconds
public static @NotNull Map<String, Long> getLoadingStats() {
Map<String, Long> result = new HashMap<>(5);
result.put("classLoadingTime", classLoading.timeCounter.get());
result.put("classDefineTime", classDefineTotalTime.get());
result.put("classRequests", (long)classLoading.requestCounter.get());
result.put("resourceLoadingTime", resourceLoading.timeCounter.get());
result.put("resourceRequests", (long)resourceLoading.requestCounter.get());
result.put("identity", (long)ClassPath.class.hashCode());
return result;
}
/** Adding URLs to classpath at runtime could lead to hard-to-debug errors */
@ApiStatus.Internal
synchronized void addFiles(@NotNull List<? extends Path> files) {
for (int i = files.size() - 1; i >= 0; i--) {
this.files.add(files.get(i));
}
allUrlsWereProcessed = false;
}
// think twice before use
public synchronized void appendFiles(@NotNull List<? extends Path> newList) {
Set<Path> existing = new HashSet<>(files);
for (int i = newList.size() - 1; i >= 0; i--) {
Path file = newList.get(i);
if (!existing.contains(file)) {
files.add(file);
}
}
allUrlsWereProcessed = false;
}
public @Nullable Class<?> findClass(@NotNull String className) throws IOException {
long start = classLoading.startTiming();
try {
String fileName = className.replace('.', '/') + CLASS_EXTENSION;
int i;
if (useCache) {
Loader[] loaders = cache.getClassLoadersByName(fileName);
if (loaders != null) {
for (Loader loader : loaders) {
if (loader.containsName(fileName)) {
Class<?> result = findClassInLoader(fileName, className, classDataConsumer, loader);
if (result != null) {
return result;
}
}
}
}
if (allUrlsWereProcessed) {
return null;
}
i = lastLoaderProcessed.get();
}
else {
i = 0;
}
Loader loader;
while ((loader = getLoader(i++)) != null) {
if (useCache && !loader.containsName(fileName)) {
continue;
}
Class<?> result = findClassInLoader(fileName, className, classDataConsumer, loader);
if (result != null) {
return result;
}
}
}
finally {
classLoading.record(start, className);
}
return null;
}
private static @Nullable Class<?> findClassInLoader(@NotNull String fileName,
@NotNull String className,
@NotNull ClassDataConsumer classConsumer,
@NotNull Loader loader) throws IOException {
Class<?> result = loader.findClass(fileName, className, classConsumer);
if (result == null) {
return null;
}
if (loadedClasses != null) {
loadedClasses.add(new AbstractMap.SimpleImmutableEntry<>(fileName, loader.path));
}
return result;
}
public @Nullable Resource findResource(@NotNull String resourceName) {
long start = resourceLoading.startTiming();
try {
int i;
if (useCache) {
Loader[] loaders = cache.getLoadersByName(resourceName);
if (loaders != null) {
for (Loader loader : loaders) {
if (loader.containsName(resourceName)) {
Resource resource = loader.getResource(resourceName);
if (resource != null) {
if (loadedClasses != null) {
loadedClasses.add(new AbstractMap.SimpleImmutableEntry<>(resourceName, loader.path));
}
return resource;
}
}
}
}
if (allUrlsWereProcessed) {
return null;
}
i = lastLoaderProcessed.get();
}
else {
i = 0;
}
Loader loader;
while ((loader = getLoader(i++)) != null) {
if (useCache && !loader.containsName(resourceName)) {
continue;
}
Resource resource = loader.getResource(resourceName);
if (resource != null) {
if (loadedClasses != null) {
loadedClasses.add(new AbstractMap.SimpleImmutableEntry<>(resourceName, loader.path));
}
return resource;
}
}
}
finally {
resourceLoading.record(start, resourceName);
}
return null;
}
public @NotNull Enumeration<URL> getResources(@NotNull String name) {
if (name.endsWith("/")) {
name = name.substring(0, name.length() - 1);
}
if (useCache && allUrlsWereProcessed) {
Loader[] loaders = cache.getLoadersByName(name);
return loaders == null || loaders.length == 0 ? Collections.emptyEnumeration() : new ResourceEnumeration(name, loaders);
}
else {
return new UncachedResourceEnumeration(name, this);
}
}
void processResources(@NotNull String dir,
@NotNull Predicate<? super String> fileNameFilter,
@NotNull BiConsumer<? super String, ? super InputStream> consumer) throws IOException {
if (useCache && allUrlsWereProcessed) {
// getLoadersByName compute package name by name, so, add ending slash
Loader[] loaders = cache.getLoadersByName(dir + '/');
if (loaders != null) {
for (Loader loader : loaders) {
loader.processResources(dir, fileNameFilter, consumer);
}
}
}
else {
int index = 0;
Loader loader;
while ((loader = getLoader(index++)) != null) {
loader.processResources(dir, fileNameFilter, consumer);
}
}
}
private @Nullable Loader getLoader(int i) {
// volatile read
return i < lastLoaderProcessed.get() ? loaders.get(i) : getLoaderSlowPath(i);
}
private synchronized @Nullable Loader getLoaderSlowPath(int i) {
while (loaders.size() < i + 1) {
int size = files.size();
if (size == 0) {
if (useCache) {
allUrlsWereProcessed = true;
}
return null;
}
Path path = files.remove(size - 1);
if (loaderMap.containsKey(path)) {
continue;
}
try {
Loader loader = createLoader(path);
if (loader != null) {
if (useCache) {
initLoaderCache(path, loader);
}
loaders.add(loader);
// volatile write
loaderMap.put(path, loader);
lastLoaderProcessed.incrementAndGet();
}
}
catch (IOException e) {
LoggerRt.getInstance(ClassPath.class).info("path: " + path, e);
}
}
return loaders.get(i);
}
public @NotNull List<Path> getBaseUrls() {
List<Path> result = new ArrayList<>();
for (Loader loader : loaders) {
result.add(loader.path);
}
return result;
}
private @Nullable Loader createLoader(@NotNull Path file) throws IOException {
BasicFileAttributes fileAttributes;
try {
fileAttributes = Files.readAttributes(file, BasicFileAttributes.class);
}
catch (NoSuchFileException ignore) {
return null;
}
if (fileAttributes.isDirectory()) {
return new FileLoader(file, isClassPathIndexEnabled);
}
else if (!fileAttributes.isRegularFile()) {
return null;
}
JarLoader loader;
if (filesWithProtectionDomain.contains(file)) {
loader = new SecureJarLoader(file, this);
}
else {
ResourceFile zipFile;
if (resourceFileFactory == null) {
zipFile = new JdkZipResourceFile(file, lockJars, false);
}
else {
zipFile = resourceFileFactory.create(file);
}
loader = new JarLoader(file, this, zipFile);
}
String filePath = file.toString();
if (filePath.startsWith(CLASSPATH_JAR_FILE_NAME_PREFIX, filePath.lastIndexOf(File.separatorChar) + 1)) {
String[] referencedJars = loadManifestClasspath(loader);
if (referencedJars != null) {
long startReferenced = logLoadingInfo ? System.nanoTime() : 0;
List<Path> urls = new ArrayList<>(referencedJars.length);
for (String referencedJar : referencedJars) {
try {
urls.add(Paths.get(UrlClassLoader.urlToFilePath(referencedJar)));
}
catch (Exception e) {
LoggerRt.getInstance(ClassPath.class).warn("file: " + file + " / " + referencedJar, e);
}
}
addFiles(urls);
if (logLoadingInfo) {
//noinspection UseOfSystemOutOrSystemErr
System.out.println("Loaded all " + referencedJars.length + " files " + (System.nanoTime() - startReferenced) / 1000000 + "ms");
}
}
}
return loader;
}
private void initLoaderCache(@NotNull Path file, @NotNull Loader loader) throws IOException {
ClasspathCache.IndexRegistrar data = cachePool == null ? null : cachePool.loaderIndexCache.get(file);
if (data == null) {
data = loader.buildData();
if (cachePool != null && cachingCondition != null && cachingCondition.test(file)) {
ClasspathCache.LoaderData loaderData =
data instanceof ClasspathCache.LoaderData ? (ClasspathCache.LoaderData)data : ((ClasspathCache.LoaderDataBuilder)data).build();
cachePool.loaderIndexCache.put(file, loaderData);
data = loaderData;
}
}
cache.applyLoaderData(data, loader);
if (files.isEmpty()) {
allUrlsWereProcessed = true;
}
}
Map<Loader.Attribute, String> getManifestData(@NotNull Path file) {
return useCache && cachePool != null ? cachePool.getManifestData(file) : null;
}
void cacheManifestData(@NotNull Path file, @NotNull Map<Loader.Attribute, String> manifestAttributes) {
if (useCache && cachePool != null && cachingCondition != null && cachingCondition.test(file)) {
cachePool.cacheManifestData(file, manifestAttributes);
}
}
private static final class ResourceEnumeration implements Enumeration<URL> {
private int index;
private Resource resource;
private final String name;
private final Loader[] loaders;
ResourceEnumeration(@NotNull String name, Loader[] loaders) {
this.name = name;
this.loaders = loaders;
}
private boolean next() {
if (resource != null) {
return true;
}
long start = resourceLoading.startTiming();
try {
Loader loader;
while (index < loaders.length) {
loader = loaders[index++];
if (!loader.containsName(name)) {
resource = null;
continue;
}
resource = loader.getResource(name);
if (resource != null) {
return true;
}
}
}
finally {
resourceLoading.record(start, name);
}
return false;
}
@Override
public boolean hasMoreElements() {
return next();
}
@Override
public URL nextElement() {
if (!next()) {
throw new NoSuchElementException();
}
Resource resource = this.resource;
this.resource = null;
return resource.getURL();
}
}
private static final class UncachedResourceEnumeration implements Enumeration<URL> {
private int index;
private Resource resource;
private final String name;
private final ClassPath classPath;
UncachedResourceEnumeration(@NotNull String name, @NotNull ClassPath classPath) {
this.name = name;
this.classPath = classPath;
}
private boolean next() {
if (resource != null) {
return true;
}
long start = resourceLoading.startTiming();
try {
Loader loader;
while ((loader = classPath.getLoader(index++)) != null) {
if (classPath.useCache && !loader.containsName(name)) {
continue;
}
resource = loader.getResource(name);
if (resource != null) {
return true;
}
}
}
finally {
resourceLoading.record(start, name);
}
return false;
}
@Override
public boolean hasMoreElements() {
return next();
}
@Override
public URL nextElement() {
if (!next()) {
throw new NoSuchElementException();
}
Resource resource = this.resource;
this.resource = null;
return resource.getURL();
}
}
private static String @Nullable [] loadManifestClasspath(@NotNull JarLoader loader) {
try {
String classPath = loader.getClassPathManifestAttribute();
if (classPath != null) {
String[] urls = classPath.split(" ");
if (urls.length > 0 && urls[0].startsWith("file:")) {
return urls;
}
}
}
catch (Exception ignore) {
}
return null;
}
private static final class MeasuringClassDataConsumer implements ClassDataConsumer {
private static final ThreadLocal<Boolean> doingClassDefineTiming = new ThreadLocal<>();
private final ClassDataConsumer classDataConsumer;
MeasuringClassDataConsumer(ClassDataConsumer classDataConsumer) {
this.classDataConsumer = classDataConsumer;
}
@Override
public boolean isByteBufferSupported(String name, @Nullable ProtectionDomain protectionDomain) {
return classDataConsumer.isByteBufferSupported(name, protectionDomain);
}
@Override
public Class<?> consumeClassData(String name,
byte[] data,
Loader loader,
@Nullable ProtectionDomain protectionDomain) throws IOException {
long start = startTiming();
try {
return classDataConsumer.consumeClassData(name, data, loader, protectionDomain);
}
finally {
record(start);
}
}
@Override
public Class<?> consumeClassData(String name,
ByteBuffer data,
Loader loader,
@Nullable ProtectionDomain protectionDomain) throws IOException {
long start = startTiming();
try {
return classDataConsumer.consumeClassData(name, data, loader, protectionDomain);
}
finally {
record(start);
}
}
private static long startTiming() {
if (doingClassDefineTiming.get() != null) {
return -1;
}
else {
doingClassDefineTiming.set(Boolean.TRUE);
return System.nanoTime();
}
}
private static void record(long start) {
if (start != -1) {
doingClassDefineTiming.set(null);
classDefineTotalTime.addAndGet(System.nanoTime() - start);
}
}
}
private static final class Measurer {
private final AtomicLong timeCounter = new AtomicLong();
private final AtomicInteger requestCounter = new AtomicInteger();
private final ThreadLocal<Boolean> doingTiming = new ThreadLocal<>();
long startTiming() {
if (!recordLoadingTime || doingTiming.get() != null) {
return -1;
}
else {
doingTiming.set(Boolean.TRUE);
return System.nanoTime();
}
}
@SuppressWarnings("UseOfSystemOutOrSystemErr")
void record(long start, String resourceName) {
if (start == -1) {
return;
}
doingTiming.set(null);
long time = System.nanoTime() - start;
long totalTime = timeCounter.addAndGet(time);
int totalRequests = requestCounter.incrementAndGet();
if (logLoadingInfo) {
if (time > 3000000L) {
System.out.println(TimeUnit.NANOSECONDS.toMillis(time) + " ms for " + resourceName);
}
if (totalRequests % 10000 == 0) {
System.out.println(ClassPath.class.getClassLoader() + ", requests: " + totalRequests +
", time:" + TimeUnit.NANOSECONDS.toMillis(totalTime) + "ms");
}
}
}
@Override
public String toString() {
return "Measurer(time=" + TimeUnit.NANOSECONDS.toMillis(timeCounter.get()) + "ms, requests=" + requestCounter + ')';
}
}
}
|
|
/*
* Copyright 2015 Marc Prengemann
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the License.
*/
package de.mprengemann.intellij.plugin.androidicons.forms;
import com.intellij.ide.BrowserUtil;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.TextFieldWithBrowseButton;
import com.intellij.openapi.ui.ValidationInfo;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.vfs.VirtualFile;
import de.mprengemann.intellij.plugin.androidicons.images.IconPack;
import de.mprengemann.intellij.plugin.androidicons.images.ImageInformation;
import de.mprengemann.intellij.plugin.androidicons.images.ImageUtils;
import de.mprengemann.intellij.plugin.androidicons.images.RefactoringTask;
import de.mprengemann.intellij.plugin.androidicons.images.Resolution;
import de.mprengemann.intellij.plugin.androidicons.settings.PluginSettings;
import de.mprengemann.intellij.plugin.androidicons.settings.SettingsHelper;
import de.mprengemann.intellij.plugin.androidicons.util.AndroidResourcesHelper;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang.StringUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ComponentAdapter;
import java.awt.event.ComponentEvent;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class MaterialIconsImporter extends DialogWrapper {
private static final String DEFAULT_RESOLUTION = "drawable-xhdpi";
private VirtualFile assetRoot;
private Project project;
private JLabel imageContainer;
private TextFieldWithBrowseButton resRoot;
private JComboBox assetSpinner;
private JComboBox colorSpinner;
private JComboBox categorySpinner;
private JComboBox sizeSpinner;
private JTextField resExportName;
private JCheckBox MDPICheckBox;
private JCheckBox HDPICheckBox;
private JCheckBox XHDPICheckBox;
private JCheckBox XXHDPICheckBox;
private JCheckBox XXXHDPICheckBox;
private JPanel container;
private boolean exportNameChanged = false;
private final Comparator<File> alphabeticalComparator = new Comparator<File>() {
@Override
public int compare(File file1, File file2) {
if (file1 != null && file2 != null) {
return file1.getName().compareTo(file2.getName());
}
return 0;
}
};
private String lastSelectedColor = null;
private String lastSelectedSize = null;
public MaterialIconsImporter(@Nullable final Project project, Module module) {
super(project, true);
this.project = project;
setTitle("Material Icons Importer");
setResizable(false);
AndroidResourcesHelper.initResourceBrowser(project, module, "Select res root", this.resRoot);
assetRoot = SettingsHelper.getAssetPath(IconPack.MATERIAL_ICONS);
getHelpAction().setEnabled(true);
fillCategories();
fillAssets();
fillSizes();
fillColors();
categorySpinner.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent actionEvent) {
fillAssets();
updateImage();
}
});
AssetSpinnerRenderer renderer = new AssetSpinnerRenderer();
//noinspection GtkPreferredJComboBoxRenderer
assetSpinner.setRenderer(renderer);
assetSpinner.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
fillSizes();
updateImage();
}
});
sizeSpinner.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent actionEvent) {
String size = (String) sizeSpinner.getSelectedItem();
if (size != null) {
lastSelectedSize = size;
}
fillColors();
updateImage();
}
});
colorSpinner.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
String color = (String) colorSpinner.getSelectedItem();
if (color != null) {
lastSelectedColor = color;
}
updateImage();
}
});
resExportName.addKeyListener(new KeyAdapter() {
@Override
public void keyTyped(KeyEvent keyEvent) {
super.keyTyped(keyEvent);
if (!exportNameChanged && keyEvent != null && keyEvent.getKeyCode() > -1) {
exportNameChanged = true;
}
}
@Override
public void keyPressed(KeyEvent keyEvent) {
super.keyPressed(keyEvent);
}
@Override
public void keyReleased(KeyEvent keyEvent) {
super.keyReleased(keyEvent);
}
});
imageContainer.addComponentListener(new ComponentAdapter() {
@Override
public void componentResized(ComponentEvent e) {
super.componentResized(e);
updateImage();
}
});
init();
}
@NotNull
@Override
public Action[] createActions() {
return SystemInfo.isMac ? new Action[] {this.getHelpAction(), this.getCancelAction(), this.getOKAction()}
: new Action[] {this.getOKAction(), this.getCancelAction(), this.getHelpAction()};
}
private void updateImage() {
if (imageContainer == null) {
return;
}
String assetColor = (String) colorSpinner.getSelectedItem();
String assetName = (String) assetSpinner.getSelectedItem();
String assetCategory = (String) categorySpinner.getSelectedItem();
String assetSize = (String) sizeSpinner.getSelectedItem();
if (assetColor == null ||
assetName == null ||
assetCategory == null ||
assetSize == null) {
return;
}
String path = assetCategory + "/" + DEFAULT_RESOLUTION + "/ic_" + assetName + "_" + assetColor + "_" + assetSize + ".png";
File imageFile = new File(assetRoot.getCanonicalPath(), path);
ImageUtils.updateImage(imageContainer, imageFile);
if (!exportNameChanged) {
resExportName.setText("ic_action_" + assetName);
}
}
private void fillCategories() {
categorySpinner.removeAllItems();
if (this.assetRoot.getCanonicalPath() == null) {
return;
}
File assetRoot = new File(this.assetRoot.getCanonicalPath());
final FilenameFilter folderFileNameFiler = new FilenameFilter() {
@Override
public boolean accept(File file, String s) {
return !s.startsWith(".") &&
new File(file, s).isDirectory() &&
!PluginSettings.BLACKLISTED_MATERIAL_ICONS_FOLDER
.contains(FilenameUtils.removeExtension(s));
}
};
File[] categories = assetRoot.listFiles(folderFileNameFiler);
Arrays.sort(categories, alphabeticalComparator);
for (File file : categories) {
categorySpinner.addItem(file.getName());
}
}
private void fillAssets() {
assetSpinner.removeAllItems();
if (this.assetRoot.getCanonicalPath() == null) {
return;
}
File assetRoot = new File(this.assetRoot.getCanonicalPath());
assetRoot = new File(assetRoot, (String) categorySpinner.getSelectedItem());
assetRoot = new File(assetRoot, DEFAULT_RESOLUTION);
final FilenameFilter drawableFileNameFiler = new FilenameFilter() {
@Override
public boolean accept(File file, String s) {
if (!FilenameUtils.isExtension(s, "png")) {
return false;
}
String filename = FilenameUtils.removeExtension(s);
return filename.startsWith("ic_") &&
filename.endsWith("_black_48dp");
}
};
File[] assets = assetRoot.listFiles(drawableFileNameFiler);
if (assets == null) {
return;
}
for (File asset : assets) {
String assetName = FilenameUtils.removeExtension(asset.getName());
assetName = assetName.replace("_black_48dp", "");
assetName = assetName.replace("ic_", "");
assetSpinner.addItem(assetName);
}
}
private void fillSizes() {
final String lastSelectedSize = this.lastSelectedSize;
sizeSpinner.removeAllItems();
if (this.assetRoot.getCanonicalPath() == null) {
return;
}
File assetRoot = new File(this.assetRoot.getCanonicalPath());
assetRoot = new File(assetRoot, (String) categorySpinner.getSelectedItem());
assetRoot = new File(assetRoot, DEFAULT_RESOLUTION);
final String assetName = (String) assetSpinner.getSelectedItem();
final FilenameFilter drawableFileNameFiler = new FilenameFilter() {
@Override
public boolean accept(File file, String s) {
if (!FilenameUtils.isExtension(s, "png")) {
return false;
}
String filename = FilenameUtils.removeExtension(s);
return filename.startsWith("ic_" + assetName + "_");
}
};
File[] assets = assetRoot.listFiles(drawableFileNameFiler);
Set<String> sizes = new HashSet<String>();
for (File asset : assets) {
String drawableName = FilenameUtils.removeExtension(asset.getName());
String[] numbers = drawableName.replaceAll("[^-?0-9]+", " ").trim().split(" ");
drawableName = numbers[numbers.length - 1].trim() + "dp";
sizes.add(drawableName);
}
List<String> list = new ArrayList<String>();
list.addAll(sizes);
Collections.sort(list);
for (String size : list) {
sizeSpinner.addItem(size);
}
if (list.contains(lastSelectedSize)) {
sizeSpinner.setSelectedIndex(list.indexOf(lastSelectedSize));
}
}
private void fillColors() {
final String lastSelectedColor = this.lastSelectedColor;
colorSpinner.removeAllItems();
if (this.assetRoot.getCanonicalPath() == null) {
return;
}
File assetRoot = new File(this.assetRoot.getCanonicalPath());
assetRoot = new File(assetRoot, (String) categorySpinner.getSelectedItem());
assetRoot = new File(assetRoot, DEFAULT_RESOLUTION);
final String assetName = (String) assetSpinner.getSelectedItem();
final String assetSize = (String) sizeSpinner.getSelectedItem();
final FilenameFilter drawableFileNameFiler = new FilenameFilter() {
@Override
public boolean accept(File file, String s) {
if (!FilenameUtils.isExtension(s, "png")) {
return false;
}
String filename = FilenameUtils.removeExtension(s);
return filename.startsWith("ic_" + assetName + "_") &&
filename.endsWith("_" + assetSize);
}
};
File[] assets = assetRoot.listFiles(drawableFileNameFiler);
Set<String> colors = new HashSet<String>();
for (File asset : assets) {
String drawableName = FilenameUtils.removeExtension(asset.getName());
String[] color = drawableName.split("_");
drawableName = color[color.length - 2].trim();
colors.add(drawableName);
}
List<String> list = new ArrayList<String>();
list.addAll(colors);
Collections.sort(list);
for (String size : list) {
colorSpinner.addItem(size);
}
if (list.contains(lastSelectedColor)) {
colorSpinner.setSelectedIndex(list.indexOf(lastSelectedColor));
}
}
@Override
protected void doHelpAction() {
try {
BrowserUtil.browse("file://" + new File(assetRoot.getCanonicalPath(), "index.html").getCanonicalPath());
} catch (IOException ignored) {
}
}
@Override
protected void doOKAction() {
importIcons();
super.doOKAction();
}
private void importIcons() {
RefactoringTask task = new RefactoringTask(project);
ImageInformation baseInformation = ImageInformation.newBuilder()
.setExportName(resExportName.getText())
.setExportPath(resRoot.getText())
.build(project);
task.addImage(getImageInformation(baseInformation, Resolution.MDPI, MDPICheckBox));
task.addImage(getImageInformation(baseInformation, Resolution.HDPI, HDPICheckBox));
task.addImage(getImageInformation(baseInformation, Resolution.XHDPI, XHDPICheckBox));
task.addImage(getImageInformation(baseInformation, Resolution.XXHDPI, XXHDPICheckBox));
task.addImage(getImageInformation(baseInformation, Resolution.XXXHDPI, XXXHDPICheckBox));
ProgressManager.getInstance().run(task);
}
private ImageInformation getImageInformation(ImageInformation baseInformation,
Resolution resolution,
JCheckBox checkBox) {
if (!checkBox.isSelected()) {
return null;
}
String assetCategory = (String) categorySpinner.getSelectedItem();
String assetName = (String) assetSpinner.getSelectedItem();
String assetColor = (String) colorSpinner.getSelectedItem();
String assetSize = (String) sizeSpinner.getSelectedItem();
String fromName = "ic_" + assetName + "_" + assetColor + "_" + assetSize + ".png";
File source = new File(assetRoot.getCanonicalPath(),
assetCategory + "/drawable-" + resolution.toString() + "/" + fromName);
return ImageInformation.newBuilder(baseInformation)
.setImageFile(source)
.setResolution(resolution)
.build(project);
}
@Nullable
@Override
protected ValidationInfo doValidate() {
if (StringUtils.isEmpty(resRoot.getText().trim())) {
return new ValidationInfo("Please select the resources root.", resRoot);
}
if (StringUtils.isEmpty(resExportName.getText().trim())) {
return new ValidationInfo("Please select a name for the drawable.", resExportName);
} else if (!resExportName.getText().matches("[a-z0-9_.]*")) {
return new ValidationInfo(
"Please select a valid name for the drawable. There are just \"[a-z0-9_.]\" allowed.",
resExportName);
}
return null;
}
@Nullable
@Override
protected JComponent createCenterPanel() {
return container;
}
private class AssetSpinnerRenderer extends DefaultListCellRenderer {
@Override
public Component getListCellRendererComponent(JList jList, Object o, int i, boolean b, boolean b2) {
JLabel label = (JLabel) super.getListCellRendererComponent(jList, o, i, b, b2);
if (label != null) {
String name = (String) assetSpinner.getItemAt(i);
String path = categorySpinner.getSelectedItem() + "/" + DEFAULT_RESOLUTION + "/ic_" + name + "_black_24dp.png";
File imageFile = new File(assetRoot.getCanonicalPath(), path);
if (imageFile.exists()) {
label.setIcon(new ImageIcon(imageFile.getAbsolutePath()));
}
}
return label;
}
}
}
|
|
/*
* Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.tvm.custom;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.services.simpledb.AmazonSimpleDBClient;
import com.amazonaws.services.simpledb.model.Attribute;
import com.amazonaws.services.simpledb.model.CreateDomainRequest;
import com.amazonaws.services.simpledb.model.DeleteAttributesRequest;
import com.amazonaws.services.simpledb.model.GetAttributesRequest;
import com.amazonaws.services.simpledb.model.Item;
import com.amazonaws.services.simpledb.model.ListDomainsRequest;
import com.amazonaws.services.simpledb.model.ListDomainsResult;
import com.amazonaws.services.simpledb.model.PutAttributesRequest;
import com.amazonaws.services.simpledb.model.ReplaceableAttribute;
import com.amazonaws.services.simpledb.model.SelectRequest;
import com.amazonaws.services.simpledb.model.SelectResult;
import com.amazonaws.tvm.Configuration;
import com.amazonaws.tvm.TokenVendingMachineLogger;
import com.amazonaws.tvm.Utilities;
/**
* This class is used store and authenticate users. All users and there username/password information is stored in a SimpleDB domain.
*/
public class UserAuthentication {
protected static final Logger log = TokenVendingMachineLogger.getLogger();
private final AmazonSimpleDBClient sdb;
/**
* Constant for the Domain name used to store the identities.
*/
private final static String IDENTITY_DOMAIN = Configuration.USERS_DOMAIN;
/** Constant for the userid attribute */
private final static String USER_ID = "userid";
/** Constant for the hash of password attribute */
private final static String HASH_SALTED_PASSWORD = "hash_salted_password";
/** Constant for the enabled attribute */
private final static String IS_ENABLED = "enabled";
/** Constant select expression used to list all the identities stored in the Domain. */
private final static String SELECT_USERS_EXPRESSION = "select * from `" + IDENTITY_DOMAIN + "`";
/**
* Looks up domain name and creates one if it doesnot exist
*/
public UserAuthentication() {
this.sdb = new AmazonSimpleDBClient( new BasicAWSCredentials( Configuration.AWS_ACCESS_KEY_ID, Configuration.AWS_SECRET_KEY ) );
this.sdb.setEndpoint( Configuration.SIMPLEDB_ENDPOINT );
if ( !this.doesDomainExist( IDENTITY_DOMAIN ) ) {
this.createIdentityDomain();
}
}
/**
* Returns the list of usernames stored in the identity domain.
*
* @return list of existing usernames in SimpleDB domain
*/
public List<String> listUsers() {
List<String> users = new ArrayList<String>( 1000 );
SelectResult result = null;
do {
SelectRequest sr = new SelectRequest( SELECT_USERS_EXPRESSION, Boolean.TRUE );
result = this.sdb.select( sr );
for ( Item item : result.getItems() ) {
users.add( item.getName() );
}
}
while ( result != null && result.getNextToken() != null );
return users;
}
/**
* Attempts to register the username, password combination. Checks if username not already exist. Returns true if successful, false otherwise.
*
* @param username
* Unique user identifier
* @param password
* user password
* @param uri
* endpoint URI
* @return true if successful, false otherwise.
*/
public boolean registerUser( String username, String password, String uri ) {
try {
if ( this.checkUsernameExists( username ) )
return false;
this.storeUser( username, password, uri );
return this.authenticateUser( username, password, uri );
}
catch ( Exception exception ) {
log.log( Level.WARNING, "Exception during registerUser", exception );
return false;
}
}
/**
* Deletes the specified username from the identity domain.
*/
public void deleteUser( String username ) {
DeleteAttributesRequest dar = new DeleteAttributesRequest( IDENTITY_DOMAIN, username );
this.sdb.deleteAttributes( dar );
}
/**
* Authenticates the given username, password combination. Hash of password is matched against the hash value stored for password field
*
* @param username
* Unique user identifier
* @param password
* user password
* @param uri
* endpoint URI
* @return true if authentication was successful, false otherwise
* @throws Exception
*/
public boolean authenticateUser( String username, String password, String uri ) throws Exception {
if ( null == username || null == password ) {
return false;
}
GetAttributesRequest gar = new GetAttributesRequest( IDENTITY_DOMAIN, username ).withConsistentRead( Boolean.TRUE );
String hashedSaltedPassword = Utilities.getSaltedPassword( username, uri, password );
List<Attribute> data = this.sdb.getAttributes( gar ).getAttributes();
if ( data != null && !data.isEmpty() ) {
Attribute passwordAttribute = this.findAttributeInList( HASH_SALTED_PASSWORD, data );
return passwordAttribute.getValue().equals( hashedSaltedPassword );
}
else {
return false;
}
}
/**
* Authenticates the given username, signature combination. A signature is generated and matched against the given signature. If they match then
* returns true.
*
* @param username
* Unique user identifier
* @param timestamp
* Timestamp of the request
* @param signature
* Signature of the request
* @return true if authentication was successful, false otherwise
*/
public String authenticateUserSignature( String username, String timestamp, String signature ) throws Exception {
String hashSaltedPassword = this.getHashSaltedPassword( username );
String computedSignature = Utilities.sign( timestamp, hashSaltedPassword );
if ( Utilities.slowStringComparison(signature, computedSignature) )
return this.getUserid( username );
return null;
}
/**
* Store the username, password combination in the Identity domain. The username will represent the item name and the item will contain a
* attributes password and userid.
*
* @param username
* Unique user identifier
* @param password
* user password
* @param uri
* endpoint URI
*/
protected void storeUser( String username, String password, String uri ) throws Exception {
if ( null == username || null == password ) {
return;
}
String hashedSaltedPassword = Utilities.getSaltedPassword( username, uri, password );
String userId = Utilities.generateRandomString();
ReplaceableAttribute userIdAttr = new ReplaceableAttribute( USER_ID, userId, Boolean.TRUE );
ReplaceableAttribute passwordAttr = new ReplaceableAttribute( HASH_SALTED_PASSWORD, hashedSaltedPassword, Boolean.TRUE );
ReplaceableAttribute enableAttr = new ReplaceableAttribute( IS_ENABLED, "true", Boolean.TRUE );
List<ReplaceableAttribute> attributes = new ArrayList<ReplaceableAttribute>( 3 );
attributes.add( userIdAttr );
attributes.add( passwordAttr );
attributes.add( enableAttr );
try {
PutAttributesRequest par = new PutAttributesRequest( IDENTITY_DOMAIN, username, attributes );
this.sdb.putAttributes( par );
}
catch ( Exception exception ) {
log.log( Level.WARNING, "Exception during storeUser", exception );
}
}
/**
* Find and return the attribute in the attribute list
*
* @param attributeName
* attribute to search for in the list
* @param attributes
* list of attributes
* @return attribute found, null if not such attribute found
*/
protected Attribute findAttributeInList( String attributeName, List<Attribute> attributes ) {
for ( Attribute attribute : attributes ) {
if ( attribute.getName().equals( attributeName ) ) {
return attribute;
}
}
return null;
}
/**
* Used to create the Identity Domain. This function only needs to be called once.
*/
protected void createIdentityDomain() {
this.sdb.createDomain( new CreateDomainRequest( IDENTITY_DOMAIN ) );
}
/**
* Fetch list of all the domains in SimpleDB
*
* @return list of domain names
*/
protected List<String> getAllDomains() {
List<String> domains = new ArrayList<String>();
String nextToken = null;
do {
ListDomainsRequest ldr = new ListDomainsRequest();
ldr.setNextToken( nextToken );
ListDomainsResult result = this.sdb.listDomains( ldr );
domains.addAll( result.getDomainNames() );
nextToken = result.getNextToken();
}
while ( nextToken != null );
return domains;
}
/**
* Checks to see if given domainName exist
*
* @param domainName
* The domain name to check
* @return true if domainName exist, false otherwise
*/
protected boolean doesDomainExist( String domainName ) {
try {
List<String> domains = this.getAllDomains();
return ( domains.contains( domainName ) );
}
catch ( Exception exception ) {
log.log( Level.WARNING, "Exception during doesDomainExist", exception );
return false;
}
}
/**
* Get hash of salted password associated with the username
*
* @param username
* Unique user identifier
* @return hash of salted password for the username
* @throws Exception
*/
public String getHashSaltedPassword( String username ) throws Exception {
return this.getAttribute( username, HASH_SALTED_PASSWORD );
}
/**
* Get userid associated with the username
*
* @param username
* Unique user identifier
* @return userid for the username
* @throws Exception
*/
public String getUserid( String username ) throws Exception {
return this.getAttribute( username, USER_ID );
}
/**
* Get specific attribute for the username
*
* @param username
* Unique user identifier
* @param attribute
* The user attribute name
* @return Value for the attribute, null otherwise
* @throws Exception
*/
private String getAttribute( String username, String attribute ) throws Exception {
GetAttributesRequest gar = new GetAttributesRequest( IDENTITY_DOMAIN, username ).withConsistentRead( Boolean.TRUE );
List<Attribute> data = this.sdb.getAttributes( gar ).getAttributes();
if ( data != null && !data.isEmpty() ) {
Attribute keyAttribute = this.findAttributeInList( attribute, data );
return keyAttribute.getValue();
}
else {
return null;
}
}
/**
* Checks to see if the username already exist in the user domain
*
* @param username
* Unique user identifier
* @return true if username already exist, false otherwise
*/
private boolean checkUsernameExists( String username ) {
GetAttributesRequest gar = new GetAttributesRequest( IDENTITY_DOMAIN, username ).withConsistentRead( Boolean.TRUE );
List<Attribute> data = this.sdb.getAttributes( gar ).getAttributes();
return ( data != null && !data.isEmpty() );
}
/**
* This method returns the username associated with the given uid.
*
* @param uid
* Unique device identifier
* @return the username linked to the devide id, null if no uid found.
*/
public static String getUsernameFromUID( String uid ) {
AmazonSimpleDBClient sdbClient = new AmazonSimpleDBClient( new BasicAWSCredentials( Configuration.AWS_ACCESS_KEY_ID, Configuration.AWS_SECRET_KEY ) );
sdbClient.setEndpoint( Configuration.SIMPLEDB_ENDPOINT );
SelectRequest sr = new SelectRequest( "select * from `" + IDENTITY_DOMAIN + "` WHERE " + USER_ID + " = '" + uid + "'", Boolean.TRUE );
SelectResult result = sdbClient.select( sr );
if ( result.getItems().size() == 0 ) {
log.log( Level.SEVERE, "No username matched for UID [" + uid + "]" );
return null;
}
else {
if ( result.getItems().size() != 1 ) {
log.log( Level.SEVERE, "More than one username matched." );
for ( Item item : result.getItems() ) {
log.log( Level.WARNING, "\tMatched: " + item.getName() );
}
}
return result.getItems().get( 0 ).getName();
}
}
}
|
|
package module6;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import de.fhpotsdam.unfolding.UnfoldingMap;
import de.fhpotsdam.unfolding.data.Feature;
import de.fhpotsdam.unfolding.data.GeoJSONReader;
import de.fhpotsdam.unfolding.data.PointFeature;
import de.fhpotsdam.unfolding.geo.Location;
import de.fhpotsdam.unfolding.marker.AbstractShapeMarker;
import de.fhpotsdam.unfolding.marker.Marker;
import de.fhpotsdam.unfolding.marker.MultiMarker;
import de.fhpotsdam.unfolding.providers.Google;
import de.fhpotsdam.unfolding.providers.MBTilesMapProvider;
import de.fhpotsdam.unfolding.utils.MapUtils;
import parsing.ParseFeed;
import processing.core.PApplet;
/** EarthquakeCityMap
* An application with an interactive map displaying earthquake data.
* Author: UC San Diego Intermediate Software Development MOOC team
* @author Your name here
* Date: July 17, 2015
* */
public class EarthquakeCityMap extends PApplet {
// We will use member variables, instead of local variables, to store the data
// that the setUp and draw methods will need to access (as well as other methods)
// You will use many of these variables, but the only one you should need to add
// code to modify is countryQuakes, where you will store the number of earthquakes
// per country.
// You can ignore this. It's to get rid of eclipse warnings
private static final long serialVersionUID = 1L;
// IF YOU ARE WORKING OFFILINE, change the value of this variable to true
private static final boolean offline = false;
/** This is where to find the local tiles, for working without an Internet connection */
public static String mbTilesString = "blankLight-1-3.mbtiles";
//feed with magnitude 2.5+ Earthquakes
private String earthquakesURL = "http://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_week.atom";
// The files containing city names and info and country names and info
private String cityFile = "city-data.json";
private String countryFile = "countries.geo.json";
// The map
private UnfoldingMap map;
// Markers for each city
private List<Marker> cityMarkers;
// Markers for each earthquake
private List<Marker> quakeMarkers;
// A List of country markers
private List<Marker> countryMarkers;
// NEW IN MODULE 5
private CommonMarker lastSelected;
private CommonMarker lastClicked;
public void setup() {
// (1) Initializing canvas and map tiles
size(900, 700, OPENGL);
if (offline) {
map = new UnfoldingMap(this, 200, 50, 650, 600, new MBTilesMapProvider(mbTilesString));
earthquakesURL = "2.5_week.atom"; // The same feed, but saved August 7, 2015
}
else {
map = new UnfoldingMap(this, 200, 50, 650, 600, new Google.GoogleMapProvider());
// IF YOU WANT TO TEST WITH A LOCAL FILE, uncomment the next line
//earthquakesURL = "2.5_week.atom";
}
MapUtils.createDefaultEventDispatcher(this, map);
// FOR TESTING: Set earthquakesURL to be one of the testing files by uncommenting
// one of the lines below. This will work whether you are online or offline
//earthquakesURL = "test1.atom";
//earthquakesURL = "test2.atom";
// Uncomment this line to take the quiz
//earthquakesURL = "quiz2.atom";
// (2) Reading in earthquake data and geometric properties
// STEP 1: load country features and markers
List<Feature> countries = GeoJSONReader.loadData(this, countryFile);
countryMarkers = MapUtils.createSimpleMarkers(countries);
// STEP 2: read in city data
List<Feature> cities = GeoJSONReader.loadData(this, cityFile);
cityMarkers = new ArrayList<Marker>();
for(Feature city : cities) {
cityMarkers.add(new CityMarker(city));
}
// STEP 3: read in earthquake RSS feed
List<PointFeature> earthquakes = ParseFeed.parseEarthquake(this, earthquakesURL);
quakeMarkers = new ArrayList<Marker>();
for(PointFeature feature : earthquakes) {
//check if LandQuake
if(isLand(feature)) {
quakeMarkers.add(new LandQuakeMarker(feature));
}
// OceanQuakes
else {
quakeMarkers.add(new OceanQuakeMarker(feature));
}
}
// could be used for debugging
printQuakes();
sortAndPrint(1000);
// (3) Add markers to map
// NOTE: Country markers are not added to the map. They are used
// for their geometric properties
map.addMarkers(quakeMarkers);
map.addMarkers(cityMarkers);
} // End setup
public void draw() {
background(0);
map.draw();
addKey();
}
// TODO: Add the method:
// private void sortAndPrint(int numToPrint)
// and then call that method from setUp
/** Event handler that gets called automatically when the
* mouse moves.
*/
@Override
public void mouseMoved()
{
// clear the last selection
if (lastSelected != null) {
lastSelected.setSelected(false);
lastSelected = null;
}
selectMarkerIfHover(quakeMarkers);
selectMarkerIfHover(cityMarkers);
//loop();
}
// If there is a marker selected
private void selectMarkerIfHover(List<Marker> markers)
{
// Abort if there's already a marker selected
if (lastSelected != null) {
return;
}
for (Marker m : markers)
{
CommonMarker marker = (CommonMarker)m;
if (marker.isInside(map, mouseX, mouseY)) {
lastSelected = marker;
marker.setSelected(true);
return;
}
}
}
/** The event handler for mouse clicks
* It will display an earthquake and its threat circle of cities
* Or if a city is clicked, it will display all the earthquakes
* where the city is in the threat circle
*/
@Override
public void mouseClicked()
{
if (lastClicked != null) {
unhideMarkers();
lastClicked = null;
}
else if (lastClicked == null)
{
checkEarthquakesForClick();
if (lastClicked == null) {
checkCitiesForClick();
}
}
}
// Helper method that will check if a city marker was clicked on
// and respond appropriately
private void checkCitiesForClick()
{
if (lastClicked != null) return;
// Loop over the earthquake markers to see if one of them is selected
for (Marker marker : cityMarkers) {
if (!marker.isHidden() && marker.isInside(map, mouseX, mouseY)) {
lastClicked = (CommonMarker)marker;
// Hide all the other earthquakes and hide
for (Marker mhide : cityMarkers) {
if (mhide != lastClicked) {
mhide.setHidden(true);
}
}
for (Marker mhide : quakeMarkers) {
EarthquakeMarker quakeMarker = (EarthquakeMarker)mhide;
if (quakeMarker.getDistanceTo(marker.getLocation())
> quakeMarker.threatCircle()) {
quakeMarker.setHidden(true);
}
}
return;
}
}
}
// Helper method that will check if an earthquake marker was clicked on
// and respond appropriately
private void checkEarthquakesForClick()
{
if (lastClicked != null) return;
// Loop over the earthquake markers to see if one of them is selected
for (Marker m : quakeMarkers) {
EarthquakeMarker marker = (EarthquakeMarker)m;
if (!marker.isHidden() && marker.isInside(map, mouseX, mouseY)) {
lastClicked = marker;
// Hide all the other earthquakes and hide
for (Marker mhide : quakeMarkers) {
if (mhide != lastClicked) {
mhide.setHidden(true);
}
}
for (Marker mhide : cityMarkers) {
if (mhide.getDistanceTo(marker.getLocation())
> marker.threatCircle()) {
mhide.setHidden(true);
}
}
return;
}
}
}
// loop over and unhide all markers
private void unhideMarkers() {
for(Marker marker : quakeMarkers) {
marker.setHidden(false);
}
for(Marker marker : cityMarkers) {
marker.setHidden(false);
}
}
// helper method to draw key in GUI
private void addKey() {
// Remember you can use Processing's graphics methods here
fill(255, 250, 240);
int xbase = 25;
int ybase = 50;
rect(xbase, ybase, 150, 250);
fill(0);
textAlign(LEFT, CENTER);
textSize(12);
text("Earthquake Key", xbase+25, ybase+25);
fill(150, 30, 30);
int tri_xbase = xbase + 35;
int tri_ybase = ybase + 50;
triangle(tri_xbase, tri_ybase-CityMarker.TRI_SIZE, tri_xbase-CityMarker.TRI_SIZE,
tri_ybase+CityMarker.TRI_SIZE, tri_xbase+CityMarker.TRI_SIZE,
tri_ybase+CityMarker.TRI_SIZE);
fill(0, 0, 0);
textAlign(LEFT, CENTER);
text("City Marker", tri_xbase + 15, tri_ybase);
text("Land Quake", xbase+50, ybase+70);
text("Ocean Quake", xbase+50, ybase+90);
text("Size ~ Magnitude", xbase+25, ybase+110);
fill(255, 255, 255);
ellipse(xbase+35,
ybase+70,
10,
10);
rect(xbase+35-5, ybase+90-5, 10, 10);
fill(color(255, 255, 0));
ellipse(xbase+35, ybase+140, 12, 12);
fill(color(0, 0, 255));
ellipse(xbase+35, ybase+160, 12, 12);
fill(color(255, 0, 0));
ellipse(xbase+35, ybase+180, 12, 12);
textAlign(LEFT, CENTER);
fill(0, 0, 0);
text("Shallow", xbase+50, ybase+140);
text("Intermediate", xbase+50, ybase+160);
text("Deep", xbase+50, ybase+180);
text("Past hour", xbase+50, ybase+200);
fill(255, 255, 255);
int centerx = xbase+35;
int centery = ybase+200;
ellipse(centerx, centery, 12, 12);
strokeWeight(2);
line(centerx-8, centery-8, centerx+8, centery+8);
line(centerx-8, centery+8, centerx+8, centery-8);
}
// Checks whether this quake occurred on land. If it did, it sets the
// "country" property of its PointFeature to the country where it occurred
// and returns true. Notice that the helper method isInCountry will
// set this "country" property already. Otherwise it returns false.
private boolean isLand(PointFeature earthquake) {
// IMPLEMENT THIS: loop over all countries to check if location is in any of them
// If it is, add 1 to the entry in countryQuakes corresponding to this country.
for (Marker country : countryMarkers) {
if (isInCountry(earthquake, country)) {
return true;
}
}
// not inside any country
return false;
}
// prints countries with number of earthquakes
// You will want to loop through the country markers or country features
// (either will work) and then for each country, loop through
// the quakes to count how many occurred in that country.
// Recall that the country markers have a "name" property,
// And LandQuakeMarkers have a "country" property set.
private void printQuakes() {
int totalWaterQuakes = quakeMarkers.size();
for (Marker country : countryMarkers) {
String countryName = country.getStringProperty("name");
int numQuakes = 0;
for (Marker marker : quakeMarkers)
{
EarthquakeMarker eqMarker = (EarthquakeMarker)marker;
if (eqMarker.isOnLand()) {
if (countryName.equals(eqMarker.getStringProperty("country"))) {
numQuakes++;
}
}
}
if (numQuakes > 0) {
totalWaterQuakes -= numQuakes;
System.out.println(countryName + ": " + numQuakes);
}
}
System.out.println("OCEAN QUAKES: " + totalWaterQuakes);
}
// helper method to test whether a given earthquake is in a given country
// This will also add the country property to the properties of the earthquake feature if
// it's in one of the countries.
// You should not have to modify this code
private boolean isInCountry(PointFeature earthquake, Marker country) {
// getting location of feature
Location checkLoc = earthquake.getLocation();
// some countries represented it as MultiMarker
// looping over SimplePolygonMarkers which make them up to use isInsideByLoc
if(country.getClass() == MultiMarker.class) {
// looping over markers making up MultiMarker
for(Marker marker : ((MultiMarker)country).getMarkers()) {
// checking if inside
if(((AbstractShapeMarker)marker).isInsideByLocation(checkLoc)) {
earthquake.addProperty("country", country.getProperty("name"));
// return if is inside one
return true;
}
}
}
// check if inside country represented by SimplePolygonMarker
else if(((AbstractShapeMarker)country).isInsideByLocation(checkLoc)) {
earthquake.addProperty("country", country.getProperty("name"));
return true;
}
return false;
}
private void sortAndPrint(int numToPrint){
Object[] markers = quakeMarkers.toArray();
Arrays.sort(markers);
int endOfLoop = (markers.length <= numToPrint)? markers.length - 1: numToPrint;
for(int i=0; i < endOfLoop; i++){
System.out.println(markers[i]);
}
}
}
|
|
package trips.tdp.fi.uba.ar.tripsandroid.model;
import android.util.Log;
import com.google.gson.annotations.SerializedName;
import java.util.ArrayList;
import trips.tdp.fi.uba.ar.tripsandroid.BackEndClient;
import trips.tdp.fi.uba.ar.tripsandroid.model.media.Audioguide;
import trips.tdp.fi.uba.ar.tripsandroid.model.media.Image;
/**
* Created by mbosco on 3/22/17.
*/
public class Attraction extends Stop{
private int id;
private float latitude;
private float longitude;
@SerializedName("schedule")
private String scheduleTime;
private float cost = -1;
private int averageTime = -1;
private Classification classification;
private City city;
private String address;
@SerializedName("audioGuides")
private ArrayList<Audioguide> audioguides;
private ArrayList<PointOfInterest> pointsOfInterest;
private ArrayList<Image> maps;
public String getMapImage(){
if (maps != null && maps.size() > 0){
return BackEndClient.getAttractionMapFullUrl(maps.get(0).getPath());
}else{
return this.getFullImageUrl(0);
}
}
public ArrayList<Image> getMaps() {
return maps;
}
public void setMaps(ArrayList<Image> maps) {
this.maps = maps;
}
public String getTelephone() {
return telephone;
}
public void setTelephone(String telephone) {
this.telephone = telephone;
}
private String telephone;
private ArrayList<Image> images;
public Attraction(int id, String name, String description) {
super(name, description);
this.id = id;
}
public ArrayList<Image> getImages(){
return this.images;
}
public float getLatitude(){
return latitude;
}
public void setLatitude(float latitude) {
this.latitude = latitude;
}
public float getLongitude() {
return longitude;
}
public void setLongitude(float longitude) {
this.longitude = longitude;
}
public String getSchedule() {
return scheduleTime;
}
public void setScheduleTime(String scheduleTime) {
this.scheduleTime = scheduleTime;
}
public float getCost() {
return cost;
}
public void setCost(float cost) {
this.cost = cost;
}
public int getAverageTime() {
return averageTime;
}
public void setAverageTime(int averageTime) {
this.averageTime = averageTime;
}
public Image getImage(int i){
try{
return images.get(i);
}catch (Exception e){
return null;
}
}
public String getFullImageUrl(int i){
Image image = getImage(i);
return BackEndClient.getAttractionImageUrl(image.getPath());
}
public ArrayList<String> getImagesFullPath(){
ArrayList<String> a = new ArrayList<>();
for (Image i: images){
String url = BackEndClient.getAttractionImageUrl(i.getPath());
a.add(url);
}
return a;
}
public int getId() {
return id;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public City getCity() {
return city;
}
public void setCity(City city) {
this.city = city;
}
public Classification getClassification() {
return classification;
}
public void setClassification(Classification classification) {
this.classification = classification;
}
public ArrayList<Audioguide> getAudioguides() {
return audioguides;
}
public void setAudioguides(ArrayList<Audioguide> audioguides){
this.audioguides = audioguides;
}
public boolean hasAudioguide(){
return this.getAudioguides() != null && this.getAudioguides().size()>=1;
}
public boolean hasSchedule() {
return (this.scheduleTime != null && this.scheduleTime != "");
}
public boolean hasAverageTime() {
return (this.averageTime > 0);
}
public boolean hasCost() {
return this.cost > 0;
}
public boolean hasPhoneNumber() {
return (this.telephone != null && this.telephone != "");
}
public boolean isVisitable() {
Log.d("NULL", Boolean.toString(pointsOfInterest != null));
Log.d("SIZE", Integer.toString(pointsOfInterest.size()));
return pointsOfInterest != null && pointsOfInterest.size() > 0;
}
public ArrayList<PointOfInterest> getPointsOfInterest() {
return pointsOfInterest;
}
public void setPointsOfInterest(ArrayList<PointOfInterest> pointsOfInterest) {
this.pointsOfInterest = pointsOfInterest;
}
}
|
|
package mnm.hdfontgen.pack;
import mnm.hdfontgen.pack.generator.FontProviderFontGenerator;
import mnm.hdfontgen.pack.generator.LegacyFontGenerator;
import mnm.hdfontgen.pack.provider.FontProvider;
import mnm.hdfontgen.pack.provider.FontProvidersJson;
import mnm.hdfontgen.pack.provider.StandardFontProviders;
import java.awt.*;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Predicate;
import java.util.function.UnaryOperator;
public class PackSettings {
public final PackFormat format;
public final String description;
public final Map<ResourcePath, FontTexture> fonts;
private PackSettings(PackFormat format, String description, Map<ResourcePath, FontTexture> fonts) {
this.format = format;
this.description = description;
this.fonts = Map.copyOf(fonts);
}
public PackJson getPackJson() {
return new PackJson(format.getFormat(), description);
}
public PackGenerator createGenerator() {
if (!format.supportsFontProviders()) {
return new LegacyFontGenerator(this);
}
return new FontProviderFontGenerator(this);
}
public abstract static class FontTexture {
public final ResourcePath name;
protected FontTexture(ResourcePath name) {
this.name = name;
}
public abstract List<FontProvider> getProviders(PackFormat format);
}
public static class Bitmap extends FontTexture {
public final Font font;
public final TextureSize size;
public final boolean unicode;
private Bitmap(ResourcePath name, Font font, TextureSize size, boolean unicode) {
super(name);
this.font = font;
this.size = size;
this.unicode = unicode;
}
public HDFont getFont() {
return new HDFont(font, size);
}
@Override
public List<FontProvider> getProviders(PackFormat format) {
List<FontProvider> providers = new ArrayList<>(4);
providers.add(StandardFontProviders.ascii(this));
if (format.supportsFontProviders()) {
providers.add(StandardFontProviders.nonLatinEuropean(this));
providers.add(StandardFontProviders.accented(this));
}
if (this.unicode) {
providers.add(StandardFontProviders.unicodePages(this));
}
return providers;
}
}
public static class TrueType extends FontTexture {
public final Path font;
public final float oversample;
private TrueType(ResourcePath name, Path font, float oversample) {
super(name);
this.font = font;
this.oversample = oversample;
}
@Override
public List<FontProvider> getProviders(PackFormat format) {
return List.of(
StandardFontProviders.trueType(this)
);
}
}
public static class Builder implements BuilderBase<PackSettings> {
private final PackFormat format;
private String description;
private Map<ResourcePath, FontTexture> fonts = new HashMap<>();
public Builder(PackFormat format) {
this.format = Objects.requireNonNull(format);
}
public Builder withDescription(String desc) {
this.description = Objects.requireNonNull(desc);
return this;
}
public Builder bitmap(ResourcePath name, UnaryOperator<BitmapBuilder> func) {
return addFontProvider(name, new BitmapBuilder(name), func);
}
public Builder trueType(ResourcePath name, UnaryOperator<TrueTypeBuilder> func) {
if (!this.format.supportsTrueTypeFonts()) {
throw new UnsupportedOperationException("Pack format " + this.format + " does not support true type fonts.");
}
return addFontProvider(name, new TrueTypeBuilder(name), func);
}
protected <T extends FontTexture, B extends BuilderBase<T>>
Builder addFontProvider(ResourcePath name, B builder, UnaryOperator<B> func) {
// make sure this name hasn't been registered already
if (this.fonts.containsKey(name)) {
throw new UnsupportedOperationException("Provider named " + name + " was already created.");
}
// named fonts are only supported in V6 and up
if (!this.format.supportsNamedFonts() && !FontProvidersJson.DEFAULT_NAME.equals(name)) {
throw new UnsupportedOperationException("Pack format " + this.format + " does not support named fonts. Only " + FontProvidersJson.DEFAULT_NAME + " is supported.");
}
this.fonts.put(name, func.apply(builder).build());
return this;
}
@Override
public PackSettings build() {
checkNotNull(this.description, "description");
check(this.fonts, Predicate.not(Map::isEmpty), "fonts is empty");
return new PackSettings(format, description, fonts);
}
public class BitmapBuilder implements BuilderBase<Bitmap> {
private final ResourcePath name;
private Font font;
private TextureSize size;
private boolean unicode;
BitmapBuilder(ResourcePath name) {
this.name = name;
}
private String makeDescription() {
var fontName = font.getFontName();
var withUnicode = unicode ? " with unicode" : "";
var versions = format.getVersionRange();
return String.format("%s %s%s for Minecraft %s", fontName, size, withUnicode, versions);
}
public BitmapBuilder withFont(Font font) {
this.font = Objects.requireNonNull(font);
return this;
}
public BitmapBuilder withSize(TextureSize size) {
this.size = Objects.requireNonNull(size);
return this;
}
public BitmapBuilder withUnicode(boolean unicode) {
this.unicode = unicode;
return this;
}
@Override
public Bitmap build() {
checkNotNull(font, "font");
checkNotNull(size, "size");
if (description == null) {
description = makeDescription();
}
return new Bitmap(name, font, size, unicode);
}
}
public class TrueTypeBuilder implements BuilderBase<TrueType> {
private ResourcePath name;
private Path font;
private float oversample = 1f;
TrueTypeBuilder(ResourcePath name) {
this.name = name;
}
private String makeDescription() {
var fontName = font.getFileName().toString();
var versions = format.getVersionRange();
return String.format("%s x%.01f for Minecraft %s", fontName, oversample, versions);
}
public TrueTypeBuilder withFont(Path fontFile) {
this.font = Objects.requireNonNull(fontFile);
return this;
}
public TrueTypeBuilder withOversample(float oversample) {
this.oversample = oversample;
return this;
}
@Override
public TrueType build() {
checkNotNull(font, "font");
if (description == null) {
description = makeDescription();
}
return new TrueType(name, font, oversample);
}
}
}
private static void checkNotNull(Object obj, String message) {
check(obj, Objects::nonNull, message);
}
private static <T> T check(T obj, Predicate<T> condition, String message) {
if (!condition.test(obj)) {
throw new IllegalStateException(message);
}
return obj;
}
private interface BuilderBase<T> {
T build();
}
}
|
|
// Copyright 2020 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.cpp;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.Artifact.ArtifactExpander;
import com.google.devtools.build.lib.actions.Artifact.SpecialArtifact;
import com.google.devtools.build.lib.actions.Artifact.TreeFileArtifact;
import com.google.devtools.build.lib.actions.ArtifactRoot;
import com.google.devtools.build.lib.actions.ArtifactRoot.RootType;
import com.google.devtools.build.lib.actions.util.ActionsTestUtil;
import com.google.devtools.build.lib.analysis.util.BuildViewTestCase;
import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.FeatureConfiguration;
import com.google.devtools.build.lib.rules.cpp.CcToolchainVariables.LibraryToLinkValue;
import com.google.devtools.build.lib.rules.cpp.CcToolchainVariables.SequenceBuilder;
import com.google.devtools.build.lib.rules.cpp.CppActionConfigs.CppPlatform;
import com.google.devtools.build.lib.rules.cpp.Link.LinkTargetType;
import com.google.devtools.build.lib.rules.cpp.Link.LinkingMode;
import com.google.devtools.build.lib.testutil.TestUtils;
import com.google.devtools.build.lib.util.Pair;
import com.google.devtools.build.lib.vfs.FileSystem;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.view.config.crosstool.CrosstoolConfig.CToolchain;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests for {@link LinkCommandLine}. In particular, tests command line emitted subject to the
* presence of certain build variables.
*/
@RunWith(JUnit4.class)
public final class LinkCommandLineTest extends BuildViewTestCase {
private Artifact scratchArtifact(String s) {
Path execRoot = outputBase.getRelative("exec");
String outSegment = "root";
Path outputRoot = execRoot.getRelative(outSegment);
ArtifactRoot root = ArtifactRoot.asDerivedRoot(execRoot, RootType.Output, outSegment);
try {
return ActionsTestUtil.createArtifact(
root, scratch.overwriteFile(outputRoot.getRelative(s).toString()));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private CcToolchainVariables.Builder getMockBuildVariables() {
return getMockBuildVariables(ImmutableList.<String>of());
}
private static CcToolchainVariables.Builder getMockBuildVariables(
ImmutableList<String> linkstampOutputs) {
CcToolchainVariables.Builder result = CcToolchainVariables.builder();
result.addStringVariable(LinkBuildVariables.GENERATE_INTERFACE_LIBRARY.getVariableName(), "no");
result.addStringVariable(
LinkBuildVariables.INTERFACE_LIBRARY_INPUT.getVariableName(), "ignored");
result.addStringVariable(
LinkBuildVariables.INTERFACE_LIBRARY_OUTPUT.getVariableName(), "ignored");
result.addStringVariable(
LinkBuildVariables.INTERFACE_LIBRARY_BUILDER.getVariableName(), "ignored");
result.addStringSequenceVariable(
LinkBuildVariables.LINKSTAMP_PATHS.getVariableName(), linkstampOutputs);
return result;
}
private FeatureConfiguration getMockFeatureConfiguration() throws Exception {
ImmutableList<CToolchain.Feature> features =
new ImmutableList.Builder<CToolchain.Feature>()
.addAll(
CppActionConfigs.getLegacyFeatures(
CppPlatform.LINUX,
ImmutableSet.of(),
"MOCK_LINKER_TOOL",
/* supportsEmbeddedRuntimes= */ true,
/* supportsInterfaceSharedLibraries= */ false,
/* doNotSplitLinkingCmdline= */ true))
.addAll(
CppActionConfigs.getFeaturesToAppearLastInFeaturesList(
ImmutableSet.of(), /* doNotSplitLinkingCmdline= */ true))
.build();
ImmutableList<CToolchain.ActionConfig> actionConfigs =
CppActionConfigs.getLegacyActionConfigs(
CppPlatform.LINUX,
"MOCK_GCC_TOOL",
"MOCK_AR_TOOL",
"MOCK_STRIP_TOOL",
/* supportsInterfaceSharedLibraries= */ false,
/* existingActionConfigNames= */ ImmutableSet.of());
return CcToolchainTestHelper.buildFeatures(features, actionConfigs)
.getFeatureConfiguration(
ImmutableSet.of(
Link.LinkTargetType.EXECUTABLE.getActionName(),
Link.LinkTargetType.NODEPS_DYNAMIC_LIBRARY.getActionName(),
Link.LinkTargetType.STATIC_LIBRARY.getActionName(),
CppActionNames.CPP_COMPILE,
CppActionNames.LINKSTAMP_COMPILE,
CppRuleClasses.INCLUDES,
CppRuleClasses.PREPROCESSOR_DEFINES,
CppRuleClasses.INCLUDE_PATHS,
CppRuleClasses.PIC));
}
private LinkCommandLine.Builder minimalConfiguration(CcToolchainVariables.Builder variables)
throws Exception {
return new LinkCommandLine.Builder()
.setBuildVariables(variables.build())
.setFeatureConfiguration(getMockFeatureConfiguration());
}
private LinkCommandLine.Builder minimalConfiguration() throws Exception {
return minimalConfiguration(getMockBuildVariables());
}
private void assertError(String expectedSubstring, LinkCommandLine.Builder builder) {
RuntimeException e = assertThrows(RuntimeException.class, () -> builder.build());
assertThat(e).hasMessageThat().contains(expectedSubstring);
}
@Test
public void testStaticLinkWithBuildInfoHeadersIsError() throws Exception {
assertError(
"build info headers may only be present",
minimalConfiguration()
.setLinkTargetType(LinkTargetType.STATIC_LIBRARY)
.setLinkingMode(LinkingMode.STATIC)
.setBuildInfoHeaderArtifacts(
ImmutableList.of(scratchArtifact("FakeBuildInfoHeaderArtifact1"))));
}
/**
* Tests that when linking without linkstamps, the exec command is the same as the link command.
*/
@Test
public void testLinkCommandIsExecCommandWhenNoLinkstamps() throws Exception {
LinkCommandLine linkConfig =
minimalConfiguration()
.setActionName(LinkTargetType.EXECUTABLE.getActionName())
.setLinkTargetType(LinkTargetType.EXECUTABLE)
.build();
List<String> rawLinkArgv = linkConfig.getRawLinkArgv();
assertThat(linkConfig.arguments()).isEqualTo(rawLinkArgv);
}
/** Tests that symbol count output does not appear in argv when it should not. */
@Test
public void testSymbolCountsDisabled() throws Exception {
LinkCommandLine linkConfig =
minimalConfiguration()
.forceToolPath("foo/bar/gcc")
.setLinkTargetType(LinkTargetType.NODEPS_DYNAMIC_LIBRARY)
.setLinkingMode(LinkingMode.STATIC)
.build();
List<String> argv = linkConfig.getRawLinkArgv();
for (String arg : argv) {
assertThat(arg).doesNotContain("print-symbol-counts");
}
}
@Test
public void testLibrariesToLink() throws Exception {
CcToolchainVariables.Builder variables =
getMockBuildVariables()
.addCustomBuiltVariable(
LinkBuildVariables.LIBRARIES_TO_LINK.getVariableName(),
new SequenceBuilder()
.addValue(LibraryToLinkValue.forStaticLibrary("foo", false))
.addValue(LibraryToLinkValue.forStaticLibrary("bar", true)));
LinkCommandLine linkConfig =
minimalConfiguration(variables)
.forceToolPath("foo/bar/gcc")
.setActionName(LinkTargetType.NODEPS_DYNAMIC_LIBRARY.getActionName())
.setLinkTargetType(LinkTargetType.NODEPS_DYNAMIC_LIBRARY)
.setLinkingMode(LinkingMode.STATIC)
.build();
String commandLine = Joiner.on(" ").join(linkConfig.getRawLinkArgv());
assertThat(commandLine).matches(".*foo -Wl,-whole-archive bar -Wl,-no-whole-archive.*");
}
@Test
public void testLibrarySearchDirectories() throws Exception {
CcToolchainVariables.Builder variables =
getMockBuildVariables()
.addStringSequenceVariable(
LinkBuildVariables.LIBRARY_SEARCH_DIRECTORIES.getVariableName(),
ImmutableList.of("foo", "bar"));
LinkCommandLine linkConfig =
minimalConfiguration(variables)
.setActionName(LinkTargetType.NODEPS_DYNAMIC_LIBRARY.getActionName())
.setLinkTargetType(LinkTargetType.NODEPS_DYNAMIC_LIBRARY)
.setLinkingMode(LinkingMode.STATIC)
.build();
assertThat(linkConfig.getRawLinkArgv()).containsAtLeast("-Lfoo", "-Lbar").inOrder();
}
@Test
public void testLinkerParamFileForStaticLibrary() throws Exception {
CcToolchainVariables.Builder variables =
getMockBuildVariables()
.addStringVariable(
LinkBuildVariables.LINKER_PARAM_FILE.getVariableName(), "foo/bar.param");
LinkCommandLine linkConfig =
minimalConfiguration(variables)
.setActionName(LinkTargetType.STATIC_LIBRARY.getActionName())
.setLinkTargetType(LinkTargetType.STATIC_LIBRARY)
.setLinkingMode(Link.LinkingMode.STATIC)
.build();
assertThat(linkConfig.getRawLinkArgv()).contains("@foo/bar.param");
}
@Test
public void testLinkerParamFileForDynamicLibrary() throws Exception {
CcToolchainVariables.Builder variables =
getMockBuildVariables()
.addStringVariable(
LinkBuildVariables.LINKER_PARAM_FILE.getVariableName(), "foo/bar.param");
LinkCommandLine linkConfig =
minimalConfiguration(variables)
.setActionName(LinkTargetType.NODEPS_DYNAMIC_LIBRARY.getActionName())
.setLinkTargetType(LinkTargetType.NODEPS_DYNAMIC_LIBRARY)
.setLinkingMode(Link.LinkingMode.STATIC)
.doNotSplitLinkingCmdLine()
.build();
assertThat(linkConfig.getRawLinkArgv()).contains("@foo/bar.param");
}
private List<String> basicArgv(LinkTargetType targetType) throws Exception {
return basicArgv(targetType, getMockBuildVariables());
}
private List<String> basicArgv(LinkTargetType targetType, CcToolchainVariables.Builder variables)
throws Exception {
LinkCommandLine linkConfig =
minimalConfiguration(variables)
.setActionName(targetType.getActionName())
.setLinkTargetType(targetType)
.setLinkingMode(LinkingMode.STATIC)
.build();
return linkConfig.arguments();
}
/** Tests that a "--force_pic" configuration applies "-pie" to executable links. */
@Test
public void testPicMode() throws Exception {
String pieArg = "-pie";
// Disabled:
assertThat(basicArgv(LinkTargetType.EXECUTABLE)).doesNotContain(pieArg);
assertThat(basicArgv(LinkTargetType.NODEPS_DYNAMIC_LIBRARY)).doesNotContain(pieArg);
assertThat(basicArgv(LinkTargetType.STATIC_LIBRARY)).doesNotContain(pieArg);
assertThat(basicArgv(LinkTargetType.PIC_STATIC_LIBRARY)).doesNotContain(pieArg);
assertThat(basicArgv(LinkTargetType.ALWAYS_LINK_STATIC_LIBRARY)).doesNotContain(pieArg);
assertThat(basicArgv(LinkTargetType.ALWAYS_LINK_PIC_STATIC_LIBRARY)).doesNotContain(pieArg);
CcToolchainVariables.Builder picVariables =
getMockBuildVariables()
.addStringVariable(LinkBuildVariables.FORCE_PIC.getVariableName(), "");
// Enabled:
useConfiguration("--force_pic");
assertThat(basicArgv(LinkTargetType.EXECUTABLE, picVariables)).contains(pieArg);
assertThat(basicArgv(LinkTargetType.NODEPS_DYNAMIC_LIBRARY, picVariables))
.doesNotContain(pieArg);
assertThat(basicArgv(LinkTargetType.STATIC_LIBRARY, picVariables)).doesNotContain(pieArg);
assertThat(basicArgv(LinkTargetType.PIC_STATIC_LIBRARY, picVariables)).doesNotContain(pieArg);
assertThat(basicArgv(LinkTargetType.ALWAYS_LINK_STATIC_LIBRARY, picVariables))
.doesNotContain(pieArg);
assertThat(basicArgv(LinkTargetType.ALWAYS_LINK_PIC_STATIC_LIBRARY, picVariables))
.doesNotContain(pieArg);
}
@Test
public void testSplitStaticLinkCommand() throws Exception {
useConfiguration("--nostart_end_lib");
Artifact paramFile = scratchArtifact("some/file.params");
LinkCommandLine linkConfig =
minimalConfiguration(
getMockBuildVariables()
.addStringVariable(
LinkBuildVariables.OUTPUT_EXECPATH.getVariableName(), "a/FakeOutput")
.addStringVariable(
LinkBuildVariables.LINKER_PARAM_FILE.getVariableName(), "some/file.params"))
.setActionName(LinkTargetType.STATIC_LIBRARY.getActionName())
.setLinkTargetType(LinkTargetType.STATIC_LIBRARY)
.forceToolPath("foo/bar/ar")
.setParamFile(paramFile)
.build();
Pair<List<String>, List<String>> result = linkConfig.splitCommandline();
assertThat(result.first).isEqualTo(Arrays.asList("foo/bar/ar", "@some/file.params"));
assertThat(result.second).isEqualTo(Arrays.asList("rcsD", "a/FakeOutput"));
}
@Test
public void testSplitDynamicLinkCommand() throws Exception {
useConfiguration("--nostart_end_lib");
Artifact paramFile = scratchArtifact("some/file.params");
LinkCommandLine linkConfig =
minimalConfiguration(
getMockBuildVariables()
.addStringVariable(
LinkBuildVariables.OUTPUT_EXECPATH.getVariableName(), "a/FakeOutput")
.addStringVariable(
LinkBuildVariables.LINKER_PARAM_FILE.getVariableName(), "some/file.params")
.addStringSequenceVariable(
LinkBuildVariables.USER_LINK_FLAGS.getVariableName(), ImmutableList.of("")))
.setActionName(LinkTargetType.DYNAMIC_LIBRARY.getActionName())
.setLinkTargetType(LinkTargetType.DYNAMIC_LIBRARY)
.forceToolPath("foo/bar/linker")
.setParamFile(paramFile)
.doNotSplitLinkingCmdLine()
.build();
Pair<List<String>, List<String>> result = linkConfig.splitCommandline();
assertThat(result.first).containsExactly("foo/bar/linker", "@some/file.params").inOrder();
assertThat(result.second).containsExactly("-shared", "-o", "a/FakeOutput", "").inOrder();
}
@Test
public void testStaticLinkCommand() throws Exception {
useConfiguration("--nostart_end_lib");
LinkCommandLine linkConfig =
minimalConfiguration(
getMockBuildVariables()
.addStringVariable(
LinkBuildVariables.OUTPUT_EXECPATH.getVariableName(), "a/FakeOutput"))
.forceToolPath("foo/bar/ar")
.setActionName(LinkTargetType.STATIC_LIBRARY.getActionName())
.setLinkTargetType(LinkTargetType.STATIC_LIBRARY)
.build();
List<String> result = linkConfig.getRawLinkArgv();
assertThat(result).isEqualTo(Arrays.asList("foo/bar/ar", "rcsD", "a/FakeOutput"));
}
@Test
public void testSplitAlwaysLinkLinkCommand() throws Exception {
CcToolchainVariables.Builder variables =
CcToolchainVariables.builder()
.addStringVariable(CcCommon.SYSROOT_VARIABLE_NAME, "/usr/grte/v1")
.addStringVariable(LinkBuildVariables.OUTPUT_EXECPATH.getVariableName(), "a/FakeOutput")
.addStringVariable(
LinkBuildVariables.LINKER_PARAM_FILE.getVariableName(), "some/file.params")
.addCustomBuiltVariable(
LinkBuildVariables.LIBRARIES_TO_LINK.getVariableName(),
new CcToolchainVariables.SequenceBuilder()
.addValue(LibraryToLinkValue.forObjectFile("foo.o", false))
.addValue(LibraryToLinkValue.forObjectFile("bar.o", false)));
Artifact paramFile = scratchArtifact("some/file.params");
LinkCommandLine linkConfig =
minimalConfiguration(variables)
.setActionName(LinkTargetType.ALWAYS_LINK_STATIC_LIBRARY.getActionName())
.setLinkTargetType(LinkTargetType.ALWAYS_LINK_STATIC_LIBRARY)
.forceToolPath("foo/bar/ar")
.setParamFile(paramFile)
.build();
Pair<List<String>, List<String>> result = linkConfig.splitCommandline();
assertThat(result.first).isEqualTo(Arrays.asList("foo/bar/ar", "@some/file.params"));
assertThat(result.second).isEqualTo(Arrays.asList("rcsD", "a/FakeOutput", "foo.o", "bar.o"));
}
private SpecialArtifact createTreeArtifact(String name) {
FileSystem fs = scratch.getFileSystem();
Path execRoot = fs.getPath(TestUtils.tmpDir());
PathFragment execPath = PathFragment.create("out").getRelative(name);
return ActionsTestUtil.createTreeArtifactWithGeneratingAction(
ArtifactRoot.asDerivedRoot(execRoot, RootType.Output, "out"), execPath);
}
private void verifyArguments(
Iterable<String> arguments,
Iterable<String> allowedArguments,
Iterable<String> disallowedArguments) {
assertThat(arguments).containsAtLeastElementsIn(allowedArguments);
assertThat(arguments).containsNoneIn(disallowedArguments);
}
@Test
public void testTreeArtifactLink() throws Exception {
SpecialArtifact testTreeArtifact = createTreeArtifact("library_directory");
TreeFileArtifact library0 = TreeFileArtifact.createTreeOutput(testTreeArtifact, "library0.o");
TreeFileArtifact library1 = TreeFileArtifact.createTreeOutput(testTreeArtifact, "library1.o");
ArtifactExpander expander =
new ArtifactExpander() {
@Override
public void expand(Artifact artifact, Collection<? super Artifact> output) {
if (artifact.equals(testTreeArtifact)) {
output.add(library0);
output.add(library1);
}
};
};
Iterable<String> treeArtifactsPaths = ImmutableList.of(testTreeArtifact.getExecPathString());
Iterable<String> treeFileArtifactsPaths =
ImmutableList.of(library0.getExecPathString(), library1.getExecPathString());
Artifact paramFile = scratchArtifact("some/file.params");
LinkCommandLine linkConfig =
minimalConfiguration(
getMockBuildVariables()
.addStringVariable(
LinkBuildVariables.LINKER_PARAM_FILE.getVariableName(), "some/file.params")
.addCustomBuiltVariable(
LinkBuildVariables.LIBRARIES_TO_LINK.getVariableName(),
new CcToolchainVariables.SequenceBuilder()
.addValue(
LibraryToLinkValue.forObjectFileGroup(
ImmutableList.of(testTreeArtifact), false))))
.forceToolPath("foo/bar/gcc")
.setActionName(LinkTargetType.STATIC_LIBRARY.getActionName())
.setLinkTargetType(LinkTargetType.STATIC_LIBRARY)
.setLinkingMode(Link.LinkingMode.STATIC)
.setParamFile(paramFile)
.build();
// Should only reference the tree artifact.
verifyArguments(linkConfig.arguments(null), treeArtifactsPaths, treeFileArtifactsPaths);
verifyArguments(linkConfig.getRawLinkArgv(null), treeArtifactsPaths, treeFileArtifactsPaths);
verifyArguments(
linkConfig.paramCmdLine().arguments(null), treeArtifactsPaths, treeFileArtifactsPaths);
// Should only reference tree file artifacts.
verifyArguments(linkConfig.arguments(expander), treeFileArtifactsPaths, treeArtifactsPaths);
verifyArguments(
linkConfig.getRawLinkArgv(expander), treeFileArtifactsPaths, treeArtifactsPaths);
verifyArguments(
linkConfig.paramCmdLine().arguments(expander), treeFileArtifactsPaths, treeArtifactsPaths);
}
}
|
|
package org.se.lab.web;
import org.apache.log4j.Logger;
import org.se.lab.db.data.Post;
import org.se.lab.db.data.User;
import org.se.lab.service.ActivityStreamService;
import org.se.lab.service.PostService;
import org.se.lab.service.ServiceException;
import org.se.lab.service.UserService;
import org.se.lab.utils.ArgumentChecker;
import org.se.lab.web.helper.RedirectHelper;
import org.se.lab.web.helper.Session;
import javax.annotation.PostConstruct;
import javax.enterprise.context.RequestScoped;
import javax.faces.context.ExternalContext;
import javax.faces.context.FacesContext;
import javax.faces.context.Flash;
import javax.inject.Inject;
import javax.inject.Named;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@Named
@RequestScoped
public class ActivityStreamBean implements Serializable {
private static final long serialVersionUID = 1L;
private static final int INVALID_STATE = -1;
private final static Logger LOG = Logger.getLogger(ActivityStreamBean.class);
@Inject
private ActivityStreamService service;
@Inject
private UserService uservice;
@Inject
private PostService pservice;
@Inject
private Session session;
private Flash flash;
private ExternalContext context;
private String inputText;
private String inputTextChild;
private List<Integer> contactIds;
private List<User> userContactList;
private List<Post> posts;
private Post post;
private List<Post> postChildren;
private int id = 0;
private User loggedInUser;
private String errorMsg;
public String getErrorMsg() {
return errorMsg;
}
public void setErrorMsg(String errorMsg) {
this.errorMsg = errorMsg;
}
@PostConstruct
public void init() {
int userId = session.getUserId();
context = FacesContext.getCurrentInstance().getExternalContext();
if (userId > INVALID_STATE) {
id = userId;
flash = context.getFlash();
flash.put("uid", id);
setLoggedInUser(loadLoggedInUser());
userContactList = uservice.getContactsOfUser(getLoggedInUser());
if (userContactList.size() > 0) {
contactIds = new ArrayList<>();
contactIds.add(getLoggedInUser().getId());
for (User c : userContactList) {
contactIds.add(c.getId());
}
loadPostsForUserAndContacts();
} else {
loadPostsForUser();
}
} else {
RedirectHelper.redirect("/pse/index.xhtml");
}
}
public List<Post> getChildPosts(Post post) {
postChildren = post.getChildPosts();
return postChildren;
}
public void addLike(Post post) {
try {
if (!post.getLikes().contains(getLoggedInUser())) {
post.addLike(getLoggedInUser());
pservice.updatePost(post);
} else {
post.removeLike(getLoggedInUser());
pservice.updatePost(post);
}
} catch (IllegalArgumentException e) {
String msg = "Post was empty";
LOG.error(msg, e);
setErrorMsg(msg);
} catch (ServiceException e) {
String msg = "Couldn't update post";
LOG.error(msg, e);
setErrorMsg(msg);
}
}
public String getLikes(Post p) {
StringBuilder sb = new StringBuilder();
sb.append("liked by ");
if (p.getLikeCount() == 0)
return "";
for (User u : p.getLikes())
sb.append(" ").append(u.getUsername());
return sb.toString();
}
public void newPost(Post parentPost) {
if (parentPost == null) {
flash.put("inputText", inputText);
try {
post = pservice.createRootPost(getLoggedInUser(), inputText, new Date());
} catch (ServiceException e) {
String msg = "Couldn't create root post";
LOG.error(msg, e);
setErrorMsg(msg);
}
} else {
flash.put("inputText", inputTextChild);
LOG.info("appending comment to post: " + inputTextChild);
try {
post = pservice.createChildPost(parentPost, parentPost.getCommunity(), getLoggedInUser(), inputTextChild,
new Date());
} catch (ServiceException e) {
String msg = "Couldn't create child post";
LOG.error(msg, e);
setErrorMsg(msg);
}
}
flash.put("post", post);
LOG.info("Flash: " + flash.toString());
refreshPage();
}
public void deletePost(Post p) {
ArgumentChecker.assertNotNull(p, "post");
service.delete(p, getLoggedInUser());
refreshPage();
}
public boolean showDeleteButton(Post p) {
return p != null && p.getCommunity() != null
&& p.getCommunity().getPortaladminId() == getLoggedInUser().getId();
}
private void refreshPage() {
RedirectHelper.redirect("/pse/activityStream.xhtml");
}
public User loadLoggedInUser() {
try {
return uservice.findById(id);
} catch (ServiceException e) {
String msg = "Unable to find User with ID = " + id;
LOG.error(msg, e);
setErrorMsg(msg);
}
return null;
}
public void loadPostsForUser() {
try {
List<Post> uposts = service.getPostsForUser(getLoggedInUser());
setPosts(uposts);
} catch (ServiceException e) {
String msg = "Unable get posts of user";
LOG.error(msg, e);
setErrorMsg(msg);
}
}
public void loadPostsForUserAndContacts() {
try {
List<Post> uposts = service.getPostsForUserAndContacts(getLoggedInUser(), contactIds);
setPosts(uposts);
} catch (ServiceException e) {
String msg = "Unable get posts of user and contacts";
LOG.error(msg, e);
setErrorMsg(msg);
}
}
/**
* Getter & Setter for Properties
**/
public List<Post> getPosts() {
return posts;
}
public void setPosts(List<Post> posts) {
this.posts = posts;
}
public String getInputText() {
return inputText;
}
public void setInputText(String inputText) {
this.inputText = inputText;
}
public String getInputTextChild() {
return inputTextChild;
}
public void setInputTextChild(String inputTextChild) {
this.inputTextChild = inputTextChild;
}
public void setLoggedInUser(User loggedInUser) {
this.loggedInUser = loggedInUser;
}
public User getLoggedInUser() {
return loggedInUser;
}
private void writeObject(ObjectOutputStream stream) throws IOException {
stream.defaultWriteObject();
}
private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException {
stream.defaultReadObject();
}
}
|
|
/*******************************************************************************
* PathVisio, a tool for data visualization and analysis using biological pathways
* Copyright 2006-2019 BiGCaT Bioinformatics
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package org.pathvisio.gui;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.DecimalFormat;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.swing.Action;
import javax.swing.ActionMap;
import javax.swing.InputMap;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSplitPane;
import javax.swing.JTabbedPane;
import javax.swing.JTable;
import javax.swing.JToolBar;
import javax.swing.KeyStroke;
import javax.swing.event.HyperlinkEvent;
import javax.swing.table.TableCellEditor;
import javax.swing.table.TableCellRenderer;
import org.pathvisio.core.ApplicationEvent;
import org.pathvisio.core.Engine.ApplicationEventListener;
import org.pathvisio.core.debug.Logger;
import org.pathvisio.core.model.PathwayElement;
import org.pathvisio.core.util.Utils;
import org.pathvisio.core.view.Graphics;
import org.pathvisio.core.view.Handle;
import org.pathvisio.core.view.Label;
import org.pathvisio.core.view.SelectionBox;
import org.pathvisio.core.view.VPathway;
import org.pathvisio.core.view.VPathwayElement;
import org.pathvisio.core.view.VPathwayEvent;
import org.pathvisio.core.view.VPathwayListener;
import org.pathvisio.gui.BackpageTextProvider.BackpageAttributes;
import org.pathvisio.gui.BackpageTextProvider.BackpageXrefs;
import org.pathvisio.gui.CommonActions.ZoomAction;
import org.pathvisio.gui.dnd.PathwayImportHandler;
import org.pathvisio.gui.handler.PathwayTableModel;
import com.mammothsoftware.frwk.ddb.DropDownButton;
/**
* this is the contents of the main window in the WikiPathways applet,
* and contains the editor window, side panels, toolbar and menu.
*
* For the standalone application, the derived class MainPanelStandalone is used.
*/
public class MainPanel extends JPanel implements VPathwayListener, ApplicationEventListener {
private JSplitPane splitPane;
protected JToolBar toolBar;
private JScrollPane pathwayScrollPane;
private JScrollPane propertiesScrollPane;
protected JTabbedPane sidebarTabbedPane;
protected JMenuBar menuBar;
private GraphicsChoiceButton itemsDropDown;
private ObjectsPane objectsPane;
private JTable propertyTable;
private JComboBox zoomCombo;
protected BackpagePane backpagePane;
protected BackpageTextProvider bpt;
protected DataPaneTextProvider dpt;
protected CommonActions actions;
private final PathwayTableModel model;
Set<Action> hideActions;
protected SwingEngine swingEngine;
private final PathwayElementMenuListener pathwayElementMenuListener;
public PathwayTableModel getModel(){
return model;
}
public PathwayElementMenuListener getPathwayElementMenuListener()
{
return pathwayElementMenuListener;
}
private boolean mayAddAction(Action a) {
return hideActions == null || !hideActions.contains(a);
}
protected void addMenuActions(JMenuBar mb) {
JMenu fileMenu = new JMenu("File");
addToMenu(actions.saveAction, fileMenu);
addToMenu(actions.saveAsAction, fileMenu);
fileMenu.addSeparator();
addToMenu(actions.importAction, fileMenu);
addToMenu(actions.exportAction, fileMenu);
fileMenu.addSeparator();
addToMenu(actions.exitAction, fileMenu);
JMenu editMenu = new JMenu("Edit");
addToMenu(actions.undoAction, editMenu);
addToMenu(actions.copyAction, editMenu);
addToMenu(actions.pasteAction, editMenu);
editMenu.addSeparator();
JMenu selectionMenu = new JMenu("Selection");
for(Action a : actions.layoutActions) addToMenu(a, selectionMenu);
editMenu.add (selectionMenu);
JMenu viewMenu = new JMenu("View");
JMenu zoomMenu = new JMenu("Zoom");
viewMenu.add(zoomMenu);
for(Action a : actions.zoomActions) addToMenu(a, zoomMenu);
JMenu helpMenu = new JMenu("Help");
mb.add(fileMenu);
mb.add(editMenu);
mb.add(viewMenu);
mb.add(helpMenu);
}
/**
* Constructor for this class. Creates the main panel of this application, containing
* the main GUI elements (menubar, toolbar, sidepanel, drawing pane). Actions that should
* not be added to the menubar and toolbar should be specified in the hideActions parameter
* @param hideActions The {@link Action}s that should not be added to the toolbar and menubar
*/
public MainPanel(SwingEngine swingEngine, Set<Action> hideActions)
{
this.hideActions = hideActions;
this.swingEngine = swingEngine;
pathwayElementMenuListener = new PathwayElementMenuListener(swingEngine);
model = new PathwayTableModel(swingEngine);
}
public void createAndShowGUI()
{
setLayout(new BorderLayout());
setTransferHandler(new PathwayImportHandler());
swingEngine.getEngine().addApplicationEventListener(this);
actions = swingEngine.getActions();
toolBar = new JToolBar();
toolBar.setFloatable(false); // disable floatable toolbar, aka Abomination of interaction design.
addToolBarActions(swingEngine, toolBar);
add(toolBar, BorderLayout.PAGE_START);
// menuBar will be added by container (JFrame or JApplet)
pathwayScrollPane = new JScrollPane();
// set background color when no VPathway is loaded, override l&f because it is usually white.
pathwayScrollPane.getViewport().setBackground(Color.LIGHT_GRAY);
objectsPane = new ObjectsPane(swingEngine);
int numItemsPerRow = 10;
objectsPane.addButtons(actions.newDatanodeActions, "Data Nodes", numItemsPerRow);
objectsPane.addButtons(actions.newInteractionActions, "Basic interactions", numItemsPerRow);
//objectsPane.addButtons(actions.newRLInteractionActions, "Receptor/ligand", numItemsPerRow);
objectsPane.addButtons(actions.newMIMInteractionActions, "MIM interactions", numItemsPerRow);
//objectsPane.addButtons(actions.newAnnotationActions, "Annotations", numItemsPerRow);
objectsPane.addButtons(actions.newShapeActions, "Graphical elements", numItemsPerRow);
objectsPane.addButtons(actions.newCellularComponentActions, "Cellular compartments", numItemsPerRow);
objectsPane.addButtons(actions.newTemplateActions, "Templates", numItemsPerRow);
propertyTable = new JTable(model) {
public TableCellRenderer getCellRenderer(int row, int column) {
TableCellRenderer r = model.getCellRenderer(row, column);
return r == null ? super.getCellRenderer(row, column) : r;
}
public TableCellEditor getCellEditor(int row, int column) {
TableCellEditor e = model.getCellEditor(row, column);
return e == null ? super.getCellEditor(row, column) : e;
}
};
//TODO: make this prettier, it's not good for the tablemodel to have
//a reference to the table. Quick fix for preventing TableCellEditor
//to remain open upon selecting a new PathwayElement
model.setTable(propertyTable);
propertiesScrollPane = new JScrollPane(propertyTable);
bpt = new BackpageTextProvider ();
bpt.addBackpageHook(new BackpageAttributes(swingEngine.getGdbManager().getCurrentGdb()));
bpt.addBackpageHook(new BackpageXrefs(swingEngine.getGdbManager().getCurrentGdb()));
backpagePane = new BackpagePane(bpt, swingEngine.getEngine());
backpagePane.addHyperlinkListener(swingEngine);
sidebarTabbedPane = new JTabbedPane();
sidebarTabbedPane.addTab("Objects", objectsPane);
sidebarTabbedPane.addTab( "Properties", propertiesScrollPane );
sidebarTabbedPane.addTab( "Backpage", new JScrollPane(backpagePane) );
splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT,
pathwayScrollPane, sidebarTabbedPane);
// set a small minimum size, to make sure we can keep resizing the side panel
sidebarTabbedPane.setMinimumSize(new Dimension(50,50));
splitPane.setResizeWeight(1);
splitPane.setOneTouchExpandable(true);
add(splitPane, BorderLayout.CENTER);
Action[] keyStrokeActions = new Action[] {
actions.copyAction,
actions.pasteAction,
};
InputMap im = getInputMap();
ActionMap am = getActionMap();
// define shortcuts
for(Action a : keyStrokeActions) {
im.put((KeyStroke)a.getValue(Action.ACCELERATOR_KEY), a.getValue(Action.NAME));
am.put(a.getValue(Action.NAME), a);
}
menuBar = new JMenuBar();
addMenuActions(menuBar);
}
/**
* Constructor for this class. Creates the main panel of this application, containing
* the main GUI elements (menubar, toolbar, sidepanel, drawing pane).
*/
public MainPanel(SwingEngine swingEngine)
{
this(swingEngine, null);
}
/** update the value in the zoom combo to the actual zoom percentage of the active pathway */
public void updateZoomCombo()
{
VPathway vpwy = swingEngine.getEngine().getActiveVPathway();
if (vpwy != null)
{
DecimalFormat df = new DecimalFormat("###.#");
zoomCombo.setSelectedItem(df.format(vpwy.getPctZoom())+"%");
}
}
/**
* {@link ActionListener} for the Zoom combobox on the toolbar. The user can select one
* of the predefined ZoomActions (50%, 100%, 200%, Zoom to fit, etc.),
* or enter a number or percentage manually.
*/
protected class ZoomComboListener implements ActionListener {
public void actionPerformed(ActionEvent e){
JComboBox combo = (JComboBox) e.getSource();
Object s = combo.getSelectedItem();
if (s instanceof Action) {
((Action) s).actionPerformed(e);
// after the selection of "fit to window" the new calculated zoom
// percentage is displayed
if(s instanceof CommonActions.ZoomToFitAction) {
updateZoomCombo();
}
} else if (s instanceof String) {
String zs = (String) s;
zs=zs.replace("%","");
try {
double zf = Double.parseDouble(zs);
if(zf > 0){ // Ignore negative number
ZoomAction za = new ZoomAction(swingEngine.getEngine(), zf);
za.setEnabled(true);
za.actionPerformed(e);
}
} catch (Exception ex) {
// Ignore bad input
}
}
}
}
protected void addCommonToolbarActions(final SwingEngine swingEngine, JToolBar tb)
{
// copy, paste and undo buttons
tb.addSeparator();
addToToolbar(actions.copyAction);
addToToolbar(actions.pasteAction);
tb.addSeparator();
addToToolbar(actions.undoAction);
tb.addSeparator();
// zoom drop-down
addToToolbar(new JLabel("Zoom:", JLabel.LEFT));
zoomCombo = new JComboBox(actions.zoomActions);
zoomCombo.setMaximumSize(zoomCombo.getPreferredSize());
zoomCombo.setEditable(true);
zoomCombo.setSelectedIndex(5); // 100%
zoomCombo.addActionListener(new ZoomComboListener());
addToToolbar(zoomCombo, TB_GROUP_SHOW_IF_VPATHWAY);
tb.addSeparator();
// define the drop-down menu for data nodes
GraphicsChoiceButton datanodeButton = new GraphicsChoiceButton();
datanodeButton.setToolTipText("Select a data node to draw");
datanodeButton.addButtons("Data Nodes", actions.newDatanodeActions);
// datanodeButton.addButtons("Annotations", actions.newAnnotationActions);
addToToolbar(datanodeButton, TB_GROUP_SHOW_IF_EDITMODE);
tb.addSeparator(new Dimension(2,0));
// define the drop-down menu for shapes
GraphicsChoiceButton shapeButton = new GraphicsChoiceButton();
shapeButton.setToolTipText("Select a shape to draw");
itemsDropDown = shapeButton;
shapeButton.addButtons("Basic shapes", actions.newShapeActions);
shapeButton.addButtons("Cellular components", actions.newCellularComponentActions);
addToToolbar(shapeButton, TB_GROUP_SHOW_IF_EDITMODE);
tb.addSeparator(new Dimension(2,0));
// define the drop-down menu for interactions
GraphicsChoiceButton lineButton = new GraphicsChoiceButton();
lineButton.setToolTipText("Select an interaction to draw");
lineButton.addButtons("Basic interactions", actions.newInteractionActions);
lineButton.addButtons("MIM interactions", actions.newMIMInteractionActions);
addToToolbar(lineButton, TB_GROUP_SHOW_IF_EDITMODE);
tb.addSeparator(new Dimension(2,0));
// define the drop-down menu for templates
GraphicsChoiceButton templateButton = new GraphicsChoiceButton();
templateButton.setToolTipText("Select a template to draw");
templateButton.addButtons("Templates", actions.newTemplateActions);
addToToolbar(templateButton, TB_GROUP_SHOW_IF_EDITMODE);
tb.addSeparator();
// layout actions
addToToolbar(actions.layoutActions);
}
protected void addToolBarActions(final SwingEngine swingEngine, JToolBar tb)
{
tb.setLayout(new WrapLayout(1, 1));
addToToolbar(actions.importAction);
addToToolbar(actions.exportAction);
addCommonToolbarActions(swingEngine, tb);
}
public static final String TB_GROUP_SHOW_IF_EDITMODE = "edit";
public static final String TB_GROUP_SHOW_IF_VPATHWAY = "vpathway";
private Map<String, Set<Component>> toolbarGroups = new HashMap<String, Set<Component>>();
public void addToToolbar(Component c, String group) {
JToolBar tb = getToolBar();
if(tb == null) {
Logger.log.warn("Trying to register toolbar action while no toolbar is available " +
"(running in headless mode?)");
return;
}
tb.add(c);
addToToolbarGroup(c, group);
}
public void addToToolbar(Component c) {
addToToolbar(c, null);
}
public void addToToolbar(Action[] actions) {
for(Action a : actions) {
addToToolbar(a);
}
}
public JButton addToToolbar(Action a, String group) {
if(mayAddAction(a)) {
JButton b = getToolBar().add(a);
b.setFocusable(false);
addToToolbarGroup(b, group);
return b;
}
return null;
}
public JButton addToToolbar(Action a) {
return addToToolbar(a, null);
}
private void addToToolbarGroup(Component c, String group) {
Utils.multimapPut(toolbarGroups, group, c);
}
public void addToMenu(Action a, JMenu parent) {
if(mayAddAction(a)) {
parent.add(a);
}
}
public Set<Component> getToolbarGroup(String group) {
Set<Component> tbg = toolbarGroups.get(group);
if(tbg == null) tbg = new HashSet<Component>();
return tbg;
}
public JToolBar getToolBar() {
return toolBar;
}
public JScrollPane getScrollPane() {
return pathwayScrollPane;
}
public JSplitPane getSplitPane() {
return splitPane;
}
public BackpagePane getBackpagePane() {
return backpagePane;
}
public void vPathwayEvent(VPathwayEvent e) {
VPathway vp = (VPathway)e.getSource();
switch(e.getType()) {
case ELEMENT_DOUBLE_CLICKED:
VPathwayElement pwe = e.getAffectedElement();
if(pwe instanceof Handle)
{
pwe = ((Handle)pwe).getParent();
}
if(pwe instanceof Graphics &&
!(pwe instanceof SelectionBox)) {
PathwayElement p = ((Graphics)pwe).getPathwayElement();
if(p != null) {
swingEngine.getPopupDialogHandler().getInstance(p, !vp.isEditMode(), null, this).setVisible(true);
}
}
break;
case EDIT_MODE_ON:
for(Component b : getToolbarGroup(TB_GROUP_SHOW_IF_EDITMODE)) {
b.setEnabled(true);
}
break;
case EDIT_MODE_OFF:
for(Component b : getToolbarGroup(TB_GROUP_SHOW_IF_EDITMODE)) {
b.setEnabled(false);
}
break;
case HREF_ACTIVATED:
if(e.getAffectedElement() instanceof Label) {
try {
hyperlinkUpdate(new HyperlinkEvent(e.getSource(), HyperlinkEvent.EventType.ACTIVATED, new URL(((Label)e.getAffectedElement()).getPathwayElement().getHref())));
} catch (MalformedURLException e1) {
swingEngine.getEngine().getActiveVPathway().selectObject(e.getAffectedElement());
swingEngine.handleMalformedURLException("The specified link address is not valid.", this, e1);
}
}
}
}
public void hyperlinkUpdate(HyperlinkEvent e) {
swingEngine.hyperlinkUpdate(e);
}
public void applicationEvent(ApplicationEvent e) {
switch(e.getType()) {
case VPATHWAY_CREATED:
{
VPathway vp = (VPathway)e.getSource();
vp.addVPathwayListener(this);
vp.addVPathwayListener(pathwayElementMenuListener);
for(Component b : getToolbarGroup(TB_GROUP_SHOW_IF_VPATHWAY)) {
b.setEnabled(true);
}
}
break;
case VPATHWAY_DISPOSED:
{
VPathway vp = (VPathway)e.getSource();
vp.removeVPathwayListener(this);
vp.removeVPathwayListener(pathwayElementMenuListener);
}
break;
}
}
public JMenuBar getMenuBar() {
return menuBar;
}
public JTabbedPane getSideBarTabbedPane()
{
return sidebarTabbedPane;
}
public void dispose()
{
backpagePane.dispose();
}
/**
* hook of the objects tab
*/
public ObjectsPane getObjectsPane()
{
return objectsPane;
}
/**
* hook of the drop-down menu
*/
public GraphicsChoiceButton getItemsDropDown()
{
return itemsDropDown;
}
/**
* add items with text to the drop-down menu (e.g. -> arrow)
*/
public void addMenuItems(Action [] aa, DropDownButton lineButton)
{
for(Action a : aa) {
lineButton.addComponent(new JMenuItem(a));
}
}
}
|
|
package com.davidbracewell;
import com.davidbracewell.string.StringUtils;
import lombok.NonNull;
import lombok.Value;
import java.io.Serializable;
import java.util.regex.Pattern;
import static com.davidbracewell.Re.re;
/**
* <p>Fluent style regular expression builder.</p>
*
* @author David B. Bracewell
*/
@Value
public final class Regex implements Serializable {
private static final long serialVersionUID = 1L;
private final String pattern;
/**
* Instantiates a new Regex.
*
* @param pattern the pattern
* @param quote the quote
*/
Regex(String pattern, boolean quote) {
if (pattern != null) {
if (quote) {
this.pattern = Pattern.quote(pattern);
} else {
this.pattern = pattern;
}
} else {
this.pattern = StringUtils.EMPTY;
}
}
/**
* To chars string.
*
* @param p the p
* @return the string
*/
static String toChars(String p) {
if (p.length() >= 3 && p.charAt(0) == '[' && p.charAt(p.length() - 1) == ']') {
return p.substring(1, p.length() - 1);
}
return p;
}
/**
* Concatenates the given regex with this one.
*
* @param regex the regex to concatenate with this one
* @return the regex
*/
public Regex then(Regex regex) {
if (regex == null) {
return this;
}
return re(this.pattern + regex.pattern);
}
/**
* Appends a dollar sign to the end of the regex signifying that the pattern must match to the end of line.
*
* @return the regex
*/
public Regex endLine() {
return re(this.pattern + "$");
}
/**
* Ors together this regex with the supplied other regular expressions
*
* @param others the other regular expressions to be ored with this one
* @return the regex
*/
public Regex or(@NonNull Regex... others) {
if (others == null) {
return this;
}
return Re.or(this, others);
}
/**
* Ands together this regex with the supplied regular expression
*
* @param other the other regular expression to be anded to this one
* @return the regex
*/
public Regex and(@NonNull Regex other) {
if (other.pattern.length() > 0) {
return re(this.pattern + "&&" + other.pattern);
}
return this;
}
/**
* Converts the regex into a group. If the supplied name is not null or blank, the group will be named.
*
* @param name the name of the group
* @return the regex
*/
public Regex group(String name) {
return re("(" + (StringUtils.isNotNullOrBlank(name) ? "?<" + name + ">" : StringUtils.EMPTY) + pattern + ")");
}
/**
* Converts the regex into a non-matching group
*
* @return the regex
*/
public Regex nmGroup() {
return re("(?:" + pattern + ")");
}
/**
* Converts the regex into a group
*
* @return the regex
*/
public Regex group() {
return re("(" + this.pattern + ")");
}
/**
* Negates the regex
*
* @return the negated regex
*/
public Regex not() {
if (this.pattern.length() > 0) {
if (this.pattern.charAt(0) == '[' && this.pattern.length() > 1) {
return re("[^" + this.pattern.substring(1));
}
return re("^" + this.pattern);
}
return this;
}
/**
* Specifies the number of times for this regex to repeat.
*
* @param n the number of times the pattern should repeat
* @return the regex
*/
public Regex nTimes(int n) {
return re(this.pattern + "{" + Integer.toString(n) + "}");
}
/**
* Specifies the minimum and maximum times for this regex to repeat.
*
* @param min the minimum times the pattern should repeat
* @param max the maximum times the pattern should repeat
* @return the regex
*/
public Regex range(int min, int max) {
return re(this.pattern + "{" + Integer.toString(min) + "," + Integer.toString(max) + "}");
}
/**
* Appends a plus sign to the end of the regex. Typically this is used to designate a match of one or more.
*
* @return the regex
*/
public Regex plus() {
return re(this.pattern + "+");
}
/**
* Appends a asterisks to the end of the regex. Typically this is used to designate a match of zero or more.
*
* @return the regex
*/
public Regex star() {
return re(this.pattern + "*");
}
/**
* Appends a question mark to the end of the regex. Typically this is used to designate a match of zero or one.
*
* @return the regex
*/
public Regex question() {
return re(this.pattern + "?");
}
/**
* Converts the regex object to a Java pattern with the specified flags.
*
* @return the Java regular expression pattern
*/
public Pattern toPattern() {
return Pattern.compile(pattern);
}
/**
* Converts the regex object to a Java pattern with the specified flags.
*
* @param flags the flags for the pattern creation see {@link Pattern}
* @return the Java regular expression pattern
*/
public Pattern toPattern(int flags) {
return Pattern.compile(pattern, flags);
}
@Override
public String toString() {
return pattern;
}
/**
* Converts this regex to match an entire line.
*
* @return the regex that matches entire lines.
*/
public Regex matchLine() {
return re("^" + this.pattern + "$");
}
/**
* Converts this regex into a character class.
*
* @return the character class regex
*/
public Regex chars() {
return Re.chars(this);
}
}// END OF Regex
|
|
/**
* Copyright 2015-2018 Maven Source Dependencies
* Plugin contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.srcdeps.core.config;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collections;
import java.util.Properties;
import org.junit.Assert;
import org.junit.Test;
import org.srcdeps.core.BuildRequest.Verbosity;
import org.srcdeps.core.config.scalar.CharStreamSource;
import org.srcdeps.core.config.tree.walk.OverrideVisitor;
public class OverrideTest {
@Test
public void appendScmRepositoryUrl() {
Configuration.Builder config = Configuration.builder() //
.repository( //
ScmRepository.builder() //
.id("org.repo1") //
.include("org.example") //
.url("file:///whereever") //
);
ScmRepository.Builder nonOverlayedRepo = config.repositories.getChildren().get("org.repo1");
Assert.assertEquals("org.repo1", nonOverlayedRepo.getName());
Assert.assertEquals(Collections.singletonList("org.example"), nonOverlayedRepo.includes.asListOfValues());
Assert.assertEquals(Collections.singletonList("file:///whereever"), nonOverlayedRepo.urls.asListOfValues());
Properties props = new Properties();
props.put("srcdeps.repositories[org.repo1].urls[1]", "file:///here");
config.accept(new OverrideVisitor(props));
ScmRepository.Builder overlayedRepo = config.repositories.getChildren().get("org.repo1");
Assert.assertSame(nonOverlayedRepo, overlayedRepo);
Assert.assertEquals("org.repo1", nonOverlayedRepo.getName());
Assert.assertEquals(Collections.singletonList("org.example"), nonOverlayedRepo.includes.asListOfValues());
Assert.assertEquals(Arrays.asList("file:///whereever", "file:///here"), nonOverlayedRepo.urls.asListOfValues());
}
@Test
public void modifyListOfStrings() {
Configuration.Builder config = Configuration.builder() //
.forwardProperty("fwd1") //
.forwardProperty("fwd2") //
.maven( //
Maven.builder() //
.versionsMavenPluginVersion("0.1") //
.failWith( //
MavenAssertions.failWithBuilder() //
.addDefaults(false) //
.goal("g1") //
.profile("p1") //
.property("prop1") //
) //
);
Assert.assertEquals(Arrays.asList("fwd1", "fwd2"), config.forwardProperties.asListOfValues());
MavenAssertions.FailWithBuilder failWith = config.maven.failWith;
Assert.assertEquals(false, failWith.addDefaults.getValue());
Assert.assertEquals(Collections.singleton("g1"), failWith.goals.asSetOfValues());
Assert.assertEquals(Collections.singleton("p1"), failWith.profiles.asSetOfValues());
Assert.assertEquals(Collections.singleton("prop1"), failWith.properties.asSetOfValues());
Properties props = new Properties();
props.put("srcdeps.forwardProperties", ""); // replace by empty
props.put("srcdeps.maven.failWith.goals[1]", "g2"); // append
props.put("srcdeps.maven.failWith.profiles", "pro1,pro2"); // replace
props.put("srcdeps.maven.failWith.properties[-1]", "prop0"); // prepend
config.accept(new OverrideVisitor(props));
MavenAssertions.FailWithoutBuilder overlayedFailWith = config.maven.failWith;
Assert.assertSame(failWith, overlayedFailWith);
Assert.assertEquals(Collections.emptyList(), config.forwardProperties.asListOfValues());
Assert.assertEquals(false, failWith.addDefaults.getValue());
Assert.assertEquals(Arrays.asList("g1", "g2"), failWith.goals.asListOfValues());
Assert.assertEquals(Arrays.asList("pro1", "pro2"), failWith.profiles.asListOfValues());
Assert.assertEquals(Arrays.asList("prop0", "prop1"), failWith.properties.asListOfValues());
}
@Test
public void overrideBoolean() {
Configuration.Builder config = Configuration.builder().skip(true);
Assert.assertEquals(true, config.skip.getValue());
Properties props = new Properties();
props.put("srcdeps.skip", "false");
config.accept(new OverrideVisitor(props));
Assert.assertEquals(false, config.skip.getValue());
}
@Test
public void overridePath() {
Path myDir = Paths.get("/my/dir");
Configuration.Builder config = Configuration.builder().sourcesDirectory(myDir);
Assert.assertEquals(myDir, config.sourcesDirectory.getValue());
Properties props = new Properties();
props.put("srcdeps.sourcesDirectory", "/your/dir");
config.accept(new OverrideVisitor(props));
Assert.assertEquals(Paths.get("/your/dir"), config.sourcesDirectory.getValue());
}
@Test
public void overrideScmRepositoryMaven() {
Configuration.Builder config = Configuration.builder().repository(ScmRepository.builder() //
.id("org.repo1") //
.include("org.example") //
.url("file:///whereever") //
.maven( //
ScmRepositoryMaven.builder() //
.versionsMavenPluginVersion("1.2")) //
);
ScmRepository.Builder nonOverlayedRepo = config.repositories.getChildren().get("org.repo1");
Assert.assertEquals("org.repo1", nonOverlayedRepo.getName());
Assert.assertEquals(Collections.singletonList("org.example"), nonOverlayedRepo.includes.asListOfValues());
Assert.assertEquals(Collections.singletonList("file:///whereever"), nonOverlayedRepo.urls.asListOfValues());
Assert.assertEquals("1.2", nonOverlayedRepo.maven.versionsMavenPluginVersion.getValue());
Properties props = new Properties();
props.put("srcdeps.repositories[org.repo1].maven.versionsMavenPluginVersion", "1.3");
props.put("srcdeps.repositories[org.repo1].gradle.modelTransformer", "literal: foo");
config.accept(new OverrideVisitor(props));
ScmRepository.Builder overlayedRepo = config.repositories.getChildren().get("org.repo1");
Assert.assertSame(nonOverlayedRepo, overlayedRepo);
Assert.assertEquals("org.repo1", nonOverlayedRepo.getName());
Assert.assertEquals(Collections.singletonList("org.example"), nonOverlayedRepo.includes.asListOfValues());
Assert.assertEquals(Collections.singletonList("file:///whereever"), nonOverlayedRepo.urls.asListOfValues());
Assert.assertEquals("1.3", nonOverlayedRepo.maven.versionsMavenPluginVersion.getValue());
Assert.assertEquals(CharStreamSource.of("literal: foo"), nonOverlayedRepo.gradle.modelTransformer.getValue());
}
@Test
public void overrideValuelessBooleanProperty() {
Configuration.Builder config = Configuration.builder().skip(true);
Assert.assertEquals(true, config.skip.getValue());
Properties props = new Properties();
props.put("srcdeps.skip", "");
config.accept(new OverrideVisitor(props));
Assert.assertEquals(true, config.skip.getValue());
}
@Test
public void overrideVerbosity() {
Configuration.Builder config = Configuration.builder().verbosity(Verbosity.trace);
Assert.assertEquals(Verbosity.trace, config.verbosity.getValue());
Properties props = new Properties();
props.put("srcdeps.verbosity", "debug");
config.accept(new OverrideVisitor(props));
Assert.assertEquals(Verbosity.debug, config.verbosity.getValue());
}
@Test
public void replaceScmRepositoryUrl() {
Configuration.Builder config = Configuration.builder() //
.skip(false) //
.repository( //
ScmRepository.builder() //
.id("repo1") //
.include("org.example") //
.url("file:///whereever") //
);
ScmRepository.Builder nonOverlayedRepo = config.repositories.getChildren().get("repo1");
Assert.assertEquals("repo1", nonOverlayedRepo.getName());
Assert.assertEquals(Collections.singletonList("org.example"), nonOverlayedRepo.includes.asListOfValues());
Assert.assertEquals(Collections.singletonList("file:///whereever"), nonOverlayedRepo.urls.asListOfValues());
Properties props = new Properties();
props.put("srcdeps.repositories[repo1].urls[0]", "file:///here");
config.accept(new OverrideVisitor(props));
ScmRepository.Builder overlayedRepo = config.repositories.getChildren().get("repo1");
Assert.assertSame(nonOverlayedRepo, overlayedRepo);
Assert.assertEquals("repo1", nonOverlayedRepo.getName());
Assert.assertEquals(Collections.singletonList("org.example"), nonOverlayedRepo.includes.asListOfValues());
Assert.assertEquals(Collections.singletonList("file:///here"), nonOverlayedRepo.urls.asListOfValues());
}
}
|
|
package com.gatech.beatouch.screens;
import com.badlogic.gdx.Game;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.InputMultiplexer;
import com.badlogic.gdx.InputProcessor;
import com.badlogic.gdx.Screen;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.scenes.scene2d.Actor;
import com.badlogic.gdx.scenes.scene2d.InputEvent;
import com.badlogic.gdx.scenes.scene2d.Stage;
import com.badlogic.gdx.scenes.scene2d.ui.Image;
import com.badlogic.gdx.scenes.scene2d.ui.List;
import com.badlogic.gdx.scenes.scene2d.ui.ScrollPane;
import com.badlogic.gdx.scenes.scene2d.ui.Table;
import com.badlogic.gdx.scenes.scene2d.ui.TextButton;
import com.badlogic.gdx.scenes.scene2d.utils.ChangeListener;
import com.badlogic.gdx.scenes.scene2d.utils.ClickListener;
import com.badlogic.gdx.utils.viewport.ExtendViewport;
import com.gatech.beatouch.assets.Assets;
import com.gatech.beatouch.configuration.GlobalVariables;
import com.gatech.beatouch.entities.SongMap;
import com.gatech.beatouch.entities.SongMapGroup;
@SuppressWarnings("unchecked")
public class SongSelectScreen implements Screen, InputProcessor {
private Stage stage = new Stage(new ExtendViewport(Gdx.graphics.getWidth(), Gdx.graphics.getHeight()));
private List<SongMapGroup> availableSongList = new List<>(Assets.menuSkin, "diff_list");
private ScrollPane SongListPanel = new ScrollPane(null, Assets.menuSkin);
private List<SongMap> songList = new List<>(Assets.menuSkin, "diff_list");
private Table table = new Table();
private TextButton nextButton = new TextButton("Next", Assets.menuSkin, "item1");
private TextButton backButton = new TextButton("Back", Assets.menuSkin, "item1");
private Image backgroundImage = new Image(Assets.mainMenuBackgroundTexture);
@Override
public void show() {
float scaleFactor = stage.getHeight() / GlobalVariables.BASE_HEIGHT;
backgroundImage.setSize(stage.getWidth(), stage.getHeight());
stage.addActor(backgroundImage);
Assets.songGroup.sort();
availableSongList.setItems(Assets.songGroup);
if (Assets.selectedGroup != null) {
availableSongList.setSelected(Assets.selectedGroup);
songList.setItems(Assets.selectedGroup.beatmaps);
} else {
if (availableSongList.getItems().size != 0)
{
Assets.selectedGroup = availableSongList.getItems().get(0);
songList.setItems(Assets.selectedGroup.beatmaps);
}
}
availableSongList.addListener(new ChangeListener() {
@Override
public void changed(ChangeEvent event, Actor actor) {
SongMapGroup previousGroup = Assets.selectedGroup;
SongMapGroup newSelected = (SongMapGroup) ((List) actor).getSelected();
if (previousGroup == newSelected) {
// if the same group was selected we ignore it
return;
}
Assets.selectedGroup = newSelected;
songList.setItems(newSelected.beatmaps);
}
});
if (Assets.selectedSongMap != null) {
songList.setSelected(Assets.selectedSongMap);
} else {
songList.setSelected(songList.getItems().size == 0 ? null : songList.getItems().first());
}
nextButton.getLabel().setFontScale(scaleFactor);
backButton.getLabel().setFontScale(scaleFactor);
SongListPanel.setWidget(availableSongList);
SongListPanel.setWidth(stage.getWidth());
table.add(SongListPanel).colspan(3).size(stage.getWidth() * 0.87f, stage.getHeight() * 0.49f).padBottom(stage.getHeight() * 0.01f).row();
//table.add(diffListPane).colspan(3).size(stage.getWidth() * 0.87f, stage.getHeight() * 0.23f).padBottom(stage.getHeight() * 0.01f).padTop(stage.getHeight() * 0.01f).row();
table.setWidth(stage.getWidth());
table.setHeight(stage.getHeight());
backButton.addListener((new ClickListener() {
@Override
public void clicked(InputEvent event, float x, float y) {
Assets.selectedGroup = availableSongList.getSelected();
Assets.selectedSongMap = songList.getSelected();
((Game) Gdx.app.getApplicationListener()).setScreen(new MainMenuScreen());
}
}));
nextButton.addListener((new ClickListener() {
@Override
public void clicked(InputEvent event, float x, float y) {
if (songList.getSelected() == null) {
return;
}
Assets.selectedSongMap = songList.getSelected();
((Game) Gdx.app.getApplicationListener()).setScreen(new SongScreen());
}
}));
table.add(backButton).size(stage.getWidth() * 0.87f / 2, stage.getHeight() * 0.12f);
table.add(nextButton).size(stage.getWidth() * 0.87f / 2, stage.getHeight() * 0.12f);
stage.addActor(table);
InputMultiplexer impx = new InputMultiplexer();
impx.addProcessor(this);
impx.addProcessor(stage);
Gdx.input.setInputProcessor(impx);
Gdx.input.setCatchBackKey(true);
}
@Override
public void render(float delta) {
Gdx.gl.glClearColor(0, 0, 0, 1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
SongListPanel.act(delta);
stage.act();
stage.draw();
}
@Override
public void resize(int width, int height) {
}
@Override
public void pause() {
}
@Override
public void resume() {
}
@Override
public void hide() {
dispose();
}
@Override
public void dispose() {
stage.dispose();
}
@Override
public boolean keyDown(int keycode) {
return false;
}
@Override
public boolean keyUp(int keycode) {
if (keycode == Input.Keys.BACK || keycode == Input.Keys.ESCAPE) {
Assets.selectedSongMap = songList.getSelected();
Assets.selectedGroup = availableSongList.getSelected();
((Game) Gdx.app.getApplicationListener()).setScreen(new MainMenuScreen());
// do nothing
return true;
}
return false;
}
@Override
public boolean keyTyped(char character) {
return false;
}
@Override
public boolean touchDown(int screenX, int screenY, int pointer, int button) {
return false;
}
@Override
public boolean touchUp(int screenX, int screenY, int pointer, int button) {
return false;
}
@Override
public boolean touchDragged(int screenX, int screenY, int pointer) {
return false;
}
@Override
public boolean mouseMoved(int screenX, int screenY) {
return false;
}
@Override
public boolean scrolled(int amount) {
return false;
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.irc;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriParams;
import org.apache.camel.spi.UriPath;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.URISupport;
import org.apache.camel.util.UnsafeUriCharactersEncoder;
import org.apache.camel.util.jsse.SSLContextParameters;
import org.schwering.irc.lib.ssl.SSLDefaultTrustManager;
import org.schwering.irc.lib.ssl.SSLTrustManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@UriParams
public class IrcConfiguration implements Cloneable {
private static final Logger LOG = LoggerFactory.getLogger(IrcConfiguration.class);
private List<IrcChannel> channels = new ArrayList<IrcChannel>();
@UriPath @Metadata(required = "true")
private String hostname;
@UriPath
private int port;
private int[] ports = {6667, 6668, 6669};
@UriParam
private String password;
@UriParam
private String nickname;
@UriParam
private String realname;
@UriParam
private String username;
private SSLTrustManager trustManager = new SSLDefaultTrustManager();
private boolean usingSSL;
@UriParam(defaultValue = "true")
private boolean persistent = true;
@UriParam(defaultValue = "true")
private boolean colors = true;
@UriParam(defaultValue = "true")
private boolean onNick = true;
@UriParam(defaultValue = "true")
private boolean onQuit = true;
@UriParam(defaultValue = "true")
private boolean onJoin = true;
@UriParam(defaultValue = "true")
private boolean onKick = true;
@UriParam(defaultValue = "true")
private boolean onMode = true;
@UriParam(defaultValue = "true")
private boolean onPart = true;
@UriParam(defaultValue = "false")
private boolean onReply;
@UriParam(defaultValue = "true")
private boolean onTopic = true;
@UriParam(defaultValue = "true")
private boolean onPrivmsg = true;
@UriParam(defaultValue = "true")
private boolean autoRejoin = true;
private SSLContextParameters sslContextParameters;
public IrcConfiguration() {
}
public IrcConfiguration(String hostname, String nickname, String displayname, List<IrcChannel> channels) {
this(hostname, null, null, nickname, displayname, channels);
}
public IrcConfiguration(String hostname, String username, String password, String nickname, String displayname, List<IrcChannel> channels) {
this.channels = channels;
this.hostname = hostname;
this.username = username;
this.password = password;
this.nickname = nickname;
this.realname = displayname;
}
public IrcConfiguration copy() {
try {
return (IrcConfiguration) clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeCamelException(e);
}
}
public String getCacheKey() {
return hostname + ":" + nickname;
}
/*
* Return space separated list of channel names without pwd
*/
public String getListOfChannels() {
String retval = "";
for (IrcChannel channel : channels) {
retval += (retval.isEmpty() ? "" : " ") + channel.getName();
}
return retval;
}
public void configure(String uriStr) throws URISyntaxException, UnsupportedEncodingException {
// fix provided URI and handle that we can use # to indicate the IRC room
if (uriStr.startsWith("ircs")) {
setUsingSSL(true);
if (!uriStr.startsWith("ircs://")) {
uriStr = uriStr.replace("ircs:", "ircs://");
}
} else if (!uriStr.startsWith("irc://")) {
uriStr = uriStr.replace("irc:", "irc://");
}
if (uriStr.contains("?")) {
uriStr = ObjectHelper.before(uriStr, "?");
}
URI uri = new URI(uriStr);
// Because we can get a "sanitized" URI, we need to deal with the situation where the
// user info includes the username and password together or else we get a mangled username
// that includes the user's secret being sent to the server.
String userInfo = uri.getUserInfo();
String username = null;
String password = null;
if (userInfo != null) {
int colonIndex = userInfo.indexOf(":");
if (colonIndex != -1) {
username = userInfo.substring(0, colonIndex);
password = userInfo.substring(colonIndex + 1);
} else {
username = userInfo;
}
}
if (uri.getPort() != -1) {
setPorts(new int[] {uri.getPort()});
setPort(uri.getPort());
}
setNickname(username);
setUsername(username);
setRealname(username);
setPassword(password);
setHostname(uri.getHost());
String path = uri.getPath();
if (path != null && !path.isEmpty()) {
LOG.warn("Channel {} should not be specified in the URI path. Use an @channel query parameter instead.", path);
}
}
public void setChannel(String channel) {
channels.add(createChannel(channel));
}
public void setChannel(List<String> channels) {
for (String ci : channels) {
this.channels.add(createChannel(ci));
}
}
public List<IrcChannel> getChannels() {
return channels;
}
public IrcChannel findChannel(String name) {
for (IrcChannel channel : channels) {
if (channel.getName().equals(name)) {
return channel;
}
}
return null;
}
public void setTrustManager(SSLTrustManager trustManager) {
this.trustManager = trustManager;
}
public SSLTrustManager getTrustManager() {
return trustManager;
}
public boolean getUsingSSL() {
return usingSSL;
}
private void setUsingSSL(boolean usingSSL) {
this.usingSSL = usingSSL;
}
public String getHostname() {
return hostname;
}
public void setHostname(String hostname) {
this.hostname = hostname;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getNickname() {
return nickname;
}
public void setNickname(String nickname) {
this.nickname = nickname;
}
public String getRealname() {
return realname;
}
public void setRealname(String realname) {
this.realname = realname;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public int[] getPorts() {
return ports;
}
public void setPorts(int[] ports) {
this.ports = ports;
}
public int getPort() {
return port;
}
public void setPort(int port) {
this.port = port;
}
public boolean isPersistent() {
return persistent;
}
public void setPersistent(boolean persistent) {
this.persistent = persistent;
}
public boolean isColors() {
return colors;
}
public void setColors(boolean colors) {
this.colors = colors;
}
public boolean isOnNick() {
return onNick;
}
public void setOnNick(boolean onNick) {
this.onNick = onNick;
}
public boolean isOnQuit() {
return onQuit;
}
public void setOnQuit(boolean onQuit) {
this.onQuit = onQuit;
}
public boolean isOnJoin() {
return onJoin;
}
public void setOnJoin(boolean onJoin) {
this.onJoin = onJoin;
}
public boolean isOnKick() {
return onKick;
}
public void setOnKick(boolean onKick) {
this.onKick = onKick;
}
public boolean isOnMode() {
return onMode;
}
public void setOnMode(boolean onMode) {
this.onMode = onMode;
}
public boolean isOnPart() {
return onPart;
}
public void setOnPart(boolean onPart) {
this.onPart = onPart;
}
public boolean isOnReply() {
return onReply;
}
public void setOnReply(boolean onReply) {
this.onReply = onReply;
}
public boolean isOnTopic() {
return onTopic;
}
public void setOnTopic(boolean onTopic) {
this.onTopic = onTopic;
}
public boolean isOnPrivmsg() {
return onPrivmsg;
}
public void setOnPrivmsg(boolean onPrivmsg) {
this.onPrivmsg = onPrivmsg;
}
public boolean isAutoRejoin() {
return autoRejoin;
}
public void setAutoRejoin(boolean autoRejoin) {
this.autoRejoin = autoRejoin;
}
public SSLContextParameters getSslContextParameters() {
return sslContextParameters;
}
public void setSslContextParameters(SSLContextParameters sslContextParameters) {
this.sslContextParameters = sslContextParameters;
}
public String toString() {
return "IrcConfiguration[hostname: " + hostname + ", ports=" + Arrays.toString(ports) + ", username=" + username + "]";
}
private static IrcChannel createChannel(String channelInfo) {
String[] pair = channelInfo.split("!");
return new IrcChannel(pair[0], pair.length > 1 ? pair[1] : null);
}
@Deprecated
public static String sanitize(String uri) {
// may be removed in camel-3.0.0
// make sure it's an URL first
int colon = uri.indexOf(':');
if (colon != -1 && uri.indexOf("://") != colon) {
uri = uri.substring(0, colon) + "://" + uri.substring(colon + 1);
}
try {
URI u = new URI(UnsafeUriCharactersEncoder.encode(uri));
String[] userInfo = u.getUserInfo() != null ? u.getUserInfo().split(":") : null;
String username = userInfo != null ? userInfo[0] : null;
String password = userInfo != null && userInfo.length > 1 ? userInfo[1] : null;
String path = URLDecoder.decode(u.getPath() != null ? u.getPath() : "", "UTF-8");
if (path.startsWith("/")) {
path = path.substring(1);
}
if (path.startsWith("#") && !path.startsWith("##")) {
path = path.substring(1);
}
Map<String, Object> parameters = URISupport.parseParameters(u);
String user = (String)parameters.get("username");
String nick = (String)parameters.get("nickname");
// not specified in authority
if (user != null) {
if (username == null) {
username = user;
} else if (!username.equals(user)) {
LOG.warn("Username specified twice in endpoint URI with different values. "
+ "The userInfo value ('{}') will be used, paramter ('{}') ignored", username, user);
}
parameters.remove("username");
}
if (nick != null) {
if (username == null) {
username = nick;
}
if (username.equals(nick)) {
parameters.remove("nickname"); // redundant
}
}
if (username == null) {
throw new RuntimeCamelException("IrcEndpoint URI with no user/nick specified is invalid");
}
String pwd = (String)parameters.get("password");
if (pwd != null) {
password = pwd;
parameters.remove("password");
}
// Remove unneeded '#' channel prefixes per convention
// and replace ',' separators and merge channel and key using convention "channel!key"
List<String> cl = new ArrayList<String>();
String channels = (String)parameters.get("channels");
String keys = (String)parameters.get("keys");
keys = keys == null ? keys : keys + " "; // if @keys ends with a ',' it will miss the last empty key after split(",")
if (channels != null) {
String[] chs = channels.split(",");
String[] ks = keys != null ? keys.split(",") : null;
parameters.remove("channels");
int count = chs.length;
if (ks != null) {
parameters.remove("keys");
if (!path.isEmpty()) {
LOG.warn("Specifying a channel '{}' in the URI path is ambiguous"
+ " when @channels and @keys are provided and will be ignored", path);
path = "";
}
if (ks.length != chs.length) {
count = count < ks.length ? count : ks.length;
LOG.warn("Different count of @channels and @keys. Only the first {} are used.", count);
}
}
for (int i = 0; i < count; i++) {
String channel = chs[i].trim();
String key = ks != null ? ks[i].trim() : null;
if (channel.startsWith("#") && !channel.startsWith("##")) {
channel = channel.substring(1);
}
if (key != null && !key.isEmpty()) {
channel += "!" + key;
}
cl.add(channel);
}
} else {
if (path.isEmpty()) {
LOG.warn("No channel specified for the irc endpoint");
}
cl.add(path);
}
parameters.put("channel", cl);
StringBuilder sb = new StringBuilder();
sb.append(u.getScheme());
sb.append("://");
sb.append(username);
sb.append(password == null ? "" : ":" + password);
sb.append("@");
sb.append(u.getHost());
sb.append(u.getPort() == -1 ? "" : ":" + u.getPort());
// ignore the path we have it as a @channel now
String query = formatQuery(parameters);
if (!query.isEmpty()) {
sb.append("?");
sb.append(query);
}
// make things a bit more predictable
return sb.toString();
} catch (Exception e) {
throw new RuntimeCamelException(e);
}
}
private static String formatQuery(Map<String, Object> params) {
if (params == null || params.size() == 0) {
return "";
}
StringBuilder result = new StringBuilder();
for (Map.Entry<String, Object> pair : params.entrySet()) {
Object value = pair.getValue();
// the value may be a list since the same key has multiple values
if (value instanceof List) {
List<?> list = (List<?>)value;
for (Object s : list) {
addQueryParameter(result, pair.getKey(), s);
}
} else {
addQueryParameter(result, pair.getKey(), value);
}
}
return result.toString();
}
private static void addQueryParameter(StringBuilder sb, String key, Object value) {
sb.append(sb.length() == 0 ? "" : "&");
sb.append(key);
if (value != null) {
String s = value.toString();
sb.append(s.isEmpty() ? "" : "=" + UnsafeUriCharactersEncoder.encode(s));
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver.wal;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.Arrays;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.regionserver.ChunkCreator;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.MemStoreLAB;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
/**
* Tests for WAL write durability
*/
@RunWith(Parameterized.class)
@Category({ RegionServerTests.class, MediumTests.class })
public class TestDurability {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestDurability.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static FileSystem FS;
private static MiniDFSCluster CLUSTER;
private static Configuration CONF;
private static Path DIR;
private static byte[] FAMILY = Bytes.toBytes("family");
private static byte[] ROW = Bytes.toBytes("row");
private static byte[] COL = Bytes.toBytes("col");
@Parameter
public String walProvider;
@Rule
public TestName name = new TestName();
@Parameters(name = "{index}: provider={0}")
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[] { "defaultProvider" }, new Object[] { "asyncfs" });
}
@BeforeClass
public static void setUpBeforeClass() throws Exception {
CONF = TEST_UTIL.getConfiguration();
TEST_UTIL.startMiniDFSCluster(1);
CLUSTER = TEST_UTIL.getDFSCluster();
FS = CLUSTER.getFileSystem();
DIR = TEST_UTIL.getDataTestDirOnTestFS("TestDurability");
CommonFSUtils.setRootDir(CONF, DIR);
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
@Before
public void setUp() {
CONF.set(WALFactory.WAL_PROVIDER, walProvider);
}
@After
public void tearDown() throws IOException {
FS.delete(DIR, true);
}
@Test
public void testDurability() throws Exception {
WALFactory wals = new WALFactory(CONF,
ServerName.valueOf("TestDurability", 16010, System.currentTimeMillis()).toString());
HRegion region = createHRegion(wals, Durability.USE_DEFAULT);
WAL wal = region.getWAL();
HRegion deferredRegion = createHRegion(region.getTableDescriptor(), region.getRegionInfo(),
"deferredRegion", wal, Durability.ASYNC_WAL);
region.put(newPut(null));
verifyWALCount(wals, wal, 1);
// a put through the deferred table does not write to the wal immediately,
// but maybe has been successfully sync-ed by the underlying AsyncWriter +
// AsyncFlusher thread
deferredRegion.put(newPut(null));
// but will after we sync the wal
wal.sync();
verifyWALCount(wals, wal, 2);
// a put through a deferred table will be sync with the put sync'ed put
deferredRegion.put(newPut(null));
wal.sync();
verifyWALCount(wals, wal, 3);
region.put(newPut(null));
verifyWALCount(wals, wal, 4);
// a put through a deferred table will be sync with the put sync'ed put
deferredRegion.put(newPut(Durability.USE_DEFAULT));
wal.sync();
verifyWALCount(wals, wal, 5);
region.put(newPut(Durability.USE_DEFAULT));
verifyWALCount(wals, wal, 6);
// SKIP_WAL never writes to the wal
region.put(newPut(Durability.SKIP_WAL));
deferredRegion.put(newPut(Durability.SKIP_WAL));
verifyWALCount(wals, wal, 6);
wal.sync();
verifyWALCount(wals, wal, 6);
// Async overrides sync table default
region.put(newPut(Durability.ASYNC_WAL));
deferredRegion.put(newPut(Durability.ASYNC_WAL));
wal.sync();
verifyWALCount(wals, wal, 8);
// sync overrides async table default
region.put(newPut(Durability.SYNC_WAL));
deferredRegion.put(newPut(Durability.SYNC_WAL));
verifyWALCount(wals, wal, 10);
// fsync behaves like sync
region.put(newPut(Durability.FSYNC_WAL));
deferredRegion.put(newPut(Durability.FSYNC_WAL));
verifyWALCount(wals, wal, 12);
}
@Test
public void testIncrement() throws Exception {
byte[] row1 = Bytes.toBytes("row1");
byte[] col1 = Bytes.toBytes("col1");
byte[] col2 = Bytes.toBytes("col2");
byte[] col3 = Bytes.toBytes("col3");
// Setting up region
WALFactory wals = new WALFactory(CONF,
ServerName.valueOf("TestIncrement", 16010, System.currentTimeMillis()).toString());
HRegion region = createHRegion(wals, Durability.USE_DEFAULT);
WAL wal = region.getWAL();
// col1: amount = 0, 1 write back to WAL
Increment inc1 = new Increment(row1);
inc1.addColumn(FAMILY, col1, 0);
Result res = region.increment(inc1);
assertEquals(1, res.size());
assertEquals(0, Bytes.toLong(res.getValue(FAMILY, col1)));
verifyWALCount(wals, wal, 1);
// col1: amount = 1, 1 write back to WAL
inc1 = new Increment(row1);
inc1.addColumn(FAMILY, col1, 1);
res = region.increment(inc1);
assertEquals(1, res.size());
assertEquals(1, Bytes.toLong(res.getValue(FAMILY, col1)));
verifyWALCount(wals, wal, 2);
// col1: amount = 0, 1 write back to WAL
inc1 = new Increment(row1);
inc1.addColumn(FAMILY, col1, 0);
res = region.increment(inc1);
assertEquals(1, res.size());
assertEquals(1, Bytes.toLong(res.getValue(FAMILY, col1)));
verifyWALCount(wals, wal, 3);
// col1: amount = 0, col2: amount = 0, col3: amount = 0
// 1 write back to WAL
inc1 = new Increment(row1);
inc1.addColumn(FAMILY, col1, 0);
inc1.addColumn(FAMILY, col2, 0);
inc1.addColumn(FAMILY, col3, 0);
res = region.increment(inc1);
assertEquals(3, res.size());
assertEquals(1, Bytes.toLong(res.getValue(FAMILY, col1)));
assertEquals(0, Bytes.toLong(res.getValue(FAMILY, col2)));
assertEquals(0, Bytes.toLong(res.getValue(FAMILY, col3)));
verifyWALCount(wals, wal, 4);
// col1: amount = 5, col2: amount = 4, col3: amount = 3
// 1 write back to WAL
inc1 = new Increment(row1);
inc1.addColumn(FAMILY, col1, 5);
inc1.addColumn(FAMILY, col2, 4);
inc1.addColumn(FAMILY, col3, 3);
res = region.increment(inc1);
assertEquals(3, res.size());
assertEquals(6, Bytes.toLong(res.getValue(FAMILY, col1)));
assertEquals(4, Bytes.toLong(res.getValue(FAMILY, col2)));
assertEquals(3, Bytes.toLong(res.getValue(FAMILY, col3)));
verifyWALCount(wals, wal, 5);
}
/**
* Test when returnResults set to false in increment it should not return the result instead it
* resturn null.
*/
@Test
public void testIncrementWithReturnResultsSetToFalse() throws Exception {
byte[] row1 = Bytes.toBytes("row1");
byte[] col1 = Bytes.toBytes("col1");
// Setting up region
WALFactory wals = new WALFactory(CONF,
ServerName
.valueOf("testIncrementWithReturnResultsSetToFalse", 16010, System.currentTimeMillis())
.toString());
HRegion region = createHRegion(wals, Durability.USE_DEFAULT);
Increment inc1 = new Increment(row1);
inc1.setReturnResults(false);
inc1.addColumn(FAMILY, col1, 1);
Result res = region.increment(inc1);
assertTrue(res.isEmpty());
}
private Put newPut(Durability durability) {
Put p = new Put(ROW);
p.addColumn(FAMILY, COL, COL);
if (durability != null) {
p.setDurability(durability);
}
return p;
}
private void verifyWALCount(WALFactory wals, WAL log, int expected) throws Exception {
Path walPath = AbstractFSWALProvider.getCurrentFileName(log);
WAL.Reader reader = wals.createReader(FS, walPath);
int count = 0;
WAL.Entry entry = new WAL.Entry();
while (reader.next(entry) != null) {
count++;
}
reader.close();
assertEquals(expected, count);
}
// lifted from TestAtomicOperation
private HRegion createHRegion(WALFactory wals, Durability durability) throws IOException {
TableName tableName = TableName.valueOf(name.getMethodName().replaceAll("[^A-Za-z0-9-_]", "_"));
TableDescriptor htd = TableDescriptorBuilder.newBuilder(tableName)
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build();
RegionInfo info = RegionInfoBuilder.newBuilder(tableName).build();
Path path = new Path(DIR, tableName.getNameAsString());
if (FS.exists(path)) {
if (!FS.delete(path, true)) {
throw new IOException("Failed delete of " + path);
}
}
ChunkCreator.initialize(MemStoreLAB.CHUNK_SIZE_DEFAULT, false, 0, 0,
0, null, MemStoreLAB.INDEX_CHUNK_SIZE_PERCENTAGE_DEFAULT);
return HRegion.createHRegion(info, path, CONF, htd, wals.getWAL(info));
}
private HRegion createHRegion(TableDescriptor td, RegionInfo info, String dir, WAL wal,
Durability durability) throws IOException {
Path path = new Path(DIR, dir);
if (FS.exists(path)) {
if (!FS.delete(path, true)) {
throw new IOException("Failed delete of " + path);
}
}
ChunkCreator.initialize(MemStoreLAB.CHUNK_SIZE_DEFAULT, false, 0, 0,
0, null, MemStoreLAB.INDEX_CHUNK_SIZE_PERCENTAGE_DEFAULT);
return HRegion.createHRegion(info, path, CONF, td, wal);
}
}
|
|
package com.pungwe.cms.core.theme.services;
import com.pungwe.cms.core.annotations.stereotypes.Theme;
import com.pungwe.cms.core.annotations.system.ModuleDependency;
import com.pungwe.cms.core.module.services.ModuleManagementService;
import com.pungwe.cms.core.system.element.templates.PageElement;
import com.pungwe.cms.core.theme.ThemeConfig;
import com.pungwe.cms.core.utils.services.HookService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
import org.springframework.core.type.filter.AnnotationTypeFilter;
import org.springframework.stereotype.Service;
import org.springframework.util.ResourceUtils;
import org.springframework.util.StringUtils;
import javax.servlet.http.HttpServletRequest;
import java.lang.reflect.InvocationTargetException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.*;
import java.util.stream.Collectors;
import static com.lyncode.jtwig.util.LocalThreadHolder.getServletRequest;
/**
* Created by ian on 29/01/2016.
*/
@Service
public class ThemeManagementService {
private static final Logger LOG = LoggerFactory.getLogger(ThemeManagementService.class);
@Autowired
private ThemeConfigService<? extends ThemeConfig> themeConfigService;
@Autowired
private HookService hookService;
@Autowired
private ModuleManagementService moduleManagementService;
@Autowired
private ApplicationContext rootContext;
private Map<String, AnnotationConfigApplicationContext> themeContexts = new TreeMap<>();
public ApplicationContext getThemeContext(String name) {
return themeContexts.get(name);
}
public boolean enable(String theme) {
if (StringUtils.isEmpty(theme)) {
return false;
}
// Fetch the theme config
ThemeConfig config = themeConfigService.getTheme(theme);
try {
Class<?> c = Class.forName(config.getEntryPoint());
Theme info = c.getAnnotation(Theme.class);
String parent = info.parent();
if (!StringUtils.isEmpty(parent) && !themeConfigService.isEnabled(parent) && !enable(parent)) {
LOG.error("Could not enabled parent theme: " + parent + " for theme: " + theme);
return false;
}
// Enable dependencies
moduleManagementService.enable(Arrays.asList(info.dependencies()).stream().map(moduleDependency -> {
return moduleDependency.value();
}).collect(Collectors.toList()));
themeConfigService.setThemeEnabled(theme, true);
return true;
} catch (ClassNotFoundException ex) {
LOG.error("Could not enable theme: " + theme, ex);
themeConfigService.removeThemes(theme);
return false;
}
}
public void setDefaultTheme(String theme) {
themeConfigService.setDefaultTheme(theme);
}
public void setDefaultAdminTheme(String theme) {
themeConfigService.setDefaultAdminTheme(theme);
}
public boolean disable(String theme) {
themeConfigService.setThemeEnabled(theme, false);
AnnotationConfigApplicationContext ctx = themeContexts.remove(theme);
ctx.close();
return ctx.isActive();
}
public void startEnabledThemes() {
removeMissingThemes();
// Get a list of enabled themes
Set<ThemeConfig> enabled = (Set<ThemeConfig>) themeConfigService.listEnabledThemes();
// Create application contexts for the enabled themes. This is different from the module
// context, whereby the modules share a single application context, themes need to be isolated
// from each other in order to function correctly...
enabled.stream().sorted((t1, t2) -> {
try {
// First theme class
Class<?> c1 = Class.forName(t1.getEntryPoint());
Theme i1 = c1.getAnnotation(Theme.class);
// Second theme class
Class<?> c2 = Class.forName(t2.getEntryPoint());
Theme i2 = c2.getAnnotation(Theme.class);
// If t1 parent is blank and t2 is not, then t1 should be before t2.
if (StringUtils.isEmpty(i1.parent()) && !StringUtils.isEmpty(i2.parent())) {
return -1;
// If t1 has a parent and t2 does not, then it should be after t2
} else if (!StringUtils.isEmpty(i1.parent()) && StringUtils.isEmpty(i2.parent())) {
return 1;
}
// Check if t1 is the parent of t2. If it is, then t1 should be first
if (i1.name().equalsIgnoreCase(i2.parent())) {
return -1;
// otherwise t2 should be first if it's the parent of t1
} else if (i2.name().equalsIgnoreCase(i1.parent())) {
return 1;
// Ensure that there is not a circular reference
} else if (i1.name().equalsIgnoreCase(i2.parent()) && i2.name().equalsIgnoreCase(i1.parent())) {
throw new IllegalArgumentException("Circular reference in theme parents");
}
// Just sort by name by default... If none of the above, then sort by name...
return t1.getName().compareTo(t2.getName());
} catch (ClassNotFoundException ex) {
return -1;
}
}).forEachOrdered(theme -> {
try {
Class<?> c = Class.forName(theme.getEntryPoint());
// Check for an existing application context
AnnotationConfigApplicationContext ctx = (AnnotationConfigApplicationContext) getThemeContext(theme.getName());
if (ctx != null && ctx.isActive()) {
ctx.close();
}
// Create a new application context for the theme
ctx = new AnnotationConfigApplicationContext();
// Fetch the theme info
Theme themeInfo = c.getAnnotation(Theme.class);
ctx.setId("theme-application-context-" + themeInfo.name());
// Find the parent application context for the theme and set it
ApplicationContext parent = getThemeContext(themeInfo.parent());
ctx.setParent(parent == null ? moduleManagementService.getModuleContext() : parent);
// Register the theme entry point class
ctx.register(c);
// Refresh the context
ctx.refresh();
// Overwrite the existing theme application context
themeContexts.put(theme.getName(), ctx);
// Execute hook install - the theme should be installed.
if (!theme.isInstalled()) {
hookService.executeHook(ctx, c, "install");
themeConfigService.setInstalled(theme.getName(), true);
}
} catch (ClassNotFoundException ex) {
LOG.error("Could not start theme: " + theme.getName(), ex);
} catch (IllegalAccessException ex) {
LOG.error("Could not install theme: " + theme.getName(), ex);
} catch (InvocationTargetException ex) {
LOG.error("Could not install theme: " + theme.getName(), ex);
}
});
}
public void scan() {
// Remove the themes missing from the classpath
removeMissingThemes();
String defaultTheme = rootContext.getEnvironment().getProperty("themes.default", "");
String defaultAdminTheme = rootContext.getEnvironment().getProperty("themes.defaultAdmin", defaultTheme);
ThemeConfig defaultThemeConfig = themeConfigService.getDefaultTheme();
ThemeConfig defaultAdminThemeConfig = themeConfigService.getDefaultAdminTheme();
ClassPathScanningCandidateComponentProvider scanner = new ClassPathScanningCandidateComponentProvider(false);
scanner.addIncludeFilter(new AnnotationTypeFilter(Theme.class));
Set<BeanDefinition> modules = scanner.findCandidateComponents("*");
modules.forEach(b -> {
try {
Class c = Class.forName(b.getBeanClassName());
themeConfigService.registerTheme(c, c.getProtectionDomain().getCodeSource().getLocation());
} catch (ClassNotFoundException e) {
LOG.error("Could not load a module found on the class path, due to it's class not being found. This should never happen and usually means something is wrong with the environment", e);
}
});
if ((defaultThemeConfig == null || !themeClassExists(defaultThemeConfig))) {
enable(defaultTheme);
setDefaultTheme(defaultTheme);
}
if ((defaultAdminThemeConfig == null || !themeClassExists(defaultAdminThemeConfig))) {
enable(defaultAdminTheme);
setDefaultAdminTheme(defaultAdminTheme);
}
}
private boolean themeClassExists(ThemeConfig config) {
try {
// Just running this will check that the class exists
Class.forName(config.getEntryPoint());
return true;
} catch (ClassNotFoundException ex) {
return false;
}
}
public ApplicationContext getDefaultThemeContext() {
HttpServletRequest request = getServletRequest();
String currentPath = request.getRequestURI().substring(request.getContextPath().length());
// If the current path starts with /admin, then load the admin theme.
ThemeConfig themeConfig = null;
if (currentPath.startsWith("/admin")) {
themeConfig = themeConfigService.getDefaultAdminTheme();
} else {
themeConfig = themeConfigService.getDefaultTheme();
}
if (themeConfig == null) {
return null;
}
return getThemeContext(themeConfig.getName());
}
protected void removeMissingThemes() {
Set<String> missing = themeConfigService.listAllThemes().stream().filter(t -> {
try {
Class<?> c = Class.forName(t.getEntryPoint());
return !c.isAnnotationPresent(Theme.class);
} catch (Exception ex) {
return true;
}
}).map(t -> t.getName()).collect(Collectors.toSet());
themeConfigService.removeThemes(missing);
}
public List<String> resolveViewPath(HttpServletRequest request, final String prefix, final String viewName, final String suffix) {
List<String> urls = new ArrayList<>();
urls.add(prefix + viewName + suffix);
urls.addAll(getThemeTemplateURLSearchPath(prefix.replace(ResourceUtils.CLASSPATH_URL_PREFIX, ""), viewName, suffix));
// Get the request path... We use a substring of this excluding the context path and the rest of the url to determine if it's admin or not.
try {
hookService.executeHook("theme", (c, o) -> {
if (o instanceof Map && ((Map) o).containsKey(viewName)) {
URL hookLocation = c.getProtectionDomain().getCodeSource().getLocation();
String prefixPath = prefix.replace(ResourceUtils.CLASSPATH_URL_PREFIX, "");
if (ResourceUtils.isJarFileURL(hookLocation)) {
String url = hookLocation.toExternalForm() + ResourceUtils.JAR_URL_SEPARATOR + prefixPath + ((Map) o).get(viewName) + suffix;
if (!urls.contains(url)) {
urls.add(url);
}
// Should default to standard prefix + file
} else {
String url = prefix + ((Map) o).get(viewName) + suffix;
if (!urls.contains(url)) {
urls.add(url);
}
}
}
});
// FIXME: Add custom exception here
// Shouldn't ever happen... But you never know
} catch (InvocationTargetException e) {
LOG.error("Could not execute hook theme", e);
} catch (IllegalAccessException e) {
LOG.error("Could not execute hook theme", e);
}
// Reverse the collection
Collections.reverse(urls);
return urls;
}
private List<String> getThemeTemplateURLSearchPath(String prefix, String viewName, String suffix) {
ThemeConfig config = getDefaultThemeConfigForRequest();
if (config == null) {
return new LinkedList<>();
}
URL url = null;
try {
if (ResourceUtils.isJarFileURL(new URL(config.getThemeLocation()))) {
url = new URL(config.getThemeLocation() + ResourceUtils.JAR_URL_SEPARATOR + prefix.replaceAll("^/", "").replaceAll("/$", "") + "/" + config.getName() + "/" + viewName + suffix);
} else {
url = new URL(config.getThemeLocation() + "/" + prefix.replaceAll("^/", "").replaceAll("/$", "") + "/" + config.getName() + "/" + viewName + suffix);
}
} catch (MalformedURLException ex) {
// do nothing
}
List<String> themePaths = new ArrayList<>(1);
if (url != null) {
themePaths.add(url.toExternalForm());
}
return themePaths;
}
protected ThemeConfig getDefaultThemeConfigForRequest() {
// FIXME: Move to a method as this is done more than once...
HttpServletRequest request = getServletRequest();
String currentPath = request.getRequestURI().substring(request.getContextPath().length());
// Fetch the default theme config
ThemeConfig themeConfig = null;
if (currentPath.startsWith("/admin")) {
themeConfig = themeConfigService.getDefaultAdminTheme();
} else {
themeConfig = themeConfigService.getDefaultTheme();
}
return themeConfig;
}
public Map<String, String> getRegionsForDefaultThemeByRequest() {
ThemeConfig themeConfig = getDefaultThemeConfigForRequest();
return getThemeRegions(themeConfig);
}
public Map<String, String> getThemeRegions(String theme) {
ThemeConfig themeConfig = themeConfigService.getTheme(theme);
return getThemeRegions(themeConfig);
}
/**
* Returns a list of the regions for the default theme, independent of current request.
*
* @return
*/
public Map<String, String> getRegionsForDefaultTheme() {
ThemeConfig config = themeConfigService.getDefaultTheme();
return getThemeRegions(config);
}
protected Map<String, String> getThemeRegions(ThemeConfig themeConfig) {
if (themeConfig != null) {
try {
Class<?> clazz = Class.forName(themeConfig.getEntryPoint());
final Map<String, String> regions = new LinkedHashMap<>();
Arrays.asList(clazz.getAnnotation(Theme.class).regions()).forEach(themeRegion -> {
regions.put(themeRegion.name(), themeRegion.label());
});
if (regions.isEmpty()) {
regions.putAll(PageElement.DEFAULT_REGIONS);
}
return regions;
} catch (ClassNotFoundException e) {
LOG.warn("Could not find default theme class!");
}
}
return PageElement.DEFAULT_REGIONS;
}
public String getDefaultThemeName() {
ThemeConfig config = themeConfigService.getDefaultTheme();
if (config != null) {
return config.getName();
}
return null;
}
public String getCurrentThemeNameForRequest() {
ThemeConfig config = getDefaultThemeConfigForRequest();
if (config == null) {
return null;
}
return config.getName();
}
public List<ApplicationContext> getThemeContextsAsList() {
if (this.themeContexts == null) {
return new LinkedList<>();
}
return themeContexts.values().stream().collect(Collectors.toList());
}
}
|
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.opsworks;
import com.amazonaws.services.opsworks.model.*;
import com.amazonaws.*;
/**
* Abstract implementation of {@code AWSOpsWorks}. Convenient method forms pass
* through to the corresponding overload that takes a request object, which
* throws an {@code UnsupportedOperationException}.
*/
public class AbstractAWSOpsWorks implements AWSOpsWorks {
protected AbstractAWSOpsWorks() {
}
@Override
public void setEndpoint(String endpoint) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void setRegion(com.amazonaws.regions.Region region) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void assignInstance(AssignInstanceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void assignVolume(AssignVolumeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void associateElasticIp(AssociateElasticIpRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void attachElasticLoadBalancer(
AttachElasticLoadBalancerRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CloneStackResult cloneStack(CloneStackRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateAppResult createApp(CreateAppRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateDeploymentResult createDeployment(
CreateDeploymentRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateInstanceResult createInstance(CreateInstanceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateLayerResult createLayer(CreateLayerRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateStackResult createStack(CreateStackRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateUserProfileResult createUserProfile(
CreateUserProfileRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void deleteApp(DeleteAppRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void deleteInstance(DeleteInstanceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void deleteLayer(DeleteLayerRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void deleteStack(DeleteStackRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void deleteUserProfile(DeleteUserProfileRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void deregisterEcsCluster(DeregisterEcsClusterRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void deregisterElasticIp(DeregisterElasticIpRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void deregisterInstance(DeregisterInstanceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void deregisterRdsDbInstance(DeregisterRdsDbInstanceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void deregisterVolume(DeregisterVolumeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeAgentVersionsResult describeAgentVersions(
DescribeAgentVersionsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeAppsResult describeApps(DescribeAppsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeCommandsResult describeCommands(
DescribeCommandsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeDeploymentsResult describeDeployments(
DescribeDeploymentsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeEcsClustersResult describeEcsClusters(
DescribeEcsClustersRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeElasticIpsResult describeElasticIps(
DescribeElasticIpsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeElasticLoadBalancersResult describeElasticLoadBalancers(
DescribeElasticLoadBalancersRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeInstancesResult describeInstances(
DescribeInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeLayersResult describeLayers(DescribeLayersRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeLoadBasedAutoScalingResult describeLoadBasedAutoScaling(
DescribeLoadBasedAutoScalingRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeMyUserProfileResult describeMyUserProfile(
DescribeMyUserProfileRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribePermissionsResult describePermissions(
DescribePermissionsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeRaidArraysResult describeRaidArrays(
DescribeRaidArraysRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeRdsDbInstancesResult describeRdsDbInstances(
DescribeRdsDbInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeServiceErrorsResult describeServiceErrors(
DescribeServiceErrorsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeStackProvisioningParametersResult describeStackProvisioningParameters(
DescribeStackProvisioningParametersRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeStackSummaryResult describeStackSummary(
DescribeStackSummaryRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeStacksResult describeStacks(DescribeStacksRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeTimeBasedAutoScalingResult describeTimeBasedAutoScaling(
DescribeTimeBasedAutoScalingRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeUserProfilesResult describeUserProfiles(
DescribeUserProfilesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeVolumesResult describeVolumes(DescribeVolumesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void detachElasticLoadBalancer(
DetachElasticLoadBalancerRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void disassociateElasticIp(DisassociateElasticIpRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetHostnameSuggestionResult getHostnameSuggestion(
GetHostnameSuggestionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GrantAccessResult grantAccess(GrantAccessRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void rebootInstance(RebootInstanceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public RegisterEcsClusterResult registerEcsCluster(
RegisterEcsClusterRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public RegisterElasticIpResult registerElasticIp(
RegisterElasticIpRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public RegisterInstanceResult registerInstance(
RegisterInstanceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void registerRdsDbInstance(RegisterRdsDbInstanceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public RegisterVolumeResult registerVolume(RegisterVolumeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void setLoadBasedAutoScaling(SetLoadBasedAutoScalingRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void setPermission(SetPermissionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void setTimeBasedAutoScaling(SetTimeBasedAutoScalingRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void startInstance(StartInstanceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void startStack(StartStackRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void stopInstance(StopInstanceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void stopStack(StopStackRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void unassignInstance(UnassignInstanceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void unassignVolume(UnassignVolumeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void updateApp(UpdateAppRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void updateElasticIp(UpdateElasticIpRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void updateInstance(UpdateInstanceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void updateLayer(UpdateLayerRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void updateMyUserProfile(UpdateMyUserProfileRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void updateRdsDbInstance(UpdateRdsDbInstanceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void updateStack(UpdateStackRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void updateUserProfile(UpdateUserProfileRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void updateVolume(UpdateVolumeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void shutdown() {
throw new java.lang.UnsupportedOperationException();
}
@Override
public com.amazonaws.ResponseMetadata getCachedResponseMetadata(
com.amazonaws.AmazonWebServiceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
}
|
|
package com.viesis.viescraft.client.tileentity.model;
import net.minecraft.client.model.ModelBase;
import net.minecraft.client.model.ModelRenderer;
import net.minecraft.entity.Entity;
public class ModelAirshipWorkbench extends ModelBase {
private float bladespin;
private long lastframe;
//fields
ModelRenderer Base1a;
ModelRenderer Base1b;
ModelRenderer Base1c;
ModelRenderer Top1a;
ModelRenderer Top1b;
ModelRenderer TopGrid1a;
ModelRenderer TopGrid1b;
ModelRenderer TopGrid1c;
ModelRenderer TopGrid1d;
ModelRenderer TopGrid2a;
ModelRenderer TopGrid2b;
ModelRenderer TopGrid2c;
ModelRenderer TopGrid2d;
ModelRenderer TopGrid2e;
ModelRenderer TopGrid2f;
ModelRenderer TopGrid2g;
ModelRenderer TopGrid2h;
ModelRenderer Middle1a;
ModelRenderer InnerGear1a;
ModelRenderer InnerGear1b;
ModelRenderer InnerGear1c;
ModelRenderer InnerGear1d;
ModelRenderer InnerGear1e;
ModelRenderer OuterGear1a;
ModelRenderer OuterGear1b;
ModelRenderer OuterGear1c;
ModelRenderer OuterGear1d;
ModelRenderer OuterGear1e;
ModelRenderer OuterGear1f;
ModelRenderer OuterGear1g;
ModelRenderer OuterGear1h;
ModelRenderer Shaft1a;
ModelRenderer InnerGear2a;
ModelRenderer InnerGear2b;
ModelRenderer InnerGear2c;
ModelRenderer InnerGear2d;
ModelRenderer InnerGear2e;
ModelRenderer OuterGear2a;
ModelRenderer OuterGear2b;
ModelRenderer OuterGear2c;
ModelRenderer OuterGear2d;
ModelRenderer OuterGear2e;
ModelRenderer OuterGear2f;
ModelRenderer OuterGear2g;
ModelRenderer OuterGear2h;
public ModelAirshipWorkbench()
{
textureWidth = 256;
textureHeight = 128;
Base1a = new ModelRenderer(this, 0, 111);
Base1a.addBox(-8F, 0F, -8F, 16, 1, 16);
Base1a.setRotationPoint(0F, 23F, 0F);
Base1a.setTextureSize(256, 128);
Base1a.mirror = true;
setRotation(Base1a, 0F, 0F, 0F);
Base1b = new ModelRenderer(this, 0, 96);
Base1b.addBox(-7F, 0F, -7F, 14, 1, 14);
Base1b.setRotationPoint(0F, 22F, 0F);
Base1b.setTextureSize(256, 128);
Base1b.mirror = true;
setRotation(Base1b, 0F, 0F, 0F);
Base1c = new ModelRenderer(this, 0, 87);
Base1c.addBox(-4F, 0F, -4F, 8, 1, 8);
Base1c.setRotationPoint(0F, 21F, 0F);
Base1c.setTextureSize(256, 128);
Base1c.mirror = true;
setRotation(Base1c, 0F, 0.7853982F, 0F);
Top1a = new ModelRenderer(this, 0, 72);
Top1a.addBox(-7F, -1F, -7F, 14, 1, 14);
Top1a.setRotationPoint(0F, 10F, 0F);
Top1a.setTextureSize(256, 128);
Top1a.mirror = true;
setRotation(Top1a, 0F, 0F, 0F);
Top1b = new ModelRenderer(this, 0, 63);
Top1b.addBox(-4F, 0F, -4F, 8, 1, 8);
Top1b.setRotationPoint(0F, 10F, 0F);
Top1b.setTextureSize(256, 128);
Top1b.mirror = true;
setRotation(Top1b, 0F, 0.7853982F, 0F);
TopGrid1a = new ModelRenderer(this, 0, 34);
TopGrid1a.addBox(-8F, -1F, -8F, 16, 1, 1);
TopGrid1a.setRotationPoint(0F, 9F, 0F);
TopGrid1a.setTextureSize(256, 128);
TopGrid1a.mirror = true;
setRotation(TopGrid1a, 0F, 0F, 0F);
TopGrid1b = new ModelRenderer(this, 0, 34);
TopGrid1b.addBox(-8F, -1F, 7F, 16, 1, 1);
TopGrid1b.setRotationPoint(0F, 9F, 0F);
TopGrid1b.setTextureSize(256, 128);
TopGrid1b.mirror = true;
setRotation(TopGrid1b, 0F, 0F, 0F);
TopGrid1c = new ModelRenderer(this, 0, 36);
TopGrid1c.addBox(-8F, -1F, -7F, 1, 1, 14);
TopGrid1c.setRotationPoint(0F, 9F, 0F);
TopGrid1c.setTextureSize(256, 128);
TopGrid1c.mirror = true;
setRotation(TopGrid1c, 0F, 0F, 0F);
TopGrid1d = new ModelRenderer(this, 0, 36);
TopGrid1d.addBox(7F, -1F, -7F, 1, 1, 14);
TopGrid1d.setRotationPoint(0F, 9F, 0F);
TopGrid1d.setTextureSize(256, 128);
TopGrid1d.mirror = true;
setRotation(TopGrid1d, 0F, 0F, 0F);
TopGrid2a = new ModelRenderer(this, 0, 36);
TopGrid2a.addBox(-3F, -1F, -7F, 1, 1, 14);
TopGrid2a.setRotationPoint(0F, 9F, 0F);
TopGrid2a.setTextureSize(256, 128);
TopGrid2a.mirror = true;
setRotation(TopGrid2a, 0F, 0F, 0F);
TopGrid2b = new ModelRenderer(this, 0, 36);
TopGrid2b.addBox(2F, -1F, -7F, 1, 1, 14);
TopGrid2b.setRotationPoint(0F, 9F, 0F);
TopGrid2b.setTextureSize(256, 128);
TopGrid2b.mirror = true;
setRotation(TopGrid2b, 0F, 0F, 0F);
TopGrid2c = new ModelRenderer(this, 0, 32);
TopGrid2c.addBox(-7F, -1F, -3F, 4, 1, 1);
TopGrid2c.setRotationPoint(0F, 9F, 0F);
TopGrid2c.setTextureSize(256, 128);
TopGrid2c.mirror = true;
setRotation(TopGrid2c, 0F, 0F, 0F);
TopGrid2d = new ModelRenderer(this, 0, 32);
TopGrid2d.addBox(-2F, -1F, -3F, 4, 1, 1);
TopGrid2d.setRotationPoint(0F, 9F, 0F);
TopGrid2d.setTextureSize(256, 128);
TopGrid2d.mirror = true;
setRotation(TopGrid2d, 0F, 0F, 0F);
TopGrid2e = new ModelRenderer(this, 0, 32);
TopGrid2e.addBox(3F, -1F, -3F, 4, 1, 1);
TopGrid2e.setRotationPoint(0F, 9F, 0F);
TopGrid2e.setTextureSize(256, 128);
TopGrid2e.mirror = true;
setRotation(TopGrid2e, 0F, 0F, 0F);
TopGrid2f = new ModelRenderer(this, 0, 32);
TopGrid2f.addBox(-7F, -1F, 2F, 4, 1, 1);
TopGrid2f.setRotationPoint(0F, 9F, 0F);
TopGrid2f.setTextureSize(256, 128);
TopGrid2f.mirror = true;
setRotation(TopGrid2f, 0F, 0F, 0F);
TopGrid2g = new ModelRenderer(this, 0, 32);
TopGrid2g.addBox(-2F, -1F, 2F, 4, 1, 1);
TopGrid2g.setRotationPoint(0F, 9F, 0F);
TopGrid2g.setTextureSize(256, 128);
TopGrid2g.mirror = true;
setRotation(TopGrid2g, 0F, 0F, 0F);
TopGrid2h = new ModelRenderer(this, 0, 32);
TopGrid2h.addBox(3F, -1F, 2F, 4, 1, 1);
TopGrid2h.setRotationPoint(0F, 9F, 0F);
TopGrid2h.setTextureSize(256, 128);
TopGrid2h.mirror = true;
setRotation(TopGrid2h, 0F, 0F, 0F);
Middle1a = new ModelRenderer(this, 0, 51);
Middle1a.addBox(-4F, 0F, -4F, 8, 4, 8);
Middle1a.setRotationPoint(0F, 14F, 0F);
Middle1a.setTextureSize(256, 128);
Middle1a.mirror = true;
setRotation(Middle1a, 0F, 0F, 0F);
InnerGear1a = new ModelRenderer(this, 0, 30);
InnerGear1a.addBox(-8F, 0F, -0.5F, 16, 1, 1);
InnerGear1a.setRotationPoint(0F, 19F, 0F);
InnerGear1a.setTextureSize(256, 128);
InnerGear1a.mirror = true;
setRotation(InnerGear1a, 0F, 0F, 0F);
InnerGear1b = new ModelRenderer(this, 0, 30);
InnerGear1b.addBox(-8F, 0F, -0.5F, 16, 1, 1);
InnerGear1b.setRotationPoint(0F, 19F, 0F);
InnerGear1b.setTextureSize(256, 128);
InnerGear1b.mirror = true;
setRotation(InnerGear1b, 0F, 1.570796F, 0F);
InnerGear1c = new ModelRenderer(this, 0, 30);
InnerGear1c.addBox(-8F, 0F, -0.5F, 16, 1, 1);
InnerGear1c.setRotationPoint(0F, 19F, 0F);
InnerGear1c.setTextureSize(256, 128);
InnerGear1c.mirror = true;
setRotation(InnerGear1c, 0F, 0.7853982F, 0F);
InnerGear1d = new ModelRenderer(this, 0, 30);
InnerGear1d.addBox(-8F, 0F, -0.5F, 16, 1, 1);
InnerGear1d.setRotationPoint(0F, 19F, 0F);
InnerGear1d.setTextureSize(256, 128);
InnerGear1d.mirror = true;
setRotation(InnerGear1d, 0F, 2.356194F, 0F);
InnerGear1e = new ModelRenderer(this, 0, 24);
InnerGear1e.addBox(-2F, -0.5F, -2F, 4, 2, 4);
InnerGear1e.setRotationPoint(0F, 19F, 0F);
InnerGear1e.setTextureSize(256, 128);
InnerGear1e.mirror = true;
setRotation(InnerGear1e, 0F, 0.3839724F, 0F);
OuterGear1a = new ModelRenderer(this, 34, 19);
OuterGear1a.addBox(-2.5F, -0.5F, 5.5F, 5, 2, 1);
OuterGear1a.setRotationPoint(0F, 19F, 0F);
OuterGear1a.setTextureSize(256, 128);
OuterGear1a.mirror = true;
setRotation(OuterGear1a, 0F, 1.169371F, 0F);
OuterGear1b = new ModelRenderer(this, 34, 19);
OuterGear1b.addBox(-2.5F, -0.5F, -6.5F, 5, 2, 1);
OuterGear1b.setRotationPoint(0F, 19F, 0F);
OuterGear1b.setTextureSize(256, 128);
OuterGear1b.mirror = true;
setRotation(OuterGear1b, 0F, 1.169371F, 0F);
OuterGear1c = new ModelRenderer(this, 34, 22);
OuterGear1c.addBox(5.5F, -0.5F, -2.5F, 1, 2, 5);
OuterGear1c.setRotationPoint(0F, 19F, 0F);
OuterGear1c.setTextureSize(256, 128);
OuterGear1c.mirror = true;
setRotation(OuterGear1c, 0F, 1.169371F, 0F);
OuterGear1d = new ModelRenderer(this, 34, 22);
OuterGear1d.addBox(-6.5F, -0.5F, -2.5F, 1, 2, 5);
OuterGear1d.setRotationPoint(0F, 19F, 0F);
OuterGear1d.setTextureSize(256, 128);
OuterGear1d.mirror = true;
setRotation(OuterGear1d, 0F, 1.169371F, 0F);
OuterGear1e = new ModelRenderer(this, 34, 29);
OuterGear1e.addBox(4.5F, -0.5F, -2F, 1, 2, 4);
OuterGear1e.setRotationPoint(0F, 19F, 0F);
OuterGear1e.setTextureSize(256, 128);
OuterGear1e.mirror = true;
setRotation(OuterGear1e, 0F, 0.3839724F, 0F);
OuterGear1f = new ModelRenderer(this, 34, 29);
OuterGear1f.addBox(-5.5F, -0.5F, -2F, 1, 2, 4);
OuterGear1f.setRotationPoint(0F, 19F, 0F);
OuterGear1f.setTextureSize(256, 128);
OuterGear1f.mirror = true;
setRotation(OuterGear1f, 0F, 0.3839724F, 0F);
OuterGear1g = new ModelRenderer(this, 34, 16);
OuterGear1g.addBox(-2F, -0.5F, 4.5F, 4, 2, 1);
OuterGear1g.setRotationPoint(0F, 19F, 0F);
OuterGear1g.setTextureSize(256, 128);
OuterGear1g.mirror = true;
setRotation(OuterGear1g, 0F, 0.3839724F, 0F);
OuterGear1h = new ModelRenderer(this, 34, 16);
OuterGear1h.addBox(-2F, -0.5F, -5.5F, 4, 2, 1);
OuterGear1h.setRotationPoint(0F, 19F, 0F);
OuterGear1h.setTextureSize(256, 128);
OuterGear1h.mirror = true;
setRotation(OuterGear1h, 0F, 0.3839724F, 0F);
Shaft1a = new ModelRenderer(this, 32, 51);
Shaft1a.addBox(-0.5F, -7F, -0.5F, 1, 13, 1);
Shaft1a.setRotationPoint(0F, 17F, 0F);
Shaft1a.setTextureSize(256, 128);
Shaft1a.mirror = true;
setRotation(Shaft1a, 0F, 0F, 0F);
InnerGear2a = new ModelRenderer(this, 0, 22);
InnerGear2a.addBox(-8F, 0F, -0.5F, 16, 1, 1);
InnerGear2a.setRotationPoint(0F, 12F, 0F);
InnerGear2a.setTextureSize(256, 128);
InnerGear2a.mirror = true;
setRotation(InnerGear2a, 0F, 0F, 0F);
InnerGear2b = new ModelRenderer(this, 0, 22);
InnerGear2b.addBox(-8F, 0F, -0.5F, 16, 1, 1);
InnerGear2b.setRotationPoint(0F, 12F, 0F);
InnerGear2b.setTextureSize(256, 128);
InnerGear2b.mirror = true;
setRotation(InnerGear2b, 0F, 1.570796F, 0F);
InnerGear2c = new ModelRenderer(this, 0, 22);
InnerGear2c.addBox(-8F, 0F, -0.5F, 16, 1, 1);
InnerGear2c.setRotationPoint(0F, 12F, 0F);
InnerGear2c.setTextureSize(256, 128);
InnerGear2c.mirror = true;
setRotation(InnerGear2c, 0F, 0.7853982F, 0F);
InnerGear2d = new ModelRenderer(this, 0, 22);
InnerGear2d.addBox(-8F, 0F, -0.5F, 16, 1, 1);
InnerGear2d.setRotationPoint(0F, 12F, 0F);
InnerGear2d.setTextureSize(256, 128);
InnerGear2d.mirror = true;
setRotation(InnerGear2d, 0F, 2.356194F, 0F);
InnerGear2e = new ModelRenderer(this, 0, 16);
InnerGear2e.addBox(-2F, -0.5F, -2F, 4, 2, 4);
InnerGear2e.setRotationPoint(0F, 12F, 0F);
InnerGear2e.setTextureSize(256, 128);
InnerGear2e.mirror = true;
setRotation(InnerGear2e, 0F, 0.3839724F, 0F);
OuterGear2a = new ModelRenderer(this, 46, 19);
OuterGear2a.addBox(-2.5F, -0.5F, 5.5F, 5, 2, 1);
OuterGear2a.setRotationPoint(0F, 12F, 0F);
OuterGear2a.setTextureSize(256, 128);
OuterGear2a.mirror = true;
setRotation(OuterGear2a, 0F, 1.169371F, 0F);
OuterGear2b = new ModelRenderer(this, 46, 19);
OuterGear2b.addBox(-2.5F, -0.5F, -6.5F, 5, 2, 1);
OuterGear2b.setRotationPoint(0F, 12F, 0F);
OuterGear2b.setTextureSize(256, 128);
OuterGear2b.mirror = true;
setRotation(OuterGear2b, 0F, 1.169371F, 0F);
OuterGear2c = new ModelRenderer(this, 46, 22);
OuterGear2c.addBox(5.5F, -0.5F, -2.5F, 1, 2, 5);
OuterGear2c.setRotationPoint(0F, 12F, 0F);
OuterGear2c.setTextureSize(256, 128);
OuterGear2c.mirror = true;
setRotation(OuterGear2c, 0F, 1.169371F, 0F);
OuterGear2d = new ModelRenderer(this, 46, 22);
OuterGear2d.addBox(-6.5F, -0.5F, -2.5F, 1, 2, 5);
OuterGear2d.setRotationPoint(0F, 12F, 0F);
OuterGear2d.setTextureSize(256, 128);
OuterGear2d.mirror = true;
setRotation(OuterGear2d, 0F, 1.169371F, 0F);
OuterGear2e = new ModelRenderer(this, 46, 29);
OuterGear2e.addBox(4.5F, -0.5F, -2F, 1, 2, 4);
OuterGear2e.setRotationPoint(0F, 12F, 0F);
OuterGear2e.setTextureSize(256, 128);
OuterGear2e.mirror = true;
setRotation(OuterGear2e, 0F, 0.3839724F, 0F);
OuterGear2f = new ModelRenderer(this, 46, 29);
OuterGear2f.addBox(-5.5F, -0.5F, -2F, 1, 2, 4);
OuterGear2f.setRotationPoint(0F, 12F, 0F);
OuterGear2f.setTextureSize(256, 128);
OuterGear2f.mirror = true;
setRotation(OuterGear2f, 0F, 0.3839724F, 0F);
OuterGear2g = new ModelRenderer(this, 46, 16);
OuterGear2g.addBox(-2F, -0.5F, 4.5F, 4, 2, 1);
OuterGear2g.setRotationPoint(0F, 12F, 0F);
OuterGear2g.setTextureSize(256, 128);
OuterGear2g.mirror = true;
setRotation(OuterGear2g, 0F, 0.3839724F, 0F);
OuterGear2h = new ModelRenderer(this, 46, 16);
OuterGear2h.addBox(-2F, -0.5F, -5.5F, 4, 2, 1);
OuterGear2h.setRotationPoint(0F, 12F, 0F);
OuterGear2h.setTextureSize(256, 128);
OuterGear2h.mirror = true;
setRotation(OuterGear2h, 0F, 0.3839724F, 0F);
}
public void render(Entity entity, float f, float f1, float f2, float f3, float f4, float f5)
{
super.render(entity, f, f1, f2, f3, f4, f5);
setRotationAngles(f, f1, f2, f3, f4, f5, entity);
Base1a.render(f5);
Base1b.render(f5);
Base1c.render(f5);
Top1a.render(f5);
Top1b.render(f5);
TopGrid1a.render(f5);
TopGrid1b.render(f5);
TopGrid1c.render(f5);
TopGrid1d.render(f5);
TopGrid2a.render(f5);
TopGrid2b.render(f5);
TopGrid2c.render(f5);
TopGrid2d.render(f5);
TopGrid2e.render(f5);
TopGrid2f.render(f5);
TopGrid2g.render(f5);
TopGrid2h.render(f5);
Middle1a.render(f5);
InnerGear1a.render(f5);
InnerGear1b.render(f5);
InnerGear1c.render(f5);
InnerGear1d.render(f5);
InnerGear1e.render(f5);
OuterGear1a.render(f5);
OuterGear1b.render(f5);
OuterGear1c.render(f5);
OuterGear1d.render(f5);
OuterGear1e.render(f5);
OuterGear1f.render(f5);
OuterGear1g.render(f5);
OuterGear1h.render(f5);
Shaft1a.render(f5);
InnerGear2a.render(f5);
InnerGear2b.render(f5);
InnerGear2c.render(f5);
InnerGear2d.render(f5);
InnerGear2e.render(f5);
OuterGear2a.render(f5);
OuterGear2b.render(f5);
OuterGear2c.render(f5);
OuterGear2d.render(f5);
OuterGear2e.render(f5);
OuterGear2f.render(f5);
OuterGear2g.render(f5);
OuterGear2h.render(f5);
}
public void renderModel(float f5)
{
Base1a.render(f5);
Base1b.render(f5);
Base1c.render(f5);
Top1a.render(f5);
Top1b.render(f5);
TopGrid1a.render(f5);
TopGrid1b.render(f5);
TopGrid1c.render(f5);
TopGrid1d.render(f5);
//TopGrid2a.render(f5);
//TopGrid2b.render(f5);
//TopGrid2c.render(f5);
//TopGrid2d.render(f5);
//TopGrid2e.render(f5);
//TopGrid2f.render(f5);
//TopGrid2g.render(f5);
//TopGrid2h.render(f5);
Middle1a.render(f5);
InnerGear1a.render(f5);
InnerGear1b.render(f5);
InnerGear1c.render(f5);
InnerGear1d.render(f5);
InnerGear1e.render(f5);
OuterGear1a.render(f5);
OuterGear1b.render(f5);
OuterGear1c.render(f5);
OuterGear1d.render(f5);
OuterGear1e.render(f5);
OuterGear1f.render(f5);
OuterGear1g.render(f5);
OuterGear1h.render(f5);
Shaft1a.render(f5);
InnerGear2a.render(f5);
InnerGear2b.render(f5);
InnerGear2c.render(f5);
InnerGear2d.render(f5);
InnerGear2e.render(f5);
OuterGear2a.render(f5);
OuterGear2b.render(f5);
OuterGear2c.render(f5);
OuterGear2d.render(f5);
OuterGear2e.render(f5);
OuterGear2f.render(f5);
OuterGear2g.render(f5);
OuterGear2h.render(f5);
long now = System.nanoTime();
int elapsed = (int) ((now - lastframe) / (1000000));
// 1000000 nanoseconds = .001 seconds
bladespin = (float) elapsed / 300.0f;
// 0.001 seconds / 300 = 3.3333 repeating.
lastframe = now;
this.InnerGear1a.rotateAngleY += (bladespin * 0.25);
this.InnerGear1b.rotateAngleY += (bladespin * 0.25);
this.InnerGear1c.rotateAngleY += (bladespin * 0.25);
this.InnerGear1d.rotateAngleY += (bladespin * 0.25);
this.InnerGear1e.rotateAngleY += (bladespin * 0.25);
this.OuterGear1a.rotateAngleY += (bladespin * 0.25);
this.OuterGear1b.rotateAngleY += (bladespin * 0.25);
this.OuterGear1c.rotateAngleY += (bladespin * 0.25);
this.OuterGear1d.rotateAngleY += (bladespin * 0.25);
this.OuterGear1e.rotateAngleY += (bladespin * 0.25);
this.OuterGear1f.rotateAngleY += (bladespin * 0.25);
this.OuterGear1g.rotateAngleY += (bladespin * 0.25);
this.OuterGear1h.rotateAngleY += (bladespin * 0.25);
this.InnerGear2a.rotateAngleY -= (bladespin * 0.5);
this.InnerGear2b.rotateAngleY -= (bladespin * 0.5);
this.InnerGear2c.rotateAngleY -= (bladespin * 0.5);
this.InnerGear2d.rotateAngleY -= (bladespin * 0.5);
this.InnerGear2e.rotateAngleY -= (bladespin * 0.5);
this.OuterGear2a.rotateAngleY -= (bladespin * 0.5);
this.OuterGear2b.rotateAngleY -= (bladespin * 0.5);
this.OuterGear2c.rotateAngleY -= (bladespin * 0.5);
this.OuterGear2d.rotateAngleY -= (bladespin * 0.5);
this.OuterGear2e.rotateAngleY -= (bladespin * 0.5);
this.OuterGear2f.rotateAngleY -= (bladespin * 0.5);
this.OuterGear2g.rotateAngleY -= (bladespin * 0.5);
this.OuterGear2h.rotateAngleY -= (bladespin * 0.5);
}
private void setRotation(ModelRenderer model, float x, float y, float z)
{
model.rotateAngleX = x;
model.rotateAngleY = y;
model.rotateAngleZ = z;
}
public void setRotationAngles(float f, float f1, float f2, float f3, float f4, float f5, Entity entity)
{
super.setRotationAngles(f, f1, f2, f3, f4, f5, entity);
long now = System.nanoTime();
int elapsed = (int) ((now - lastframe) / (1000000));
// 1000000 nanoseconds = .001 seconds
bladespin = (float) elapsed / 300.0f;
// 0.001 seconds / 300 = 3.3333 repeating.
lastframe = now;
this.InnerGear1a.rotateAngleY += (bladespin * 2);
this.InnerGear1b.rotateAngleY += (bladespin * 2);
this.InnerGear1c.rotateAngleY += (bladespin * 2);
this.InnerGear1d.rotateAngleY += (bladespin * 2);
this.InnerGear1e.rotateAngleY += (bladespin * 2);
this.OuterGear1a.rotateAngleY += (bladespin * 2);
this.OuterGear1b.rotateAngleY += (bladespin * 2);
this.OuterGear1c.rotateAngleY += (bladespin * 2);
this.OuterGear1d.rotateAngleY += (bladespin * 2);
this.OuterGear1e.rotateAngleY += (bladespin * 2);
this.OuterGear1f.rotateAngleY += (bladespin * 2);
this.OuterGear1g.rotateAngleY += (bladespin * 2);
this.OuterGear1h.rotateAngleY += (bladespin * 2);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.riot.out ;
import java.net.MalformedURLException ;
import org.apache.jena.atlas.io.AWriter ;
import org.apache.jena.atlas.lib.Pair ;
import org.apache.jena.datatypes.xsd.XSDDatatype ;
import org.apache.jena.graph.Node ;
import org.apache.jena.iri.IRI ;
import org.apache.jena.iri.IRIFactory ;
import org.apache.jena.iri.IRIRelativize ;
import org.apache.jena.riot.out.NodeToLabel ;
import org.apache.jena.riot.system.PrefixMap ;
import org.apache.jena.riot.system.PrefixMapFactory ;
import org.apache.jena.riot.system.RiotChars ;
public class NodeFormatterTTL extends NodeFormatterNT
{
private final NodeToLabel nodeToLabel ;
private final PrefixMap prefixMap ;
private final String baseIRI ;
private final IRI iriResolver ;
public NodeFormatterTTL(String baseIRI, PrefixMap prefixMap) {
this(baseIRI, prefixMap, NodeToLabel.createBNodeByLabelEncoded()) ;
}
public NodeFormatterTTL(String baseIRI, PrefixMap prefixMap, NodeToLabel nodeToLabel) {
super(CharSpace.UTF8) ;
this.nodeToLabel = nodeToLabel ;
if ( prefixMap == null )
prefixMap = PrefixMapFactory.create() ;
this.prefixMap = prefixMap ;
this.baseIRI = baseIRI ;
this.iriResolver =
baseIRI != null ? IRIFactory.jenaImplementation().construct(baseIRI) : null ;
}
@Override
public void formatURI(AWriter w, String uriStr) {
Pair<String, String> pName = prefixMap.abbrev(uriStr) ;
// Check if legal
if ( pName != null ) {
// Check legal - need to check legal, not for illegal.
String pref = pName.getLeft() ;
String ln = pName.getRight() ;
if ( safeForPrefix(pref) && safeForPrefixLocalname(ln) ) {
w.print(pName.getLeft()) ;
w.print(':') ;
w.print(pName.getRight()) ;
return ;
}
}
// Attempt base abbreviation.
if ( iriResolver != null ) {
String x = abbrevByBase(uriStr) ;
if ( x != null ) {
w.print('<') ;
w.print(x) ;
w.print('>') ;
return ;
}
}
// else
super.formatURI(w, uriStr) ;
}
static private int relFlags = IRIRelativize.SAMEDOCUMENT | IRIRelativize.CHILD ;
/** Abbreviate the URI */
private String abbrevByBase(String uri) {
IRI rel = iriResolver.relativize(uri, relFlags) ;
String r = null ;
try {
r = rel.toASCIIString() ;
} catch (MalformedURLException ex) {
r = rel.toString() ;
}
return r ;
}
/* private-testing */
static boolean safeForPrefix(String str) {
int N = str.length() ;
if ( N == 0 )
return true ;
int idx = 0 ;
idx = skip1_PN_CHARS_BASE(str, idx) ;
if ( idx == -1 )
return false ;
idx = skipAny_PN_CHARS_or_DOT(str, idx, N - 1) ;
if ( idx == -1 )
return false ;
// Final char
idx = skip1_PN_CHARS(str, idx) ;
if ( idx == -1 )
return false ;
return (idx == N) ;
}
// @Override
// public void formatVar(WriterI w, String name)
// @Override
// public void formatBNode(WriterI w, String label)
@Override
public void formatBNode(AWriter w, Node n) {
String x = nodeToLabel.get(null, n) ;
w.print(x) ;
}
// @Override
// public void formatLitString(WriterI w, String lex)
// @Override
// public void formatLitLang(WriterI w, String lex, String langTag)
/* private-testing */static boolean safeForPrefixLocalname(String str) {
int N = str.length() ;
if ( N == 0 )
return true ;
int idx = 0 ;
idx = skip1_PN_CHARS_U_or_digit(str, idx) ;
if ( idx == -1 )
return false ;
idx = skipAny_PN_CHARS_or_DOT(str, idx, N - 1) ;
if ( idx == -1 )
return false ;
idx = skip1_PN_CHARS(str, idx) ;
// Final char
return (idx == N) ;
}
private static boolean is_PN_CHARS_BASE(int ch) {
return RiotChars.isAlpha(ch) ;
}
private static boolean is_PN_CHARS_U(int ch) {
return is_PN_CHARS_BASE(ch) || ch == '_' ;
}
private static boolean is_PN_CHARS(int ch) {
return is_PN_CHARS_U(ch) || ch == '-' || RiotChars.isDigit(ch) || isCharsExtra(ch) ;
}
private static boolean isCharsExtra(int ch) {
return ch == '\u00B7' || RiotChars.range(ch, '\u0300', '\u036F') || RiotChars.range(ch, '\u203F', '\u2040') ;
}
private static int skip1_PN_CHARS_U_or_digit(String str, int idx) {
char ch = str.charAt(idx) ;
if ( is_PN_CHARS_U(ch) )
return idx + 1 ;
if ( RiotChars.isDigit(ch) )
return idx + 1 ;
return -1 ;
}
private static int skip1_PN_CHARS_BASE(String str, int idx) {
char ch = str.charAt(idx) ;
if ( is_PN_CHARS_BASE(ch) )
return idx + 1 ;
return -1 ;
}
private static int skipAny_PN_CHARS_or_DOT(String str, int idx, int max) {
for (int i = idx; i < max; i++) {
char ch = str.charAt(i) ;
if ( !is_PN_CHARS(ch) && ch != '.' )
return i ;
}
return max ;
}
private static int skip1_PN_CHARS(String str, int idx) {
char ch = str.charAt(idx) ;
if ( is_PN_CHARS(ch) )
return idx + 1 ;
return -1 ;
}
private static final String dtDecimal = XSDDatatype.XSDdecimal.getURI() ;
private static final String dtInteger = XSDDatatype.XSDinteger.getURI() ;
private static final String dtDouble = XSDDatatype.XSDdouble.getURI() ;
private static final String dtBoolean = XSDDatatype.XSDboolean.getURI() ;
@Override
public void formatLitDT(AWriter w, String lex, String datatypeURI) {
if ( dtDecimal.equals(datatypeURI) ) {
if ( validDecimal(lex) ) {
w.print(lex) ;
return ;
}
} else if ( dtInteger.equals(datatypeURI) ) {
if ( validInteger(lex) ) {
w.print(lex) ;
return ;
}
}
if ( dtDouble.equals(datatypeURI) ) {
if ( validDouble(lex) ) {
w.print(lex) ;
return ;
}
}
// Boolean
if ( dtBoolean.equals(datatypeURI) ) {
// We leave "0" and "1" as-is assumign that if written like that,
// there was a reason.
if ( lex.equals("true") || lex.equals("false") ) {
w.print(lex) ;
return ;
}
}
// else.
super.formatLitDT(w, lex, datatypeURI) ;
}
private static boolean validInteger(String lex) {
int N = lex.length() ;
if ( N == 0 )
return false ;
int idx = 0 ;
idx = skipSign(lex, idx) ;
idx = skipDigits(lex, idx) ;
return (idx == N) ;
}
private static boolean validDecimal(String lex) {
// case : In N3, "." illegal, as is "+." and -." but legal in Turtle.
int N = lex.length() ;
if ( N <= 1 )
return false ;
int idx = 0 ;
idx = skipSign(lex, idx) ;
idx = skipDigits(lex, idx) ; // Maybe none.
// DOT required.
if ( idx >= N )
return false ;
char ch = lex.charAt(idx) ;
if ( ch != '.' )
return false ;
idx++ ;
// Digit required.
if ( idx >= N )
return false ;
idx = skipDigits(lex, idx) ;
return (idx == N) ;
}
private static boolean validDouble(String lex) {
int N = lex.length() ;
if ( N == 0 )
return false ;
int idx = 0 ;
// Decimal part (except 12. is legal)
idx = skipSign(lex, idx) ;
int idx2 = skipDigits(lex, idx) ;
boolean initialDigits = (idx != idx2) ;
idx = idx2 ;
// Exponent required.
if ( idx >= N )
return false ;
char ch = lex.charAt(idx) ;
if ( ch == '.' ) {
idx++ ;
if ( idx >= N )
return false ;
idx2 = skipDigits(lex, idx) ;
boolean trailingDigits = (idx != idx2) ;
idx = idx2 ;
if ( idx >= N )
return false ;
if ( !initialDigits && !trailingDigits )
return false ;
}
// "e" or "E"
ch = lex.charAt(idx) ;
if ( ch != 'e' && ch != 'E' )
return false ;
idx++ ;
if ( idx >= N )
return false ;
idx = skipSign(lex, idx) ;
if ( idx >= N )
return false ; // At least one digit.
idx = skipDigits(lex, idx) ;
return (idx == N) ;
}
/**
* Skip digits [0-9] and return the index just after the digits, which may
* be beyond the length of the string. May skip zero.
*/
private static int skipDigits(String str, int start) {
int N = str.length() ;
for (int i = start; i < N; i++) {
char ch = str.charAt(i) ;
if ( !RiotChars.isDigit(ch) )
return i ;
}
return N ;
}
/** Skip any plus or minus */
private static int skipSign(String str, int idx) {
int N = str.length() ;
char ch = str.charAt(idx) ;
if ( ch == '+' || ch == '-' )
return idx + 1 ;
return idx ;
}
}
|
|
package crystal.util;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
import java.util.Vector;
import org.apache.log4j.Logger;
import crystal.Constants;
import crystal.client.ClientPreferences;
/**
* RunIt executes system commands, captures their outputs, performs file system operations, and finds paths.
*
* @author rtholmes
* @author brun
*
*/
public class RunIt {
// the logger
public static Logger _log = Logger.getLogger(ClientPreferences.class);
/*
* Represents the output of a run command. Consists of:
* two strings: output and error,
* one int : exit status.
*/
public static class Output {
String _output;
String _error;
int _status;
Output(String output, String error, int status) {
_output = output;
_error = error;
_status = status;
}
public String getOutput() {
return _output;
}
public String getError() {
return _error;
}
public int getStatus() {
return _status;
}
@Override
public String toString() {
String answer = "Exit status: " + _status + "\n";
if (_error.length() > 0) {
answer += "*****-START-ERROR-*****\n";
answer += _error;
answer += "*****-END-ERROR-*****\n";
}
answer += "*****-START-OUTPUT-*****\n";
answer += _output;
answer += "*****-END-OUTPUT-*****\n";
return answer;
}
}
/**
* Runs a command twice. A not-nice hack for those times when executions don't seem to be coming out consistently.
*
* @param command
* @param args
* @param path
* @return
* @throws IOException
*/
public static Output executeTwice(String command, String[] args, String path, boolean getStatus) throws IOException {
execute(command, args, path, false);
Output result = execute(command, args, path, getStatus);
return result;
}
/**
* Executes a command.
*
* @param command
* @param args
* @param path
* @return
* @throws IOException
*/
public static Output execute(String command, String[] args, String path, boolean getStatus) throws IOException {
long start = System.currentTimeMillis();
ProcessBuilder builder = new ProcessBuilder();
File directory = new File(path);
assert directory.exists();
assert directory.isDirectory();
// Assert.assertTrue(directory.exists(), "Directory does not exist: " + path);
// Assert.assertTrue(directory.isDirectory(), "This is not a directory: " + path);
builder.directory(new File(path));
if (args == null || args.length == 0) {
builder.command(command);
} else {
List<String> cmd = new Vector<String>();
cmd.add(command);
for (String arg : args)
cmd.add(arg);
builder.command(cmd);
}
_log.info("\tRunIt::execute(..) - command: " + builder.command().toString() + "; in path: " + builder.directory());
if (Constants.DEBUG_RUNIT) {
System.out.println("\tRunIt::execute(..) - command: " + builder.command().toString() + "; in path: " + builder.directory());
}
Process proc = builder.start();
// configure the streams
BufferedInputStream err = new BufferedInputStream(proc.getErrorStream());
BufferedInputStream out = new BufferedInputStream(proc.getInputStream());
StreamCatcher outCatcher = new StreamCatcher(out);
Thread outCatcherThread = new Thread(outCatcher);
outCatcherThread.start();
StreamCatcher errCatcher = new StreamCatcher(err);
Thread errCatcherThread = new Thread(errCatcher);
errCatcherThread.start();
try {
errCatcherThread.join();
outCatcherThread.join();
_log.info("RunIt::execute(..) - Threads joined peacefully after: " + TimeUtility.msToHumanReadableDelta(start));
if (Constants.DEBUG_RUNIT) {
System.out.println("\t\tRunIt::execute(..) - Threads joined peacefully after: " + TimeUtility.msToHumanReadableDelta(start));
}
} catch (InterruptedException e) {
e.printStackTrace();
}
String goodOutput = outCatcher.getOutput();
String errOutput = errCatcher.getOutput();
int exitStatus;
if (getStatus) {
_log.info("Waiting for exit status of " + builder.command().toString() + "; in path: " + builder.directory());
try {
exitStatus = proc.waitFor();
} catch (InterruptedException e) {
_log.error("Encountered an interrupt exception while executing " + builder.command().toString() + "; in path: " + builder.directory());
exitStatus = -1;
}
} else
exitStatus = 0;
// String output = "";
//
// if (errOutput.length() > 0) {
// output += "*****-START-ERROR-*****\n";
// output += errOutput;
// output += "*****-END-ERROR-*****\n";
// }
//
// output += "*****-START-OUTPUT-*****\n";
// output += goodOutput;
// output += "*****-END-OUTPUT-*****\n";
// System.out.println("\t\tRunIt::execute(..) - output: " + output);
return new Output(goodOutput, errOutput, exitStatus);
}
/**
* Deletes the File pointed to by path.
* @param path: the File to delete
* @return true iff path is successfully deleted
*/
static public boolean deleteDirectory(File path) {
_log.trace("RunIt::deleteDirectory(..) - deleting " + path);
if (path.exists()) {
File[] files = path.listFiles();
for (int i = 0; i < files.length; i++) {
if (files[i].isDirectory()) {
deleteDirectory(files[i]);
} else {
files[i].delete();
}
}
}
boolean answer = path.delete();
if (answer)
_log.trace("RunIt::deleteDirectory(..) - " + path + " deleted successfully");
else
_log.warn("RunIt::deleteDirectory(..) - deleting " + path + " failed");
return answer;
}
/**
* Parses out the executable and the arguments from the command and executes the command.
* @param command: the command to run
* @param path: the path in which to run the command
* @return: the Output of the command's execution
* @throws IOException
*/
public static Output tryCommand(String command, String path) throws IOException {
StringTokenizer tokens = new StringTokenizer(command);
String executable;
List<String> argumentsList = new ArrayList<String>();
executable = tokens.nextToken();
while (tokens.hasMoreTokens()) {
argumentsList.add(tokens.nextToken());
}
return execute(executable, argumentsList.toArray(new String[0]), path, true);
}
/**
* @requires the executable is the first part of the executable parameter
* and executable has no spaces in it
* and arguments have no spaces in them
* @param executable: a String that can be run from the command line
* @return a String that runs the same command as executable, but that has an absolute path to the executable.
* If there is no such executable in the system PATH, returns null
*/
public static String getExecutable(String executable) {
if (executable == null)
return null;
if ((new File(executable)).exists())
return executable;
StringTokenizer args = new StringTokenizer(executable);
if (!(args.hasMoreTokens()))
return null;
String execPart = args.nextToken();
String arguments = "";
while (args.hasMoreTokens())
arguments += " " + args.nextToken();
if ((new File(execPart)).exists())
return executable;
String path = System.getenv("PATH");
StringTokenizer pathTokens = new StringTokenizer(path, File.pathSeparator);
while (pathTokens.hasMoreTokens()) {
String token = pathTokens.nextToken();
if ((new File(token + File.separator + execPart)).exists()) {
if (!(token.endsWith(File.separator)))
token += File.separator;
return token + executable;
}
if ((new File(token + File.separator + execPart + ".exe")).exists()) {
if (!(token.endsWith(File.separator)))
token += File.separator;
return token + File.separator + execPart + ".exe" + arguments;
}
if ((token.endsWith("cmd")) || (token.endsWith("cmd" + File.separator))) {
token = token.replace("cmd","bin");
if ((new File(token + File.separator + execPart + ".exe")).exists()) {
if (!(token.endsWith(File.separator)))
token += File.separator;
return token + File.separator + execPart + ".exe" + arguments;
}
}
}
// Could not find any executable
return null;
}
/**
*
* @param minimumVersion: the minimum version of hg required
* @param hg: the path to the hg executable
* @param tempPath: any valid path in which hg can be executed
* @return true iff hg points to a same or newer version of hg than the minimumVersion
* @throws IOException if hg cannot be run in tempPath
*
*/
public static boolean validHG(double minimumVersion, String hg, String tempPath) throws IOException {
String[] versionArgs = new String[1];
versionArgs[0] = "--version";
String output = execute(hg, versionArgs, tempPath, false).getOutput();
String versionStr = output.substring(output.indexOf("version") + 8, output.indexOf(")"));
double version = Double.parseDouble(versionStr.substring(0, versionStr.indexOf(".", versionStr.indexOf(".") + 1)));
boolean answer = (minimumVersion <= version);
if (answer)
_log.info("The version of hg checks out. (You are running " + versionStr + ", which is newer than the minimum required "+ minimumVersion + ".)");
else
_log.error("You are using an outdated hg. (You are running " + versionStr + ", which is older than the minimum required "+ minimumVersion + ".)");
return answer;
}
//TODO implement this method
public static boolean validGit(double minimumVersion, String git, String tempPath) throws Exception {
throw new Exception("not implemented yet");
}
}
|
|
/* Copyright 2004-2005 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.grails.context.annotation;
import grails.util.BuildSettings;
import grails.util.BuildSettingsHolder;
import grails.util.Metadata;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.URL;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.codehaus.groovy.grails.plugins.GrailsPluginManager;
import org.codehaus.groovy.grails.plugins.PluginManagerHolder;
import org.codehaus.groovy.runtime.DefaultGroovyMethods;
import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.beans.factory.xml.XmlReaderContext;
import org.springframework.context.annotation.ClassPathBeanDefinitionScanner;
import org.springframework.context.annotation.ComponentScanBeanDefinitionParser;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.type.filter.TypeFilter;
import org.springframework.util.AntPathMatcher;
import org.springframework.util.ReflectionUtils;
import org.w3c.dom.Element;
/**
* Extends Spring's default <context:component-scan/> element to ignore Groovy's
* generated closure classes.
*
* @author Graeme Rocher
* @author Lari Hotari
* @since 1.2
*/
public class ClosureClassIgnoringComponentScanBeanDefinitionParser extends ComponentScanBeanDefinitionParser{
private static final Log LOG = LogFactory.getLog(ClosureClassIgnoringComponentScanBeanDefinitionParser.class);
@Override
protected ClassPathBeanDefinitionScanner createScanner(XmlReaderContext readerContext, boolean useDefaultFilters) {
final ClassPathBeanDefinitionScanner scanner = super.createScanner(readerContext, useDefaultFilters);
GrailsPluginManager pluginManager = PluginManagerHolder.getPluginManager();
if (pluginManager != null) {
List<TypeFilter> typeFilters = pluginManager.getTypeFilters();
for (TypeFilter typeFilter : typeFilters) {
scanner.addIncludeFilter(typeFilter);
}
}
return scanner;
}
/**
* This ClassLoader is used to restrict getResources & getResource methods only to the
* parent ClassLoader. getResources/getResource usually search all parent level classloaders.
* (look at details in source code of java.lang.ClassLoader.getResources)
*
* @author Lari Hotari
*/
private static final class ParentOnlyGetResourcesClassLoader extends ClassLoader {
private final Method findResourcesMethod=ReflectionUtils.findMethod(ClassLoader.class, "findResources", String.class);
private final Method findResourceMethod=ReflectionUtils.findMethod(ClassLoader.class, "findResource", String.class);
private ClassLoader rootLoader;
public ParentOnlyGetResourcesClassLoader(ClassLoader parent) {
super(parent);
this.rootLoader = DefaultGroovyMethods.getRootLoader(parent);
ReflectionUtils.makeAccessible(findResourceMethod);
ReflectionUtils.makeAccessible(findResourcesMethod);
}
@Override
public Enumeration<URL> getResources(String name) throws IOException {
if (rootLoader != null) {
// search all parents up to rootLoader
Collection<URL> urls=new LinkedHashSet<URL>();
findResourcesRecursive(getParent(), name, urls);
return Collections.enumeration(urls);
}
return invokeFindResources(getParent(), name);
}
private void findResourcesRecursive(ClassLoader parent, String name, Collection<URL> urls) {
Enumeration<URL> result = invokeFindResources(parent, name);
while (result.hasMoreElements()) {
urls.add(result.nextElement());
}
if (parent != rootLoader) {
findResourcesRecursive(parent.getParent(), name, urls);
}
}
@SuppressWarnings("unchecked")
private Enumeration<URL> invokeFindResources(ClassLoader parent, String name) {
return (Enumeration<URL>)ReflectionUtils.invokeMethod(findResourcesMethod, parent, name);
}
@Override
public URL getResource(String name) {
if (rootLoader != null) {
return findResourceRecursive(getParent(), name);
}
return invokeFindResource(getParent(), name);
}
private URL findResourceRecursive(ClassLoader parent, String name) {
URL url = invokeFindResource(parent, name);
if (url != null) {
return url;
}
if (parent != rootLoader) {
return findResourceRecursive(parent.getParent(), name);
}
return null;
}
private URL invokeFindResource(ClassLoader parent, String name) {
return (URL)ReflectionUtils.invokeMethod(findResourceMethod, parent, name);
}
}
@Override
protected ClassPathBeanDefinitionScanner configureScanner(ParserContext parserContext, Element element) {
final ClassPathBeanDefinitionScanner scanner = super.configureScanner(parserContext, element);
final ResourceLoader originalResourceLoader = parserContext.getReaderContext().getResourceLoader();
if (LOG.isDebugEnabled()) {
LOG.debug("Scanning only this classloader:" + originalResourceLoader.getClassLoader());
}
ResourceLoader parentOnlyResourceLoader;
try {
parentOnlyResourceLoader = new ResourceLoader() {
ClassLoader parentOnlyGetResourcesClassLoader = new ParentOnlyGetResourcesClassLoader(originalResourceLoader.getClassLoader());
public Resource getResource(String location) {
return originalResourceLoader.getResource(location);
}
public ClassLoader getClassLoader() {
return parentOnlyGetResourcesClassLoader;
}
};
}
catch (Throwable t) {
// restrictive classloading environment, use the original
parentOnlyResourceLoader = originalResourceLoader;
}
final PathMatchingResourcePatternResolver resourceResolver = new PathMatchingResourcePatternResolver(parentOnlyResourceLoader) {
@Override
protected Resource[] findAllClassPathResources(String location) throws IOException {
Set<Resource> result = new LinkedHashSet<Resource>(16);
@SuppressWarnings("unused")
URL classesDir = null;
final boolean warDeployed = Metadata.getCurrent().isWarDeployed();
if (!warDeployed) {
BuildSettings buildSettings = BuildSettingsHolder.getSettings();
if (buildSettings != null && buildSettings.getClassesDir()!=null) {
classesDir = buildSettings.getClassesDir().toURI().toURL();
}
}
// only scan classes from project classes directory
String path = location;
if (path.startsWith("/")) {
path = path.substring(1);
}
Enumeration<URL> resourceUrls = getClassLoader().getResources(path);
while (resourceUrls.hasMoreElements()) {
URL url = resourceUrls.nextElement();
if (LOG.isDebugEnabled()) {
LOG.debug("Scanning URL " + url.toExternalForm() + " while searching for '" + location + "'");
}
/*
if (!warDeployed && classesDir!= null && url.equals(classesDir)) {
result.add(convertClassLoaderURL(url));
}
else if (warDeployed){
result.add(convertClassLoaderURL(url));
}
*/
result.add(convertClassLoaderURL(url));
}
return result.toArray(new Resource[result.size()]);
}
};
resourceResolver.setPathMatcher(new AntPathMatcher(){
@Override
public boolean match(String pattern, String path) {
if (path.endsWith(".class")) {
String filename = FilenameUtils.getBaseName(path);
if (filename.indexOf("$")>-1) return false;
}
return super.match(pattern, path);
}
});
scanner.setResourceLoader(resourceResolver);
return scanner;
}
}
|
|
/*
* This file is part of the Heritrix web crawler (crawler.archive.org).
*
* Licensed to the Internet Archive (IA) by one or more individual
* contributors.
*
* The IA licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.archive.io.warc;
import it.unimi.dsi.fastutil.io.RepositionableStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.httpclient.Header;
import org.apache.commons.httpclient.HttpParser;
import org.archive.io.ArchiveRecord;
import org.archive.io.ArchiveRecordHeader;
import org.archive.util.LaxHttpParser;
/**
* A WARC file Record.
*
* @author stack
*/
public class WARCRecord extends ArchiveRecord implements WARCConstants {
private Pattern WHITESPACE = Pattern.compile("\\s");
/**
* Constructor.
*
* @param in Stream cue'd up to be at the start of the record this instance
* is to represent.
* @throws IOException
*/
public WARCRecord(InputStream in, final String identifier,
final long offset)
throws IOException {
this(in, identifier, offset, true, false);
}
/**
* Constructor.
* @param in Stream cue'd up just past Header Line and Named Fields.
* @param headers Header Line and ANVL Named fields.
* @throws IOException
*/
public WARCRecord(InputStream in, ArchiveRecordHeader headers)
throws IOException {
super(in, headers, 0, true, false);
}
/**
* Constructor.
*
* @param in Stream cue'd up to be at the start of the record this instance
* is to represent or, if <code>headers</code> is not null, just past the
* Header Line and Named Fields.
* @param identifier Identifier for this the hosting Reader.
* @param offset Current offset into <code>in</code> (Used to keep
* <code>position</code> properly aligned). Usually 0.
* @param digest True if we're to calculate digest for this record. Not
* digesting saves about ~15% of cpu during parse.
* @param strict Be strict parsing (Parsing stops if file inproperly
* formatted).
* @throws IOException
*/
public WARCRecord(final InputStream in, final String identifier,
final long offset, boolean digest, boolean strict)
throws IOException {
super(in, null, 0, digest, strict);
setHeader(parseHeaders(in, identifier, offset, strict));
}
/**
* Parse WARC Header Line and Named Fields.
* @param in Stream to read.
* @param identifier Identifier for the hosting Reader.
* @param offset Absolute offset into Reader.
* @param strict Whether to be loose parsing or not.
* @return An ArchiveRecordHeader.
* @throws IOException
*/
protected ArchiveRecordHeader parseHeaders(final InputStream in,
final String identifier, final long offset, final boolean strict)
throws IOException {
final Map<String, Object> m = new HashMap<String, Object>();
m.put(ABSOLUTE_OFFSET_KEY, new Long(offset));
m.put(READER_IDENTIFIER_FIELD_KEY, identifier);
long startPosition = -1;
if (in instanceof RepositionableStream) {
startPosition = ((RepositionableStream)in).position();
}
String firstLine =
new String(LaxHttpParser.readLine(in, WARC_HEADER_ENCODING));
if (firstLine == null || firstLine.length() <=0) {
throw new IOException("Failed to read WARC_MAGIC");
}
if (!firstLine.startsWith(WARC_MAGIC)) {
throw new IOException("Failed to find WARC MAGIC: " + firstLine);
}
// Here we start reading off the inputstream but we're reading the
// stream direct rather than going via WARCRecord#read. The latter will
// keep count of bytes read, digest and fail properly if EOR too soon...
// We don't want digesting while reading Headers.
//
Header [] h = LaxHttpParser.parseHeaders(in, WARC_HEADER_ENCODING);
for (int i = 0; i < h.length; i++) {
m.put(h[i].getName(), h[i].getValue());
}
int headerLength = -1;
if (in instanceof RepositionableStream) {
headerLength =
(int)(((RepositionableStream)in).position() - startPosition);
}
final int contentOffset = headerLength;
incrementPosition(contentOffset);
return new ArchiveRecordHeader() {
private Map<String, Object> headers = m;
private int contentBegin = contentOffset;
public String getDate() {
return (String)this.headers.get(HEADER_KEY_DATE);
}
public String getDigest() {
return null;
// TODO: perhaps return block-digest?
// superclass def implies this is calculated ("only after
// read in totality"), not pulled from header, so
// below prior implementation was misleading
// return (String)this.headers.get(HEADER_KEY_CHECKSUM);
}
public String getReaderIdentifier() {
return (String)this.headers.get(READER_IDENTIFIER_FIELD_KEY);
}
public Set<String> getHeaderFieldKeys() {
return this.headers.keySet();
}
public Map<String,Object> getHeaderFields() {
return this.headers;
}
public Object getHeaderValue(String key) {
return this.headers.get(key);
}
// Returns just the Content-Length of the warc record
public long getContentLength() {
Object o = this.headers.get(CONTENT_LENGTH);
if (o == null) {
return -1;
}
long contentLength = (o instanceof Long)?
((Long)o).longValue(): Long.parseLong((String)o);
return contentLength;
}
// Returns the full record length
public long getLength()
{
return getContentLength() + contentOffset;
}
public String getMimetype() {
return (String)this.headers.get(CONTENT_TYPE);
}
public long getOffset() {
Object o = this.headers.get(ABSOLUTE_OFFSET_KEY);
if (o == null) {
return -1;
}
return (o instanceof Long)?
((Long)o).longValue(): Long.parseLong((String)o);
}
public String getRecordIdentifier() {
return (String)this.headers.get(RECORD_IDENTIFIER_FIELD_KEY);
}
public String getUrl() {
return (String)this.headers.get(HEADER_KEY_URI);
}
public String getVersion() {
return (String)this.headers.get(VERSION_FIELD_KEY);
}
public int getContentBegin() {
return this.contentBegin;
}
@Override
public String toString() {
return this.headers.toString();
}
};
}
@Override
protected String getMimetype4Cdx(ArchiveRecordHeader h) {
final String m = super.getMimetype4Cdx(h);
// Mimetypes can have spaces in WARCs. Emitting for CDX, just
// squash them for now. Later, quote them since squashing spaces won't
// work for params that have quoted-string values.
Matcher matcher = WHITESPACE.matcher(m);
return matcher.replaceAll("");
}
}
|
|
package se.cygni.snake.player.bot.snakey;
import se.cygni.snake.api.model.SnakeDirection;
import se.cygni.snake.client.MapCoordinate;
import java.util.*;
/**
* Created by trivo on 2017-03-29.
*/
public class SnakeState {
private int mapHeight;
private int mapWidth;
private Snake self;
private HashSet<Snake> foeSet;
private HashSet<MapCoordinate> obstacleSet;
private HashSet<MapCoordinate> visitedTiles;
private boolean isKilledFoeState;
//only for updateState
public SnakeState(int height, int width, Snake self, ArrayList<Snake> foes, MapCoordinate[] obstacles){
this(height, width, self, foes, new HashSet<>(), false);
for(MapCoordinate obstacle : obstacles){
obstacleSet.add(obstacle);
}
}
// for future state
public SnakeState(int height, int width, Snake self, ArrayList<Snake> foes, HashSet<MapCoordinate> obstacles, boolean killed){
this.mapHeight = height;
this.mapWidth = width;
this.self = self;
this.isKilledFoeState = killed;
obstacleSet = new HashSet<>();
foeSet = new HashSet<>();
for(Iterator<Snake> snakeIt = foes.iterator(); snakeIt.hasNext(); ){
Snake foe = snakeIt.next();
foeSet.add(foe);
}
for(MapCoordinate obstacle : obstacles){
obstacleSet.add(obstacle);
}
}
public void updateSnakeState(MapCoordinate selfNewHead, HashSet<Snake> foes){
self.updatePos(selfNewHead);
foeSet = foes;
}
//Creates a new, possible state from a given state and a direction of movement
public SnakeState createFutureState(SnakeDirection dir){
Snake newSelf = new Snake(self);
newSelf.setDir(dir);
newSelf.stepOnePos();
boolean killed = false;
//TODO: Predict behaviour of foes here - target face
ArrayList<Snake> newFoes = new ArrayList<>();
for(Snake foe : foeSet) {
Snake newFoe = new Snake(foe);
estimateNewHead(newFoe); //remove all related if this fucks up
if(newFoe.getLength() != 0){
newFoes.add(newFoe);
} else {
killed = true;
}
}
return new SnakeState(mapHeight, mapWidth, newSelf, newFoes, obstacleSet, killed);
}
private void estimateNewHead(Snake snake){
if(canSnakeMoveInDirection(snake, snake.getDir())){
snake.stepOnePos();
} else {
SnakeDirection possibleDir = estimateFoeDirection(snake);
if(possibleDir == null){
snake.kill();
} else {
snake.setDir(possibleDir);
snake.stepOnePos();
}
}
}
public boolean getIsKilledFoeState(){
return isKilledFoeState;
}
private SnakeDirection estimateFoeDirection(Snake foe){
ArrayList<SnakeDirection> possibleDirections = new ArrayList<>();
for(SnakeDirection dir : SnakeDirection.values()){
if(canSnakeMoveInDirection(foe, dir)){
possibleDirections.add(dir);
}
}
int possibleDirectionsNbr = possibleDirections.size();
if (possibleDirectionsNbr > 1){
return findTargetDirection(foe);
} else if (possibleDirectionsNbr == 1){
return possibleDirections.get(0);
}
return null;
}
private SnakeDirection findTargetDirection(Snake foe){
MapCoordinate playerHead = self.getHead();
MapCoordinate foeHead = foe.getHead();
SnakeDirection foeDir = foe.getDir();
if(foeDir.equals(SnakeDirection.DOWN) ||foeDir.equals(SnakeDirection.UP)){
if(playerHead.x <= foeHead.x){
return SnakeDirection.LEFT;
} else {
return SnakeDirection.RIGHT;
}
} else {
if(playerHead.y <= foeHead.y){
return SnakeDirection.UP;
} else {
return SnakeDirection.DOWN;
}
}
}
public boolean canIMoveInDirection(SnakeDirection dir){
return canSnakeMoveInDirection(self, dir);
}
private boolean isMoveOutOfBounds(Snake snake, SnakeDirection dir){
MapCoordinate head = snake.getHead();
switch (dir){
case LEFT:
return (head.x - 1) < 0;
case RIGHT:
return (head.x + 2) > mapWidth;
case DOWN:
return (head.y + 2) > mapHeight;
case UP:
return (head.y - 1) < 0;
default:
return false;
}
}
private boolean canSnakeMoveInDirection(Snake snake, SnakeDirection dir){
MapCoordinate snakeHead = snake.getHead();
if(isMoveOutOfBounds(snake, dir)){
return false;
}
HashSet<MapCoordinate>totalObstacleSet = getTotalSet();
switch(dir){
case LEFT:
return !totalObstacleSet.contains(snakeHead.translateBy(-1, 0));
case RIGHT:
return !totalObstacleSet.contains(snakeHead.translateBy(1, 0));
case DOWN:
return !totalObstacleSet.contains(snakeHead.translateBy(0, 1));
case UP:
return !totalObstacleSet.contains(snakeHead.translateBy(0, -1));
default:
return false;
}
}
//TODO: Generalize this for any snake?
public int getOpenSpacesinDir(SnakeDirection dir){
visitedTiles = new HashSet<>();
SnakeState futureState = createFutureState(dir);
HashSet<MapCoordinate> obstacles = getTotalSet();
HashSet<MapCoordinate> visitedTiles = new HashSet<>();
visitedTiles.add(futureState.getSelf().getHead());
return getOpenSpacesRec(futureState, futureState.getSelf().getHead(), obstacles);
}
private int getOpenSpacesRec(SnakeState state, MapCoordinate tile, HashSet<MapCoordinate> obstacleSet){
MapCoordinate leftTile = tile.translateBy(-1, 0);
MapCoordinate rightTile = tile.translateBy(1, 0);
MapCoordinate upTile = tile.translateBy(0, -1);
MapCoordinate downTile = tile.translateBy(0, 1);
if(!isCoordinateOutOfBounds(leftTile) && !obstacleSet.contains(leftTile) && visitedTiles.add(leftTile)){
getOpenSpacesRec(state, leftTile, obstacleSet);
}
if(!isCoordinateOutOfBounds(rightTile) && !obstacleSet.contains(rightTile) && visitedTiles.add(rightTile)){
getOpenSpacesRec(state, rightTile, obstacleSet);
}
if(!isCoordinateOutOfBounds(upTile) && !obstacleSet.contains(upTile) && visitedTiles.add(upTile)){
getOpenSpacesRec(state, upTile, obstacleSet);
}
if(!isCoordinateOutOfBounds(downTile) && !obstacleSet.contains(downTile) && visitedTiles.add(downTile)){
getOpenSpacesRec(state, downTile, obstacleSet);
}
return visitedTiles.size();
}
public boolean isCoordinateOutOfBounds(MapCoordinate coordinate) {
return coordinate.x < 0 || coordinate.x >= mapWidth || coordinate.y < 0 || coordinate.y >= mapHeight;
}
public HashSet<MapCoordinate> getTotalSet(){
HashSet<MapCoordinate> totalSet = new HashSet<>();
totalSet.addAll(obstacleSet);
totalSet.addAll(self.getBodySet());
Iterator<Snake> foeIT = foeSet.iterator();
while(foeIT.hasNext()){
Snake foe = foeIT.next();
totalSet.addAll(foe.getBodySet());
}
return totalSet;
}
public int getMapHeight(){
return mapHeight;
}
public int getMapWidth(){
return mapWidth;
}
public Snake getSelf(){
return self;
}
public Collection<Snake> getFoes() {
return foeSet;
}
}
|
|
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.sessions.infinispan.remotestore;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.RejectedExecutionException;
import org.infinispan.client.hotrod.event.ClientCacheEntryCreatedEvent;
import org.infinispan.client.hotrod.event.ClientCacheEntryModifiedEvent;
import org.infinispan.client.hotrod.event.ClientCacheEntryRemovedEvent;
import org.infinispan.client.hotrod.event.ClientEvent;
import org.jboss.logging.Logger;
import org.keycloak.common.util.MultivaluedHashMap;
import org.keycloak.common.util.Time;
import static org.infinispan.client.hotrod.event.ClientEvent.Type.CLIENT_CACHE_ENTRY_CREATED;
import static org.infinispan.client.hotrod.event.ClientEvent.Type.CLIENT_CACHE_ENTRY_REMOVED;
/**
* @author <a href="mailto:[email protected]">Marek Posolda</a>
*/
public class ClientListenerExecutorDecorator<K> {
private static final Logger logger = Logger.getLogger(ClientListenerExecutorDecorator.class);
private final Object lock = new Object();
private final ExecutorService decorated;
// Both "eventsInProgress" and "eventsQueue" maps are guarded by the "lock", so doesn't need to be concurrency safe
// Events currently submitted to the ExecutorService
private Map<K, MyClientEvent> eventsInProgress = new HashMap<>();
// Queue of the events waiting to process. We don't want events of same key to be processed concurrently
private MultivaluedHashMap<K, MyClientEventContext> eventsQueue = new MultivaluedHashMap<>();
public ClientListenerExecutorDecorator(ExecutorService decorated) {
this.decorated = decorated;
}
// Explicitly use 3 submit methods to ensure that different type of ClientEvent is not used
public void submit(ClientCacheEntryCreatedEvent<K> cacheEntryCreatedEvent, Runnable r) {
MyClientEvent event = convertIspnClientEvent(cacheEntryCreatedEvent);
submit(event, r);
}
public void submit(ClientCacheEntryModifiedEvent<K> cacheEntryModifiedEvent, Runnable r) {
MyClientEvent event = convertIspnClientEvent(cacheEntryModifiedEvent);
submit(event, r);
}
public void submit(ClientCacheEntryRemovedEvent<K> cacheEntryRemovedEvent, Runnable r) {
MyClientEvent event = convertIspnClientEvent(cacheEntryRemovedEvent);
submit(event, r);
}
// IMPL
private void submit(MyClientEvent event, Runnable r) {
K key = event.key;
synchronized (lock) {
if (!eventsInProgress.containsKey(key)) {
submitImpl(key, event, r);
} else {
putEventToTheQueue(key, event, r);
}
}
}
// Assume it's called from the synchronized block
private void submitImpl(K key, MyClientEvent event, Runnable r) {
logger.debugf("Submitting event to the executor: %s . eventsInProgress size: %d, eventsQueue size: %d", event.toString(), eventsInProgress.size(), eventsQueue.size());
eventsInProgress.put(key, event);
Runnable decoratedRunnable = () -> {
Long start = null;
try {
if (logger.isDebugEnabled()) {
start = Time.currentTimeMillis();
}
r.run();
} finally {
synchronized (lock) {
eventsInProgress.remove(key);
if (logger.isDebugEnabled()) {
long took = Time.currentTimeMillis() - start;
logger.debugf("Finished processing event by the executor: %s, took: %d ms. EventsInProgress size: %d", event.toString(), took, eventsInProgress.size());
}
pollQueue(key);
}
}
};
try {
decorated.submit(decoratedRunnable);
} catch (RejectedExecutionException ree) {
eventsInProgress.remove(key);
logger.errorf("Rejected execution of task for the event '%s' . Try to increase the pool size. Pool is '%s'", event.toString(), decorated.toString());
throw ree;
}
}
// Assume it's called from the synchronized block
private void pollQueue(K key) {
if (eventsQueue.containsKey(key)) {
List<MyClientEventContext> events = eventsQueue.get(key);
if (events.size() > 0) {
MyClientEventContext nextEvent = events.remove(0);
// Was last event in the queue for that key
if (events.size() == 0) {
eventsQueue.remove(key);
}
submitImpl(key, nextEvent.event, nextEvent.r);
} else {
// Shouldn't happen
throw new IllegalStateException("Illegal state. Size was 0 for key " + key);
}
}
}
// Assume it's called from the synchronized block
private void putEventToTheQueue(K key, MyClientEvent event, Runnable r) {
logger.debugf("Calling putEventToTheQueue: %s", event.toString());
if (!eventsQueue.containsKey(key)) {
eventsQueue.putSingle(key, new MyClientEventContext(event, r));
} else {
List<MyClientEventContext> existingEvents = eventsQueue.get(key);
MyClientEventContext myNewEvent = new MyClientEventContext(event, r);
// Try to optimize queue (EG. in case we have REMOVE event, we can ignore the previous CREATE or MODIFIED events)
switch (event.type) {
case CLIENT_CACHE_ENTRY_CREATED:
boolean add = true;
for (MyClientEventContext ctx : existingEvents) {
if (ctx.event.type == CLIENT_CACHE_ENTRY_REMOVED) {
// Ignore. TODO: Log me?
add = false;
break;
} else if (ctx.event.type == CLIENT_CACHE_ENTRY_CREATED) {
// Ignore. Already on the list
add = false;
break;
}
}
// Add to the beginning before the MODIFIED events
if (add) {
existingEvents.add(0, myNewEvent);
}
break;
case CLIENT_CACHE_ENTRY_MODIFIED:
boolean addd = true;
for (int i=0 ; i<existingEvents.size() ; i++) {
MyClientEventContext ctx = existingEvents.get(i);
if (ctx.event.type == CLIENT_CACHE_ENTRY_REMOVED) {
// Ignore.
addd = false;
break;
} else if (ctx.event.type == CLIENT_CACHE_ENTRY_CREATED) {
// Shift to the next element. CREATE event go first.
} else {
// Can ignore the previous MODIFY event if we have newer version
if (ctx.event.version < myNewEvent.event.version) {
existingEvents.remove(i);
} else {
addd = false;
}
}
if (addd) {
// Add to the end
existingEvents.add(myNewEvent);
}
}
break;
case CLIENT_CACHE_ENTRY_REMOVED:
// Can just ignore the other events in the queue in case of REMOVE
eventsQueue.putSingle(key, new MyClientEventContext(event, r));
break;
default:
throw new IllegalStateException("Unsupported event type: " + event.type);
}
}
logger.debugf("Event queued. Current events for the key '%s': %s", key.toString(), eventsQueue.getList(key));
}
public MyClientEvent convertIspnClientEvent(ClientEvent ispnClientEvent) {
if (ispnClientEvent instanceof ClientCacheEntryCreatedEvent) {
ClientCacheEntryCreatedEvent<K> ev = (ClientCacheEntryCreatedEvent<K>) ispnClientEvent;
return new MyClientEvent(ev.getKey(), ev.getVersion(), ev.getType());
} else if (ispnClientEvent instanceof ClientCacheEntryModifiedEvent) {
ClientCacheEntryModifiedEvent<K> ev = (ClientCacheEntryModifiedEvent<K>) ispnClientEvent;
return new MyClientEvent(ev.getKey(), ev.getVersion(), ev.getType());
} else if (ispnClientEvent instanceof ClientCacheEntryRemovedEvent) {
ClientCacheEntryRemovedEvent<K> ev = (ClientCacheEntryRemovedEvent<K>) ispnClientEvent;
return new MyClientEvent(ev.getKey(), -1l, ev.getType());
} else {
throw new IllegalStateException("Unsupported event type: " + ispnClientEvent.getType());
}
}
private class MyClientEventContext {
private final MyClientEvent event;
private final Runnable r;
private MyClientEventContext(MyClientEvent event, Runnable r) {
this.event = event;
this.r = r;
}
@Override
public String toString() {
return event.toString();
}
}
// Using separate class as ISPN ClientEvent type doesn't provide access to key and version :/
private class MyClientEvent {
private final K key;
private final long version;
private final ClientEvent.Type type;
private MyClientEvent(K key, long version, ClientEvent.Type type) {
this.key = key;
this.version = version;
this.type = type;
}
@Override
public String toString() {
return String.format("ClientEvent [ type=%s, key=%s, version=%d ]", type, key, version);
}
}
}
|
|
package org.wso2.developerstudio.datamapper.diagram.edit.parts;
import java.util.Collections;
import java.util.List;
import org.eclipse.draw2d.IFigure;
import org.eclipse.draw2d.Label;
import org.eclipse.draw2d.geometry.Point;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.transaction.RunnableWithResult;
import org.eclipse.gef.AccessibleEditPart;
import org.eclipse.gef.EditPolicy;
import org.eclipse.gef.Request;
import org.eclipse.gef.requests.DirectEditRequest;
import org.eclipse.gef.tools.DirectEditManager;
import org.eclipse.gmf.runtime.common.ui.services.parser.IParser;
import org.eclipse.gmf.runtime.common.ui.services.parser.IParserEditStatus;
import org.eclipse.gmf.runtime.common.ui.services.parser.ParserEditStatus;
import org.eclipse.gmf.runtime.common.ui.services.parser.ParserOptions;
import org.eclipse.gmf.runtime.diagram.ui.editparts.CompartmentEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.ITextAwareEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.LabelDirectEditPolicy;
import org.eclipse.gmf.runtime.diagram.ui.l10n.DiagramColorRegistry;
import org.eclipse.gmf.runtime.diagram.ui.label.ILabelDelegate;
import org.eclipse.gmf.runtime.diagram.ui.label.WrappingLabelDelegate;
import org.eclipse.gmf.runtime.diagram.ui.requests.RequestConstants;
import org.eclipse.gmf.runtime.diagram.ui.tools.TextDirectEditManager;
import org.eclipse.gmf.runtime.draw2d.ui.figures.WrappingLabel;
import org.eclipse.gmf.runtime.emf.core.util.EObjectAdapter;
import org.eclipse.gmf.runtime.emf.ui.services.parser.ISemanticParser;
import org.eclipse.gmf.runtime.notation.FontStyle;
import org.eclipse.gmf.runtime.notation.NotationPackage;
import org.eclipse.gmf.runtime.notation.View;
import org.eclipse.gmf.tooling.runtime.directedit.TextDirectEditManager2;
import org.eclipse.gmf.tooling.runtime.draw2d.labels.SimpleLabelDelegate;
import org.eclipse.gmf.tooling.runtime.edit.policies.DefaultNodeLabelDragPolicy;
import org.eclipse.gmf.tooling.runtime.edit.policies.labels.IRefreshableFeedbackEditPolicy;
import org.eclipse.jface.text.contentassist.IContentAssistProcessor;
import org.eclipse.jface.viewers.ICellEditorValidator;
import org.eclipse.swt.SWT;
import org.eclipse.swt.accessibility.AccessibleEvent;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.FontData;
import org.eclipse.swt.graphics.Image;
import org.wso2.developerstudio.datamapper.diagram.edit.policies.DataMapperTextSelectionEditPolicy;
import org.wso2.developerstudio.datamapper.diagram.part.DataMapperVisualIDRegistry;
import org.wso2.developerstudio.datamapper.diagram.providers.DataMapperElementTypes;
import org.wso2.developerstudio.datamapper.diagram.providers.DataMapperParserProvider;
/**
* @generated
*/
public class ElementNameEditPart extends CompartmentEditPart implements ITextAwareEditPart {
/**
* @generated
*/
public static final int VISUAL_ID = 5004;
/**
* @generated
*/
private DirectEditManager manager;
/**
* @generated
*/
private IParser parser;
/**
* @generated
*/
private List<?> parserElements;
/**
* @generated
*/
private String defaultText;
/**
* @generated
*/
private ILabelDelegate labelDelegate;
/**
* @generated
*/
public ElementNameEditPart(View view) {
super(view);
}
/**
* @generated
*/
protected void createDefaultEditPolicies() {
super.createDefaultEditPolicies();
installEditPolicy(EditPolicy.SELECTION_FEEDBACK_ROLE,
new DataMapperTextSelectionEditPolicy());
installEditPolicy(EditPolicy.DIRECT_EDIT_ROLE, new LabelDirectEditPolicy());
installEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE, new DefaultNodeLabelDragPolicy());
}
/**
* @generated
*/
protected String getLabelTextHelper(IFigure figure) {
if (figure instanceof WrappingLabel) {
return ((WrappingLabel) figure).getText();
} else if (figure instanceof Label) {
return ((Label) figure).getText();
} else {
return getLabelDelegate().getText();
}
}
/**
* @generated
*/
protected void setLabelTextHelper(IFigure figure, String text) {
if (figure instanceof WrappingLabel) {
((WrappingLabel) figure).setText(text);
} else if (figure instanceof Label) {
((Label) figure).setText(text);
} else {
getLabelDelegate().setText(text);
}
}
/**
* @generated
*/
protected Image getLabelIconHelper(IFigure figure) {
if (figure instanceof WrappingLabel) {
return ((WrappingLabel) figure).getIcon();
} else if (figure instanceof Label) {
return ((Label) figure).getIcon();
} else {
return getLabelDelegate().getIcon(0);
}
}
/**
* @generated
*/
protected void setLabelIconHelper(IFigure figure, Image icon) {
if (figure instanceof WrappingLabel) {
((WrappingLabel) figure).setIcon(icon);
return;
} else if (figure instanceof Label) {
((Label) figure).setIcon(icon);
return;
} else {
getLabelDelegate().setIcon(icon, 0);
}
}
/**
* @generated NOT
*/
public void setLabel(WrappingLabel figure) {
unregisterVisuals();
setFigure(figure);
defaultText = getLabelTextHelper(figure);
registerVisuals();
refreshVisuals();
figure.setFont(new Font(null, "Arial", 10, SWT.BOLD));
}
/**
* @generated
*/
@SuppressWarnings("rawtypes")
protected List getModelChildren() {
return Collections.EMPTY_LIST;
}
/**
* @generated
*/
public IGraphicalEditPart getChildBySemanticHint(String semanticHint) {
return null;
}
/**
* @generated
*/
protected EObject getParserElement() {
return resolveSemanticElement();
}
/**
* @generated
*/
protected Image getLabelIcon() {
return null;
}
/**
* @generated
*/
protected String getLabelText() {
String text = null;
EObject parserElement = getParserElement();
if (parserElement != null && getParser() != null) {
text = getParser().getPrintString(new EObjectAdapter(parserElement),
getParserOptions().intValue());
}
if (text == null || text.length() == 0) {
text = defaultText;
}
return text;
}
/**
* @generated
*/
public void setLabelText(String text) {
setLabelTextHelper(getFigure(), text);
refreshSelectionFeedback();
}
/**
* @generated
*/
public String getEditText() {
if (getParserElement() == null || getParser() == null) {
return ""; //$NON-NLS-1$
}
return getParser().getEditString(new EObjectAdapter(getParserElement()),
getParserOptions().intValue());
}
/**
* @generated
*/
protected boolean isEditable() {
return getParser() != null;
}
/**
* @generated
*/
public ICellEditorValidator getEditTextValidator() {
return new ICellEditorValidator() {
public String isValid(final Object value) {
if (value instanceof String) {
final EObject element = getParserElement();
final IParser parser = getParser();
try {
IParserEditStatus valid = (IParserEditStatus) getEditingDomain()
.runExclusive(new RunnableWithResult.Impl<IParserEditStatus>() {
public void run() {
setResult(parser.isValidEditString(new EObjectAdapter(
element), (String) value));
}
});
return valid.getCode() == ParserEditStatus.EDITABLE ? null : valid
.getMessage();
} catch (InterruptedException ie) {
ie.printStackTrace();
}
}
// shouldn't get here
return null;
}
};
}
/**
* @generated
*/
public IContentAssistProcessor getCompletionProcessor() {
if (getParserElement() == null || getParser() == null) {
return null;
}
return getParser().getCompletionProcessor(new EObjectAdapter(getParserElement()));
}
/**
* @generated
*/
public ParserOptions getParserOptions() {
return ParserOptions.NONE;
}
/**
* @generated
*/
public IParser getParser() {
if (parser == null) {
parser = DataMapperParserProvider
.getParser(
DataMapperElementTypes.Element_3007,
getParserElement(),
DataMapperVisualIDRegistry
.getType(org.wso2.developerstudio.datamapper.diagram.edit.parts.ElementNameEditPart.VISUAL_ID));
}
return parser;
}
/**
* @generated
*/
protected DirectEditManager getManager() {
if (manager == null) {
setManager(new TextDirectEditManager2(this, null,
DataMapperEditPartFactory.getTextCellEditorLocator(this)));
}
return manager;
}
/**
* @generated
*/
protected void setManager(DirectEditManager manager) {
this.manager = manager;
}
/**
* @generated
*/
protected void performDirectEdit() {
getManager().show();
}
/**
* @generated
*/
protected void performDirectEdit(Point eventLocation) {
if (getManager().getClass() == TextDirectEditManager2.class) {
((TextDirectEditManager2) getManager()).show(eventLocation.getSWTPoint());
}
}
/**
* @generated
*/
private void performDirectEdit(char initialCharacter) {
if (getManager() instanceof TextDirectEditManager) {
((TextDirectEditManager) getManager()).show(initialCharacter);
} else //
if (getManager() instanceof TextDirectEditManager2) {
((TextDirectEditManager2) getManager()).show(initialCharacter);
} else //
{
performDirectEdit();
}
}
/**
* @generated
*/
protected void performDirectEditRequest(Request request) {
final Request theRequest = request;
try {
getEditingDomain().runExclusive(new Runnable() {
public void run() {
if (isActive() && isEditable()) {
if (theRequest.getExtendedData().get(
RequestConstants.REQ_DIRECTEDIT_EXTENDEDDATA_INITIAL_CHAR) instanceof Character) {
Character initialChar = (Character) theRequest.getExtendedData().get(
RequestConstants.REQ_DIRECTEDIT_EXTENDEDDATA_INITIAL_CHAR);
performDirectEdit(initialChar.charValue());
} else if ((theRequest instanceof DirectEditRequest)
&& (getEditText().equals(getLabelText()))) {
DirectEditRequest editRequest = (DirectEditRequest) theRequest;
performDirectEdit(editRequest.getLocation());
} else {
performDirectEdit();
}
}
}
});
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* @generated
*/
protected void refreshVisuals() {
super.refreshVisuals();
refreshLabel();
refreshFont();
refreshFontColor();
refreshUnderline();
refreshStrikeThrough();
}
/**
* @generated
*/
protected void refreshLabel() {
setLabelTextHelper(getFigure(), getLabelText());
setLabelIconHelper(getFigure(), getLabelIcon());
refreshSelectionFeedback();
}
/**
* @generated
*/
protected void refreshUnderline() {
FontStyle style = (FontStyle) getFontStyleOwnerView().getStyle(
NotationPackage.eINSTANCE.getFontStyle());
if (style != null && getFigure() instanceof WrappingLabel) {
((WrappingLabel) getFigure()).setTextUnderline(style.isUnderline());
}
}
/**
* @generated
*/
protected void refreshStrikeThrough() {
FontStyle style = (FontStyle) getFontStyleOwnerView().getStyle(
NotationPackage.eINSTANCE.getFontStyle());
if (style != null && getFigure() instanceof WrappingLabel) {
((WrappingLabel) getFigure()).setTextStrikeThrough(style.isStrikeThrough());
}
}
/**
* @generated
*/
protected void refreshFont() {
FontStyle style = (FontStyle) getFontStyleOwnerView().getStyle(
NotationPackage.eINSTANCE.getFontStyle());
if (style != null) {
FontData fontData = new FontData(style.getFontName(), style.getFontHeight(),
(style.isBold() ? SWT.BOLD : SWT.NORMAL)
| (style.isItalic() ? SWT.ITALIC : SWT.NORMAL));
setFont(fontData);
}
}
/**
* @generated
*/
private void refreshSelectionFeedback() {
requestEditPolicyFeedbackRefresh(EditPolicy.PRIMARY_DRAG_ROLE);
requestEditPolicyFeedbackRefresh(EditPolicy.SELECTION_FEEDBACK_ROLE);
}
/**
* @generated
*/
private void requestEditPolicyFeedbackRefresh(String editPolicyKey) {
Object editPolicy = getEditPolicy(editPolicyKey);
if (editPolicy instanceof IRefreshableFeedbackEditPolicy) {
((IRefreshableFeedbackEditPolicy) editPolicy).refreshFeedback();
}
}
/**
* @generated
*/
protected void setFontColor(Color color) {
getFigure().setForegroundColor(color);
}
/**
* @generated
*/
protected void addSemanticListeners() {
if (getParser() instanceof ISemanticParser) {
EObject element = resolveSemanticElement();
parserElements = ((ISemanticParser) getParser())
.getSemanticElementsBeingParsed(element);
for (int i = 0; i < parserElements.size(); i++) {
addListenerFilter("SemanticModel" + i, this, (EObject) parserElements.get(i)); //$NON-NLS-1$
}
} else {
super.addSemanticListeners();
}
}
/**
* @generated
*/
protected void removeSemanticListeners() {
if (parserElements != null) {
for (int i = 0; i < parserElements.size(); i++) {
removeListenerFilter("SemanticModel" + i); //$NON-NLS-1$
}
} else {
super.removeSemanticListeners();
}
}
/**
* @generated
*/
protected AccessibleEditPart getAccessibleEditPart() {
if (accessibleEP == null) {
accessibleEP = new AccessibleGraphicalEditPart() {
public void getName(AccessibleEvent e) {
e.result = getLabelTextHelper(getFigure());
}
};
}
return accessibleEP;
}
/**
* @generated
*/
private View getFontStyleOwnerView() {
return getPrimaryView();
}
/**
* @generated
*/
private ILabelDelegate getLabelDelegate() {
if (labelDelegate == null) {
IFigure label = getFigure();
if (label instanceof WrappingLabel) {
labelDelegate = new WrappingLabelDelegate((WrappingLabel) label);
} else {
labelDelegate = new SimpleLabelDelegate((Label) label);
}
}
return labelDelegate;
}
/**
* @generated
*/
@Override
public Object getAdapter(Class key) {
if (ILabelDelegate.class.equals(key)) {
return getLabelDelegate();
}
return super.getAdapter(key);
}
/**
* @generated
*/
protected void addNotationalListeners() {
super.addNotationalListeners();
addListenerFilter("PrimaryView", this, getPrimaryView()); //$NON-NLS-1$
}
/**
* @generated
*/
protected void removeNotationalListeners() {
super.removeNotationalListeners();
removeListenerFilter("PrimaryView"); //$NON-NLS-1$
}
/**
* @generated
*/
protected void handleNotificationEvent(Notification event) {
Object feature = event.getFeature();
if (NotationPackage.eINSTANCE.getFontStyle_FontColor().equals(feature)) {
Integer c = (Integer) event.getNewValue();
setFontColor(DiagramColorRegistry.getInstance().getColor(c));
} else if (NotationPackage.eINSTANCE.getFontStyle_Underline().equals(feature)) {
refreshUnderline();
} else if (NotationPackage.eINSTANCE.getFontStyle_StrikeThrough().equals(feature)) {
refreshStrikeThrough();
} else if (NotationPackage.eINSTANCE.getFontStyle_FontHeight().equals(feature)
|| NotationPackage.eINSTANCE.getFontStyle_FontName().equals(feature)
|| NotationPackage.eINSTANCE.getFontStyle_Bold().equals(feature)
|| NotationPackage.eINSTANCE.getFontStyle_Italic().equals(feature)) {
refreshFont();
} else {
if (getParser() != null
&& getParser().isAffectingEvent(event, getParserOptions().intValue())) {
refreshLabel();
}
if (getParser() instanceof ISemanticParser) {
ISemanticParser modelParser = (ISemanticParser) getParser();
if (modelParser.areSemanticElementsAffected(null, event)) {
removeSemanticListeners();
if (resolveSemanticElement() != null) {
addSemanticListeners();
}
refreshLabel();
}
}
}
super.handleNotificationEvent(event);
}
/**
* @generated
*/
protected IFigure createFigure() {
// Parent should assign one using setLabel() method
return null;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.api.core;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
/**
* Defines all {@link ActiveMQException} types and their codes.
*/
public enum ActiveMQExceptionType {
// Error codes -------------------------------------------------
INTERNAL_ERROR(000) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQInternalErrorException(msg);
}
},
UNSUPPORTED_PACKET(001) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQUnsupportedPacketException(msg);
}
},
NOT_CONNECTED(002) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQNotConnectedException(msg);
}
},
CONNECTION_TIMEDOUT(003) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQConnectionTimedOutException(msg);
}
},
DISCONNECTED(004) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQDisconnectedException(msg);
}
},
UNBLOCKED(005) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQUnBlockedException(msg);
}
},
IO_ERROR(006) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQIOErrorException(msg);
}
},
QUEUE_DOES_NOT_EXIST(100) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQNonExistentQueueException(msg);
}
},
QUEUE_EXISTS(101) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQQueueExistsException(msg);
}
},
OBJECT_CLOSED(102) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQObjectClosedException(msg);
}
},
INVALID_FILTER_EXPRESSION(103) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQInvalidFilterExpressionException(msg);
}
},
ILLEGAL_STATE(104) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQIllegalStateException(msg);
}
},
SECURITY_EXCEPTION(105) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQSecurityException(msg);
}
},
ADDRESS_DOES_NOT_EXIST(106) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQAddressDoesNotExistException(msg);
}
},
ADDRESS_EXISTS(107) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQAddressExistsException(msg);
}
},
INCOMPATIBLE_CLIENT_SERVER_VERSIONS(108) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQIncompatibleClientServerException(msg);
}
},
LARGE_MESSAGE_ERROR_BODY(110) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQLargeMessageException(msg);
}
},
TRANSACTION_ROLLED_BACK(111) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQTransactionRolledBackException(msg);
}
},
SESSION_CREATION_REJECTED(112) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQSessionCreationException(msg);
}
},
DUPLICATE_ID_REJECTED(113) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQDuplicateIdException(msg);
}
},
DUPLICATE_METADATA(114) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQDuplicateMetaDataException(msg);
}
},
TRANSACTION_OUTCOME_UNKNOWN(115) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQTransactionOutcomeUnknownException(msg);
}
},
ALREADY_REPLICATING(116) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQAlreadyReplicatingException(msg);
}
},
INTERCEPTOR_REJECTED_PACKET(117) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQInterceptorRejectedPacketException(msg);
}
},
INVALID_TRANSIENT_QUEUE_USE(118) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQInvalidTransientQueueUseException(msg);
}
},
REMOTE_DISCONNECT(119) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQRemoteDisconnectException(msg);
}
},
TRANSACTION_TIMEOUT(120) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQTransactionTimeoutException(msg);
}
},
GENERIC_EXCEPTION(999),
NATIVE_ERROR_INTERNAL(200),
NATIVE_ERROR_INVALID_BUFFER(201),
NATIVE_ERROR_NOT_ALIGNED(202),
NATIVE_ERROR_CANT_INITIALIZE_AIO(203),
NATIVE_ERROR_CANT_RELEASE_AIO(204),
NATIVE_ERROR_CANT_OPEN_CLOSE_FILE(205),
NATIVE_ERROR_CANT_ALLOCATE_QUEUE(206),
NATIVE_ERROR_PREALLOCATE_FILE(208),
NATIVE_ERROR_ALLOCATE_MEMORY(209),
ADDRESS_FULL(210) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQAddressFullException(msg);
}
},
LARGE_MESSAGE_INTERRUPTED(211) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQLargeMessageInterruptedException(msg);
}
},
CLUSTER_SECURITY_EXCEPTION(212) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQClusterSecurityException(msg);
}
},
NOT_IMPLEMTNED_EXCEPTION(213),
MAX_CONSUMER_LIMIT_EXCEEDED(214) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQQueueMaxConsumerLimitReached(msg);
}
},
UNEXPECTED_ROUTING_TYPE_FOR_ADDRESS(215) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQUnexpectedRoutingTypeForAddress(msg);
}
},
INVALID_QUEUE_CONFIGURATION(216) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQInvalidQueueConfiguration(msg);
}
},
DELETE_ADDRESS_ERROR(217) {
@Override
public ActiveMQException createException(String msg) {
return new ActiveMQDeleteAddressException(msg);
}
};
private static final Map<Integer, ActiveMQExceptionType> TYPE_MAP;
static {
HashMap<Integer, ActiveMQExceptionType> map = new HashMap<>();
for (ActiveMQExceptionType type : EnumSet.allOf(ActiveMQExceptionType.class)) {
map.put(type.getCode(), type);
}
TYPE_MAP = Collections.unmodifiableMap(map);
}
private final int code;
ActiveMQExceptionType(int code) {
this.code = code;
}
public int getCode() {
return code;
}
public ActiveMQException createException(String msg) {
return new ActiveMQException(msg + ", code:" + this);
}
public static ActiveMQException createException(int code, String msg) {
return getType(code).createException(msg);
}
public static ActiveMQExceptionType getType(int code) {
ActiveMQExceptionType type = TYPE_MAP.get(code);
if (type != null)
return type;
return ActiveMQExceptionType.GENERIC_EXCEPTION;
}
}
|
|
/*
* Copyright 2012 Sebastian Annies, Hamburg
*
* Licensed under the Apache License, Version 2.0 (the License);
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.coremedia.iso;
import com.coremedia.iso.boxes.Box;
import com.coremedia.iso.boxes.Container;
import com.googlecode.mp4parser.AbstractBox;
import com.googlecode.mp4parser.DataSource;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.net.URL;
import java.util.Enumeration;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* A Property file based BoxFactory
*/
public class PropertyBoxParserImpl extends AbstractBoxParser {
Properties mapping;
Pattern constuctorPattern = Pattern.compile("(.*)\\((.*?)\\)");
private final boolean parseDetails;
public PropertyBoxParserImpl(boolean parseDetails, String... customProperties) {
this.parseDetails = parseDetails;
InputStream is = getClass().getResourceAsStream("/isoparser-default.properties");
try {
mapping = new Properties();
try {
mapping.load(is);
ClassLoader cl = Thread.currentThread().getContextClassLoader();
if (cl == null) {
cl = ClassLoader.getSystemClassLoader();
}
Enumeration<URL> enumeration = cl.getResources("isoparser-custom.properties");
while (enumeration.hasMoreElements()) {
URL url = enumeration.nextElement();
InputStream customIS = url.openStream();
try {
mapping.load(customIS);
} finally {
customIS.close();
}
}
for (String customProperty : customProperties) {
mapping.load(getClass().getResourceAsStream(customProperty));
}
} catch (IOException e) {
throw new RuntimeException(e);
}
} finally {
try {
is.close();
} catch (IOException e) {
e.printStackTrace();
// ignore - I can't help
}
}
}
public PropertyBoxParserImpl(String... customProperties) {
this(true, customProperties);
}
public PropertyBoxParserImpl(boolean parseDetails, Properties mapping) {
this.parseDetails = parseDetails;
this.mapping = mapping;
}
public PropertyBoxParserImpl(Properties mapping) {
this(true, mapping);
}
@Override
public Box createBox(String type, byte[] userType, String parent) {
invoke(type, userType, parent);
String[] param = this.param.get();
try {
Class<Box> clazz = (Class<Box>) Class.forName(clazzName.get());
if (param.length > 0) {
Class[] constructorArgsClazz = new Class[param.length];
Object[] constructorArgs = new Object[param.length];
for (int i = 0; i < param.length; i++) {
if ("userType".equals(param[i])) {
constructorArgs[i] = userType;
constructorArgsClazz[i] = byte[].class;
} else if ("type".equals(param[i])) {
constructorArgs[i] = type;
constructorArgsClazz[i] = String.class;
} else if ("parent".equals(param[i])) {
constructorArgs[i] = parent;
constructorArgsClazz[i] = String.class;
} else {
throw new InternalError("No such param: " + param[i]);
}
}
Constructor<Box> constructorObject = clazz.getConstructor(constructorArgsClazz);
return constructorObject.newInstance(constructorArgs);
} else {
return clazz.newInstance();
}
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
} catch (InstantiationException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
} catch (InvocationTargetException e) {
throw new RuntimeException(e);
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
StringBuilder buildLookupStrings = new StringBuilder();
ThreadLocal<String> clazzName = new ThreadLocal<String>();
ThreadLocal<String[]> param = new ThreadLocal<String[]>();
static String[] EMPTY_STRING_ARRAY = new String[0];
public void invoke(String type, byte[] userType, String parent) {
String constructor;
if (userType != null) {
if (!"uuid".equals((type))) {
throw new RuntimeException("we have a userType but no uuid box type. Something's wrong");
}
constructor = mapping.getProperty("uuid[" + Hex.encodeHex(userType).toUpperCase() + "]");
if (constructor == null) {
constructor = mapping.getProperty((parent) + "-uuid[" + Hex.encodeHex(userType).toUpperCase() + "]");
}
if (constructor == null) {
constructor = mapping.getProperty("uuid");
}
} else {
constructor = mapping.getProperty((type));
if (constructor == null) {
String lookup = buildLookupStrings.append(parent).append('-').append(type).toString();
buildLookupStrings.setLength(0);
constructor = mapping.getProperty(lookup);
}
}
if (constructor == null) {
constructor = mapping.getProperty("default");
}
if (constructor == null) {
throw new RuntimeException("No box object found for " + type);
}
if (!constructor.endsWith(")")) {
param.set( EMPTY_STRING_ARRAY);
clazzName.set(constructor);
} else {
Matcher m = constuctorPattern.matcher(constructor);
boolean matches = m.matches();
if (!matches) {
throw new RuntimeException("Cannot work with that constructor: " + constructor);
}
clazzName.set( m.group(1));
if (m.group(2).length() == 0) {
param.set(EMPTY_STRING_ARRAY);
} else {
param.set(m.group(2).length() > 0 ? m.group(2).split(",") : new String[]{});
}
}
}
@Override
public Box parseBox(DataSource byteChannel, Container parent) throws IOException {
Box box = super.parseBox(byteChannel, parent);
if (parseDetails && box instanceof AbstractBox) {
AbstractBox abstractBox = (AbstractBox)box;
if (!abstractBox.isParsed()) {
//System.err.println(String.format("parsed detail %s", box.getClass().getSimpleName()));
abstractBox.parseDetails();
}
}
return box;
}
}
|
|
package com.apigee.sdk.apm.android.crashlogging;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.Thread.UncaughtExceptionHandler;
import java.util.UUID;
import android.content.Context;
import android.util.Log;
import com.apigee.sdk.AppIdentification;
import com.apigee.sdk.apm.android.AndroidLog;
import com.apigee.sdk.apm.android.MonitoringClient;
import com.apigee.sdk.apm.android.crashlogging.internal.ExceptionHandler;
import com.apigee.sdk.apm.android.model.ClientLog;
import com.apigee.sdk.apm.android.util.StringUtils;
/**
* <h4>Description</h4>
*
* The crash manager sets an exception handler to catch all unhandled
* exceptions. The handler writes the stack trace and additional meta data to
* a file. If it finds one or more of these files at the next start, it shows
* an alert dialog to ask the user if he want the send the crash data to
* HockeyApp.
*
* <h4>License</h4>
*
* <pre>
* Copyright (c) 2012 Codenauts UG
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
* </pre>
*
* @author Thomas Dohmke
**/
public class CrashManager {
public static String CRASH_LOG_TAG = "CRASH";
protected static String CRASH_LOG_KEY_STRING_FORMAT = "%s/crashlog/%s/%s";
public static AppIdentification appIdentification = null;
public static String appUniqueIdentifier = null;
protected static AndroidLog logger;
/**
* Registers new crash manager and handles existing crash logs.
*
* @param context The context to use. Usually your Activity object.
* @param appIdentifier App ID of your app on HockeyApp.
*/
public static void register(Context context, AppIdentification appIdentification, MonitoringClient monitoringClient) {
register(context, appIdentification, null, monitoringClient);
}
/**
* Registers new crash manager and handles existing crash logs.
*
* @param context The context to use. Usually your Activity object.
* @param appIdentifier App ID of your app on HockeyApp.
* @param listener Implement for callback functions.
*/
public static void register(Context context, AppIdentification appIdentification, CrashManagerListener listener, MonitoringClient monitoringClient) {
initialize(context, appIdentification, listener, false);
execute(context, listener, monitoringClient);
}
/**
* Initializes the crash manager, but does not handle crash log. Use this
* method only if you want to split the process into two parts, i.e. when
* your app has multiple entry points. You need to call the method 'execute'
* at some point after this method.
*
* @param context The context to use. Usually your Activity object.
* @param appIdentifier App ID of your app on HockeyApp.
* @param listener Implement for callback functions.
*/
public static void initialize(Context context, AppIdentification appIdentification, CrashManagerListener listener) {
initialize(context, appIdentification, listener, true);
}
/**
* Executes the crash manager. You need to call this method if you have used
* the method 'initialize' before.
*
* @param context The context to use. Usually your Activity object.
* @param listener Implement for callback functions.
*/
public static void execute(Context context, CrashManagerListener listener, MonitoringClient monitoringClient) {
Boolean ignoreDefaultHandler = (listener != null) && (listener.ignoreDefaultHandler());
if( hasStackTraces() ) {
if (listener != null) {
listener.onCrashesFound();
}
sendCrashes(context, listener, ignoreDefaultHandler, monitoringClient);
} else {
registerHandler(context, listener, ignoreDefaultHandler);
}
}
protected static String getCrashFilesDirectory() {
return Constants.FILES_PATH + "/";
}
/**
* Checks if there are any saved stack traces in the files dir.
*
* @param context The context to use. Usually your Activity object.
* @return 0 if there are no stack traces,
* 1 if there are any new stack traces,
* 2 if there are confirmed stack traces
*/
public static boolean hasStackTraces() {
String[] filenames = searchForStackTraces();
if( (filenames != null) && (filenames.length > 0) ) {
return true;
}
return false;
}
/**
* Submits all stack traces in the files dir to server.
*
* @param context The context to use. Usually your Activity object.
* @param listener Implement for callback functions.
*/
public static void submitStackTraces(Context context, CrashManagerListener listener, MonitoringClient monitoringClient) {
Log.d(ClientLog.TAG_MONITORING_CLIENT, "Looking for exceptions in: " + Constants.FILES_PATH);
String[] list = searchForStackTraces();
Boolean successful = false;
if ((list != null) && (list.length > 0)) {
Log.d(ClientLog.TAG_MONITORING_CLIENT, "Found " + list.length + " stacktrace(s).");
for (int index = 0; index < list.length; index++) {
try {
// Read contents of stack trace
String filename = list[index];
Log.v(ClientLog.TAG_MONITORING_CLIENT, "crash file found: '" + filename + "'");
String stacktrace = contentsOfFile(context, filename);
if ( (stacktrace != null) && (stacktrace.length() > 0) ) {
Log.d(ClientLog.TAG_MONITORING_CLIENT, "Transmitting crash data: \n" + stacktrace);
submitStackTrace(context, filename, monitoringClient);
successful = true;
}
}
catch (Exception e) {
e.printStackTrace();
}
finally {
if (successful) {
deleteStackTrace(context, list[index]);
if (listener != null) {
listener.onCrashesSent();
}
}
else {
if (listener != null) {
listener.onCrashesNotSent();
}
}
}
}
}
}
/**
* Deletes all stack traces and meta files from files dir.
*
* @param context The context to use. Usually your Activity object.
*/
public static void deleteStackTraces(Context context) {
Log.d(ClientLog.TAG_MONITORING_CLIENT, "Looking for exceptions in: " + Constants.FILES_PATH);
String[] list = searchForStackTraces();
if ((list != null) && (list.length > 0)) {
Log.d(ClientLog.TAG_MONITORING_CLIENT, "Found " + list.length + " stacktrace(s).");
for (int index = 0; index < list.length; index++) {
String fileName = list[index];
try {
Log.d(ClientLog.TAG_MONITORING_CLIENT, "Delete stacktrace " + fileName + ".");
deleteStackTrace(context, list[index]);
context.deleteFile(list[index]);
}
catch (Exception e) {
e.printStackTrace();
}
}
}
}
/**
* Private method to initialize the crash manager. This method has an
* additional parameter to decide whether to register the exception handler
* at the end or not.
*/
private static void initialize(Context context, AppIdentification appIdentification, CrashManagerListener listener, boolean registerHandler) {
CrashManager.appIdentification = appIdentification;
Constants.loadFromContext(context);
if (CrashManager.appIdentification == null) {
CrashManager.appUniqueIdentifier = Constants.APP_PACKAGE;
}
if (registerHandler) {
Boolean ignoreDefaultHandler = (listener != null) && (listener.ignoreDefaultHandler());
registerHandler(context, listener, ignoreDefaultHandler);
}
}
/**
* Starts thread to send crashes to HockeyApp, then registers the exception
* handler.
*/
private static void sendCrashes(final Context context, final CrashManagerListener listener, final boolean ignoreDefaultHandler, final MonitoringClient monitoringClient) {
new Thread() {
@Override
public void run() {
submitStackTraces(context, listener, monitoringClient);
registerHandler(context, listener, ignoreDefaultHandler);
}
}.start();
}
/**
* Registers the exception handler.
*/
private static void registerHandler(Context context, CrashManagerListener listener, boolean ignoreDefaultHandler) {
if ((Constants.APP_VERSION != null) && (Constants.APP_PACKAGE != null)) {
// Get current handler
UncaughtExceptionHandler currentHandler = Thread.getDefaultUncaughtExceptionHandler();
if (currentHandler != null) {
Log.w(ClientLog.TAG_MONITORING_CLIENT, "Multiple crash reporters detected");
Log.d(ClientLog.TAG_MONITORING_CLIENT, "Current handler class = " + currentHandler.getClass().getName());
// Register if not already registered
if (!(currentHandler instanceof ExceptionHandler)) {
Log.w(ClientLog.TAG_MONITORING_CLIENT, "Replacing existing crash reporter");
Thread.setDefaultUncaughtExceptionHandler(new ExceptionHandler(currentHandler, listener, ignoreDefaultHandler));
}
}
else {
Thread.setDefaultUncaughtExceptionHandler(new ExceptionHandler(currentHandler, listener, ignoreDefaultHandler));
}
}
else {
Log.d(ClientLog.TAG_MONITORING_CLIENT, "Exception handler not set because version or package is null.");
}
}
/**
* Deletes the give filename and all corresponding files (same name,
* different extension).
*/
protected static void deleteStackTrace(Context context, String filename) {
context.deleteFile(filename);
String user = filename.replace(".stacktrace", ".user");
context.deleteFile(user);
String contact = filename.replace(".stacktrace", ".contact");
context.deleteFile(contact);
String description = filename.replace(".stacktrace", ".description");
context.deleteFile(description);
}
/**
* Returns the content of a file as a string.
*/
protected static String contentsOfFile(Context context, String filename) {
StringBuilder contents = new StringBuilder();
BufferedReader reader = null;
try {
reader = new BufferedReader(new InputStreamReader(context.openFileInput(filename)));
String line = null;
String lineSeparator = System.getProperty("line.separator");
while ((line = reader.readLine()) != null) {
contents.append(line);
contents.append(lineSeparator);
}
}
catch (FileNotFoundException e) {
}
catch (IOException e) {
e.printStackTrace();
}
finally {
if (reader != null) {
try {
reader.close();
}
catch (IOException ignored) {
}
}
}
return contents.toString();
}
/**
* Searches .stacktrace files and returns then as array.
*/
protected static String[] searchForStackTraces() {
// Try to create the files folder if it doesn't exist
File dir = new File(getCrashFilesDirectory());
dir.mkdir();
// Filter for ".stacktrace" files
FilenameFilter filter = new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.endsWith(".stacktrace");
}
};
return dir.list(filter);
}
protected static void submitStackTrace(Context context, String fileNameOnDevice, MonitoringClient monitoringClient) throws IOException
{
UUID uuid = UUID.randomUUID();
String uuidAsString = uuid.toString();
String fileNameForServer = uuidAsString + ".stacktrace";
if(logger != null)
{
logger.wtf(CRASH_LOG_TAG, fileNameForServer);
}
String crashFilePath = getCrashFilesDirectory() + fileNameOnDevice;
String crashFileContents = StringUtils.fileToString(crashFilePath);
if( (crashFileContents != null) && (crashFileContents.length() > 0) ) {
String postURL = monitoringClient.getCrashReportUploadURL(fileNameForServer);
monitoringClient.onCrashReportUpload(crashFileContents);
if( monitoringClient.putString(crashFileContents, postURL, "text/plain") != null ) {
Log.i(ClientLog.TAG_MONITORING_CLIENT,"Sent crash file to server '" + fileNameForServer + "'");
} else {
Log.e(ClientLog.TAG_MONITORING_CLIENT,"There was an error with the request to upload the crash report");
}
} else {
// can't read crash file
Log.e(ClientLog.TAG_MONITORING_CLIENT,"Error: unable to read crash file on device '" + fileNameOnDevice + "'");
}
}
//Apigee specific logger
public static void register(Context context, AndroidLog log, AppIdentification appIdentification, MonitoringClient monitoringClient) {
logger = log;
register(context, appIdentification, new CrashManagerListener() {
@Override
public Boolean onCrashesFound() {
logger.wtf(ClientLog.TAG_MONITORING_CLIENT, "1 or more crashes occurred");
return true; // auto-send (don't ask the user)
}
@Override
public void onCrashesSent() {
logger.i(ClientLog.TAG_MONITORING_CLIENT, "Sent Crashlogs to Server");
super.onCrashesSent();
}
@Override
public void onCrashesNotSent() {
logger.w(ClientLog.TAG_MONITORING_CLIENT, "Unable to send crashlogs to server");
super.onCrashesNotSent();
}
}, monitoringClient);
}
}
|
|
package org.praisenter.ui.slide;
import java.util.ArrayList;
import java.util.List;
import org.praisenter.data.slide.Slide;
import org.praisenter.data.slide.SlideComponent;
import org.praisenter.ui.document.DocumentContext;
import javafx.beans.binding.Bindings;
import javafx.beans.property.BooleanProperty;
import javafx.beans.property.DoubleProperty;
import javafx.beans.property.ObjectProperty;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.SimpleBooleanProperty;
import javafx.beans.property.SimpleDoubleProperty;
import javafx.beans.property.SimpleObjectProperty;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.scene.Cursor;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.Border;
import javafx.scene.layout.BorderStroke;
import javafx.scene.layout.BorderStrokeStyle;
import javafx.scene.layout.BorderWidths;
import javafx.scene.layout.Region;
import javafx.scene.layout.StackPane;
import javafx.scene.paint.Color;
import javafx.scene.shape.Rectangle;
import javafx.scene.shape.StrokeLineCap;
import javafx.scene.shape.StrokeLineJoin;
import javafx.scene.shape.StrokeType;
final class EditNode extends StackPane {
private static final double THUMB_SIZE = 10;
private static final double BORDER_OFFSET = 6;
private static final double BORDER_SIZE = 1;
private static final double DASH_SIZE = 4;
private final DocumentContext<Slide> document;
private final ObjectProperty<SlideComponent> component;
private final DoubleProperty scale;
private final BooleanProperty selected;
// for selection, move, and resize
private Cursor cursor;
/** The start x value of the mouse gesture */
private double sx;
/** The start y value of the mouse gesture */
private double sy;
/** The current x coordinate of the region */
private double x;
/** The current y coordinate of the region */
private double y;
/** The current width of the region */
private double w;
/** The current height of the region */
private double h;
// private final ContextMenu contextMenu;
public EditNode(
DocumentContext<Slide> document,
SlideComponent component) {
this.document = document;
this.component = new SimpleObjectProperty<>(component);
this.scale = new SimpleDoubleProperty(1);
this.selected = new SimpleBooleanProperty(false);
// this.contextMenu = new ContextMenu();
// this.contextMenu.getItems().addAll(
//// this.createMenuItem(Action.NEW_BOOK),
//// this.createMenuItem(Action.NEW_CHAPTER),
//// this.createMenuItem(Action.NEW_VERSE),
//// new SeparatorMenuItem(),
// this.createMenuItem(Action.COPY),
// this.createMenuItem(Action.CUT),
// this.createMenuItem(Action.PASTE),
//// new SeparatorMenuItem(),
//// this.createMenuItem(Action.REORDER),
//// this.createMenuItem(Action.RENUMBER),
// new SeparatorMenuItem(),
// this.createMenuItem(Action.DELETE)
// );
// this.contextMenu.setAutoHide(true);
// this.setOnContextMenuRequested(e -> this.contextMenu.show(this, e.getScreenX(), e.getScreenY()));
List<Double> halfdashes = new ArrayList<Double>();
halfdashes.add(DASH_SIZE / 2);
halfdashes.add(DASH_SIZE / 2);
List<Double> dashes = new ArrayList<Double>();
dashes.add(DASH_SIZE);
dashes.add(DASH_SIZE);
Border hoverBorder = new Border(
new BorderStroke(Color.BLACK, new BorderStrokeStyle(StrokeType.OUTSIDE, StrokeLineJoin.MITER, StrokeLineCap.BUTT, BORDER_SIZE * 2, 0.0, halfdashes), null, new BorderWidths(BORDER_SIZE)),
new BorderStroke(Color.WHITE, new BorderStrokeStyle(StrokeType.OUTSIDE, StrokeLineJoin.MITER, StrokeLineCap.BUTT, BORDER_SIZE * 2, DASH_SIZE / 2, halfdashes), null, new BorderWidths(BORDER_SIZE))
);
Border selectBorder = new Border(
new BorderStroke(Color.BLACK, new BorderStrokeStyle(StrokeType.OUTSIDE, StrokeLineJoin.MITER, StrokeLineCap.BUTT, BORDER_SIZE * 2, 0.0, dashes), null, new BorderWidths(BORDER_SIZE)),
new BorderStroke(Color.WHITE, new BorderStrokeStyle(StrokeType.OUTSIDE, StrokeLineJoin.MITER, StrokeLineCap.BUTT, BORDER_SIZE * 2, DASH_SIZE, dashes), null, new BorderWidths(BORDER_SIZE))
);
Region editBorder = new Region();
editBorder.setSnapToPixel(true);
// border.setBackground(new Background(new BackgroundFill(Color.WHITE, null, null)));
StackPane editBorderContainer = new StackPane(editBorder);
editBorderContainer.setPadding(new Insets(BORDER_OFFSET));
editBorderContainer.setSnapToPixel(true);
// resize knobs
Rectangle tlThumb = new Rectangle(THUMB_SIZE, THUMB_SIZE);
Rectangle tThumb = new Rectangle(THUMB_SIZE, THUMB_SIZE);
Rectangle trThumb = new Rectangle(THUMB_SIZE, THUMB_SIZE);
Rectangle rThumb = new Rectangle(THUMB_SIZE, THUMB_SIZE);
Rectangle brThumb = new Rectangle(THUMB_SIZE, THUMB_SIZE);
Rectangle bThumb = new Rectangle(THUMB_SIZE, THUMB_SIZE);
Rectangle blThumb = new Rectangle(THUMB_SIZE, THUMB_SIZE);
Rectangle lThumb = new Rectangle(THUMB_SIZE, THUMB_SIZE);
StackPane.setAlignment(tlThumb, Pos.TOP_LEFT);
StackPane.setAlignment(tThumb, Pos.TOP_CENTER);
StackPane.setAlignment(trThumb, Pos.TOP_RIGHT);
StackPane.setAlignment(rThumb, Pos.CENTER_RIGHT);
StackPane.setAlignment(brThumb, Pos.BOTTOM_RIGHT);
StackPane.setAlignment(bThumb, Pos.BOTTOM_CENTER);
StackPane.setAlignment(blThumb, Pos.BOTTOM_LEFT);
StackPane.setAlignment(lThumb, Pos.CENTER_LEFT);
tlThumb.setCursor(Cursor.NW_RESIZE);
tThumb.setCursor(Cursor.N_RESIZE);
trThumb.setCursor(Cursor.NE_RESIZE);
rThumb.setCursor(Cursor.E_RESIZE);
brThumb.setCursor(Cursor.SE_RESIZE);
bThumb.setCursor(Cursor.S_RESIZE);
blThumb.setCursor(Cursor.SW_RESIZE);
lThumb.setCursor(Cursor.W_RESIZE);
Rectangle[] thumbs = new Rectangle[] {
tlThumb,
tThumb,
trThumb,
rThumb,
brThumb,
bThumb,
blThumb,
lThumb
};
for (Rectangle r : thumbs) {
r.setFill(Color.WHITE);
r.setStroke(Color.BLACK);
r.setStrokeWidth(BORDER_SIZE);
r.setStrokeType(StrokeType.CENTERED);
r.setStrokeLineCap(StrokeLineCap.BUTT);
r.setStrokeLineJoin(StrokeLineJoin.MITER);
r.setStrokeMiterLimit(BORDER_SIZE);
r.addEventHandler(MouseEvent.ANY, e -> {
if (e.getEventType() == MouseEvent.MOUSE_PRESSED ||
e.getEventType() == MouseEvent.MOUSE_CLICKED) {
pressed(e, r.getCursor());
} else if (e.getEventType() == MouseEvent.MOUSE_DRAGGED) {
dragged(e);
} else if (e.getEventType() == MouseEvent.MOUSE_RELEASED) {
apply(e);
}
e.consume();
});
}
this.getChildren().addAll(editBorderContainer, tlThumb, tThumb, trThumb, rThumb, brThumb, bThumb, blThumb, lThumb);
this.prefWidthProperty().bind(Bindings.createDoubleBinding(() -> {
return Math.ceil(component.getWidth() * this.scale.get()) + BORDER_OFFSET * 2;
}, component.widthProperty(), this.scale));
this.prefHeightProperty().bind(Bindings.createDoubleBinding(() -> {
return Math.ceil(component.getHeight() * this.scale.get()) + BORDER_OFFSET * 2;
}, component.heightProperty(), this.scale));
this.layoutXProperty().bind(Bindings.createDoubleBinding(() -> {
return Math.floor(component.getX() * this.scale.get()) - BORDER_OFFSET;
}, component.xProperty(), this.scale));
this.layoutYProperty().bind(Bindings.createDoubleBinding(() -> {
return Math.floor(component.getY() * this.scale.get()) - BORDER_OFFSET;
}, component.yProperty(), this.scale));
for (Rectangle r : thumbs) {
r.visibleProperty().bind(this.selected);
}
this.addEventHandler(MouseEvent.MOUSE_PRESSED, e -> this.selected.set(true));
this.addEventHandler(MouseEvent.ANY, e -> {
if (e.getEventType() == MouseEvent.MOUSE_PRESSED ||
e.getEventType() == MouseEvent.MOUSE_CLICKED) {
pressed(e, Cursor.MOVE);
} else if (e.getEventType() == MouseEvent.MOUSE_DRAGGED) {
dragged(e);
} else if (e.getEventType() == MouseEvent.MOUSE_RELEASED) {
apply(e);
} else if (e.getEventType() == MouseEvent.MOUSE_ENTERED) {
if (!this.isSelected()) {
editBorder.setBorder(hoverBorder);
}
} else if (e.getEventType() == MouseEvent.MOUSE_EXITED) {
if (!this.isSelected()) {
editBorder.setBorder(null);
}
}
});
this.selected.addListener((obs, ov, nv) -> {
if (!nv) {
editBorder.setBorder(null);
} else {
editBorder.setBorder(selectBorder);
}
});
this.setCursor(Cursor.MOVE);
this.setSnapToPixel(true);
// this.setBackground(new Background(new BackgroundFill(random(), null, null)));
}
// private Color random() {
// return Color.color(Math.random(), Math.random(), Math.random());
// }
/**
* Called when a mouse button has been pressed on the component.
* @param event the event
*/
private void pressed(MouseEvent event, Cursor cursor) {
this.cursor = cursor;
// record the scene coordinates of the start
sx = event.getSceneX();
sy = event.getSceneY();
// record the original x,y coordinates of the slide component
x = component.get().getX();
y = component.get().getY();
w = component.get().getWidth();
h = component.get().getHeight();
}
/**
* Called when a mouse drag gesture involves the component.
* @param event the event
*/
private void dragged(MouseEvent event) {
// only components can be moved or resized
// compute the integer difference in position
// of the mouse from the start and scale it
// by the scale factor
double nx = event.getSceneX();
double ny = event.getSceneY();
double dx = nx - sx;
double dy = ny - sy;
// get the scaled (transformed translation)
double sf = 1.0 / this.scale.get();
double dxi = dx * sf;
double dyi = dy * sf;
// are we moving the node?
if (cursor == Cursor.MOVE) {
// we SET the x/y for accuracy
this.positionChanged(x + dxi, y + dyi);
}
if (cursor == Cursor.E_RESIZE) {
dxi = clamp(w, dxi);
this.sizeChanged(x, y, w + dxi, h);
} else if (cursor == Cursor.S_RESIZE) {
dyi = clamp(h, dyi);
this.sizeChanged(x, y, w, h + dyi);
} else if (cursor == Cursor.N_RESIZE) {
dyi = -clamp(h, -dyi);
this.sizeChanged(x, y + dyi, w, h - dyi);
} else if (cursor == Cursor.W_RESIZE) {
dxi = -clamp(w, -dxi);
this.sizeChanged(x + dxi, y, w - dxi, h);
} else if (cursor == Cursor.SE_RESIZE) {
dxi = clamp(w, dxi);
dyi = clamp(h, dyi);
this.sizeChanged(x, y, w + dxi, h + dyi);
} else if (cursor == Cursor.SW_RESIZE) {
dxi = -clamp(w, -dxi);
dyi = clamp(h, dyi);
this.sizeChanged(x + dxi, y, w - dxi, h + dyi);
} else if (cursor == Cursor.NE_RESIZE) {
dxi = clamp(w, dxi);
dyi = -clamp(h, -dyi);
this.sizeChanged(x, y + dyi, w + dxi, h - dyi);
} else if (cursor == Cursor.NW_RESIZE) {
dxi = -clamp(w, -dxi);
dyi = -clamp(h, -dyi);
this.sizeChanged(x + dxi, y + dyi, w - dxi, h - dyi);
}
}
/**
* Clamps the given dimension + value to no less than MIN_DIMENSION.
* @param dimension the dimension
* @param value the value
* @return double
*/
private static double clamp(double dimension, double value) {
if (value < 0) {
if (dimension + value < 20) {
return -Math.floor(dimension - 20);
}
}
return Math.floor(value);
}
/**
* Calls the position changed handler.
* @param x the new x coordinate
* @param y the new y coordinate
*/
private void positionChanged(double x, double y) {
this.component.get().setX(x);
this.component.get().setY(y);
}
/**
* Calls the size changed handler.
* @param x the new x coordinate
* @param y the new y coordinate
* @param w the new width
* @param h the new height
*/
private void sizeChanged(double x, double y, double w, double h) {
SlideComponent component = this.component.get();
component.setX(x);
component.setY(y);
component.setWidth(w);
component.setHeight(h);
}
/**
* Records the action performed to the undo manager for easy undo/redo.
* @param e the mouse event
*/
private void apply(MouseEvent e) {
SlideComponent component = this.component.get();
double sx = this.x;
double sy = this.y;
double sw = this.w;
double sh = this.h;
double nx = component.getX();
double ny = component.getY();
double nw = component.getWidth();
double nh = component.getHeight();
if (sx != nx ||
sy != ny ||
sw != nw ||
sh != nh) {
SlideComponentBoundsEdit be = new SlideComponentBoundsEdit(
component.xProperty(),
component.yProperty(),
component.widthProperty(),
component.heightProperty(),
sx, sy, sw, sh,
nx, ny, nw, nh);
// apply the undo
this.document.getUndoManager().addEdit(be);
}
}
public ReadOnlyObjectProperty<SlideComponent> componentProperty() {
return this.component;
}
public SlideComponent getComponent() {
return this.component.get();
}
public DoubleProperty scaleProperty() {
return this.scale;
}
public double getScale() {
return this.scale.get();
}
public void setScale(double scale) {
this.scale.set(scale);
}
public BooleanProperty selectedProperty() {
return this.selected;
}
public boolean isSelected() {
return this.selected.get();
}
public void setSelected(boolean flag) {
this.selected.set(flag);
}
}
|
|
package org.lnu.is.web.rest.controller.publicactivity.award;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.nio.file.AccessDeniedException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.lnu.is.facade.facade.Facade;
import org.lnu.is.pagination.OrderBy;
import org.lnu.is.resource.message.MessageResource;
import org.lnu.is.resource.message.MessageType;
import org.lnu.is.resource.publicactivity.award.PublicActivityAwardResource;
import org.lnu.is.resource.search.PagedRequest;
import org.lnu.is.resource.search.PagedResultResource;
import org.lnu.is.web.rest.controller.AbstractControllerTest;
import org.lnu.is.web.rest.controller.BaseController;
import org.mockito.InjectMocks;
import org.mockito.Matchers;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.springframework.http.MediaType;
@RunWith(MockitoJUnitRunner.class)
public class PublicActivityAwardControllerTest extends AbstractControllerTest {
@Mock
private Facade<PublicActivityAwardResource, PublicActivityAwardResource, Long> facade;
@InjectMocks
private PublicActivityAwardController unit;
@Override
protected BaseController getUnit() {
return unit;
}
@Test
public void testCreate() throws Exception {
// Given
Long publicActivityId = 1L;
PublicActivityAwardResource resource = new PublicActivityAwardResource();
resource.setPublicActivityId(publicActivityId);
resource.setBegDate(new Date());
// When
String request = getJson(resource, true);
String response = getJson(resource, false);
when(facade.createResource(any(PublicActivityAwardResource.class)))
.thenReturn(resource);
// Then
mockMvc.perform(
post("/publicactivities/{publicActivityId}/awards",
publicActivityId).contentType(
MediaType.APPLICATION_JSON).content(request))
.andExpect(status().isCreated())
.andExpect(content().string(response));
verify(facade).createResource(resource);
}
@Test
public void testUpdate() throws Exception {
// Given
Long id = 1L;
Long publicActivityId = 1L;
PublicActivityAwardResource resource = new PublicActivityAwardResource();
resource.setPublicActivityId(publicActivityId);
resource.setBegDate(new Date());
resource.setId(id);
MessageResource responseResource = new MessageResource(MessageType.INFO);
// When
String request = getJson(resource, true);
String response = getJson(responseResource, false);
// Then
mockMvc.perform(
put("/publicactivities/{publicActivityId}/awards/{id}",
publicActivityId, id).contentType(
MediaType.APPLICATION_JSON).content(request))
.andExpect(status().isOk())
.andExpect(content().string(response));
verify(facade).updateResource(id, resource);
}
@Test
public void testGetResource() throws Exception {
// Given
Long id = 1L;
Long publicActivityId = 1L;
PublicActivityAwardResource resource = new PublicActivityAwardResource();
resource.setPublicActivityId(publicActivityId);
resource.setBegDate(new Date());
resource.setId(id);
// When
String response = getJson(resource, false);
when(facade.getResource(anyLong())).thenReturn(resource);
// Then
mockMvc.perform(
get("/publicactivities/{publicActivityId}/awards/{id}",
publicActivityId, id)).andExpect(status().isOk())
.andExpect(content().string(response));
verify(facade).getResource(id);
}
@Test
public void testDelete() throws Exception {
// Given
Long id = 1L;
Long publicActivityId = 2L;
// When
// Then
mockMvc.perform(
delete("/publicactivities/{publicActivityId}/awards/{id}",
publicActivityId, id)).andExpect(status().is(204));
verify(facade).removeResource(id);
}
@Test
public void testGetResources() throws Exception {
// Given
Long id = 1L;
Long publicActivityId = 1L;
PublicActivityAwardResource resource = new PublicActivityAwardResource();
resource.setPublicActivityId(publicActivityId);
resource.setBegDate(new Date());
resource.setId(id);
long count = 100;
int limit = 25;
Integer offset = 10;
String uri = "/publicactivities";
List<PublicActivityAwardResource> entities = Arrays.asList(resource);
PagedResultResource<PublicActivityAwardResource> expectedResource = new PagedResultResource<>();
expectedResource.setCount(count);
expectedResource.setLimit(limit);
expectedResource.setOffset(offset);
expectedResource.setUri(uri);
expectedResource.setResources(entities);
PublicActivityAwardResource parameters = new PublicActivityAwardResource();
parameters.setPublicActivityId(publicActivityId);
PagedRequest<PublicActivityAwardResource> pagedRequest = new PagedRequest<PublicActivityAwardResource>(
parameters, offset, limit, Collections.<OrderBy> emptyList());
// When
when(
facade.getResources(Matchers
.<PagedRequest<PublicActivityAwardResource>> any()))
.thenReturn(expectedResource);
String response = getJson(expectedResource, false);
// Then
mockMvc.perform(
get("/publicactivities/{publicActivityId}/awards",
publicActivityId).param("offset",
String.valueOf(offset)).param("limit",
String.valueOf(limit))).andExpect(status().isOk())
.andExpect(content().string(response));
verify(facade).getResources(pagedRequest);
}
@Test(expected = AccessDeniedException.class)
public void testGetResourceWithAccessDeniedException() throws Exception {
// Given
Long id = 1L;
Long publicActivityId = 2L;
// When
doThrow(AccessDeniedException.class).when(facade)
.getResource(anyLong());
// Then
mockMvc.perform(get("/publicactivities/{publicActivityId}/awards/{id}",
publicActivityId, id));
verify(facade).getResource(id);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache;
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import org.apache.geode.internal.InternalStatisticsDisabledException;
import org.apache.geode.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry;
// macros whose definition changes this class:
// disk: DISK
// lru: LRU
// stats: 1
// versioned: VERSIONED
// offheap: OFFHEAP
// One of the following key macros must be defined:
// key object: KEY_OBJECT
// key int: KEY_INT
// key long: KEY_LONG
// key uuid: KEY_UUID
// key string1: KEY_STRING1
// key string2: 1
/**
* Do not modify this class. It was generated. Instead modify LeafRegionEntry.cpp and then run
* ./dev-tools/generateRegionEntryClasses.sh (it must be run from the top level directory).
*/
public class VMStatsRegionEntryHeapStringKey2 extends VMStatsRegionEntryHeap {
public VMStatsRegionEntryHeapStringKey2(RegionEntryContext context, String key,
Object value
, boolean byteEncode
) {
super(context,
value
);
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// caller has already confirmed that key.length <= MAX_INLINE_STRING_KEY
long tmpBits1 = 0L;
long tmpBits2 = 0L;
if (byteEncode) {
for (int i = key.length() - 1; i >= 0; i--) {
// Note: we know each byte is <= 0x7f so the "& 0xff" is not needed. But I added it in to
// keep findbugs happy.
if (i < 7) {
tmpBits1 |= (byte) key.charAt(i) & 0xff;
tmpBits1 <<= 8;
} else {
tmpBits2 <<= 8;
tmpBits2 |= (byte) key.charAt(i) & 0xff;
}
}
tmpBits1 |= 1 << 6;
} else {
for (int i = key.length() - 1; i >= 0; i--) {
if (i < 3) {
tmpBits1 |= key.charAt(i);
tmpBits1 <<= 16;
} else {
tmpBits2 <<= 16;
tmpBits2 |= key.charAt(i);
}
}
}
tmpBits1 |= key.length();
this.bits1 = tmpBits1;
this.bits2 = tmpBits2;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// common code
protected int hash;
private HashEntry<Object, Object> next;
@SuppressWarnings("unused")
private volatile long lastModified;
private static final AtomicLongFieldUpdater<VMStatsRegionEntryHeapStringKey2> lastModifiedUpdater =
AtomicLongFieldUpdater.newUpdater(VMStatsRegionEntryHeapStringKey2.class, "lastModified");
private volatile Object value;
@Override
protected Object getValueField() {
return this.value;
}
@Override
protected void setValueField(Object v) {
this.value = v;
}
protected long getLastModifiedField() {
return lastModifiedUpdater.get(this);
}
protected boolean compareAndSetLastModifiedField(long expectedValue, long newValue) {
return lastModifiedUpdater.compareAndSet(this, expectedValue, newValue);
}
/**
* @see HashEntry#getEntryHash()
*/
public int getEntryHash() {
return this.hash;
}
protected void setEntryHash(int v) {
this.hash = v;
}
/**
* @see HashEntry#getNextEntry()
*/
public HashEntry<Object, Object> getNextEntry() {
return this.next;
}
/**
* @see HashEntry#setNextEntry
*/
public void setNextEntry(final HashEntry<Object, Object> n) {
this.next = n;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// stats code
@Override
public void updateStatsForGet(boolean hit, long time) {
setLastAccessed(time);
if (hit) {
incrementHitCount();
} else {
incrementMissCount();
}
}
@Override
protected void setLastModifiedAndAccessedTimes(long lastModified, long lastAccessed) {
_setLastModified(lastModified);
if (!DISABLE_ACCESS_TIME_UPDATE_ON_PUT) {
setLastAccessed(lastAccessed);
}
}
private volatile long lastAccessed;
private volatile int hitCount;
private volatile int missCount;
private static final AtomicIntegerFieldUpdater<VMStatsRegionEntryHeapStringKey2> hitCountUpdater =
AtomicIntegerFieldUpdater.newUpdater(VMStatsRegionEntryHeapStringKey2.class, "hitCount");
private static final AtomicIntegerFieldUpdater<VMStatsRegionEntryHeapStringKey2> missCountUpdater =
AtomicIntegerFieldUpdater.newUpdater(VMStatsRegionEntryHeapStringKey2.class, "missCount");
@Override
public long getLastAccessed() throws InternalStatisticsDisabledException {
return this.lastAccessed;
}
private void setLastAccessed(long lastAccessed) {
this.lastAccessed = lastAccessed;
}
@Override
public long getHitCount() throws InternalStatisticsDisabledException {
return this.hitCount & 0xFFFFFFFFL;
}
@Override
public long getMissCount() throws InternalStatisticsDisabledException {
return this.missCount & 0xFFFFFFFFL;
}
private void incrementHitCount() {
hitCountUpdater.incrementAndGet(this);
}
private void incrementMissCount() {
missCountUpdater.incrementAndGet(this);
}
@Override
public void resetCounts() throws InternalStatisticsDisabledException {
hitCountUpdater.set(this, 0);
missCountUpdater.set(this, 0);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public void txDidDestroy(long currTime) {
setLastModified(currTime);
setLastAccessed(currTime);
this.hitCount = 0;
this.missCount = 0;
}
@Override
public boolean hasStats() {
return true;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// key code
// strlen is encoded in lowest 6 bits (max strlen is 63)
// character encoding info is in bits 7 and 8
// The other bits are used to encoded character data.
private final long bits1;
// bits2 encodes character data
private final long bits2;
private int getKeyLength() {
return (int) (this.bits1 & 0x003fL);
}
private int getEncoding() {
// 0 means encoded as char
// 1 means encoded as bytes that are all <= 0x7f;
return (int) (this.bits1 >> 6) & 0x03;
}
@Override
public Object getKey() {
int keylen = getKeyLength();
char[] chars = new char[keylen];
long tmpBits1 = this.bits1;
long tmpBits2 = this.bits2;
if (getEncoding() == 1) {
for (int i = 0; i < keylen; i++) {
if (i < 7) {
tmpBits1 >>= 8;
chars[i] = (char) (tmpBits1 & 0x00ff);
} else {
chars[i] = (char) (tmpBits2 & 0x00ff);
tmpBits2 >>= 8;
}
}
} else {
for (int i = 0; i < keylen; i++) {
if (i < 3) {
tmpBits1 >>= 16;
chars[i] = (char) (tmpBits1 & 0x00FFff);
} else {
chars[i] = (char) (tmpBits2 & 0x00FFff);
tmpBits2 >>= 16;
}
}
}
return new String(chars);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public boolean isKeyEqual(Object k) {
if (k instanceof String) {
String str = (String) k;
int keylen = getKeyLength();
if (str.length() == keylen) {
long tmpBits1 = this.bits1;
long tmpBits2 = this.bits2;
if (getEncoding() == 1) {
for (int i = 0; i < keylen; i++) {
char c;
if (i < 7) {
tmpBits1 >>= 8;
c = (char) (tmpBits1 & 0x00ff);
} else {
c = (char) (tmpBits2 & 0x00ff);
tmpBits2 >>= 8;
}
if (str.charAt(i) != c) {
return false;
}
}
} else {
for (int i = 0; i < keylen; i++) {
char c;
if (i < 3) {
tmpBits1 >>= 16;
c = (char) (tmpBits1 & 0x00FFff);
} else {
c = (char) (tmpBits2 & 0x00FFff);
tmpBits2 >>= 16;
}
if (str.charAt(i) != c) {
return false;
}
}
}
return true;
}
}
return false;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
}
|
|
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver11;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFPortStatsEntryVer11 implements OFPortStatsEntry {
private static final Logger logger = LoggerFactory.getLogger(OFPortStatsEntryVer11.class);
// version: 1.1
final static byte WIRE_VERSION = 2;
final static int LENGTH = 104;
private final static OFPort DEFAULT_PORT_NO = OFPort.ANY;
private final static U64 DEFAULT_RX_PACKETS = U64.ZERO;
private final static U64 DEFAULT_TX_PACKETS = U64.ZERO;
private final static U64 DEFAULT_RX_BYTES = U64.ZERO;
private final static U64 DEFAULT_TX_BYTES = U64.ZERO;
private final static U64 DEFAULT_RX_DROPPED = U64.ZERO;
private final static U64 DEFAULT_TX_DROPPED = U64.ZERO;
private final static U64 DEFAULT_RX_ERRORS = U64.ZERO;
private final static U64 DEFAULT_TX_ERRORS = U64.ZERO;
private final static U64 DEFAULT_RX_FRAME_ERR = U64.ZERO;
private final static U64 DEFAULT_RX_OVER_ERR = U64.ZERO;
private final static U64 DEFAULT_RX_CRC_ERR = U64.ZERO;
private final static U64 DEFAULT_COLLISIONS = U64.ZERO;
// OF message fields
private final OFPort portNo;
private final U64 rxPackets;
private final U64 txPackets;
private final U64 rxBytes;
private final U64 txBytes;
private final U64 rxDropped;
private final U64 txDropped;
private final U64 rxErrors;
private final U64 txErrors;
private final U64 rxFrameErr;
private final U64 rxOverErr;
private final U64 rxCrcErr;
private final U64 collisions;
//
// Immutable default instance
final static OFPortStatsEntryVer11 DEFAULT = new OFPortStatsEntryVer11(
DEFAULT_PORT_NO, DEFAULT_RX_PACKETS, DEFAULT_TX_PACKETS, DEFAULT_RX_BYTES, DEFAULT_TX_BYTES, DEFAULT_RX_DROPPED, DEFAULT_TX_DROPPED, DEFAULT_RX_ERRORS, DEFAULT_TX_ERRORS, DEFAULT_RX_FRAME_ERR, DEFAULT_RX_OVER_ERR, DEFAULT_RX_CRC_ERR, DEFAULT_COLLISIONS
);
// package private constructor - used by readers, builders, and factory
OFPortStatsEntryVer11(OFPort portNo, U64 rxPackets, U64 txPackets, U64 rxBytes, U64 txBytes, U64 rxDropped, U64 txDropped, U64 rxErrors, U64 txErrors, U64 rxFrameErr, U64 rxOverErr, U64 rxCrcErr, U64 collisions) {
if(portNo == null) {
throw new NullPointerException("OFPortStatsEntryVer11: property portNo cannot be null");
}
if(rxPackets == null) {
throw new NullPointerException("OFPortStatsEntryVer11: property rxPackets cannot be null");
}
if(txPackets == null) {
throw new NullPointerException("OFPortStatsEntryVer11: property txPackets cannot be null");
}
if(rxBytes == null) {
throw new NullPointerException("OFPortStatsEntryVer11: property rxBytes cannot be null");
}
if(txBytes == null) {
throw new NullPointerException("OFPortStatsEntryVer11: property txBytes cannot be null");
}
if(rxDropped == null) {
throw new NullPointerException("OFPortStatsEntryVer11: property rxDropped cannot be null");
}
if(txDropped == null) {
throw new NullPointerException("OFPortStatsEntryVer11: property txDropped cannot be null");
}
if(rxErrors == null) {
throw new NullPointerException("OFPortStatsEntryVer11: property rxErrors cannot be null");
}
if(txErrors == null) {
throw new NullPointerException("OFPortStatsEntryVer11: property txErrors cannot be null");
}
if(rxFrameErr == null) {
throw new NullPointerException("OFPortStatsEntryVer11: property rxFrameErr cannot be null");
}
if(rxOverErr == null) {
throw new NullPointerException("OFPortStatsEntryVer11: property rxOverErr cannot be null");
}
if(rxCrcErr == null) {
throw new NullPointerException("OFPortStatsEntryVer11: property rxCrcErr cannot be null");
}
if(collisions == null) {
throw new NullPointerException("OFPortStatsEntryVer11: property collisions cannot be null");
}
this.portNo = portNo;
this.rxPackets = rxPackets;
this.txPackets = txPackets;
this.rxBytes = rxBytes;
this.txBytes = txBytes;
this.rxDropped = rxDropped;
this.txDropped = txDropped;
this.rxErrors = rxErrors;
this.txErrors = txErrors;
this.rxFrameErr = rxFrameErr;
this.rxOverErr = rxOverErr;
this.rxCrcErr = rxCrcErr;
this.collisions = collisions;
}
// Accessors for OF message fields
@Override
public OFPort getPortNo() {
return portNo;
}
@Override
public U64 getRxPackets() {
return rxPackets;
}
@Override
public U64 getTxPackets() {
return txPackets;
}
@Override
public U64 getRxBytes() {
return rxBytes;
}
@Override
public U64 getTxBytes() {
return txBytes;
}
@Override
public U64 getRxDropped() {
return rxDropped;
}
@Override
public U64 getTxDropped() {
return txDropped;
}
@Override
public U64 getRxErrors() {
return rxErrors;
}
@Override
public U64 getTxErrors() {
return txErrors;
}
@Override
public U64 getRxFrameErr() {
return rxFrameErr;
}
@Override
public U64 getRxOverErr() {
return rxOverErr;
}
@Override
public U64 getRxCrcErr() {
return rxCrcErr;
}
@Override
public U64 getCollisions() {
return collisions;
}
@Override
public long getDurationSec()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property durationSec not supported in version 1.1");
}
@Override
public long getDurationNsec()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property durationNsec not supported in version 1.1");
}
@Override
public List<OFPortStatsProp> getProperties()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property properties not supported in version 1.1");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_11;
}
public OFPortStatsEntry.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFPortStatsEntry.Builder {
final OFPortStatsEntryVer11 parentMessage;
// OF message fields
private boolean portNoSet;
private OFPort portNo;
private boolean rxPacketsSet;
private U64 rxPackets;
private boolean txPacketsSet;
private U64 txPackets;
private boolean rxBytesSet;
private U64 rxBytes;
private boolean txBytesSet;
private U64 txBytes;
private boolean rxDroppedSet;
private U64 rxDropped;
private boolean txDroppedSet;
private U64 txDropped;
private boolean rxErrorsSet;
private U64 rxErrors;
private boolean txErrorsSet;
private U64 txErrors;
private boolean rxFrameErrSet;
private U64 rxFrameErr;
private boolean rxOverErrSet;
private U64 rxOverErr;
private boolean rxCrcErrSet;
private U64 rxCrcErr;
private boolean collisionsSet;
private U64 collisions;
BuilderWithParent(OFPortStatsEntryVer11 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFPort getPortNo() {
return portNo;
}
@Override
public OFPortStatsEntry.Builder setPortNo(OFPort portNo) {
this.portNo = portNo;
this.portNoSet = true;
return this;
}
@Override
public U64 getRxPackets() {
return rxPackets;
}
@Override
public OFPortStatsEntry.Builder setRxPackets(U64 rxPackets) {
this.rxPackets = rxPackets;
this.rxPacketsSet = true;
return this;
}
@Override
public U64 getTxPackets() {
return txPackets;
}
@Override
public OFPortStatsEntry.Builder setTxPackets(U64 txPackets) {
this.txPackets = txPackets;
this.txPacketsSet = true;
return this;
}
@Override
public U64 getRxBytes() {
return rxBytes;
}
@Override
public OFPortStatsEntry.Builder setRxBytes(U64 rxBytes) {
this.rxBytes = rxBytes;
this.rxBytesSet = true;
return this;
}
@Override
public U64 getTxBytes() {
return txBytes;
}
@Override
public OFPortStatsEntry.Builder setTxBytes(U64 txBytes) {
this.txBytes = txBytes;
this.txBytesSet = true;
return this;
}
@Override
public U64 getRxDropped() {
return rxDropped;
}
@Override
public OFPortStatsEntry.Builder setRxDropped(U64 rxDropped) {
this.rxDropped = rxDropped;
this.rxDroppedSet = true;
return this;
}
@Override
public U64 getTxDropped() {
return txDropped;
}
@Override
public OFPortStatsEntry.Builder setTxDropped(U64 txDropped) {
this.txDropped = txDropped;
this.txDroppedSet = true;
return this;
}
@Override
public U64 getRxErrors() {
return rxErrors;
}
@Override
public OFPortStatsEntry.Builder setRxErrors(U64 rxErrors) {
this.rxErrors = rxErrors;
this.rxErrorsSet = true;
return this;
}
@Override
public U64 getTxErrors() {
return txErrors;
}
@Override
public OFPortStatsEntry.Builder setTxErrors(U64 txErrors) {
this.txErrors = txErrors;
this.txErrorsSet = true;
return this;
}
@Override
public U64 getRxFrameErr() {
return rxFrameErr;
}
@Override
public OFPortStatsEntry.Builder setRxFrameErr(U64 rxFrameErr) {
this.rxFrameErr = rxFrameErr;
this.rxFrameErrSet = true;
return this;
}
@Override
public U64 getRxOverErr() {
return rxOverErr;
}
@Override
public OFPortStatsEntry.Builder setRxOverErr(U64 rxOverErr) {
this.rxOverErr = rxOverErr;
this.rxOverErrSet = true;
return this;
}
@Override
public U64 getRxCrcErr() {
return rxCrcErr;
}
@Override
public OFPortStatsEntry.Builder setRxCrcErr(U64 rxCrcErr) {
this.rxCrcErr = rxCrcErr;
this.rxCrcErrSet = true;
return this;
}
@Override
public U64 getCollisions() {
return collisions;
}
@Override
public OFPortStatsEntry.Builder setCollisions(U64 collisions) {
this.collisions = collisions;
this.collisionsSet = true;
return this;
}
@Override
public long getDurationSec()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property durationSec not supported in version 1.1");
}
@Override
public OFPortStatsEntry.Builder setDurationSec(long durationSec) throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property durationSec not supported in version 1.1");
}
@Override
public long getDurationNsec()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property durationNsec not supported in version 1.1");
}
@Override
public OFPortStatsEntry.Builder setDurationNsec(long durationNsec) throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property durationNsec not supported in version 1.1");
}
@Override
public List<OFPortStatsProp> getProperties()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property properties not supported in version 1.1");
}
@Override
public OFPortStatsEntry.Builder setProperties(List<OFPortStatsProp> properties) throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property properties not supported in version 1.1");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_11;
}
@Override
public OFPortStatsEntry build() {
OFPort portNo = this.portNoSet ? this.portNo : parentMessage.portNo;
if(portNo == null)
throw new NullPointerException("Property portNo must not be null");
U64 rxPackets = this.rxPacketsSet ? this.rxPackets : parentMessage.rxPackets;
if(rxPackets == null)
throw new NullPointerException("Property rxPackets must not be null");
U64 txPackets = this.txPacketsSet ? this.txPackets : parentMessage.txPackets;
if(txPackets == null)
throw new NullPointerException("Property txPackets must not be null");
U64 rxBytes = this.rxBytesSet ? this.rxBytes : parentMessage.rxBytes;
if(rxBytes == null)
throw new NullPointerException("Property rxBytes must not be null");
U64 txBytes = this.txBytesSet ? this.txBytes : parentMessage.txBytes;
if(txBytes == null)
throw new NullPointerException("Property txBytes must not be null");
U64 rxDropped = this.rxDroppedSet ? this.rxDropped : parentMessage.rxDropped;
if(rxDropped == null)
throw new NullPointerException("Property rxDropped must not be null");
U64 txDropped = this.txDroppedSet ? this.txDropped : parentMessage.txDropped;
if(txDropped == null)
throw new NullPointerException("Property txDropped must not be null");
U64 rxErrors = this.rxErrorsSet ? this.rxErrors : parentMessage.rxErrors;
if(rxErrors == null)
throw new NullPointerException("Property rxErrors must not be null");
U64 txErrors = this.txErrorsSet ? this.txErrors : parentMessage.txErrors;
if(txErrors == null)
throw new NullPointerException("Property txErrors must not be null");
U64 rxFrameErr = this.rxFrameErrSet ? this.rxFrameErr : parentMessage.rxFrameErr;
if(rxFrameErr == null)
throw new NullPointerException("Property rxFrameErr must not be null");
U64 rxOverErr = this.rxOverErrSet ? this.rxOverErr : parentMessage.rxOverErr;
if(rxOverErr == null)
throw new NullPointerException("Property rxOverErr must not be null");
U64 rxCrcErr = this.rxCrcErrSet ? this.rxCrcErr : parentMessage.rxCrcErr;
if(rxCrcErr == null)
throw new NullPointerException("Property rxCrcErr must not be null");
U64 collisions = this.collisionsSet ? this.collisions : parentMessage.collisions;
if(collisions == null)
throw new NullPointerException("Property collisions must not be null");
//
return new OFPortStatsEntryVer11(
portNo,
rxPackets,
txPackets,
rxBytes,
txBytes,
rxDropped,
txDropped,
rxErrors,
txErrors,
rxFrameErr,
rxOverErr,
rxCrcErr,
collisions
);
}
}
static class Builder implements OFPortStatsEntry.Builder {
// OF message fields
private boolean portNoSet;
private OFPort portNo;
private boolean rxPacketsSet;
private U64 rxPackets;
private boolean txPacketsSet;
private U64 txPackets;
private boolean rxBytesSet;
private U64 rxBytes;
private boolean txBytesSet;
private U64 txBytes;
private boolean rxDroppedSet;
private U64 rxDropped;
private boolean txDroppedSet;
private U64 txDropped;
private boolean rxErrorsSet;
private U64 rxErrors;
private boolean txErrorsSet;
private U64 txErrors;
private boolean rxFrameErrSet;
private U64 rxFrameErr;
private boolean rxOverErrSet;
private U64 rxOverErr;
private boolean rxCrcErrSet;
private U64 rxCrcErr;
private boolean collisionsSet;
private U64 collisions;
@Override
public OFPort getPortNo() {
return portNo;
}
@Override
public OFPortStatsEntry.Builder setPortNo(OFPort portNo) {
this.portNo = portNo;
this.portNoSet = true;
return this;
}
@Override
public U64 getRxPackets() {
return rxPackets;
}
@Override
public OFPortStatsEntry.Builder setRxPackets(U64 rxPackets) {
this.rxPackets = rxPackets;
this.rxPacketsSet = true;
return this;
}
@Override
public U64 getTxPackets() {
return txPackets;
}
@Override
public OFPortStatsEntry.Builder setTxPackets(U64 txPackets) {
this.txPackets = txPackets;
this.txPacketsSet = true;
return this;
}
@Override
public U64 getRxBytes() {
return rxBytes;
}
@Override
public OFPortStatsEntry.Builder setRxBytes(U64 rxBytes) {
this.rxBytes = rxBytes;
this.rxBytesSet = true;
return this;
}
@Override
public U64 getTxBytes() {
return txBytes;
}
@Override
public OFPortStatsEntry.Builder setTxBytes(U64 txBytes) {
this.txBytes = txBytes;
this.txBytesSet = true;
return this;
}
@Override
public U64 getRxDropped() {
return rxDropped;
}
@Override
public OFPortStatsEntry.Builder setRxDropped(U64 rxDropped) {
this.rxDropped = rxDropped;
this.rxDroppedSet = true;
return this;
}
@Override
public U64 getTxDropped() {
return txDropped;
}
@Override
public OFPortStatsEntry.Builder setTxDropped(U64 txDropped) {
this.txDropped = txDropped;
this.txDroppedSet = true;
return this;
}
@Override
public U64 getRxErrors() {
return rxErrors;
}
@Override
public OFPortStatsEntry.Builder setRxErrors(U64 rxErrors) {
this.rxErrors = rxErrors;
this.rxErrorsSet = true;
return this;
}
@Override
public U64 getTxErrors() {
return txErrors;
}
@Override
public OFPortStatsEntry.Builder setTxErrors(U64 txErrors) {
this.txErrors = txErrors;
this.txErrorsSet = true;
return this;
}
@Override
public U64 getRxFrameErr() {
return rxFrameErr;
}
@Override
public OFPortStatsEntry.Builder setRxFrameErr(U64 rxFrameErr) {
this.rxFrameErr = rxFrameErr;
this.rxFrameErrSet = true;
return this;
}
@Override
public U64 getRxOverErr() {
return rxOverErr;
}
@Override
public OFPortStatsEntry.Builder setRxOverErr(U64 rxOverErr) {
this.rxOverErr = rxOverErr;
this.rxOverErrSet = true;
return this;
}
@Override
public U64 getRxCrcErr() {
return rxCrcErr;
}
@Override
public OFPortStatsEntry.Builder setRxCrcErr(U64 rxCrcErr) {
this.rxCrcErr = rxCrcErr;
this.rxCrcErrSet = true;
return this;
}
@Override
public U64 getCollisions() {
return collisions;
}
@Override
public OFPortStatsEntry.Builder setCollisions(U64 collisions) {
this.collisions = collisions;
this.collisionsSet = true;
return this;
}
@Override
public long getDurationSec()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property durationSec not supported in version 1.1");
}
@Override
public OFPortStatsEntry.Builder setDurationSec(long durationSec) throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property durationSec not supported in version 1.1");
}
@Override
public long getDurationNsec()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property durationNsec not supported in version 1.1");
}
@Override
public OFPortStatsEntry.Builder setDurationNsec(long durationNsec) throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property durationNsec not supported in version 1.1");
}
@Override
public List<OFPortStatsProp> getProperties()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property properties not supported in version 1.1");
}
@Override
public OFPortStatsEntry.Builder setProperties(List<OFPortStatsProp> properties) throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property properties not supported in version 1.1");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_11;
}
//
@Override
public OFPortStatsEntry build() {
OFPort portNo = this.portNoSet ? this.portNo : DEFAULT_PORT_NO;
if(portNo == null)
throw new NullPointerException("Property portNo must not be null");
U64 rxPackets = this.rxPacketsSet ? this.rxPackets : DEFAULT_RX_PACKETS;
if(rxPackets == null)
throw new NullPointerException("Property rxPackets must not be null");
U64 txPackets = this.txPacketsSet ? this.txPackets : DEFAULT_TX_PACKETS;
if(txPackets == null)
throw new NullPointerException("Property txPackets must not be null");
U64 rxBytes = this.rxBytesSet ? this.rxBytes : DEFAULT_RX_BYTES;
if(rxBytes == null)
throw new NullPointerException("Property rxBytes must not be null");
U64 txBytes = this.txBytesSet ? this.txBytes : DEFAULT_TX_BYTES;
if(txBytes == null)
throw new NullPointerException("Property txBytes must not be null");
U64 rxDropped = this.rxDroppedSet ? this.rxDropped : DEFAULT_RX_DROPPED;
if(rxDropped == null)
throw new NullPointerException("Property rxDropped must not be null");
U64 txDropped = this.txDroppedSet ? this.txDropped : DEFAULT_TX_DROPPED;
if(txDropped == null)
throw new NullPointerException("Property txDropped must not be null");
U64 rxErrors = this.rxErrorsSet ? this.rxErrors : DEFAULT_RX_ERRORS;
if(rxErrors == null)
throw new NullPointerException("Property rxErrors must not be null");
U64 txErrors = this.txErrorsSet ? this.txErrors : DEFAULT_TX_ERRORS;
if(txErrors == null)
throw new NullPointerException("Property txErrors must not be null");
U64 rxFrameErr = this.rxFrameErrSet ? this.rxFrameErr : DEFAULT_RX_FRAME_ERR;
if(rxFrameErr == null)
throw new NullPointerException("Property rxFrameErr must not be null");
U64 rxOverErr = this.rxOverErrSet ? this.rxOverErr : DEFAULT_RX_OVER_ERR;
if(rxOverErr == null)
throw new NullPointerException("Property rxOverErr must not be null");
U64 rxCrcErr = this.rxCrcErrSet ? this.rxCrcErr : DEFAULT_RX_CRC_ERR;
if(rxCrcErr == null)
throw new NullPointerException("Property rxCrcErr must not be null");
U64 collisions = this.collisionsSet ? this.collisions : DEFAULT_COLLISIONS;
if(collisions == null)
throw new NullPointerException("Property collisions must not be null");
return new OFPortStatsEntryVer11(
portNo,
rxPackets,
txPackets,
rxBytes,
txBytes,
rxDropped,
txDropped,
rxErrors,
txErrors,
rxFrameErr,
rxOverErr,
rxCrcErr,
collisions
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFPortStatsEntry> {
@Override
public OFPortStatsEntry readFrom(ByteBuf bb) throws OFParseError {
OFPort portNo = OFPort.read4Bytes(bb);
// pad: 4 bytes
bb.skipBytes(4);
U64 rxPackets = U64.ofRaw(bb.readLong());
U64 txPackets = U64.ofRaw(bb.readLong());
U64 rxBytes = U64.ofRaw(bb.readLong());
U64 txBytes = U64.ofRaw(bb.readLong());
U64 rxDropped = U64.ofRaw(bb.readLong());
U64 txDropped = U64.ofRaw(bb.readLong());
U64 rxErrors = U64.ofRaw(bb.readLong());
U64 txErrors = U64.ofRaw(bb.readLong());
U64 rxFrameErr = U64.ofRaw(bb.readLong());
U64 rxOverErr = U64.ofRaw(bb.readLong());
U64 rxCrcErr = U64.ofRaw(bb.readLong());
U64 collisions = U64.ofRaw(bb.readLong());
OFPortStatsEntryVer11 portStatsEntryVer11 = new OFPortStatsEntryVer11(
portNo,
rxPackets,
txPackets,
rxBytes,
txBytes,
rxDropped,
txDropped,
rxErrors,
txErrors,
rxFrameErr,
rxOverErr,
rxCrcErr,
collisions
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", portStatsEntryVer11);
return portStatsEntryVer11;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFPortStatsEntryVer11Funnel FUNNEL = new OFPortStatsEntryVer11Funnel();
static class OFPortStatsEntryVer11Funnel implements Funnel<OFPortStatsEntryVer11> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFPortStatsEntryVer11 message, PrimitiveSink sink) {
message.portNo.putTo(sink);
// skip pad (4 bytes)
message.rxPackets.putTo(sink);
message.txPackets.putTo(sink);
message.rxBytes.putTo(sink);
message.txBytes.putTo(sink);
message.rxDropped.putTo(sink);
message.txDropped.putTo(sink);
message.rxErrors.putTo(sink);
message.txErrors.putTo(sink);
message.rxFrameErr.putTo(sink);
message.rxOverErr.putTo(sink);
message.rxCrcErr.putTo(sink);
message.collisions.putTo(sink);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFPortStatsEntryVer11> {
@Override
public void write(ByteBuf bb, OFPortStatsEntryVer11 message) {
message.portNo.write4Bytes(bb);
// pad: 4 bytes
bb.writeZero(4);
bb.writeLong(message.rxPackets.getValue());
bb.writeLong(message.txPackets.getValue());
bb.writeLong(message.rxBytes.getValue());
bb.writeLong(message.txBytes.getValue());
bb.writeLong(message.rxDropped.getValue());
bb.writeLong(message.txDropped.getValue());
bb.writeLong(message.rxErrors.getValue());
bb.writeLong(message.txErrors.getValue());
bb.writeLong(message.rxFrameErr.getValue());
bb.writeLong(message.rxOverErr.getValue());
bb.writeLong(message.rxCrcErr.getValue());
bb.writeLong(message.collisions.getValue());
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFPortStatsEntryVer11(");
b.append("portNo=").append(portNo);
b.append(", ");
b.append("rxPackets=").append(rxPackets);
b.append(", ");
b.append("txPackets=").append(txPackets);
b.append(", ");
b.append("rxBytes=").append(rxBytes);
b.append(", ");
b.append("txBytes=").append(txBytes);
b.append(", ");
b.append("rxDropped=").append(rxDropped);
b.append(", ");
b.append("txDropped=").append(txDropped);
b.append(", ");
b.append("rxErrors=").append(rxErrors);
b.append(", ");
b.append("txErrors=").append(txErrors);
b.append(", ");
b.append("rxFrameErr=").append(rxFrameErr);
b.append(", ");
b.append("rxOverErr=").append(rxOverErr);
b.append(", ");
b.append("rxCrcErr=").append(rxCrcErr);
b.append(", ");
b.append("collisions=").append(collisions);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFPortStatsEntryVer11 other = (OFPortStatsEntryVer11) obj;
if (portNo == null) {
if (other.portNo != null)
return false;
} else if (!portNo.equals(other.portNo))
return false;
if (rxPackets == null) {
if (other.rxPackets != null)
return false;
} else if (!rxPackets.equals(other.rxPackets))
return false;
if (txPackets == null) {
if (other.txPackets != null)
return false;
} else if (!txPackets.equals(other.txPackets))
return false;
if (rxBytes == null) {
if (other.rxBytes != null)
return false;
} else if (!rxBytes.equals(other.rxBytes))
return false;
if (txBytes == null) {
if (other.txBytes != null)
return false;
} else if (!txBytes.equals(other.txBytes))
return false;
if (rxDropped == null) {
if (other.rxDropped != null)
return false;
} else if (!rxDropped.equals(other.rxDropped))
return false;
if (txDropped == null) {
if (other.txDropped != null)
return false;
} else if (!txDropped.equals(other.txDropped))
return false;
if (rxErrors == null) {
if (other.rxErrors != null)
return false;
} else if (!rxErrors.equals(other.rxErrors))
return false;
if (txErrors == null) {
if (other.txErrors != null)
return false;
} else if (!txErrors.equals(other.txErrors))
return false;
if (rxFrameErr == null) {
if (other.rxFrameErr != null)
return false;
} else if (!rxFrameErr.equals(other.rxFrameErr))
return false;
if (rxOverErr == null) {
if (other.rxOverErr != null)
return false;
} else if (!rxOverErr.equals(other.rxOverErr))
return false;
if (rxCrcErr == null) {
if (other.rxCrcErr != null)
return false;
} else if (!rxCrcErr.equals(other.rxCrcErr))
return false;
if (collisions == null) {
if (other.collisions != null)
return false;
} else if (!collisions.equals(other.collisions))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((portNo == null) ? 0 : portNo.hashCode());
result = prime * result + ((rxPackets == null) ? 0 : rxPackets.hashCode());
result = prime * result + ((txPackets == null) ? 0 : txPackets.hashCode());
result = prime * result + ((rxBytes == null) ? 0 : rxBytes.hashCode());
result = prime * result + ((txBytes == null) ? 0 : txBytes.hashCode());
result = prime * result + ((rxDropped == null) ? 0 : rxDropped.hashCode());
result = prime * result + ((txDropped == null) ? 0 : txDropped.hashCode());
result = prime * result + ((rxErrors == null) ? 0 : rxErrors.hashCode());
result = prime * result + ((txErrors == null) ? 0 : txErrors.hashCode());
result = prime * result + ((rxFrameErr == null) ? 0 : rxFrameErr.hashCode());
result = prime * result + ((rxOverErr == null) ? 0 : rxOverErr.hashCode());
result = prime * result + ((rxCrcErr == null) ? 0 : rxCrcErr.hashCode());
result = prime * result + ((collisions == null) ? 0 : collisions.hashCode());
return result;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.