gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
---|---|
package com.linkedin.thirdeye.client;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheBuilderSpec;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.linkedin.thirdeye.api.DimensionKey;
import com.linkedin.thirdeye.api.MetricTimeSeries;
import com.linkedin.thirdeye.api.MetricType;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.joda.time.DateTime;
import org.joda.time.format.ISODateTimeFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.InputStream;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
public class DefaultThirdEyeClient implements ThirdEyeClient {
private static final Logger LOG = LoggerFactory.getLogger(DefaultThirdEyeClient.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private final HttpHost httpHost;
private final CloseableHttpClient httpClient;
private final LoadingCache<QuerySpec, Map<DimensionKey, MetricTimeSeries>> resultCache;
private final LoadingCache<String, Map<String, MetricType>> schemaCache;
public DefaultThirdEyeClient(String hostname, int port) {
this(hostname, port, new DefaultThirdEyeClientConfig());
}
@SuppressWarnings("unchecked")
public DefaultThirdEyeClient(String hostname, int port, DefaultThirdEyeClientConfig config) {
this.httpHost = new HttpHost(hostname, port);
this.httpClient = HttpClients.createDefault();
CacheBuilder builder = CacheBuilder.newBuilder();
if (config.isExpireAfterAccess()) {
builder.expireAfterAccess(config.getExpirationTime(), config.getExpirationUnit());
} else {
builder.expireAfterWrite(config.getExpirationTime(), config.getExpirationUnit());
}
this.resultCache = builder.build(new ResultCacheLoader());
this.schemaCache = CacheBuilder.newBuilder()
.expireAfterWrite(Long.MAX_VALUE, TimeUnit.MILLISECONDS) // never
.build(new SchemaCacheLoader());
LOG.info("Created DefaultThirdEyeClient to {}", httpHost);
}
@Override
public Map<DimensionKey, MetricTimeSeries> execute(ThirdEyeRequest request) throws Exception {
QuerySpec querySpec = new QuerySpec(request.getCollection(), request.toSql());
LOG.debug("Generated SQL {}", request.toSql());
return resultCache.get(querySpec);
}
@Override
public void close() throws Exception {
httpClient.close();
}
/**
* Executes SQL statements against the /query resource.
*/
private class ResultCacheLoader extends CacheLoader<QuerySpec, Map<DimensionKey, MetricTimeSeries>> {
@Override
public Map<DimensionKey, MetricTimeSeries> load(QuerySpec querySpec) throws Exception {
HttpGet req = new HttpGet("/query/" + URLEncoder.encode(querySpec.getSql(), "UTF-8"));
CloseableHttpResponse res = httpClient.execute(httpHost, req);
try {
if (res.getStatusLine().getStatusCode() != 200) {
throw new IllegalStateException(res.getStatusLine().toString());
}
// Parse response
InputStream content = res.getEntity().getContent();
ThirdEyeRawResponse rawResponse = OBJECT_MAPPER.readValue(content, ThirdEyeRawResponse.class);
// Figure out the metric types of the projection
Map<String, MetricType> metricTypes = schemaCache.get(querySpec.getCollection());
List<MetricType> projectionTypes = new ArrayList<>();
for (String metricName : rawResponse.getMetrics()) {
MetricType metricType = metricTypes.get(metricName);
if (metricType == null) { // could be derived
metricType = MetricType.DOUBLE;
}
projectionTypes.add(metricTypes.get(metricName));
}
return rawResponse.convert(projectionTypes);
} finally {
if (res.getEntity() != null) {
EntityUtils.consume(res.getEntity());
}
res.close();
}
}
}
private class SchemaCacheLoader extends CacheLoader<String, Map<String, MetricType>> {
@Override
public Map<String, MetricType> load(String collection) throws Exception {
HttpGet req = new HttpGet("/collections/" + URLEncoder.encode(collection, "UTF-8"));
CloseableHttpResponse res = httpClient.execute(httpHost, req);
try {
if (res.getStatusLine().getStatusCode() != 200) {
throw new IllegalStateException(res.getStatusLine().toString());
}
InputStream content = res.getEntity().getContent();
JsonNode json = OBJECT_MAPPER.readTree(content);
Map<String, MetricType> metricTypes = new HashMap<>();
for (JsonNode metricSpec : json.get("metrics")) {
String metricName = metricSpec.get("name").asText();
MetricType metricType = MetricType.valueOf(metricSpec.get("type").asText());
metricTypes.put(metricName, metricType);
}
LOG.info("Cached metric types for {}: {}", collection, metricTypes);
return metricTypes;
} finally {
if (res.getEntity() != null) {
EntityUtils.consume(res.getEntity());
}
res.close();
}
}
}
private static class QuerySpec {
private String collection;
private String sql;
QuerySpec(String collection, String sql) {
this.collection = collection;
this.sql = sql;
}
public String getCollection() {
return collection;
}
public String getSql() {
return sql;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof QuerySpec)) {
return false;
}
QuerySpec s = (QuerySpec) o;
return Objects.equals(sql, s.getSql()) && Objects.equals(collection, s.getCollection());
}
@Override
public int hashCode() {
return Objects.hash(sql, collection);
}
}
public static void main(String[] args) throws Exception {
if (args.length != 6 && args.length != 7) {
throw new IllegalArgumentException("usage: host port collection metricFunction startTime endTime [groupBy]");
}
String host = args[0];
int port = Integer.valueOf(args[1]);
String collection = args[2];
String metricFunction = args[3];
DateTime startTime = ISODateTimeFormat.dateTimeParser().parseDateTime(args[4]);
DateTime endTime = ISODateTimeFormat.dateTimeParser().parseDateTime(args[5]);
ThirdEyeRequest request = new ThirdEyeRequest()
.setCollection(collection)
.setStartTime(startTime)
.setEndTime(endTime)
.setMetricFunction(metricFunction);
if (args.length == 7) {
request.setGroupBy(args[6]);
}
ThirdEyeClient client = new DefaultThirdEyeClient(host, port);
try {
Map<DimensionKey, MetricTimeSeries> result = client.execute(request);
for (Map.Entry<DimensionKey, MetricTimeSeries> entry : result.entrySet()) {
System.out.println(entry.getKey() + " #=> " + entry.getValue());
}
} finally {
client.close();
}
}
}
|
|
/*
* Copyright (c) 2016 JFoenix
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.jfoenix.controls;
import com.jfoenix.animation.alert.JFXAlertAnimation;
import com.jfoenix.assets.JFoenixResources;
import com.jfoenix.effects.JFXDepthManager;
import com.sun.javafx.event.EventHandlerManager;
import javafx.animation.Animation;
import javafx.application.Platform;
import javafx.beans.InvalidationListener;
import javafx.beans.property.BooleanProperty;
import javafx.beans.property.ObjectProperty;
import javafx.beans.property.SimpleBooleanProperty;
import javafx.beans.property.SimpleObjectProperty;
import javafx.event.Event;
import javafx.event.EventDispatchChain;
import javafx.geometry.HPos;
import javafx.geometry.Insets;
import javafx.geometry.VPos;
import javafx.scene.Node;
import javafx.scene.control.ButtonType;
import javafx.scene.control.Dialog;
import javafx.scene.control.DialogEvent;
import javafx.scene.control.DialogPane;
import javafx.scene.input.KeyCode;
import javafx.scene.input.KeyEvent;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.StackPane;
import javafx.scene.paint.Color;
import javafx.stage.StageStyle;
import javafx.stage.Window;
import java.util.List;
/**
* JFXAlert is similar to {@link JFXDialog} control, however it extends JavaFX {@link Dialog}
* control, thus it support modality options and doesn't require a parent to be specified
* unlike {@link JFXDialog}
*
* @author Shadi Shaheen
* @version 1.0
* @since 2017-05-26
*/
public class JFXAlert<R> extends Dialog<R> {
private final StackPane contentContainer;
private InvalidationListener widthListener;
private InvalidationListener heightListener;
private InvalidationListener xListener;
private InvalidationListener yListener;
private boolean animateClosing = true;
public JFXAlert() {
this(null);
}
public JFXAlert(Window window) {
// create content
contentContainer = new StackPane();
contentContainer.getStyleClass().add("jfx-alert-content-container");
// add depth effect
final Node materialNode = JFXDepthManager.createMaterialNode(contentContainer, 2);
materialNode.setPickOnBounds(false);
materialNode.addEventHandler(MouseEvent.MOUSE_CLICKED, Event::consume);
// create custom dialog pane (will layout children in center)
final DialogPane dialogPane = new DialogPane() {
private boolean performingLayout = false;
{
getButtonTypes().add(ButtonType.CLOSE);
Node closeButton = this.lookupButton(ButtonType.CLOSE);
closeButton.managedProperty().bind(closeButton.visibleProperty());
closeButton.setVisible(false);
}
@Override
protected double computePrefHeight(double width) {
Window owner = getOwner();
if (owner != null) {
return owner.getHeight();
} else {
return super.computePrefHeight(width);
}
}
@Override
protected double computePrefWidth(double height) {
Window owner = getOwner();
if (owner != null) {
return owner.getWidth();
} else {
return super.computePrefWidth(height);
}
}
@Override
public void requestLayout() {
if (performingLayout) {
return;
}
super.requestLayout();
}
@Override
protected void layoutChildren() {
performingLayout = true;
List<Node> managed = getManagedChildren();
final double width = getWidth();
double height = getHeight();
double top = getInsets().getTop();
double right = getInsets().getRight();
double left = getInsets().getLeft();
double bottom = getInsets().getBottom();
double contentWidth = width - left - right;
double contentHeight = height - top - bottom;
for (Node child : managed) {
layoutInArea(child, left, top, contentWidth, contentHeight,
0, Insets.EMPTY, HPos.CENTER, VPos.CENTER);
}
performingLayout = false;
}
public String getUserAgentStylesheet() {
return JFoenixResources.load("css/controls/jfx-alert.css").toExternalForm();
}
@Override
protected Node createButtonBar() {
return null;
}
};
dialogPane.getStyleClass().add("jfx-alert-overlay");
dialogPane.setContent(materialNode);
setDialogPane(dialogPane);
dialogPane.getScene().setFill(Color.TRANSPARENT);
if (window != null) {
// set the window to transparent
initStyle(StageStyle.TRANSPARENT);
initOwner(window);
// init style for overlay
dialogPane.addEventHandler(MouseEvent.MOUSE_CLICKED, event -> {
if (this.isOverlayClose()) {
hide();
}
});
// bind dialog position to window position
widthListener = observable -> updateWidth();
heightListener = observable -> updateHeight();
xListener = observable -> updateX();
yListener = observable -> updateY();
}
// handle animation / owner window layout changes
eventHandlerManager.addEventHandler(DialogEvent.DIALOG_SHOWING, event -> {
addLayoutListeners();
JFXAlertAnimation currentAnimation = getCurrentAnimation();
currentAnimation.initAnimation(contentContainer.getParent(), dialogPane);
});
eventHandlerManager.addEventHandler(DialogEvent.DIALOG_SHOWN, event -> {
if (getOwner() != null) {
updateLayout();
}
animateClosing = true;
JFXAlertAnimation currentAnimation = getCurrentAnimation();
Animation animation = currentAnimation.createShowingAnimation(dialogPane.getContent(), dialogPane);
if (animation != null) {
animation.play();
}
});
eventHandlerManager.addEventHandler(DialogEvent.DIALOG_CLOSE_REQUEST, event -> {
if (animateClosing) {
event.consume();
hideWithAnimation();
}
});
eventHandlerManager.addEventHandler(DialogEvent.DIALOG_HIDDEN, event -> removeLayoutListeners());
getDialogPane().getScene().getWindow().addEventFilter(KeyEvent.KEY_PRESSED, keyEvent -> {
if (keyEvent.getCode() == KeyCode.ESCAPE) {
if (!isHideOnEscape()) {
keyEvent.consume();
}
}
});
}
// this method ensure not null value for current animation
private JFXAlertAnimation getCurrentAnimation() {
JFXAlertAnimation usedAnimation = getAnimation();
usedAnimation = usedAnimation == null ? JFXAlertAnimation.NO_ANIMATION : usedAnimation;
return usedAnimation;
}
private void removeLayoutListeners() {
Window stage = getOwner();
if (stage != null) {
stage.getScene().widthProperty().removeListener(widthListener);
stage.getScene().heightProperty().removeListener(heightListener);
stage.xProperty().removeListener(xListener);
stage.yProperty().removeListener(yListener);
}
}
private void addLayoutListeners() {
Window stage = getOwner();
if (stage != null) {
if (widthListener == null) {
throw new RuntimeException("Owner can only be set using the constructor");
}
stage.getScene().widthProperty().addListener(widthListener);
stage.getScene().heightProperty().addListener(heightListener);
stage.xProperty().addListener(xListener);
stage.yProperty().addListener(yListener);
}
}
private void updateLayout() {
updateX();
updateY();
updateWidth();
updateHeight();
}
private void updateHeight() {
Window stage = getOwner();
setHeight(stage.getScene().getHeight());
}
private void updateWidth() {
Window stage = getOwner();
setWidth(stage.getScene().getWidth());
}
private void updateY() {
Window stage = getOwner();
setY(stage.getY() + stage.getScene().getY());
}
private void updateX() {
Window stage = getOwner();
setX(stage.getX() + stage.getScene().getX());
}
private Animation transition = null;
/**
* play the hide animation for the dialog, as the java hide method is set to final
* so it can not be overridden
*/
public void hideWithAnimation() {
if (transition == null || transition.getStatus().equals(Animation.Status.STOPPED)) {
JFXAlertAnimation currentAnimation = getCurrentAnimation();
Animation animation = currentAnimation.createHidingAnimation(getDialogPane().getContent(), getDialogPane());
if (animation != null) {
transition = animation;
animation.setOnFinished(finish -> {
animateClosing = false;
hide();
transition = null;
});
animation.play();
} else {
animateClosing = false;
transition = null;
Platform.runLater(this::hide);
}
}
}
private final EventHandlerManager eventHandlerManager = new EventHandlerManager(this);
/**
* {@inheritDoc}
*/
@Override
public EventDispatchChain buildEventDispatchChain(EventDispatchChain tail) {
return super.buildEventDispatchChain(tail).prepend(eventHandlerManager);
}
public void setContent(Node... content) {
contentContainer.getChildren().setAll(content);
}
/**
* indicates whether the dialog will close when clicking on the overlay or not
*/
private BooleanProperty overlayClose = new SimpleBooleanProperty(true);
public boolean isOverlayClose() {
return overlayClose.get();
}
public BooleanProperty overlayCloseProperty() {
return overlayClose;
}
public void setOverlayClose(boolean overlayClose) {
this.overlayClose.set(overlayClose);
}
/**
* specify the animation when showing / hiding the dialog
* by default it's set to {@link JFXAlertAnimation#CENTER_ANIMATION}
*/
private ObjectProperty<JFXAlertAnimation> animation = new SimpleObjectProperty<>
(JFXAlertAnimation.CENTER_ANIMATION);
public JFXAlertAnimation getAnimation() {
return animation.get();
}
public ObjectProperty<JFXAlertAnimation> animationProperty() {
return animation;
}
public void setAnimation(JFXAlertAnimation animation) {
this.animation.set(animation);
}
public void setSize(double prefWidth, double prefHeight) {
contentContainer.setPrefSize(prefWidth, prefHeight);
}
private BooleanProperty hideOnEscape = new SimpleBooleanProperty(this, "hideOnEscape", true);
public final void setHideOnEscape(boolean value) {
hideOnEscape.set(value);
}
public final boolean isHideOnEscape() {
return hideOnEscape.get();
}
public final BooleanProperty hideOnEscapeProperty() {
return hideOnEscape;
}
}
|
|
/*
* Copyright 2015, Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
*
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package io.grpc.benchmarks;
import static java.util.concurrent.ForkJoinPool.defaultForkJoinWorkerThreadFactory;
import com.google.common.util.concurrent.UncaughtExceptionHandlers;
import com.google.protobuf.ByteString;
import io.grpc.ManagedChannel;
import io.grpc.ManagedChannelBuilder;
import io.grpc.Status;
import io.grpc.benchmarks.proto.Messages;
import io.grpc.benchmarks.proto.Messages.Payload;
import io.grpc.benchmarks.proto.Messages.SimpleRequest;
import io.grpc.benchmarks.proto.Messages.SimpleResponse;
import io.grpc.internal.GrpcUtil;
import io.grpc.netty.GrpcSslContexts;
import io.grpc.netty.NegotiationType;
import io.grpc.netty.NettyChannelBuilder;
import io.grpc.okhttp.OkHttpChannelBuilder;
import io.grpc.okhttp.internal.Platform;
import io.grpc.testing.TestUtils;
import io.netty.channel.epoll.EpollDomainSocketChannel;
import io.netty.channel.epoll.EpollEventLoopGroup;
import io.netty.channel.epoll.EpollSocketChannel;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.channel.unix.DomainSocketAddress;
import io.netty.handler.ssl.SslContext;
import io.netty.handler.ssl.SslContextBuilder;
import io.netty.handler.ssl.SslProvider;
import io.netty.util.concurrent.DefaultThreadFactory;
import org.HdrHistogram.Histogram;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.SocketAddress;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.ForkJoinPool.ForkJoinWorkerThreadFactory;
import java.util.concurrent.ForkJoinWorkerThread;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.Nullable;
import javax.net.ssl.SSLSocketFactory;
/**
* Utility methods to support benchmarking classes.
*/
public final class Utils {
private static final String UNIX_DOMAIN_SOCKET_PREFIX = "unix://";
// The histogram can record values between 1 microsecond and 1 min.
public static final long HISTOGRAM_MAX_VALUE = 60000000L;
// Value quantization will be no more than 1%. See the README of HdrHistogram for more details.
public static final int HISTOGRAM_PRECISION = 2;
public static final int DEFAULT_FLOW_CONTROL_WINDOW =
NettyChannelBuilder.DEFAULT_FLOW_CONTROL_WINDOW;
private Utils() {
}
public static boolean parseBoolean(String value) {
return value.isEmpty() || Boolean.parseBoolean(value);
}
/**
* Parse a {@link SocketAddress} from the given string.
*/
public static SocketAddress parseSocketAddress(String value) {
if (value.startsWith(UNIX_DOMAIN_SOCKET_PREFIX)) {
// Unix Domain Socket address.
// Create the underlying file for the Unix Domain Socket.
String filePath = value.substring(UNIX_DOMAIN_SOCKET_PREFIX.length());
File file = new File(filePath);
if (!file.isAbsolute()) {
throw new IllegalArgumentException("File path must be absolute: " + filePath);
}
try {
if (file.createNewFile()) {
// If this application created the file, delete it when the application exits.
file.deleteOnExit();
}
} catch (IOException ex) {
throw new RuntimeException(ex);
}
// Create the SocketAddress referencing the file.
return new DomainSocketAddress(file);
} else {
// Standard TCP/IP address.
String[] parts = value.split(":", 2);
if (parts.length < 2) {
throw new IllegalArgumentException(
"Address must be a unix:// path or be in the form host:port. Got: " + value);
}
String host = parts[0];
int port = Integer.parseInt(parts[1]);
return new InetSocketAddress(host, port);
}
}
private static OkHttpChannelBuilder newOkhttpClientChannel(
SocketAddress address, boolean tls, boolean testca, @Nullable String authorityOverride) {
InetSocketAddress addr = (InetSocketAddress) address;
OkHttpChannelBuilder builder =
OkHttpChannelBuilder.forAddress(addr.getHostName(), addr.getPort());
if (tls) {
builder.negotiationType(io.grpc.okhttp.NegotiationType.TLS);
SSLSocketFactory factory;
if (testca) {
builder.overrideAuthority(
GrpcUtil.authorityFromHostAndPort(authorityOverride, addr.getPort()));
try {
factory = TestUtils.newSslSocketFactoryForCa(
Platform.get().getProvider(),
TestUtils.loadCert("ca.pem"));
} catch (Exception e) {
throw new RuntimeException(e);
}
} else {
factory = (SSLSocketFactory) SSLSocketFactory.getDefault();
}
builder.sslSocketFactory(factory);
} else {
builder.negotiationType(io.grpc.okhttp.NegotiationType.PLAINTEXT);
}
return builder;
}
private static NettyChannelBuilder newNettyClientChannel(Transport transport,
SocketAddress address, boolean tls, boolean testca, int flowControlWindow,
boolean useDefaultCiphers) throws IOException {
NettyChannelBuilder builder =
NettyChannelBuilder.forAddress(address).flowControlWindow(flowControlWindow);
if (tls) {
builder.negotiationType(NegotiationType.TLS);
SslContext sslContext = null;
if (testca) {
File cert = TestUtils.loadCert("ca.pem");
SslContextBuilder sslContextBuilder = GrpcSslContexts.forClient().trustManager(cert);
if (transport == Transport.NETTY_NIO) {
sslContextBuilder = GrpcSslContexts.configure(sslContextBuilder, SslProvider.JDK);
} else {
// Native transport with OpenSSL
sslContextBuilder = GrpcSslContexts.configure(sslContextBuilder, SslProvider.OPENSSL);
}
if (useDefaultCiphers) {
sslContextBuilder.ciphers(null);
}
sslContext = sslContextBuilder.build();
}
builder.sslContext(sslContext);
} else {
builder.negotiationType(NegotiationType.PLAINTEXT);
}
DefaultThreadFactory tf = new DefaultThreadFactory("client-elg-", true /*daemon */);
switch (transport) {
case NETTY_NIO:
builder
.eventLoopGroup(new NioEventLoopGroup(0, tf))
.channelType(NioSocketChannel.class);
break;
case NETTY_EPOLL:
// These classes only work on Linux.
builder
.eventLoopGroup(new EpollEventLoopGroup(0, tf))
.channelType(EpollSocketChannel.class);
break;
case NETTY_UNIX_DOMAIN_SOCKET:
// These classes only work on Linux.
builder
.eventLoopGroup(new EpollEventLoopGroup(0, tf))
.channelType(EpollDomainSocketChannel.class);
break;
default:
// Should never get here.
throw new IllegalArgumentException("Unsupported transport: " + transport);
}
return builder;
}
private static ExecutorService clientExecutor;
private static synchronized ExecutorService getExecutor() {
if (clientExecutor == null) {
clientExecutor = new ForkJoinPool(
Runtime.getRuntime().availableProcessors(),
new ForkJoinWorkerThreadFactory() {
final AtomicInteger num = new AtomicInteger();
@Override
public ForkJoinWorkerThread newThread(ForkJoinPool pool) {
ForkJoinWorkerThread thread = defaultForkJoinWorkerThreadFactory.newThread(pool);
thread.setDaemon(true);
thread.setName("grpc-client-app-" + "-" + num.getAndIncrement());
return thread;
}
}, UncaughtExceptionHandlers.systemExit(), true /* async */);
}
return clientExecutor;
}
/**
* Create a {@link ManagedChannel} for the given parameters.
*/
public static ManagedChannel newClientChannel(Transport transport, SocketAddress address,
boolean tls, boolean testca, @Nullable String authorityOverride, boolean useDefaultCiphers,
int flowControlWindow, boolean directExecutor) {
ManagedChannelBuilder<?> builder;
if (transport == Transport.OK_HTTP) {
builder = newOkhttpClientChannel(address, tls, testca, authorityOverride);
} else {
try {
builder = newNettyClientChannel(
transport, address, tls, testca, flowControlWindow, useDefaultCiphers);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
if (authorityOverride != null) {
builder.overrideAuthority(authorityOverride);
}
if (directExecutor) {
builder.directExecutor();
} else {
// TODO(carl-mastrangelo): This should not be necessary. I don't know where this should be
// put. Move it somewhere else, or remove it if no longer necessary.
// See: https://github.com/grpc/grpc-java/issues/2119
builder.executor(getExecutor());
}
return builder.build();
}
/**
* Save a {@link Histogram} to a file.
*/
public static void saveHistogram(Histogram histogram, String filename) throws IOException {
File file;
PrintStream log = null;
try {
file = new File(filename);
if (file.exists() && !file.delete()) {
System.err.println("Failed deleting previous histogram file: " + file.getAbsolutePath());
}
log = new PrintStream(new FileOutputStream(file), false);
histogram.outputPercentileDistribution(log, 1.0);
} finally {
if (log != null) {
log.close();
}
}
}
/**
* Construct a {@link SimpleResponse} for the given request.
*/
public static SimpleResponse makeResponse(SimpleRequest request) {
if (request.getResponseSize() > 0) {
if (!Messages.PayloadType.COMPRESSABLE.equals(request.getResponseType())) {
throw Status.INTERNAL.augmentDescription("Error creating payload.").asRuntimeException();
}
ByteString body = ByteString.copyFrom(new byte[request.getResponseSize()]);
Messages.PayloadType type = request.getResponseType();
Payload payload = Payload.newBuilder().setType(type).setBody(body).build();
return SimpleResponse.newBuilder().setPayload(payload).build();
}
return SimpleResponse.getDefaultInstance();
}
/**
* Construct a {@link SimpleRequest} with the specified dimensions.
*/
public static SimpleRequest makeRequest(Messages.PayloadType payloadType, int reqLength,
int respLength) {
ByteString body = ByteString.copyFrom(new byte[reqLength]);
Payload payload = Payload.newBuilder()
.setType(payloadType)
.setBody(body)
.build();
return SimpleRequest.newBuilder()
.setResponseType(payloadType)
.setResponseSize(respLength)
.setPayload(payload)
.build();
}
/**
* Picks a port that is not used right at this moment.
* Warning: Not thread safe. May see "BindException: Address already in use: bind" if using the
* returned port to create a new server socket when other threads/processes are concurrently
* creating new sockets without a specific port.
*/
public static int pickUnusedPort() {
try {
ServerSocket serverSocket = new ServerSocket(0);
int port = serverSocket.getLocalPort();
serverSocket.close();
return port;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
|
|
/*
* Copyright (C) 2019 Hudhaifa Shatnawi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hudhaifa.sortframework.app;
import com.hudhaifa.sortframework.core.AbstractSort;
import com.hudhaifa.sortframework.core.SortCanvas;
import com.hudhaifa.sortframework.core.Sorter;
import com.hudhaifa.sortframework.parallel.ParallelIMergeSort;
import com.hudhaifa.sortframework.parallel.ParallelMergeSort;
import com.hudhaifa.sortframework.sort.BubbleSort;
import com.hudhaifa.sortframework.sort.IMergeSort;
import com.hudhaifa.sortframework.sort.InsertionSort;
import com.hudhaifa.sortframework.sort.MergeSort;
import com.hudhaifa.sortframework.sort.ShuffleSort;
import com.hudhaifa.sortframework.util.ArrayUtil;
import java.awt.HeadlessException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.JOptionPane;
/**
* Visualize sorting algorithms.
*
* @author Hudhaifa Shatnawi <[email protected]>
* @version 1.0, Jul 16, 2019
* @since sort-framework v1.1
*/
public class SortApp
extends javax.swing.JFrame {
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Windows".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(SortApp.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
/* Create and display the form */
java.awt.EventQueue.invokeLater(() -> {
new SortApp().setVisible(true);
});
}
/**
* Creates new form SortApp
*/
public SortApp() {
this.sorters = new ArrayList<>();
initComponents();
initSorters();
initSortersComponents();
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
sortersPanel = new javax.swing.JPanel();
menuBar = new javax.swing.JMenuBar();
fileMenu = new javax.swing.JMenu();
resetMenuItem = new javax.swing.JMenuItem();
startMenuItem = new javax.swing.JMenuItem();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
setTitle("Sort Framework");
sortersPanel.setLayout(new java.awt.GridLayout(0, 2));
fileMenu.setText("File");
resetMenuItem.setAccelerator(javax.swing.KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_R, java.awt.event.InputEvent.CTRL_MASK));
resetMenuItem.setText("Reset");
resetMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
resetMenuItemActionPerformed(evt);
}
});
fileMenu.add(resetMenuItem);
startMenuItem.setAccelerator(javax.swing.KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_S, java.awt.event.InputEvent.CTRL_MASK));
startMenuItem.setText("Start");
startMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
startMenuItemActionPerformed(evt);
}
});
fileMenu.add(startMenuItem);
menuBar.add(fileMenu);
setJMenuBar(menuBar);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(sortersPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 836, Short.MAX_VALUE)
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(sortersPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 533, Short.MAX_VALUE)
.addContainerGap())
);
pack();
}// </editor-fold>//GEN-END:initComponents
/**
* Initialize sort algorithms instances
*/
private void initSorters() {
for (String sorterClass : linearSorters) {
try {
AbstractSort sorter = (AbstractSort) Class.forName(sorterClass).newInstance();
SortCanvas canvas = new HColorCanvas();
sorters.add(new Sorter(sorter, canvas));
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException ex) {
Logger.getLogger(SortApp.class.getName()).log(Level.SEVERE, null, ex);
}
}
for (String sorterClass : parallelSorters) {
try {
AbstractSort sorter = (AbstractSort) Class.forName(sorterClass).newInstance();
SortCanvas canvas = new HColorCanvas();
sorters.add(new Sorter(sorter, canvas));
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException ex) {
Logger.getLogger(SortApp.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
/**
* Appends sort algorithms components to the screen
*/
private void initSortersComponents() {
for (int i = 0; i < sorters.size(); i++) {
Sorter sorter = sorters.get(i);
sortersPanel.add(sorter);
}
}
private void resetMenuItemActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_resetMenuItemActionPerformed
int size = 0;
try {
String sizeValue = JOptionPane.showInputDialog(this, "Enter the array size:");
size = Integer.parseInt(sizeValue);
} catch (HeadlessException | NumberFormatException e) {
}
if (size <= 0) {
return;
}
int[] arr = fill(ArrayUtil.FILL_RANDOM, size);
sorters.forEach((sorter) -> {
sorter.init(Arrays.copyOf(arr, arr.length));
});
}//GEN-LAST:event_resetMenuItemActionPerformed
private void startMenuItemActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_startMenuItemActionPerformed
sorters.forEach((sorter) -> {
sorter.startSort();
});
}//GEN-LAST:event_startMenuItemActionPerformed
/**
* Fills the array in on of the following orders:
* <ul>
* <li>Reverse order (non-repeated elements)</li>
* <li>Nearly sorted (non-repeated elements)</li>
* <li>Randomized (non-repeated elements)</li>
* <li>Randomized (repeated elements)</li>
* </ul>
*
* @param strategy fill strategy
* @param size array length
* @return filled array
* @see ArrayUtil
*/
protected int[] fill(int strategy, int size) {
switch (strategy) {
case ArrayUtil.FILL_REVERSE:
return ArrayUtil.reversed(size);
case ArrayUtil.FILL_NEARLY_SORTED:
return ArrayUtil.nearlySorted(size, 3);
case ArrayUtil.FILL_REPEATED:
return ArrayUtil.repeated(size, 15);
case ArrayUtil.FILL_RANDOM:
default:
return ArrayUtil.random(size);
}
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JMenu fileMenu;
private javax.swing.JMenuBar menuBar;
private javax.swing.JMenuItem resetMenuItem;
private javax.swing.JPanel sortersPanel;
private javax.swing.JMenuItem startMenuItem;
// End of variables declaration//GEN-END:variables
/**
* List of all supported sort algorithms
*/
private final List<Sorter> sorters;
/**
* List of supported linear sort algorithms
*/
private final String[] linearSorters = new String[]{
BubbleSort.class.getName(),
IMergeSort.class.getName(),
InsertionSort.class.getName(),
MergeSort.class.getName(),
ShuffleSort.class.getName()
};
/**
* List of supported parallel sort algorithms
*/
private final String[] parallelSorters = new String[]{
ParallelIMergeSort.class.getName(),
ParallelMergeSort.class.getName()
};
}
|
|
/*
* Copyright 2015-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.ui.impl;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.collect.ImmutableSet;
import org.onlab.osgi.ServiceDirectory;
import org.onlab.util.AbstractAccumulator;
import org.onlab.util.Accumulator;
import org.onosproject.cluster.ClusterEvent;
import org.onosproject.cluster.ClusterEventListener;
import org.onosproject.cluster.ControllerNode;
import org.onosproject.core.ApplicationId;
import org.onosproject.core.CoreService;
import org.onosproject.core.DefaultApplicationId;
import org.onosproject.event.Event;
import org.onosproject.mastership.MastershipEvent;
import org.onosproject.mastership.MastershipListener;
import org.onosproject.net.ConnectPoint;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.Host;
import org.onosproject.net.HostId;
import org.onosproject.net.HostLocation;
import org.onosproject.net.Link;
import org.onosproject.net.device.DeviceEvent;
import org.onosproject.net.device.DeviceListener;
import org.onosproject.net.flow.DefaultTrafficSelector;
import org.onosproject.net.flow.DefaultTrafficTreatment;
import org.onosproject.net.flow.FlowRuleEvent;
import org.onosproject.net.flow.FlowRuleListener;
import org.onosproject.net.flow.TrafficSelector;
import org.onosproject.net.flow.TrafficTreatment;
import org.onosproject.net.host.HostEvent;
import org.onosproject.net.host.HostListener;
import org.onosproject.net.intent.HostToHostIntent;
import org.onosproject.net.intent.Intent;
import org.onosproject.net.intent.IntentEvent;
import org.onosproject.net.intent.IntentListener;
import org.onosproject.net.intent.IntentService;
import org.onosproject.net.intent.IntentState;
import org.onosproject.net.intent.Key;
import org.onosproject.net.intent.MultiPointToSinglePointIntent;
import org.onosproject.net.link.LinkEvent;
import org.onosproject.net.link.LinkListener;
import org.onosproject.ui.JsonUtils;
import org.onosproject.ui.RequestHandler;
import org.onosproject.ui.UiConnection;
import org.onosproject.ui.impl.TrafficMonitorBase.Mode;
import org.onosproject.ui.topo.Highlights;
import org.onosproject.ui.topo.NodeSelection;
import org.onosproject.ui.topo.PropertyPanel;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ExecutorService;
import static java.util.concurrent.Executors.newSingleThreadExecutor;
import static org.onlab.util.Tools.groupedThreads;
import static org.onosproject.cluster.ClusterEvent.Type.INSTANCE_ADDED;
import static org.onosproject.net.ConnectPoint.deviceConnectPoint;
import static org.onosproject.net.DeviceId.deviceId;
import static org.onosproject.net.HostId.hostId;
import static org.onosproject.net.device.DeviceEvent.Type.DEVICE_ADDED;
import static org.onosproject.net.device.DeviceEvent.Type.DEVICE_UPDATED;
import static org.onosproject.net.device.DeviceEvent.Type.PORT_STATS_UPDATED;
import static org.onosproject.net.host.HostEvent.Type.HOST_ADDED;
import static org.onosproject.net.link.LinkEvent.Type.LINK_ADDED;
import static org.onosproject.ui.JsonUtils.envelope;
import static org.onosproject.ui.JsonUtils.string;
import static org.onosproject.ui.topo.TopoJson.highlightsMessage;
import static org.onosproject.ui.topo.TopoJson.json;
/**
* Web socket capable of interacting with the GUI topology view.
*/
public class TopologyViewMessageHandler extends TopologyViewMessageHandlerBase {
// incoming event types
private static final String REQ_DETAILS = "requestDetails";
private static final String UPDATE_META = "updateMeta";
private static final String ADD_HOST_INTENT = "addHostIntent";
private static final String REMOVE_INTENT = "removeIntent";
private static final String REMOVE_INTENTS = "removeIntents";
private static final String RESUBMIT_INTENT = "resubmitIntent";
private static final String ADD_MULTI_SRC_INTENT = "addMultiSourceIntent";
private static final String REQ_RELATED_INTENTS = "requestRelatedIntents";
private static final String REQ_NEXT_INTENT = "requestNextRelatedIntent";
private static final String REQ_PREV_INTENT = "requestPrevRelatedIntent";
private static final String REQ_SEL_INTENT_TRAFFIC = "requestSelectedIntentTraffic";
private static final String SEL_INTENT = "selectIntent";
private static final String REQ_ALL_TRAFFIC = "requestAllTraffic";
private static final String REQ_DEV_LINK_FLOWS = "requestDeviceLinkFlows";
private static final String CANCEL_TRAFFIC = "cancelTraffic";
private static final String REQ_SUMMARY = "requestSummary";
private static final String CANCEL_SUMMARY = "cancelSummary";
private static final String EQ_MASTERS = "equalizeMasters";
private static final String SPRITE_LIST_REQ = "spriteListRequest";
private static final String SPRITE_DATA_REQ = "spriteDataRequest";
private static final String TOPO_START = "topoStart";
private static final String TOPO_SELECT_OVERLAY = "topoSelectOverlay";
private static final String TOPO_STOP = "topoStop";
private static final String SEL_PROTECTED_INTENT = "selectProtectedIntent";
private static final String CANCEL_PROTECTED_INTENT_HIGHLIGHT = "cancelProtectedIntentHighlight";
// outgoing event types
private static final String SHOW_SUMMARY = "showSummary";
private static final String SHOW_DETAILS = "showDetails";
private static final String SPRITE_LIST_RESPONSE = "spriteListResponse";
private static final String SPRITE_DATA_RESPONSE = "spriteDataResponse";
private static final String UPDATE_INSTANCE = "updateInstance";
private static final String TOPO_START_DONE = "topoStartDone";
// fields
private static final String PAYLOAD = "payload";
private static final String EXTRA = "extra";
private static final String ID = "id";
private static final String KEY = "key";
private static final String IS_EDGE_LINK = "isEdgeLink";
private static final String SOURCE_ID = "sourceId";
private static final String SOURCE_PORT = "sourcePort";
private static final String TARGET_ID = "targetId";
private static final String TARGET_PORT = "targetPort";
private static final String APP_ID = "appId";
private static final String APP_NAME = "appName";
private static final String DEVICE = "device";
private static final String HOST = "host";
private static final String LINK = "link";
private static final String CLASS = "class";
private static final String UNKNOWN = "unknown";
private static final String ONE = "one";
private static final String TWO = "two";
private static final String SRC = "src";
private static final String DST = "dst";
private static final String DATA = "data";
private static final String NAME = "name";
private static final String NAMES = "names";
private static final String ACTIVATE = "activate";
private static final String DEACTIVATE = "deactivate";
private static final String PURGE = "purge";
private static final String TRAFFIC_TYPE = "trafficType";
// field values
private static final String FLOW_STATS_BYTES = "flowStatsBytes";
private static final String PORT_STATS_BIT_SEC = "portStatsBitSec";
private static final String PORT_STATS_PKT_SEC = "portStatsPktSec";
private static final String MY_APP_ID = "org.onosproject.gui";
private static final String SLASH = "/";
private static final long TRAFFIC_PERIOD = 5000;
private static final long SUMMARY_PERIOD = 30000;
private static final Comparator<? super ControllerNode> NODE_COMPARATOR =
Comparator.comparing(o -> o.id().toString());
private final Timer timer = new Timer("onos-topology-view");
private static final int MAX_EVENTS = 1000;
private static final int MAX_BATCH_MS = 5000;
private static final int MAX_IDLE_MS = 1000;
private ApplicationId appId;
private final ClusterEventListener clusterListener = new InternalClusterListener();
private final MastershipListener mastershipListener = new InternalMastershipListener();
private final DeviceListener deviceListener = new InternalDeviceListener();
private final LinkListener linkListener = new InternalLinkListener();
private final HostListener hostListener = new InternalHostListener();
private final IntentListener intentListener = new InternalIntentListener();
private final FlowRuleListener flowListener = new InternalFlowListener();
private final Accumulator<Event> eventAccummulator = new InternalEventAccummulator();
private final ExecutorService msgSender =
newSingleThreadExecutor(groupedThreads("onos/gui", "msg-sender", log));
private TopoOverlayCache overlayCache;
private TrafficMonitor traffic;
private ProtectedIntentMonitor protectedIntentMonitor;
private TimerTask summaryTask = null;
private boolean summaryRunning = false;
private volatile boolean listenersRemoved = false;
@Override
public void init(UiConnection connection, ServiceDirectory directory) {
super.init(connection, directory);
appId = directory.get(CoreService.class).registerApplication(MY_APP_ID);
traffic = new TrafficMonitor(TRAFFIC_PERIOD, services, this);
protectedIntentMonitor = new ProtectedIntentMonitor(TRAFFIC_PERIOD, services, this);
}
@Override
public void destroy() {
cancelAllRequests();
removeListeners();
super.destroy();
}
@Override
protected Collection<RequestHandler> createRequestHandlers() {
return ImmutableSet.of(
new TopoStart(),
new TopoSelectOverlay(),
new TopoStop(),
new ReqSummary(),
new CancelSummary(),
new SpriteListReq(),
new SpriteDataReq(),
new RequestDetails(),
new UpdateMeta(),
new EqMasters(),
// TODO: migrate traffic related to separate app
new AddHostIntent(),
new AddMultiSourceIntent(),
new RemoveIntent(),
new ResubmitIntent(),
new RemoveIntents(),
new ReqAllTraffic(),
new ReqDevLinkFlows(),
new ReqRelatedIntents(),
new ReqNextIntent(),
new ReqPrevIntent(),
new ReqSelectedIntentTraffic(),
new SelIntent(),
new SelProtectedIntent(),
new CancelTraffic(),
new CancelProtectedIntentHighlight()
);
}
/**
* Injects the topology overlay cache.
*
* @param overlayCache injected cache
*/
void setOverlayCache(TopoOverlayCache overlayCache) {
this.overlayCache = overlayCache;
}
// ==================================================================
private final class TopoStart extends RequestHandler {
private TopoStart() {
super(TOPO_START);
}
@Override
public void process(ObjectNode payload) {
addListeners();
sendAllInstances(null);
sendAllDevices();
sendAllLinks();
sendAllHosts();
sendTopoStartDone();
}
}
private final class TopoSelectOverlay extends RequestHandler {
private TopoSelectOverlay() {
super(TOPO_SELECT_OVERLAY);
}
@Override
public void process(ObjectNode payload) {
String deact = string(payload, DEACTIVATE);
String act = string(payload, ACTIVATE);
overlayCache.switchOverlay(deact, act);
}
}
private final class TopoStop extends RequestHandler {
private TopoStop() {
super(TOPO_STOP);
}
@Override
public void process(ObjectNode payload) {
removeListeners();
stopSummaryMonitoring();
traffic.stopMonitoring();
}
}
private final class ReqSummary extends RequestHandler {
private ReqSummary() {
super(REQ_SUMMARY);
}
@Override
public void process(ObjectNode payload) {
requestSummary();
startSummaryMonitoring();
}
}
private final class CancelSummary extends RequestHandler {
private CancelSummary() {
super(CANCEL_SUMMARY);
}
@Override
public void process(ObjectNode payload) {
stopSummaryMonitoring();
}
}
private final class SpriteListReq extends RequestHandler {
private SpriteListReq() {
super(SPRITE_LIST_REQ);
}
@Override
public void process(ObjectNode payload) {
ObjectNode root = objectNode();
ArrayNode names = arrayNode();
get(SpriteService.class).getNames().forEach(names::add);
root.set(NAMES, names);
sendMessage(SPRITE_LIST_RESPONSE, root);
}
}
private final class SpriteDataReq extends RequestHandler {
private SpriteDataReq() {
super(SPRITE_DATA_REQ);
}
@Override
public void process(ObjectNode payload) {
String name = string(payload, NAME);
ObjectNode root = objectNode();
root.set(DATA, get(SpriteService.class).get(name));
sendMessage(SPRITE_DATA_RESPONSE, root);
}
}
private final class RequestDetails extends RequestHandler {
private RequestDetails() {
super(REQ_DETAILS);
}
@Override
public void process(ObjectNode payload) {
String type = string(payload, CLASS, UNKNOWN);
String id = string(payload, ID, "");
PropertyPanel pp = null;
if (type.equals(DEVICE)) {
DeviceId did = deviceId(id);
pp = deviceDetails(did);
overlayCache.currentOverlay().modifyDeviceDetails(pp, did);
} else if (type.equals(HOST)) {
HostId hid = hostId(id);
pp = hostDetails(hid);
overlayCache.currentOverlay().modifyHostDetails(pp, hid);
} else if (type.equals(LINK)) {
String srcId = string(payload, SOURCE_ID);
String tgtId = string(payload, TARGET_ID);
boolean isEdgeLink = bool(payload, IS_EDGE_LINK);
if (isEdgeLink) {
HostId hid = hostId(srcId);
String cpstr = tgtId + SLASH + string(payload, TARGET_PORT);
ConnectPoint cp = deviceConnectPoint(cpstr);
pp = edgeLinkDetails(hid, cp);
overlayCache.currentOverlay().modifyEdgeLinkDetails(pp, hid, cp);
} else {
String cpAstr = srcId + SLASH + string(payload, SOURCE_PORT);
String cpBstr = tgtId + SLASH + string(payload, TARGET_PORT);
ConnectPoint cpA = deviceConnectPoint(cpAstr);
ConnectPoint cpB = deviceConnectPoint(cpBstr);
pp = infraLinkDetails(cpA, cpB);
overlayCache.currentOverlay().modifyInfraLinkDetails(pp, cpA, cpB);
}
}
if (pp != null) {
sendMessage(envelope(SHOW_DETAILS, json(pp)));
} else {
log.warn("Unable to process details request: {}", payload);
}
}
}
private final class UpdateMeta extends RequestHandler {
private UpdateMeta() {
super(UPDATE_META);
}
@Override
public void process(ObjectNode payload) {
updateMetaUi(payload);
}
}
private final class EqMasters extends RequestHandler {
private EqMasters() {
super(EQ_MASTERS);
}
@Override
public void process(ObjectNode payload) {
services.mastershipAdmin().balanceRoles();
}
}
// ========= -----------------------------------------------------------------
// === TODO: move traffic related classes to traffic app
private final class AddHostIntent extends RequestHandler {
private AddHostIntent() {
super(ADD_HOST_INTENT);
}
@Override
public void process(ObjectNode payload) {
// TODO: add protection against device ids and non-existent hosts.
HostId one = hostId(string(payload, ONE));
HostId two = hostId(string(payload, TWO));
HostToHostIntent intent = HostToHostIntent.builder()
.appId(appId)
.one(one)
.two(two)
.build();
services.intent().submit(intent);
if (overlayCache.isActive(TrafficOverlay.TRAFFIC_ID)) {
traffic.monitor(intent);
}
}
}
private Intent findIntentByPayload(ObjectNode payload) {
Intent intent;
Key key;
int appId = Integer.parseInt(string(payload, APP_ID));
String appName = string(payload, APP_NAME);
ApplicationId applicId = new DefaultApplicationId(appId, appName);
String stringKey = string(payload, KEY);
try {
// FIXME: If apps use different string key, but they contains
// same numeric value (e.g. "020", "0x10", "16", "#10")
// and one intent using long key (e.g. 16L)
// this function might return wrong intent.
long longKey = Long.decode(stringKey);
key = Key.of(longKey, applicId);
intent = services.intent().getIntent(key);
if (intent == null) {
// Intent might using string key, not long key
key = Key.of(stringKey, applicId);
intent = services.intent().getIntent(key);
}
} catch (NumberFormatException ex) {
// string key
key = Key.of(stringKey, applicId);
intent = services.intent().getIntent(key);
}
log.debug("Attempting to select intent by key={}", key);
return intent;
}
private final class RemoveIntent extends RequestHandler {
private RemoveIntent() {
super(REMOVE_INTENT);
}
private boolean isIntentToBePurged(ObjectNode payload) {
return bool(payload, PURGE);
}
@Override
public void process(ObjectNode payload) {
Intent intent = findIntentByPayload(payload);
if (intent == null) {
log.warn("Unable to find intent from payload {}", payload);
} else {
log.debug("Withdrawing / Purging intent {}", intent.key());
if (isIntentToBePurged(payload)) {
services.intent().purge(intent);
} else {
services.intent().withdraw(intent);
}
}
}
}
private final class ResubmitIntent extends RequestHandler {
private ResubmitIntent() {
super(RESUBMIT_INTENT);
}
@Override
public void process(ObjectNode payload) {
Intent intent = findIntentByPayload(payload);
if (intent == null) {
log.warn("Unable to find intent from payload {}", payload);
} else {
log.debug("Resubmitting intent {}", intent.key());
services.intent().submit(intent);
}
}
}
private final class AddMultiSourceIntent extends RequestHandler {
private AddMultiSourceIntent() {
super(ADD_MULTI_SRC_INTENT);
}
@Override
public void process(ObjectNode payload) {
// TODO: add protection against device ids and non-existent hosts.
Set<HostId> src = getHostIds((ArrayNode) payload.path(SRC));
HostId dst = hostId(string(payload, DST));
Host dstHost = services.host().getHost(dst);
Set<ConnectPoint> ingressPoints = getHostLocations(src);
// FIXME: clearly, this is not enough
TrafficSelector selector = DefaultTrafficSelector.builder()
.matchEthDst(dstHost.mac()).build();
TrafficTreatment treatment = DefaultTrafficTreatment.emptyTreatment();
MultiPointToSinglePointIntent intent =
MultiPointToSinglePointIntent.builder()
.appId(appId)
.selector(selector)
.treatment(treatment)
.ingressPoints(ingressPoints)
.egressPoint(dstHost.location())
.build();
services.intent().submit(intent);
if (overlayCache.isActive(TrafficOverlay.TRAFFIC_ID)) {
traffic.monitor(intent);
}
}
}
private final class RemoveIntents extends RequestHandler {
private RemoveIntents() {
super(REMOVE_INTENTS);
}
@Override
public void process(ObjectNode payload) {
IntentService intentService = get(IntentService.class);
for (Intent intent : intentService.getIntents()) {
if (intentService.getIntentState(intent.key()) == IntentState.WITHDRAWN) {
intentService.purge(intent);
}
}
}
}
// ========= -----------------------------------------------------------------
private final class ReqAllTraffic extends RequestHandler {
private ReqAllTraffic() {
super(REQ_ALL_TRAFFIC);
}
@Override
public void process(ObjectNode payload) {
String trafficType = string(payload, TRAFFIC_TYPE, FLOW_STATS_BYTES);
switch (trafficType) {
case FLOW_STATS_BYTES:
traffic.monitor(Mode.ALL_FLOW_TRAFFIC_BYTES);
break;
case PORT_STATS_BIT_SEC:
traffic.monitor(Mode.ALL_PORT_TRAFFIC_BIT_PS);
break;
case PORT_STATS_PKT_SEC:
traffic.monitor(Mode.ALL_PORT_TRAFFIC_PKT_PS);
break;
default:
break;
}
}
}
private NodeSelection makeNodeSelection(ObjectNode payload) {
return new NodeSelection(payload, services.device(), services.host(),
services.link());
}
private final class ReqDevLinkFlows extends RequestHandler {
private ReqDevLinkFlows() {
super(REQ_DEV_LINK_FLOWS);
}
@Override
public void process(ObjectNode payload) {
traffic.monitor(Mode.DEV_LINK_FLOWS, makeNodeSelection(payload));
}
}
private final class ReqRelatedIntents extends RequestHandler {
private ReqRelatedIntents() {
super(REQ_RELATED_INTENTS);
}
@Override
public void process(ObjectNode payload) {
traffic.monitor(Mode.RELATED_INTENTS, makeNodeSelection(payload));
}
}
private final class ReqNextIntent extends RequestHandler {
private ReqNextIntent() {
super(REQ_NEXT_INTENT);
}
@Override
public void process(ObjectNode payload) {
traffic.selectNextIntent();
}
}
private final class ReqPrevIntent extends RequestHandler {
private ReqPrevIntent() {
super(REQ_PREV_INTENT);
}
@Override
public void process(ObjectNode payload) {
traffic.selectPreviousIntent();
}
}
private final class ReqSelectedIntentTraffic extends RequestHandler {
private ReqSelectedIntentTraffic() {
super(REQ_SEL_INTENT_TRAFFIC);
}
@Override
public void process(ObjectNode payload) {
traffic.monitor(Mode.SELECTED_INTENT);
}
}
private final class SelIntent extends RequestHandler {
private SelIntent() {
super(SEL_INTENT);
}
@Override
public void process(ObjectNode payload) {
Intent intent = findIntentByPayload(payload);
if (intent == null) {
log.warn("Unable to find intent from payload {}", payload);
} else {
log.debug("starting to monitor intent {}", intent.key());
traffic.monitor(intent);
}
}
}
private final class SelProtectedIntent extends RequestHandler {
private SelProtectedIntent() {
super(SEL_PROTECTED_INTENT);
}
@Override
public void process(ObjectNode payload) {
Intent intent = findIntentByPayload(payload);
if (intent == null) {
log.warn("Unable to find protected intent from payload {}", payload);
} else {
log.debug("starting to monitor protected intent {}", intent.key());
protectedIntentMonitor.monitor(intent);
}
}
}
private final class CancelTraffic extends RequestHandler {
private CancelTraffic() {
super(CANCEL_TRAFFIC);
}
@Override
public void process(ObjectNode payload) {
traffic.stopMonitoring();
}
}
private final class CancelProtectedIntentHighlight extends RequestHandler {
private CancelProtectedIntentHighlight() {
super(CANCEL_PROTECTED_INTENT_HIGHLIGHT);
}
@Override
public void process(ObjectNode payload) {
protectedIntentMonitor.stopMonitoring();
}
}
//=======================================================================
// Converts highlights to JSON format and sends the message to the client
void sendHighlights(Highlights highlights) {
sendMessage(highlightsMessage(highlights));
}
// Subscribes for summary messages.
private synchronized void requestSummary() {
PropertyPanel pp = summmaryMessage();
overlayCache.currentOverlay().modifySummary(pp);
sendMessage(envelope(SHOW_SUMMARY, json(pp)));
}
private void cancelAllRequests() {
stopSummaryMonitoring();
traffic.stopMonitoring();
}
// Sends all controller nodes to the client as node-added messages.
private void sendAllInstances(String messageType) {
List<ControllerNode> nodes = new ArrayList<>(services.cluster().getNodes());
nodes.sort(NODE_COMPARATOR);
for (ControllerNode node : nodes) {
sendMessage(instanceMessage(new ClusterEvent(INSTANCE_ADDED, node),
messageType));
}
}
// Sends all devices to the client as device-added messages.
private void sendAllDevices() {
// Send optical first, others later for layered rendering
for (Device device : services.device().getDevices()) {
if ((device.type() == Device.Type.ROADM) ||
(device.type() == Device.Type.OTN)) {
sendMessage(deviceMessage(new DeviceEvent(DEVICE_ADDED, device)));
}
}
for (Device device : services.device().getDevices()) {
if ((device.type() != Device.Type.ROADM) &&
(device.type() != Device.Type.OTN)) {
sendMessage(deviceMessage(new DeviceEvent(DEVICE_ADDED, device)));
}
}
}
// Sends all links to the client as link-added messages.
private void sendAllLinks() {
// Send optical first, others later for layered rendering
for (Link link : services.link().getLinks()) {
if (link.type() == Link.Type.OPTICAL) {
sendMessage(composeLinkMessage(new LinkEvent(LINK_ADDED, link)));
}
}
for (Link link : services.link().getLinks()) {
if (link.type() != Link.Type.OPTICAL) {
sendMessage(composeLinkMessage(new LinkEvent(LINK_ADDED, link)));
}
}
}
// Temporary mechanism to support topology overlays adding their own
// properties to the link events.
private ObjectNode composeLinkMessage(LinkEvent event) {
// start with base message
ObjectNode msg = linkMessage(event);
Map<String, String> additional =
overlayCache.currentOverlay().additionalLinkData(event);
if (additional != null) {
// attach additional key-value pairs as extra data structure
ObjectNode payload = (ObjectNode) msg.get(PAYLOAD);
payload.set(EXTRA, createExtra(additional));
}
return msg;
}
private ObjectNode createExtra(Map<String, String> additional) {
ObjectNode extra = objectNode();
for (Map.Entry<String, String> entry : additional.entrySet()) {
extra.put(entry.getKey(), entry.getValue());
}
return extra;
}
// Sends all hosts to the client as host-added messages.
private void sendAllHosts() {
for (Host host : services.host().getHosts()) {
sendMessage(hostMessage(new HostEvent(HOST_ADDED, host)));
}
}
private Set<ConnectPoint> getHostLocations(Set<HostId> hostIds) {
Set<ConnectPoint> points = new HashSet<>();
for (HostId hostId : hostIds) {
points.add(getHostLocation(hostId));
}
return points;
}
private HostLocation getHostLocation(HostId hostId) {
return services.host().getHost(hostId).location();
}
// Produces a list of host ids from the specified JSON array.
private Set<HostId> getHostIds(ArrayNode ids) {
Set<HostId> hostIds = new HashSet<>();
for (JsonNode id : ids) {
hostIds.add(hostId(id.asText()));
}
return hostIds;
}
private void sendTopoStartDone() {
sendMessage(JsonUtils.envelope(TOPO_START_DONE, objectNode()));
}
private synchronized void startSummaryMonitoring() {
stopSummaryMonitoring();
summaryTask = new SummaryMonitor();
timer.schedule(summaryTask, SUMMARY_PERIOD, SUMMARY_PERIOD);
summaryRunning = true;
}
private synchronized void stopSummaryMonitoring() {
if (summaryTask != null) {
summaryTask.cancel();
summaryTask = null;
}
summaryRunning = false;
}
// Adds all internal listeners.
private synchronized void addListeners() {
listenersRemoved = false;
services.cluster().addListener(clusterListener);
services.mastership().addListener(mastershipListener);
services.device().addListener(deviceListener);
services.link().addListener(linkListener);
services.host().addListener(hostListener);
services.intent().addListener(intentListener);
services.flow().addListener(flowListener);
}
// Removes all internal listeners.
private synchronized void removeListeners() {
if (!listenersRemoved) {
listenersRemoved = true;
services.cluster().removeListener(clusterListener);
services.mastership().removeListener(mastershipListener);
services.device().removeListener(deviceListener);
services.link().removeListener(linkListener);
services.host().removeListener(hostListener);
services.intent().removeListener(intentListener);
services.flow().removeListener(flowListener);
}
}
// Cluster event listener.
// TODO: Superceded by UiSharedTopologyModel.ModelEventListener
@Deprecated
private class InternalClusterListener implements ClusterEventListener {
@Override
public void event(ClusterEvent event) {
msgSender.execute(() -> sendMessage(instanceMessage(event, null)));
}
}
// Mastership change listener
// TODO: Superceded by UiSharedTopologyModel.ModelEventListener
@Deprecated
private class InternalMastershipListener implements MastershipListener {
@Override
public void event(MastershipEvent event) {
msgSender.execute(() -> {
sendAllInstances(UPDATE_INSTANCE);
Device device = services.device().getDevice(event.subject());
if (device != null) {
sendMessage(deviceMessage(new DeviceEvent(DEVICE_UPDATED, device)));
}
});
}
}
// Device event listener.
// TODO: Superceded by UiSharedTopologyModel.ModelEventListener
@Deprecated
private class InternalDeviceListener implements DeviceListener {
@Override
public void event(DeviceEvent event) {
if (event.type() != PORT_STATS_UPDATED) {
msgSender.execute(() -> sendMessage(deviceMessage(event)));
msgSender.execute(traffic::pokeIntent);
eventAccummulator.add(event);
}
}
}
// Link event listener.
// TODO: Superceded by UiSharedTopologyModel.ModelEventListener
@Deprecated
private class InternalLinkListener implements LinkListener {
@Override
public void event(LinkEvent event) {
msgSender.execute(() -> sendMessage(composeLinkMessage(event)));
msgSender.execute(traffic::pokeIntent);
eventAccummulator.add(event);
}
}
// Host event listener.
// TODO: Superceded by UiSharedTopologyModel.ModelEventListener
@Deprecated
private class InternalHostListener implements HostListener {
@Override
public void event(HostEvent event) {
msgSender.execute(() -> sendMessage(hostMessage(event)));
msgSender.execute(traffic::pokeIntent);
eventAccummulator.add(event);
}
}
// Intent event listener.
// TODO: Superceded by UiSharedTopologyModel.ModelEventListener
@Deprecated
private class InternalIntentListener implements IntentListener {
@Override
public void event(IntentEvent event) {
msgSender.execute(traffic::pokeIntent);
eventAccummulator.add(event);
}
}
// Intent event listener.
// TODO: Superceded by UiSharedTopologyModel.ModelEventListener
@Deprecated
private class InternalFlowListener implements FlowRuleListener {
@Override
public void event(FlowRuleEvent event) {
eventAccummulator.add(event);
}
}
// === SUMMARY MONITORING
// Periodic update of the summary information
private class SummaryMonitor extends TimerTask {
@Override
public void run() {
try {
if (summaryRunning) {
msgSender.execute(() -> requestSummary());
}
} catch (Exception e) {
log.warn("Unable to handle summary request due to {}", e.getMessage());
log.warn("Boom!", e);
}
}
}
// Accumulates events to drive methodic update of the summary pane.
private class InternalEventAccummulator extends AbstractAccumulator<Event> {
protected InternalEventAccummulator() {
super(new Timer("topo-summary"), MAX_EVENTS, MAX_BATCH_MS, MAX_IDLE_MS);
}
@Override
public void processItems(List<Event> items) {
// Start-of-Debugging -- Keep in until ONOS-2572 is fixed for reals
long now = System.currentTimeMillis();
String me = this.toString();
String miniMe = me.replaceAll("^.*@", "me@");
log.debug("Time: {}; this: {}, processing items ({} events)",
now, miniMe, items.size());
// End-of-Debugging
try {
if (summaryRunning) {
msgSender.execute(() -> requestSummary());
}
} catch (Exception e) {
log.warn("Unable to handle summary request due to {}", e.getMessage());
log.debug("Boom!", e);
}
}
}
}
|
|
package us.kbase.kidl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import us.kbase.common.service.test.Tuple2;
/**
* Class represents function definition in spec-file.
*/
public class KbFuncdef implements KbModuleComp {
private String name;
private boolean async;
private String authentication;
private String comment;
private List<KbParameter> parameters;
private List<KbParameter> returnType;
private Map<?,?> data = null;
public KbFuncdef() {}
public KbFuncdef(String name, String comment) {
this(name, comment, false);
}
public KbFuncdef(String name, String comment, boolean async) {
this.name = name;
this.async = async;
this.comment = comment == null ? "" : comment;
parameters = new ArrayList<KbParameter>();
returnType = new ArrayList<KbParameter>();
}
public KbFuncdef loadFromMap(Map<?,?> data, String defaultAuth) throws KidlParseException {
name = Utils.prop(data, "name");
async = (0 != Utils.intPropFromString(data, "async"));
authentication = Utils.prop(data, "authentication"); // defaultAuth was already involved on kidl stage
comment = Utils.prop(data, "comment");
parameters = loadParameters(Utils.propList(data, "parameters"), false);
returnType = loadParameters(Utils.propList(data, "return_type"), true);
this.data = data;
return this;
}
private static List<KbParameter> loadParameters(List<?> inputList, boolean isReturn) throws KidlParseException {
List<KbParameter> ret = new ArrayList<KbParameter>();
for (Map<?,?> data : Utils.repareTypingMap(inputList)) {
ret.add(new KbParameter().loadFromMap(data, isReturn, ret.size() + 1));
}
return Collections.unmodifiableList(ret);
}
public String getName() {
return name;
}
public boolean isAsync() {
return async;
}
public String getAuthentication() {
return authentication;
}
public boolean isAuthenticationRequired() {
return KbAuthdef.REQUIRED.equals(authentication);
}
public boolean isAuthenticationOptional() {
return KbAuthdef.OPTIONAL.equals(authentication);
}
public void setAuthentication(String authentication) {
this.authentication = authentication;
}
public String getComment() {
return comment;
}
public List<KbParameter> getParameters() {
return parameters;
}
public List<KbParameter> getReturnType() {
return returnType;
}
public Map<?, ?> getData() {
return data;
}
private List<Object> toJson(List<KbParameter> list) {
List<Object> ret = new ArrayList<Object>();
for (KbParameter param : list)
ret.add(param.toJson());
return ret;
}
@Override
public Object toJson() {
Map<String, Object> ret = new TreeMap<String, Object>();
ret.put("!", "Bio::KBase::KIDL::KBT::Funcdef");
ret.put("annotations", new KbAnnotations().toJson(false));
ret.put("async", async ? "1" : "0");
ret.put("authentication", authentication);
ret.put("comment", comment);
ret.put("name", name);
ret.put("parameters", toJson(parameters));
ret.put("return_type", toJson(returnType));
return ret;
}
@Override
public Map<String, Object> forTemplates() {
Map<String, Object> ret = new LinkedHashMap<String, Object>();
ret.put("name", name);
ret.put("arg_count", parameters.size());
List<String> paramNames = getNameList(parameters, false);
ret.put("args", getNames(paramNames, null));
ret.put("arg_vars", getNames(paramNames, "$"));
ret.put("ret_count", returnType.size());
List<String> returnNames = getNameList(returnType, true);
ret.put("ret_vars", getNames(returnNames, "$"));
ret.put("authentication", authentication == null ? "none" : authentication);
List<String> docLines = new ArrayList<String>();
LinkedList<Tuple2<String, KbType>> typeQueue = new LinkedList<Tuple2<String, KbType>>();
for (int paramPos = 0; paramPos < parameters.size(); paramPos++) {
KbParameter arg = parameters.get(paramPos);
String item = paramNames.get(paramPos);
typeQueue.add(new Tuple2<String, KbType>().withE1("$" + item).withE2(arg.getType()));
}
for (int returnPos = 0; returnPos < returnType.size(); returnPos++) {
KbParameter arg = returnType.get(returnPos);
String item = returnNames.get(returnPos);
typeQueue.add(new Tuple2<String, KbType>().withE1("$" + item).withE2(arg.getType()));
}
processArgDoc(typeQueue, docLines, null, true);
ret.put("arg_doc", docLines);
ret.put("doc", Utils.removeStarsInComment(comment));
List<Object> params = new ArrayList<Object>();
for (int paramPos = 0; paramPos < parameters.size(); paramPos++) {
KbParameter param = parameters.get(paramPos);
Map<String, Object> paramMap = param.forTemplates(paramNames.get(paramPos));
paramMap.put("index", paramPos + 1);
params.add(paramMap);
}
ret.put("params", params);
List<Object> returns = new ArrayList<Object>();
for (int retPos = 0; retPos < returnType.size(); retPos++) {
KbParameter retParam = returnType.get(retPos);
Map<String, Object> paramMap = retParam.forTemplates(returnNames.get(retPos));
paramMap.put("index", retPos + 1);
returns.add(paramMap);
}
ret.put("returns", returns);
if (isAsync())
ret.put("async", true);
return ret;
}
private static void processArgDoc(LinkedList<Tuple2<String, KbType>> typeQueue,
List<String> docLines, Set<String> allKeys, boolean topLevel) {
if (allKeys == null)
allKeys = new HashSet<String>();
List<String> additional = new ArrayList<>();
LinkedList<Tuple2<String, KbType>> subQueue = new LinkedList<Tuple2<String, KbType>>();
while (!typeQueue.isEmpty()) {
Tuple2<String, KbType> namedType = typeQueue.removeFirst();
String key = namedType.getE1();
if (allKeys.contains(key))
continue;
allKeys.add(key);
KbType type = namedType.getE2();
additional.clear();
String argLine = key + " is " +
Utils.getEnglishTypeDescr(type, subQueue, allKeys, additional);
if (additional.size() > 0)
argLine += ":";
docLines.add(argLine);
for (String add : additional)
if (add.isEmpty()) {
docLines.add("");
} else {
docLines.add("\t" + add);
}
if (subQueue.size() > 0 && !topLevel) {
processArgDoc(subQueue, docLines, allKeys, false);
if (subQueue.size() > 0)
throw new IllegalStateException("Not empty: " + subQueue);
}
}
if (subQueue.size() > 0)
processArgDoc(subQueue, docLines, allKeys, false);
}
private static List<String> getNameList(List<KbParameter> args, boolean returned) {
List<String> ret = new ArrayList<String>();
for (int i = 0; i < args.size(); i++) {
KbParameter arg = args.get(i);
String item = arg.getOriginalName();
if (item == null) {
if (returned) {
item = "return" + (args.size() > 1 ? ("_" + (i + 1)) : "");
} else {
KbType type = arg.getType();
if (type instanceof KbTypedef) {
item = ((KbTypedef)type).getName();
} else {
item = "arg_" + (i + 1);
}
}
}
ret.add(item);
}
Map<String, int[]> valToCount = new HashMap<String, int[]>();
for (String val : ret) {
int[] count = valToCount.get(val);
if (count == null) {
valToCount.put(val, new int[] {1, 0});
} else {
count[0]++;
}
}
for (int pos = 0; pos < ret.size(); pos++) {
String val = ret.get(pos);
int[] count = valToCount.get(val);
if (count[0] > 1) {
val += "_" + (++count[1]);
ret.set(pos, val);
}
}
return ret;
}
private static String getNames(List<String> items, String prefix) {
StringBuilder ret = new StringBuilder();
for (String arg : items) {
if (ret.length() > 0)
ret.append(", ");
if (prefix != null)
ret.append(prefix);
ret.append(arg);
}
return ret.toString();
}
}
|
|
package by.hut.flat.calendar.cell;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.drawable.Drawable;
import by.hut.flat.calendar.core.Config;
import by.hut.flat.calendar.core.Day;
import by.hut.flat.calendar.internal.FastBitmapDrawable;
import by.hut.flat.calendar.utils.Date;
import by.hut.flat.calendar.utils.Dimension;
public class FlatCellBackground {
protected Paint bgPaint = new Paint();
protected static final Paint borderPaint = new Paint();
protected static final Paint redBorderPaint = new Paint();
protected static final Paint debtPaint = new Paint(Paint.SUBPIXEL_TEXT_FLAG |Paint.ANTI_ALIAS_FLAG);
protected static final Paint prepayPaint = new Paint(Paint.SUBPIXEL_TEXT_FLAG |Paint.ANTI_ALIAS_FLAG);
private static final int borderSize = 2;
private static final int redBorderSize = 4;
private static final int redBorderPadding = -1;
private static float debtTextSizePercent = 0.2f;
private static float prepayTextSizePercent = 0.2f;
private static float debtMarginTopPercent = 0.0f;
private static float prepayMarginTopPercent = 0.0f;
private static float debtMarginPercent = 0.04f;
private static float prepayMarginPercent = 0.04f;
private static final int borderSizePartLittle = calcPartLittle(borderSize);;
private static final int borderSizePartBig = calcPartBig(borderSize);
private float debtTextSize;
private float prepayTextSize;
private float debtMarginTop;
private float prepayMarginTop;
private float debtMargin;
private float prepayMargin;
private static final int borderColor = Color.LTGRAY;
private static final int redBorderColor = 0xffff0000;
private static final int debtColor = Color.BLACK;
private static final int prepayColor = Color.BLACK;
static {
borderPaint.setColor(borderColor);
redBorderPaint.setColor(redBorderColor);
debtPaint.setColor(debtColor);
prepayPaint.setColor(prepayColor);
}
private Day day;
private Date date;
private Date today;
protected final Dimension dimension;
protected final boolean isToday;
protected int dayOfWeek = 0;
protected Drawable background;
protected Canvas canvas;
private boolean invariant(){
return day != null
&& dimension != null;
}
public FlatCellBackground(Dimension dimension,Day day){
this.day = day;
this.date = this.day.date;
this.today = Config.INST.SYSTEM.TODAY;
this.dimension = dimension;
this.isToday = date.isEqual(today);
this.bgPaint.setColor(day.bgColor);
this.debtTextSize = calcDebtTextSize();
this.debtMarginTop = this.calcDebtMarginTop();
this.debtMargin = this.calcDebtMargin();
this.prepayTextSize = calcPrepayTextSize();
this.prepayMargin = this.calcPrepayMargin();
this.prepayMarginTop = this.calcPrepayMarginTop();
debtPaint.setTextSize(debtTextSize);
prepayPaint.setTextSize(prepayTextSize);
assert invariant();
initBackground();
}
public FlatCellBackground(Dimension dimension, int dayOfWeek) {
this.dimension = dimension;
this.isToday = false;
this.dayOfWeek = dayOfWeek;
initBackground();
}
private void initBackground(){
Bitmap bitmap = Bitmap.createBitmap(dimension.width, dimension.height, Bitmap.Config.RGB_565);
canvas = new Canvas(bitmap);
drawBackground();
drawBorder();
background = new FastBitmapDrawable(bitmap);
}
/* Draw background */
protected void drawBackground(){
MaidenBackground.draw(canvas,bgPaint,dimension,day.background);
drawDebt();
drawPrepay();
}
private void drawDebt(){
if (day.debt > 0){
canvas.drawText(""+day.debt, debtMargin, debtMarginTop+this.debtTextSize, debtPaint);
}
}
private void drawPrepay(){
if (day.prepay > 0){
canvas.drawText(""+day.prepay, this.dimension.width-prepayMargin-prepayPaint.measureText(""+day.prepay), prepayMarginTop+this.prepayTextSize, prepayPaint);
}
}
/*------------------------------------------------------------
--------------------------- B O R D E R ----------------------
------------------------------------------------------------*/
private void drawBorder(){
drawBorderTop(); // should be drawn before left and right
drawBorderBottom(); // should be drawn before left and right
drawBorderRight(); // should be drawn after top and bottom
drawBorderLeft(); // should be drawn after top and bottom
}
private void drawBorderTop(){
canvas.drawRect(0, 0, dimension.width, borderSizePartLittle, borderPaint);
if (isToday){
int left = borderSizePartLittle+redBorderPadding+1;
int top = borderSizePartLittle+redBorderPadding;
int right = dimension.width-borderSizePartBig-redBorderPadding-1;
int bottom = borderSizePartLittle+redBorderPadding+redBorderSize;
canvas.drawRect(left, top, right,bottom , redBorderPaint);
}
}
private void drawBorderBottom(){
canvas.drawRect(0, dimension.height-borderSizePartBig, dimension.width, dimension.height, borderPaint);
if (isToday){
int left = borderSizePartLittle+redBorderPadding+1;
int top = dimension.height-borderSizePartBig-redBorderPadding-redBorderSize;
int right = dimension.width-borderSizePartBig-redBorderPadding-1;
int bottom = dimension.height-borderSizePartBig-redBorderPadding;
canvas.drawRect(left, top, right,bottom , redBorderPaint);
}
}
private void drawBorderRight(){
canvas.drawRect(dimension.width-borderSizePartBig, 0, dimension.width, dimension.height, borderPaint);
if (isToday){
int left = dimension.width-borderSizePartBig-redBorderPadding-redBorderSize;
int top = borderSizePartLittle + redBorderPadding + 1;
int right = dimension.width-borderSizePartBig-redBorderPadding;
int bottom = dimension.height - borderSizePartBig - redBorderPadding - 1;
canvas.drawRect(left, top, right,bottom , redBorderPaint);
}
}
private void drawBorderLeft(){
canvas.drawRect(0, 0, borderSizePartLittle, dimension.height, borderPaint);
if (isToday){
int left = borderSizePartLittle+redBorderPadding;
int top = borderSizePartLittle + redBorderPadding + 1;
int right = borderSizePartLittle+redBorderPadding + redBorderSize;
int bottom = dimension.height - borderSizePartBig - redBorderPadding - 1;
canvas.drawRect(left, top, right,bottom , redBorderPaint);
}
}
private static int calcPartLittle(int border){
return (int) Math.floor((double)border/(double)2);
}
private static int calcPartBig(int border){
return (int) Math.ceil((double)border/(double)2);
}
private float calcDebtTextSize(){
return this.dimension.width * debtTextSizePercent;
}
private float calcPrepayTextSize(){
return this.dimension.width * prepayTextSizePercent;
}
private float calcDebtMarginTop(){
return this.dimension.height * debtMarginTopPercent;
}
private float calcPrepayMarginTop(){
return this.dimension.height * prepayMarginTopPercent;
}
private float calcDebtMargin(){
return this.dimension.width * debtMarginPercent;
}
private float calcPrepayMargin(){
return this.dimension.width * prepayMarginPercent;
}
/*------------------------------------------------------------
-------------------------- G E T T E R S ---------------------
------------------------------------------------------------*/
public Drawable getBackground(){
return this.background;
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.tools;
import java.io.BufferedReader;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.Stack;
import java.util.StringTokenizer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.InvalidInputException;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.SequenceFileRecordReader;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
/**
* A Map-reduce program to recursively copy directories between
* different file-systems.
*/
public class DistCp implements Tool {
public static final Log LOG = LogFactory.getLog(DistCp.class);
private static final String NAME = "distcp";
private static final String usage = NAME
+ " [OPTIONS] <srcurl>* <desturl>" +
"\n\nOPTIONS:" +
"\n-p[rbugp] Preserve status" +
"\n r: replication number" +
"\n b: block size" +
"\n u: user" +
"\n g: group" +
"\n p: permission" +
"\n -p alone is equivalent to -prbugp" +
"\n-i Ignore failures" +
"\n-log <logdir> Write logs to <logdir>" +
"\n-m <num_maps> Maximum number of simultaneous copies" +
"\n-overwrite Overwrite destination" +
"\n-update Overwrite if src size different from dst size" +
"\n-f <urilist_uri> Use list at <urilist_uri> as src list" +
"\n-filelimit <n> Limit the total number of files to be <= n" +
"\n-sizelimit <n> Limit the total size to be <= n bytes" +
"\n-delete Delete the files existing in the dst but not in src" +
"\n\nNOTE 1: if -overwrite or -update are set, each source URI is " +
"\n interpreted as an isomorphic update to an existing directory." +
"\nFor example:" +
"\nhadoop " + NAME + " -p -update \"hdfs://A:8020/user/foo/bar\" " +
"\"hdfs://B:8020/user/foo/baz\"\n" +
"\n would update all descendants of 'baz' also in 'bar'; it would " +
"\n *not* update /user/foo/baz/bar" +
"\n\nNOTE 2: The parameter <n> in -filelimit and -sizelimit can be " +
"\n specified with symbolic representation. For examples," +
"\n 1230k = 1230 * 1024 = 1259520" +
"\n 891g = 891 * 1024^3 = 956703965184" +
"\n";
private static final long BYTES_PER_MAP = 256 * 1024 * 1024;
private static final int MAX_MAPS_PER_NODE = 20;
private static final int SYNC_FILE_MAX = 10;
static enum Counter { COPY, SKIP, FAIL, BYTESCOPIED, BYTESEXPECTED }
static enum Options {
DELETE("-delete", NAME + ".delete"),
FILE_LIMIT("-filelimit", NAME + ".limit.file"),
SIZE_LIMIT("-sizelimit", NAME + ".limit.size"),
IGNORE_READ_FAILURES("-i", NAME + ".ignore.read.failures"),
PRESERVE_STATUS("-p", NAME + ".preserve.status"),
OVERWRITE("-overwrite", NAME + ".overwrite.always"),
UPDATE("-update", NAME + ".overwrite.ifnewer");
final String cmd, propertyname;
private Options(String cmd, String propertyname) {
this.cmd = cmd;
this.propertyname = propertyname;
}
private long parseLong(String[] args, int offset) {
if (offset == args.length) {
throw new IllegalArgumentException("<n> not specified in " + cmd);
}
long n = StringUtils.TraditionalBinaryPrefix.string2long(args[offset]);
if (n <= 0) {
throw new IllegalArgumentException("n = " + n + " <= 0 in " + cmd);
}
return n;
}
}
static enum FileAttribute {
BLOCK_SIZE, REPLICATION, USER, GROUP, PERMISSION;
final char symbol;
private FileAttribute() {symbol = toString().toLowerCase().charAt(0);}
static EnumSet<FileAttribute> parse(String s) {
if (s == null || s.length() == 0) {
return EnumSet.allOf(FileAttribute.class);
}
EnumSet<FileAttribute> set = EnumSet.noneOf(FileAttribute.class);
FileAttribute[] attributes = values();
for(char c : s.toCharArray()) {
int i = 0;
for(; i < attributes.length && c != attributes[i].symbol; i++);
if (i < attributes.length) {
if (!set.contains(attributes[i])) {
set.add(attributes[i]);
} else {
throw new IllegalArgumentException("There are more than one '"
+ attributes[i].symbol + "' in " + s);
}
} else {
throw new IllegalArgumentException("'" + c + "' in " + s
+ " is undefined.");
}
}
return set;
}
}
static final String TMP_DIR_LABEL = NAME + ".tmp.dir";
static final String DST_DIR_LABEL = NAME + ".dest.path";
static final String JOB_DIR_LABEL = NAME + ".job.dir";
static final String MAX_MAPS_LABEL = NAME + ".max.map.tasks";
static final String SRC_LIST_LABEL = NAME + ".src.list";
static final String SRC_COUNT_LABEL = NAME + ".src.count";
static final String TOTAL_SIZE_LABEL = NAME + ".total.size";
static final String DST_DIR_LIST_LABEL = NAME + ".dst.dir.list";
static final String BYTES_PER_MAP_LABEL = NAME + ".bytes.per.map";
static final String PRESERVE_STATUS_LABEL
= Options.PRESERVE_STATUS.propertyname + ".value";
private JobConf conf;
public void setConf(Configuration conf) {
if (conf instanceof JobConf) {
this.conf = (JobConf) conf;
} else {
this.conf = new JobConf(conf);
}
}
public Configuration getConf() {
return conf;
}
public DistCp(Configuration conf) {
setConf(conf);
}
/**
* An input/output pair of filenames.
*/
static class FilePair implements Writable {
FileStatus input = new FileStatus();
String output;
FilePair() { }
FilePair(FileStatus input, String output) {
this.input = input;
this.output = output;
}
public void readFields(DataInput in) throws IOException {
input.readFields(in);
output = Text.readString(in);
}
public void write(DataOutput out) throws IOException {
input.write(out);
Text.writeString(out, output);
}
public String toString() {
return input + " : " + output;
}
}
/**
* InputFormat of a distcp job responsible for generating splits of the src
* file list.
*/
static class CopyInputFormat implements InputFormat<Text, Text> {
/**
* Produce splits such that each is no greater than the quotient of the
* total size and the number of splits requested.
* @param job The handle to the JobConf object
* @param numSplits Number of splits requested
*/
public InputSplit[] getSplits(JobConf job, int numSplits)
throws IOException {
int cnfiles = job.getInt(SRC_COUNT_LABEL, -1);
long cbsize = job.getLong(TOTAL_SIZE_LABEL, -1);
String srcfilelist = job.get(SRC_LIST_LABEL, "");
if (cnfiles < 0 || cbsize < 0 || "".equals(srcfilelist)) {
throw new RuntimeException("Invalid metadata: #files(" + cnfiles +
") total_size(" + cbsize + ") listuri(" +
srcfilelist + ")");
}
Path src = new Path(srcfilelist);
FileSystem fs = src.getFileSystem(job);
FileStatus srcst = fs.getFileStatus(src);
ArrayList<FileSplit> splits = new ArrayList<FileSplit>(numSplits);
LongWritable key = new LongWritable();
FilePair value = new FilePair();
final long targetsize = cbsize / numSplits;
long pos = 0L;
long last = 0L;
long acc = 0L;
long cbrem = srcst.getLen();
SequenceFile.Reader sl = null;
try {
sl = new SequenceFile.Reader(fs, src, job);
for (; sl.next(key, value); last = sl.getPosition()) {
// if adding this split would put this split past the target size,
// cut the last split and put this next file in the next split.
if (acc + key.get() > targetsize && acc != 0) {
long splitsize = last - pos;
splits.add(new FileSplit(src, pos, splitsize, (String[])null));
cbrem -= splitsize;
pos = last;
acc = 0L;
}
acc += key.get();
}
}
finally {
checkAndClose(sl);
}
if (cbrem != 0) {
splits.add(new FileSplit(src, pos, cbrem, (String[])null));
}
return splits.toArray(new FileSplit[splits.size()]);
}
/**
* Returns a reader for this split of the src file list.
*/
public RecordReader<Text, Text> getRecordReader(InputSplit split,
JobConf job, Reporter reporter) throws IOException {
return new SequenceFileRecordReader<Text, Text>(job, (FileSplit)split);
}
}
/**
* FSCopyFilesMapper: The mapper for copying files between FileSystems.
*/
static class CopyFilesMapper
implements Mapper<LongWritable, FilePair, WritableComparable<?>, Text> {
// config
private int sizeBuf = 128 * 1024;
private FileSystem destFileSys = null;
private boolean ignoreReadFailures;
private boolean preserve_status;
private EnumSet<FileAttribute> preseved;
private boolean overwrite;
private boolean update;
private Path destPath = null;
private byte[] buffer = null;
private JobConf job;
// stats
private int failcount = 0;
private int skipcount = 0;
private int copycount = 0;
private String getCountString() {
return "Copied: " + copycount + " Skipped: " + skipcount
+ " Failed: " + failcount;
}
private void updateStatus(Reporter reporter) {
reporter.setStatus(getCountString());
}
/**
* Return true if dst should be replaced by src and the update flag is set.
* Right now, this merely checks that the src and dst len are not equal.
* This should be improved on once modification times, CRCs, etc. can
* be meaningful in this context.
* @throws IOException
*/
private boolean needsUpdate(FileStatus srcstatus,
FileSystem dstfs, Path dstpath) throws IOException {
return update && !sameFile(srcstatus.getPath().getFileSystem(job),
srcstatus, dstfs, dstpath);
}
private FSDataOutputStream create(Path f, Reporter reporter,
FileStatus srcstat) throws IOException {
if (destFileSys.exists(f)) {
destFileSys.delete(f, false);
}
if (!preserve_status) {
return destFileSys.create(f, true, sizeBuf, reporter);
}
FsPermission permission = preseved.contains(FileAttribute.PERMISSION)?
srcstat.getPermission(): null;
short replication = preseved.contains(FileAttribute.REPLICATION)?
srcstat.getReplication(): destFileSys.getDefaultReplication();
long blockSize = preseved.contains(FileAttribute.BLOCK_SIZE)?
srcstat.getBlockSize(): destFileSys.getDefaultBlockSize();
return destFileSys.create(f, permission, true, sizeBuf, replication,
blockSize, reporter);
}
/**
* Copy a file to a destination.
* @param srcstat src path and metadata
* @param dstpath dst path
* @param reporter
*/
private void copy(FileStatus srcstat, Path relativedst,
OutputCollector<WritableComparable<?>, Text> outc, Reporter reporter)
throws IOException {
Path absdst = new Path(destPath, relativedst);
int totfiles = job.getInt(SRC_COUNT_LABEL, -1);
assert totfiles >= 0 : "Invalid file count " + totfiles;
// if a directory, ensure created even if empty
if (srcstat.isDir()) {
if (destFileSys.exists(absdst)) {
if (!destFileSys.getFileStatus(absdst).isDir()) {
throw new IOException("Failed to mkdirs: " + absdst+" is a file.");
}
}
else if (!destFileSys.mkdirs(absdst)) {
throw new IOException("Failed to mkdirs " + absdst);
}
// TODO: when modification times can be set, directories should be
// emitted to reducers so they might be preserved. Also, mkdirs does
// not currently return an error when the directory already exists;
// if this changes, all directory work might as well be done in reduce
return;
}
if (destFileSys.exists(absdst) && !overwrite
&& !needsUpdate(srcstat, destFileSys, absdst)) {
outc.collect(null, new Text("SKIP: " + srcstat.getPath()));
++skipcount;
reporter.incrCounter(Counter.SKIP, 1);
updateStatus(reporter);
return;
}
Path tmpfile = new Path(job.get(TMP_DIR_LABEL), relativedst);
long cbcopied = 0L;
FSDataInputStream in = null;
FSDataOutputStream out = null;
try {
// open src file
in = srcstat.getPath().getFileSystem(job).open(srcstat.getPath());
reporter.incrCounter(Counter.BYTESEXPECTED, srcstat.getLen());
// open tmp file
out = create(tmpfile, reporter, srcstat);
// copy file
for(int cbread; (cbread = in.read(buffer)) >= 0; ) {
out.write(buffer, 0, cbread);
cbcopied += cbread;
reporter.setStatus(
String.format("%.2f ", cbcopied*100.0/srcstat.getLen())
+ absdst + " [ " +
StringUtils.humanReadableInt(cbcopied) + " / " +
StringUtils.humanReadableInt(srcstat.getLen()) + " ]");
}
} finally {
checkAndClose(in);
checkAndClose(out);
}
if (cbcopied != srcstat.getLen()) {
throw new IOException("File size not matched: copied "
+ bytesString(cbcopied) + " to tmpfile (=" + tmpfile
+ ") but expected " + bytesString(srcstat.getLen())
+ " from " + srcstat.getPath());
}
else {
if (totfiles == 1) {
// Copying a single file; use dst path provided by user as destination
// rather than destination directory, if a file
Path dstparent = absdst.getParent();
if (!(destFileSys.exists(dstparent) &&
destFileSys.getFileStatus(dstparent).isDir())) {
absdst = dstparent;
}
}
if (destFileSys.exists(absdst) &&
destFileSys.getFileStatus(absdst).isDir()) {
throw new IOException(absdst + " is a directory");
}
if (!destFileSys.mkdirs(absdst.getParent())) {
throw new IOException("Failed to craete parent dir: " + absdst.getParent());
}
rename(tmpfile, absdst);
FileStatus dststat = destFileSys.getFileStatus(absdst);
if (dststat.getLen() != srcstat.getLen()) {
destFileSys.delete(absdst, false);
throw new IOException("File size not matched: copied "
+ bytesString(dststat.getLen()) + " to dst (=" + absdst
+ ") but expected " + bytesString(srcstat.getLen())
+ " from " + srcstat.getPath());
}
updatePermissions(srcstat, dststat);
}
// report at least once for each file
++copycount;
reporter.incrCounter(Counter.BYTESCOPIED, cbcopied);
reporter.incrCounter(Counter.COPY, 1);
updateStatus(reporter);
}
/** rename tmp to dst, delete dst if already exists */
private void rename(Path tmp, Path dst) throws IOException {
try {
if (destFileSys.exists(dst)) {
destFileSys.delete(dst, true);
}
if (!destFileSys.rename(tmp, dst)) {
throw new IOException();
}
}
catch(IOException cause) {
throw (IOException)new IOException("Fail to rename tmp file (=" + tmp
+ ") to destination file (=" + dst + ")").initCause(cause);
}
}
private void updatePermissions(FileStatus src, FileStatus dst
) throws IOException {
if (preserve_status) {
DistCp.updatePermissions(src, dst, preseved, destFileSys);
}
}
static String bytesString(long b) {
return b + " bytes (" + StringUtils.humanReadableInt(b) + ")";
}
/** Mapper configuration.
* Extracts source and destination file system, as well as
* top-level paths on source and destination directories.
* Gets the named file systems, to be used later in map.
*/
public void configure(JobConf job)
{
destPath = new Path(job.get(DST_DIR_LABEL, "/"));
try {
destFileSys = destPath.getFileSystem(job);
} catch (IOException ex) {
throw new RuntimeException("Unable to get the named file system.", ex);
}
sizeBuf = job.getInt("copy.buf.size", 128 * 1024);
buffer = new byte[sizeBuf];
ignoreReadFailures = job.getBoolean(Options.IGNORE_READ_FAILURES.propertyname, false);
preserve_status = job.getBoolean(Options.PRESERVE_STATUS.propertyname, false);
if (preserve_status) {
preseved = FileAttribute.parse(job.get(PRESERVE_STATUS_LABEL));
}
update = job.getBoolean(Options.UPDATE.propertyname, false);
overwrite = !update && job.getBoolean(Options.OVERWRITE.propertyname, false);
this.job = job;
}
/** Map method. Copies one file from source file system to destination.
* @param key src len
* @param value FilePair (FileStatus src, Path dst)
* @param out Log of failed copies
* @param reporter
*/
public void map(LongWritable key,
FilePair value,
OutputCollector<WritableComparable<?>, Text> out,
Reporter reporter) throws IOException {
final FileStatus srcstat = value.input;
final Path relativedst = new Path(value.output);
try {
copy(srcstat, relativedst, out, reporter);
} catch (IOException e) {
++failcount;
reporter.incrCounter(Counter.FAIL, 1);
updateStatus(reporter);
final String sfailure = "FAIL " + relativedst + " : " +
StringUtils.stringifyException(e);
out.collect(null, new Text(sfailure));
LOG.info(sfailure);
try {
for (int i = 0; i < 3; ++i) {
try {
final Path tmp = new Path(job.get(TMP_DIR_LABEL), relativedst);
if (destFileSys.delete(tmp, true))
break;
} catch (Throwable ex) {
// ignore, we are just cleaning up
LOG.debug("Ignoring cleanup exception", ex);
}
// update status, so we don't get timed out
updateStatus(reporter);
Thread.sleep(3 * 1000);
}
} catch (InterruptedException inte) {
throw (IOException)new IOException().initCause(inte);
}
} finally {
updateStatus(reporter);
}
}
public void close() throws IOException {
if (0 == failcount || ignoreReadFailures) {
return;
}
throw new IOException(getCountString());
}
}
private static List<Path> fetchFileList(Configuration conf, Path srcList)
throws IOException {
List<Path> result = new ArrayList<Path>();
FileSystem fs = srcList.getFileSystem(conf);
BufferedReader input = null;
try {
input = new BufferedReader(new InputStreamReader(fs.open(srcList)));
String line = input.readLine();
while (line != null) {
result.add(new Path(line));
line = input.readLine();
}
} finally {
checkAndClose(input);
}
return result;
}
@Deprecated
public static void copy(Configuration conf, String srcPath,
String destPath, Path logPath,
boolean srcAsList, boolean ignoreReadFailures)
throws IOException {
final Path src = new Path(srcPath);
List<Path> tmp = new ArrayList<Path>();
if (srcAsList) {
tmp.addAll(fetchFileList(conf, src));
} else {
tmp.add(src);
}
EnumSet<Options> flags = ignoreReadFailures
? EnumSet.of(Options.IGNORE_READ_FAILURES)
: EnumSet.noneOf(Options.class);
final Path dst = new Path(destPath);
copy(conf, new Arguments(tmp, dst, logPath, flags, null,
Long.MAX_VALUE, Long.MAX_VALUE));
}
/** Sanity check for srcPath */
private static void checkSrcPath(Configuration conf, List<Path> srcPaths
) throws IOException {
List<IOException> rslt = new ArrayList<IOException>();
for (Path p : srcPaths) {
FileSystem fs = p.getFileSystem(conf);
if (!fs.exists(p)) {
rslt.add(new IOException("Input source " + p + " does not exist."));
}
}
if (!rslt.isEmpty()) {
throw new InvalidInputException(rslt);
}
}
/**
* Driver to copy srcPath to destPath depending on required protocol.
* @param args arguments
*/
static void copy(final Configuration conf, final Arguments args
) throws IOException {
LOG.info("srcPaths=" + args.srcs);
LOG.info("destPath=" + args.dst);
checkSrcPath(conf, args.srcs);
JobConf job = createJobConf(conf);
if (args.preservedAttributes != null) {
job.set(PRESERVE_STATUS_LABEL, args.preservedAttributes);
}
//Initialize the mapper
try {
setup(conf, job, args);
JobClient.runJob(job);
finalize(conf, job, args.dst, args.preservedAttributes);
} finally {
//delete tmp
fullyDelete(job.get(TMP_DIR_LABEL), job);
//delete jobDirectory
fullyDelete(job.get(JOB_DIR_LABEL), job);
}
}
private static void updatePermissions(FileStatus src, FileStatus dst,
EnumSet<FileAttribute> preseved, FileSystem destFileSys
) throws IOException {
String owner = null;
String group = null;
if (preseved.contains(FileAttribute.USER)
&& !src.getOwner().equals(dst.getOwner())) {
owner = src.getOwner();
}
if (preseved.contains(FileAttribute.GROUP)
&& !src.getGroup().equals(dst.getGroup())) {
group = src.getGroup();
}
if (owner != null || group != null) {
destFileSys.setOwner(dst.getPath(), owner, group);
}
if (preseved.contains(FileAttribute.PERMISSION)
&& !src.getPermission().equals(dst.getPermission())) {
destFileSys.setPermission(dst.getPath(), src.getPermission());
}
}
static private void finalize(Configuration conf, JobConf jobconf,
final Path destPath, String presevedAttributes) throws IOException {
if (presevedAttributes == null) {
return;
}
EnumSet<FileAttribute> preseved = FileAttribute.parse(presevedAttributes);
if (!preseved.contains(FileAttribute.USER)
&& !preseved.contains(FileAttribute.GROUP)
&& !preseved.contains(FileAttribute.PERMISSION)) {
return;
}
FileSystem dstfs = destPath.getFileSystem(conf);
Path dstdirlist = new Path(jobconf.get(DST_DIR_LIST_LABEL));
SequenceFile.Reader in = null;
try {
in = new SequenceFile.Reader(dstdirlist.getFileSystem(jobconf),
dstdirlist, jobconf);
Text dsttext = new Text();
FilePair pair = new FilePair();
for(; in.next(dsttext, pair); ) {
Path absdst = new Path(destPath, pair.output);
updatePermissions(pair.input, dstfs.getFileStatus(absdst),
preseved, dstfs);
}
} finally {
checkAndClose(in);
}
}
static private class Arguments {
final List<Path> srcs;
final Path dst;
final Path log;
final EnumSet<Options> flags;
final String preservedAttributes;
final long filelimit;
final long sizelimit;
/**
* Arguments for distcp
* @param srcs List of source paths
* @param dst Destination path
* @param log Log output directory
* @param flags Command-line flags
* @param preservedAttributes Preserved attributes
* @param filelimit File limit
* @param sizelimit Size limit
*/
Arguments(List<Path> srcs, Path dst, Path log,
EnumSet<Options> flags, String preservedAttributes,
long filelimit, long sizelimit) {
this.srcs = srcs;
this.dst = dst;
this.log = log;
this.flags = flags;
this.preservedAttributes = preservedAttributes;
this.filelimit = filelimit;
this.sizelimit = sizelimit;
if (LOG.isTraceEnabled()) {
LOG.trace("this = " + this);
}
}
static Arguments valueOf(String[] args, Configuration conf
) throws IOException {
List<Path> srcs = new ArrayList<Path>();
Path dst = null;
Path log = null;
EnumSet<Options> flags = EnumSet.noneOf(Options.class);
String presevedAttributes = null;
long filelimit = Long.MAX_VALUE;
long sizelimit = Long.MAX_VALUE;
for (int idx = 0; idx < args.length; idx++) {
Options[] opt = Options.values();
int i = 0;
for(; i < opt.length && !args[idx].startsWith(opt[i].cmd); i++);
if (i < opt.length) {
flags.add(opt[i]);
if (opt[i] == Options.PRESERVE_STATUS) {
presevedAttributes = args[idx].substring(2);
FileAttribute.parse(presevedAttributes); //validation
}
else if (opt[i] == Options.FILE_LIMIT) {
filelimit = Options.FILE_LIMIT.parseLong(args, ++idx);
}
else if (opt[i] == Options.SIZE_LIMIT) {
sizelimit = Options.SIZE_LIMIT.parseLong(args, ++idx);
}
} else if ("-f".equals(args[idx])) {
if (++idx == args.length) {
throw new IllegalArgumentException("urilist_uri not specified in -f");
}
srcs.addAll(fetchFileList(conf, new Path(args[idx])));
} else if ("-log".equals(args[idx])) {
if (++idx == args.length) {
throw new IllegalArgumentException("logdir not specified in -log");
}
log = new Path(args[idx]);
} else if ("-m".equals(args[idx])) {
if (++idx == args.length) {
throw new IllegalArgumentException("num_maps not specified in -m");
}
try {
conf.setInt(MAX_MAPS_LABEL, Integer.valueOf(args[idx]));
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Invalid argument to -m: " +
args[idx]);
}
} else if ('-' == args[idx].codePointAt(0)) {
throw new IllegalArgumentException("Invalid switch " + args[idx]);
} else if (idx == args.length -1) {
dst = new Path(args[idx]);
} else {
srcs.add(new Path(args[idx]));
}
}
// mandatory command-line parameters
if (srcs.isEmpty() || dst == null) {
throw new IllegalArgumentException("Missing "
+ (dst == null ? "dst path" : "src"));
}
// incompatible command-line flags
final boolean isOverwrite = flags.contains(Options.OVERWRITE);
final boolean isUpdate = flags.contains(Options.UPDATE);
final boolean isDelete = flags.contains(Options.DELETE);
if (isOverwrite && isUpdate) {
throw new IllegalArgumentException("Conflicting overwrite policies");
}
if (isDelete && !isOverwrite && !isUpdate) {
throw new IllegalArgumentException(Options.DELETE.cmd
+ " must be specified with " + Options.OVERWRITE + " or "
+ Options.UPDATE + ".");
}
return new Arguments(srcs, dst, log, flags, presevedAttributes,
filelimit, sizelimit);
}
/** {@inheritDoc} */
public String toString() {
return getClass().getName() + "{"
+ "\n srcs = " + srcs
+ "\n dst = " + dst
+ "\n log = " + log
+ "\n flags = " + flags
+ "\n preservedAttributes = " + preservedAttributes
+ "\n filelimit = " + filelimit
+ "\n sizelimit = " + sizelimit
+ "\n}";
}
}
/**
* This is the main driver for recursively copying directories
* across file systems. It takes at least two cmdline parameters. A source
* URL and a destination URL. It then essentially does an "ls -lR" on the
* source URL, and writes the output in a round-robin manner to all the map
* input files. The mapper actually copies the files allotted to it. The
* reduce is empty.
*/
public int run(String[] args) {
try {
copy(conf, Arguments.valueOf(args, conf));
return 0;
} catch (IllegalArgumentException e) {
System.err.println(StringUtils.stringifyException(e) + "\n" + usage);
ToolRunner.printGenericCommandUsage(System.err);
return -1;
} catch (DuplicationException e) {
System.err.println(StringUtils.stringifyException(e));
return DuplicationException.ERROR_CODE;
} catch (RemoteException e) {
final IOException unwrapped = e.unwrapRemoteException(
FileNotFoundException.class,
AccessControlException.class,
QuotaExceededException.class);
System.err.println(StringUtils.stringifyException(unwrapped));
return -3;
} catch (Exception e) {
System.err.println("With failures, global counters are inaccurate; " +
"consider running with -i");
System.err.println("Copy failed: " + StringUtils.stringifyException(e));
return -999;
}
}
public static void main(String[] args) throws Exception {
JobConf job = new JobConf(DistCp.class);
DistCp distcp = new DistCp(job);
int res = ToolRunner.run(distcp, args);
System.exit(res);
}
/**
* Make a path relative with respect to a root path.
* absPath is always assumed to descend from root.
* Otherwise returned path is null.
*/
static String makeRelative(Path root, Path absPath) {
if (!absPath.isAbsolute()) {
throw new IllegalArgumentException("!absPath.isAbsolute(), absPath="
+ absPath);
}
String p = absPath.toUri().getPath();
StringTokenizer pathTokens = new StringTokenizer(p, "/");
for(StringTokenizer rootTokens = new StringTokenizer(
root.toUri().getPath(), "/"); rootTokens.hasMoreTokens(); ) {
if (!rootTokens.nextToken().equals(pathTokens.nextToken())) {
return null;
}
}
StringBuilder sb = new StringBuilder();
for(; pathTokens.hasMoreTokens(); ) {
sb.append(pathTokens.nextToken());
if (pathTokens.hasMoreTokens()) { sb.append(Path.SEPARATOR); }
}
return sb.length() == 0? ".": sb.toString();
}
/**
* Calculate how many maps to run.
* Number of maps is bounded by a minimum of the cumulative size of the
* copy / (distcp.bytes.per.map, default BYTES_PER_MAP or -m on the
* command line) and at most (distcp.max.map.tasks, default
* MAX_MAPS_PER_NODE * nodes in the cluster).
* @param totalBytes Count of total bytes for job
* @param job The job to configure
* @return Count of maps to run.
*/
private static void setMapCount(long totalBytes, JobConf job)
throws IOException {
int numMaps =
(int)(totalBytes / job.getLong(BYTES_PER_MAP_LABEL, BYTES_PER_MAP));
numMaps = Math.min(numMaps,
job.getInt(MAX_MAPS_LABEL, MAX_MAPS_PER_NODE *
new JobClient(job).getClusterStatus().getTaskTrackers()));
job.setNumMapTasks(Math.max(numMaps, 1));
}
/** Fully delete dir */
static void fullyDelete(String dir, Configuration conf) throws IOException {
if (dir != null) {
Path tmp = new Path(dir);
tmp.getFileSystem(conf).delete(tmp, true);
}
}
//Job configuration
private static JobConf createJobConf(Configuration conf) {
JobConf jobconf = new JobConf(conf, DistCp.class);
jobconf.setJobName(NAME);
// turn off speculative execution, because DFS doesn't handle
// multiple writers to the same file.
jobconf.setMapSpeculativeExecution(false);
jobconf.setInputFormat(CopyInputFormat.class);
jobconf.setOutputKeyClass(Text.class);
jobconf.setOutputValueClass(Text.class);
jobconf.setMapperClass(CopyFilesMapper.class);
jobconf.setNumReduceTasks(0);
return jobconf;
}
private static final Random RANDOM = new Random();
public static String getRandomId() {
return Integer.toString(RANDOM.nextInt(Integer.MAX_VALUE), 36);
}
/**
* Initialize DFSCopyFileMapper specific job-configuration.
* @param conf : The dfs/mapred configuration.
* @param jobConf : The handle to the jobConf object to be initialized.
* @param args Arguments
*/
private static void setup(Configuration conf, JobConf jobConf,
final Arguments args)
throws IOException {
jobConf.set(DST_DIR_LABEL, args.dst.toUri().toString());
//set boolean values
final boolean update = args.flags.contains(Options.UPDATE);
final boolean overwrite = !update && args.flags.contains(Options.OVERWRITE);
jobConf.setBoolean(Options.UPDATE.propertyname, update);
jobConf.setBoolean(Options.OVERWRITE.propertyname, overwrite);
jobConf.setBoolean(Options.IGNORE_READ_FAILURES.propertyname,
args.flags.contains(Options.IGNORE_READ_FAILURES));
jobConf.setBoolean(Options.PRESERVE_STATUS.propertyname,
args.flags.contains(Options.PRESERVE_STATUS));
final String randomId = getRandomId();
JobClient jClient = new JobClient(jobConf);
Path jobDirectory = new Path(jClient.getSystemDir(), NAME + "_" + randomId);
jobConf.set(JOB_DIR_LABEL, jobDirectory.toString());
FileSystem dstfs = args.dst.getFileSystem(conf);
boolean dstExists = dstfs.exists(args.dst);
boolean dstIsDir = false;
if (dstExists) {
dstIsDir = dstfs.getFileStatus(args.dst).isDir();
}
// default logPath
Path logPath = args.log;
if (logPath == null) {
String filename = "_distcp_logs_" + randomId;
if (!dstExists || !dstIsDir) {
Path parent = args.dst.getParent();
if (!dstfs.exists(parent)) {
dstfs.mkdirs(parent);
}
logPath = new Path(parent, filename);
} else {
logPath = new Path(args.dst, filename);
}
}
FileOutputFormat.setOutputPath(jobConf, logPath);
// create src list, dst list
FileSystem jobfs = jobDirectory.getFileSystem(jobConf);
Path srcfilelist = new Path(jobDirectory, "_distcp_src_files");
jobConf.set(SRC_LIST_LABEL, srcfilelist.toString());
SequenceFile.Writer src_writer = SequenceFile.createWriter(jobfs, jobConf,
srcfilelist, LongWritable.class, FilePair.class,
SequenceFile.CompressionType.NONE);
Path dstfilelist = new Path(jobDirectory, "_distcp_dst_files");
SequenceFile.Writer dst_writer = SequenceFile.createWriter(jobfs, jobConf,
dstfilelist, Text.class, Text.class,
SequenceFile.CompressionType.NONE);
Path dstdirlist = new Path(jobDirectory, "_distcp_dst_dirs");
jobConf.set(DST_DIR_LIST_LABEL, dstdirlist.toString());
SequenceFile.Writer dir_writer = SequenceFile.createWriter(jobfs, jobConf,
dstdirlist, Text.class, FilePair.class,
SequenceFile.CompressionType.NONE);
// handle the case where the destination directory doesn't exist
// and we've only a single src directory OR we're updating/overwriting
// the contents of the destination directory.
final boolean special =
(args.srcs.size() == 1 && !dstExists) || update || overwrite;
int srcCount = 0, cnsyncf = 0, dirsyn = 0;
long fileCount = 0L, byteCount = 0L, cbsyncs = 0L;
boolean exceededlimit = false;
try {
for(Iterator<Path> srcItr = args.srcs.iterator();
!exceededlimit && srcItr.hasNext(); ) {
final Path src = srcItr.next();
FileSystem srcfs = src.getFileSystem(conf);
FileStatus srcfilestat = srcfs.getFileStatus(src);
Path root = special && srcfilestat.isDir()? src: src.getParent();
if (srcfilestat.isDir()) {
++srcCount;
}
Stack<FileStatus> pathstack = new Stack<FileStatus>();
for(pathstack.push(srcfilestat); !exceededlimit && !pathstack.empty(); ) {
FileStatus cur = pathstack.pop();
FileStatus[] children = srcfs.listStatus(cur.getPath());
for(int i = 0; !exceededlimit && i < children.length; i++) {
boolean skipfile = false;
final FileStatus child = children[i];
final String dst = makeRelative(root, child.getPath());
++srcCount;
if (child.isDir()) {
pathstack.push(child);
}
else {
//skip file if the src and the dst files are the same.
skipfile = update && sameFile(srcfs, child, dstfs, new Path(args.dst, dst));
if (!skipfile) {
++fileCount;
byteCount += child.getLen();
exceededlimit |= fileCount > args.filelimit
|| byteCount > args.sizelimit;
if (!exceededlimit) {
if (LOG.isTraceEnabled()) {
LOG.trace("adding file " + child.getPath());
}
++cnsyncf;
cbsyncs += child.getLen();
if (cnsyncf > SYNC_FILE_MAX || cbsyncs > BYTES_PER_MAP) {
src_writer.sync();
dst_writer.sync();
cnsyncf = 0;
cbsyncs = 0L;
}
}
}
}
if (!skipfile && !exceededlimit) {
src_writer.append(new LongWritable(child.isDir()? 0: child.getLen()),
new FilePair(child, dst));
dst_writer.append(new Text(dst),
new Text(child.getPath().toString()));
}
}
if (cur.isDir()) {
String dst = makeRelative(root, cur.getPath());
dir_writer.append(new Text(dst), new FilePair(cur, dst));
if (++dirsyn > SYNC_FILE_MAX) {
dirsyn = 0;
dir_writer.sync();
}
}
}
}
} finally {
checkAndClose(src_writer);
checkAndClose(dst_writer);
checkAndClose(dir_writer);
}
FileStatus dststatus = null;
try {
dststatus = dstfs.getFileStatus(args.dst);
} catch(FileNotFoundException fnfe) {
LOG.info(args.dst + " does not exist.");
}
// create dest path dir if copying > 1 file
if (dststatus == null) {
if (srcCount > 1 && !dstfs.mkdirs(args.dst)) {
throw new IOException("Failed to create" + args.dst);
}
}
final Path sorted = new Path(jobDirectory, "_distcp_sorted");
checkDuplication(jobfs, dstfilelist, sorted, conf);
if (dststatus != null && args.flags.contains(Options.DELETE)) {
deleteNonexisting(dstfs, dststatus, sorted,
jobfs, jobDirectory, jobConf, conf);
}
Path tmpDir = new Path(
(dstExists && !dstIsDir) || (!dstExists && srcCount == 1)?
args.dst.getParent(): args.dst, "_distcp_tmp_" + randomId);
jobConf.set(TMP_DIR_LABEL, tmpDir.toUri().toString());
LOG.info("srcCount=" + srcCount);
jobConf.setInt(SRC_COUNT_LABEL, srcCount);
jobConf.setLong(TOTAL_SIZE_LABEL, byteCount);
setMapCount(byteCount, jobConf);
}
/**
* Check whether the contents of src and dst are the same.
*
* Return false if dstpath does not exist
*
* If the files have different sizes, return false.
*
* If the files have the same sizes, the file checksums will be compared.
*
* When file checksum is not supported in any of file systems,
* two files are considered as the same if they have the same size.
*/
static private boolean sameFile(FileSystem srcfs, FileStatus srcstatus,
FileSystem dstfs, Path dstpath) throws IOException {
FileStatus dststatus;
try {
dststatus = dstfs.getFileStatus(dstpath);
} catch(FileNotFoundException fnfe) {
return false;
}
//same length?
if (srcstatus.getLen() != dststatus.getLen()) {
return false;
}
//compare checksums
try {
final FileChecksum srccs = srcfs.getFileChecksum(srcstatus.getPath());
final FileChecksum dstcs = dstfs.getFileChecksum(dststatus.getPath());
//return true if checksum is not supported
//(i.e. some of the checksums is null)
return srccs == null || dstcs == null || srccs.equals(dstcs);
} catch(FileNotFoundException fnfe) {
return false;
}
}
/** Delete the dst files/dirs which do not exist in src */
static private void deleteNonexisting(
FileSystem dstfs, FileStatus dstroot, Path dstsorted,
FileSystem jobfs, Path jobdir, JobConf jobconf, Configuration conf
) throws IOException {
if (!dstroot.isDir()) {
throw new IOException("dst must be a directory when option "
+ Options.DELETE.cmd + " is set, but dst (= " + dstroot.getPath()
+ ") is not a directory.");
}
//write dst lsr results
final Path dstlsr = new Path(jobdir, "_distcp_dst_lsr");
final SequenceFile.Writer writer = SequenceFile.createWriter(jobfs, jobconf,
dstlsr, Text.class, FileStatus.class,
SequenceFile.CompressionType.NONE);
try {
//do lsr to get all file statuses in dstroot
final Stack<FileStatus> lsrstack = new Stack<FileStatus>();
for(lsrstack.push(dstroot); !lsrstack.isEmpty(); ) {
final FileStatus status = lsrstack.pop();
if (status.isDir()) {
for(FileStatus child : dstfs.listStatus(status.getPath())) {
String relative = makeRelative(dstroot.getPath(), child.getPath());
writer.append(new Text(relative), child);
lsrstack.push(child);
}
}
}
} finally {
checkAndClose(writer);
}
//sort lsr results
final Path sortedlsr = new Path(jobdir, "_distcp_dst_lsr_sorted");
SequenceFile.Sorter sorter = new SequenceFile.Sorter(jobfs,
new Text.Comparator(), Text.class, FileStatus.class, jobconf);
sorter.sort(dstlsr, sortedlsr);
//compare lsr list and dst list
SequenceFile.Reader lsrin = null;
SequenceFile.Reader dstin = null;
try {
lsrin = new SequenceFile.Reader(jobfs, sortedlsr, jobconf);
dstin = new SequenceFile.Reader(jobfs, dstsorted, jobconf);
//compare sorted lsr list and sorted dst list
final Text lsrpath = new Text();
final FileStatus lsrstatus = new FileStatus();
final Text dstpath = new Text();
final Text dstfrom = new Text();
final FsShell shell = new FsShell(conf);
final String[] shellargs = {"-rmr", null};
boolean hasnext = dstin.next(dstpath, dstfrom);
for(; lsrin.next(lsrpath, lsrstatus); ) {
int dst_cmp_lsr = dstpath.compareTo(lsrpath);
for(; hasnext && dst_cmp_lsr < 0; ) {
hasnext = dstin.next(dstpath, dstfrom);
dst_cmp_lsr = dstpath.compareTo(lsrpath);
}
if (dst_cmp_lsr == 0) {
//lsrpath exists in dst, skip it
hasnext = dstin.next(dstpath, dstfrom);
}
else {
//lsrpath does not exist, delete it
String s = new Path(dstroot.getPath(), lsrpath.toString()).toString();
if (shellargs[1] == null || !isAncestorPath(shellargs[1], s)) {
shellargs[1] = s;
int r = 0;
try {
r = shell.run(shellargs);
} catch(Exception e) {
throw new IOException("Exception from shell.", e);
}
if (r != 0) {
throw new IOException("\"" + shellargs[0] + " " + shellargs[1]
+ "\" returns non-zero value " + r);
}
}
}
}
} finally {
checkAndClose(lsrin);
checkAndClose(dstin);
}
}
//is x an ancestor path of y?
static private boolean isAncestorPath(String x, String y) {
if (!y.startsWith(x)) {
return false;
}
final int len = x.length();
return y.length() == len || y.charAt(len) == Path.SEPARATOR_CHAR;
}
/** Check whether the file list have duplication. */
static private void checkDuplication(FileSystem fs, Path file, Path sorted,
Configuration conf) throws IOException {
SequenceFile.Reader in = null;
try {
SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs,
new Text.Comparator(), Text.class, Text.class, conf);
sorter.sort(file, sorted);
in = new SequenceFile.Reader(fs, sorted, conf);
Text prevdst = null, curdst = new Text();
Text prevsrc = null, cursrc = new Text();
for(; in.next(curdst, cursrc); ) {
if (prevdst != null && curdst.equals(prevdst)) {
throw new DuplicationException(
"Invalid input, there are duplicated files in the sources: "
+ prevsrc + ", " + cursrc);
}
prevdst = curdst;
curdst = new Text();
prevsrc = cursrc;
cursrc = new Text();
}
}
finally {
checkAndClose(in);
}
}
static boolean checkAndClose(java.io.Closeable io) {
if (io != null) {
try {
io.close();
}
catch(IOException ioe) {
LOG.warn(StringUtils.stringifyException(ioe));
return false;
}
}
return true;
}
/** An exception class for duplicated source files. */
public static class DuplicationException extends IOException {
private static final long serialVersionUID = 1L;
/** Error code for this exception */
public static final int ERROR_CODE = -2;
DuplicationException(String message) {super(message);}
}
}
|
|
package uk.ac.manchester.cs.jfact.helpers;
/* This file is part of the JFact DL reasoner
Copyright 2011 by Ignazio Palmisano, Dmitry Tsarkov, University of Manchester
This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA*/
import java.util.ArrayList;
import java.util.List;
import uk.ac.manchester.cs.jfact.kernel.DlCompletionGraph;
import uk.ac.manchester.cs.jfact.kernel.options.JFactReasonerConfiguration;
@SuppressWarnings("javadoc")
public class Stats {
public AccumulatedStatistic build(List<AccumulatedStatistic> list) {
AccumulatedStatistic toReturn = new AccumulatedStatistic();
list.add(toReturn);
return toReturn;
}
public static class AccumulatedStatistic {
/** accumulated statistic */
private int total;
/** current session statistic */
private int local;
/** c'tor: link itself to the list */
AccumulatedStatistic() {
total = 0;
local = 0;
}
/** increment local value */
public void inc() {
++local;
}
/** add local value to a global one */
void accumulate() {
total += local;
local = 0;
}
public void print(LogAdapter l, boolean b, String s1, String s2) {
l.print(s1);
if (b) {
l.print(local);
} else {
l.print(total);
}
l.print(s2);
}
}
// statistic elements
/** all AccumulatedStatistic members are linked together */
private List<AccumulatedStatistic> root = new ArrayList<AccumulatedStatistic>();
private AccumulatedStatistic nTacticCalls = build(root);
private AccumulatedStatistic nUseless = build(root);
private AccumulatedStatistic nIdCalls = build(root);
private AccumulatedStatistic nSingletonCalls = build(root);
private AccumulatedStatistic nOrCalls = build(root);
private AccumulatedStatistic nOrBrCalls = build(root);
private AccumulatedStatistic nAndCalls = build(root);
private AccumulatedStatistic nSomeCalls = build(root);
private AccumulatedStatistic nAllCalls = build(root);
private AccumulatedStatistic nFuncCalls = build(root);
private AccumulatedStatistic nLeCalls = build(root);
private AccumulatedStatistic nGeCalls = build(root);
private AccumulatedStatistic nNNCalls = build(root);
private AccumulatedStatistic nMergeCalls = build(root);
private AccumulatedStatistic nAutoEmptyLookups = build(root);
private AccumulatedStatistic nAutoTransLookups = build(root);
private AccumulatedStatistic nSRuleAdd = build(root);
private AccumulatedStatistic nSRuleFire = build(root);
private AccumulatedStatistic nStateSaves = build(root);
private AccumulatedStatistic nStateRestores = build(root);
private AccumulatedStatistic nNodeSaves = build(root);
private AccumulatedStatistic nNodeRestores = build(root);
private AccumulatedStatistic nLookups = build(root);
private AccumulatedStatistic nFairnessViolations = build(root);
// reasoning cache
private AccumulatedStatistic nCacheTry = build(root);
private AccumulatedStatistic nCacheFailedNoCache = build(root);
private AccumulatedStatistic nCacheFailedShallow = build(root);
private AccumulatedStatistic nCacheFailed = build(root);
private AccumulatedStatistic nCachedSat = build(root);
private AccumulatedStatistic nCachedUnsat = build(root);
public void accumulate() {
for (AccumulatedStatistic cur : root) {
cur.accumulate();
}
}
public void logStatisticData(LogAdapter o, boolean needLocal,
DlCompletionGraph CGraph, JFactReasonerConfiguration options) {
if (options.isUSE_REASONING_STATISTICS()) {
nTacticCalls.print(o, needLocal, "\nThere were made ",
" tactic operations, of which:");
nIdCalls.print(o, needLocal, "\n CN operations: ", "");
nSingletonCalls.print(o, needLocal, "\n including ",
" singleton ones");
nOrCalls.print(o, needLocal, "\n OR operations: ", "");
nOrBrCalls.print(o, needLocal, "\n ", " of which are branching");
nAndCalls.print(o, needLocal, "\n AND operations: ", "");
nSomeCalls.print(o, needLocal, "\n SOME operations: ", "");
nAllCalls.print(o, needLocal, "\n ALL operations: ", "");
nFuncCalls.print(o, needLocal, "\n Func operations: ", "");
nLeCalls.print(o, needLocal, "\n LE operations: ", "");
nGeCalls.print(o, needLocal, "\n GE operations: ", "");
nUseless.print(o, needLocal, "\n N/A operations: ", "");
nNNCalls.print(o, needLocal, "\nThere were made ", " NN rule application");
nMergeCalls.print(o, needLocal, "\nThere were made ", " merging operations");
nAutoEmptyLookups.print(o, needLocal, "\nThere were made ",
" RA empty transition lookups");
nAutoTransLookups.print(o, needLocal, "\nThere were made ",
" RA applicable transition lookups");
nSRuleAdd.print(o, needLocal, "\nThere were made ", " simple rule additions");
nSRuleFire.print(o, needLocal, "\n of which ", " simple rules fired");
nStateSaves.print(o, needLocal, "\nThere were made ",
" save(s) of global state");
nStateRestores.print(o, needLocal, "\nThere were made ",
" restore(s) of global state");
nNodeSaves
.print(o, needLocal, "\nThere were made ", " save(s) of tree state");
nNodeRestores.print(o, needLocal, "\nThere were made ",
" restore(s) of tree state");
nLookups.print(o, needLocal, "\nThere were made ", " concept lookups");
if (options.isRKG_USE_FAIRNESS()) {
nFairnessViolations.print(o, needLocal, "\nThere were ",
" fairness constraints violation");
}
nCacheTry.print(o, needLocal, "\nThere were made ",
" tries to cache completion tree node, of which:");
nCacheFailedNoCache.print(o, needLocal, "\n ",
" fails due to cache absence");
nCacheFailedShallow.print(o, needLocal, "\n ",
" fails due to shallow node");
nCacheFailed.print(o, needLocal, "\n ",
" fails due to cache merge failure");
nCachedSat.print(o, needLocal, "\n ",
" cached satisfiable nodes");
nCachedUnsat.print(o, needLocal, "\n ",
" cached unsatisfiable nodes");
}
if (!needLocal) {
o.print("\nThe maximal graph size is ", CGraph.maxSize(), " nodes");
}
}
public AccumulatedStatistic getnTacticCalls() {
return nTacticCalls;
}
public AccumulatedStatistic getnUseless() {
return nUseless;
}
public AccumulatedStatistic getnIdCalls() {
return nIdCalls;
}
public AccumulatedStatistic getnSingletonCalls() {
return nSingletonCalls;
}
public AccumulatedStatistic getnOrCalls() {
return nOrCalls;
}
public AccumulatedStatistic getnOrBrCalls() {
return nOrBrCalls;
}
public AccumulatedStatistic getnAndCalls() {
return nAndCalls;
}
public AccumulatedStatistic getnSomeCalls() {
return nSomeCalls;
}
public AccumulatedStatistic getnAllCalls() {
return nAllCalls;
}
public AccumulatedStatistic getnFuncCalls() {
return nFuncCalls;
}
public AccumulatedStatistic getnLeCalls() {
return nLeCalls;
}
public AccumulatedStatistic getnGeCalls() {
return nGeCalls;
}
public AccumulatedStatistic getnNNCalls() {
return nNNCalls;
}
public AccumulatedStatistic getnMergeCalls() {
return nMergeCalls;
}
public AccumulatedStatistic getnAutoEmptyLookups() {
return nAutoEmptyLookups;
}
public AccumulatedStatistic getnAutoTransLookups() {
return nAutoTransLookups;
}
public AccumulatedStatistic getnSRuleAdd() {
return nSRuleAdd;
}
public AccumulatedStatistic getnSRuleFire() {
return nSRuleFire;
}
public AccumulatedStatistic getnStateSaves() {
return nStateSaves;
}
public AccumulatedStatistic getnStateRestores() {
return nStateRestores;
}
public AccumulatedStatistic getnNodeSaves() {
return nNodeSaves;
}
public AccumulatedStatistic getnNodeRestores() {
return nNodeRestores;
}
public AccumulatedStatistic getnLookups() {
return nLookups;
}
public AccumulatedStatistic getnFairnessViolations() {
return nFairnessViolations;
}
public AccumulatedStatistic getnCacheTry() {
return nCacheTry;
}
public AccumulatedStatistic getnCacheFailedNoCache() {
return nCacheFailedNoCache;
}
public AccumulatedStatistic getnCacheFailedShallow() {
return nCacheFailedShallow;
}
public AccumulatedStatistic getnCacheFailed() {
return nCacheFailed;
}
public AccumulatedStatistic getnCachedSat() {
return nCachedSat;
}
public AccumulatedStatistic getnCachedUnsat() {
return nCachedUnsat;
}
}
|
|
/*
* Copyright 2013 gitblit.com.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gitblit.wicket.pages;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import org.apache.wicket.Component;
import org.apache.wicket.PageParameters;
import org.apache.wicket.behavior.SimpleAttributeModifier;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.link.BookmarkablePageLink;
import org.apache.wicket.markup.html.panel.Fragment;
import org.apache.wicket.markup.repeater.Item;
import org.apache.wicket.markup.repeater.data.DataView;
import org.apache.wicket.markup.repeater.data.ListDataProvider;
import com.gitblit.Constants.AccessPermission;
import com.gitblit.Keys;
import com.gitblit.models.RegistrantAccessPermission;
import com.gitblit.models.RepositoryModel;
import com.gitblit.models.TicketModel;
import com.gitblit.models.TicketModel.Status;
import com.gitblit.models.UserModel;
import com.gitblit.tickets.QueryBuilder;
import com.gitblit.tickets.QueryResult;
import com.gitblit.tickets.TicketIndexer.Lucene;
import com.gitblit.tickets.TicketLabel;
import com.gitblit.tickets.TicketMilestone;
import com.gitblit.tickets.TicketResponsible;
import com.gitblit.utils.ArrayUtils;
import com.gitblit.utils.StringUtils;
import com.gitblit.wicket.GitBlitWebSession;
import com.gitblit.wicket.TicketsUI;
import com.gitblit.wicket.TicketsUI.TicketQuery;
import com.gitblit.wicket.TicketsUI.TicketSort;
import com.gitblit.wicket.WicketUtils;
import com.gitblit.wicket.panels.LinkPanel;
import com.gitblit.wicket.panels.TicketListPanel;
import com.gitblit.wicket.panels.TicketSearchForm;
public class TicketsPage extends RepositoryPage {
final TicketResponsible any;
public TicketsPage(PageParameters params) {
super(params);
if (!app().tickets().isReady()) {
// tickets prohibited
setResponsePage(SummaryPage.class, WicketUtils.newRepositoryParameter(repositoryName));
} else if (!app().tickets().hasTickets(getRepositoryModel())) {
// no tickets for this repository
setResponsePage(NoTicketsPage.class, WicketUtils.newRepositoryParameter(repositoryName));
} else {
String id = WicketUtils.getObject(params);
if (id != null) {
// view the ticket with the TicketPage
setResponsePage(TicketPage.class, params);
}
}
// set stateless page preference
setStatelessHint(true);
any = new TicketResponsible(getString("gb.any"), "[* TO *]", null);
UserModel user = GitBlitWebSession.get().getUser();
boolean isAuthenticated = user != null && user.isAuthenticated;
final String [] statiiParam = params.getStringArray(Lucene.status.name());
final String assignedToParam = params.getString(Lucene.responsible.name(), null);
final String milestoneParam = params.getString(Lucene.milestone.name(), null);
final String queryParam = params.getString("q", null);
final String searchParam = params.getString("s", null);
final String sortBy = Lucene.fromString(params.getString("sort", Lucene.created.name())).name();
final boolean desc = !"asc".equals(params.getString("direction", "desc"));
// add search form
add(new TicketSearchForm("ticketSearchForm", repositoryName, searchParam, getClass(), params));
final String activeQuery;
if (!StringUtils.isEmpty(searchParam)) {
activeQuery = searchParam;
} else if (StringUtils.isEmpty(queryParam)) {
activeQuery = "";
} else {
activeQuery = queryParam;
}
// build Lucene query from defaults and request parameters
QueryBuilder qb = new QueryBuilder(queryParam);
if (!qb.containsField(Lucene.rid.name())) {
// specify the repository
qb.and(Lucene.rid.matches(getRepositoryModel().getRID()));
}
if (!qb.containsField(Lucene.responsible.name())) {
// specify the responsible
qb.and(Lucene.responsible.matches(assignedToParam));
}
if (!qb.containsField(Lucene.milestone.name())) {
// specify the milestone
qb.and(Lucene.milestone.matches(milestoneParam));
}
if (!qb.containsField(Lucene.status.name()) && !ArrayUtils.isEmpty(statiiParam)) {
// specify the states
boolean not = false;
QueryBuilder q = new QueryBuilder();
for (String state : statiiParam) {
if (state.charAt(0) == '!') {
not = true;
q.and(Lucene.status.doesNotMatch(state.substring(1)));
} else {
q.or(Lucene.status.matches(state));
}
}
if (not) {
qb.and(q.toString());
} else {
qb.and(q.toSubquery().toString());
}
}
final String luceneQuery = qb.build();
// open milestones
List<TicketMilestone> milestones = app().tickets().getMilestones(getRepositoryModel(), Status.Open);
TicketMilestone currentMilestone = null;
if (!StringUtils.isEmpty(milestoneParam)) {
for (TicketMilestone tm : milestones) {
if (tm.name.equals(milestoneParam)) {
// get the milestone (queries the index)
currentMilestone = app().tickets().getMilestone(getRepositoryModel(), milestoneParam);
break;
}
}
if (currentMilestone == null) {
// milestone not found, create a temporary one
currentMilestone = new TicketMilestone(milestoneParam);
String q = QueryBuilder.q(Lucene.rid.matches(getRepositoryModel().getRID())).and(Lucene.milestone.matches(milestoneParam)).build();
currentMilestone.tickets = app().tickets().queryFor(q, 1, 0, Lucene.number.name(), true);
milestones.add(currentMilestone);
}
}
Fragment milestonePanel;
if (currentMilestone == null) {
milestonePanel = new Fragment("milestonePanel", "noMilestoneFragment", this);
add(milestonePanel);
} else {
milestonePanel = new Fragment("milestonePanel", "milestoneProgressFragment", this);
milestonePanel.add(new Label("currentMilestone", currentMilestone.name));
if (currentMilestone.due == null) {
milestonePanel.add(new Label("currentDueDate", getString("gb.notSpecified")));
} else {
milestonePanel.add(WicketUtils.createDateLabel("currentDueDate", currentMilestone.due, GitBlitWebSession
.get().getTimezone(), getTimeUtils(), false));
}
Label label = new Label("progress");
WicketUtils.setCssStyle(label, "width:" + currentMilestone.getProgress() + "%;");
milestonePanel.add(label);
milestonePanel.add(new LinkPanel("openTickets", null,
MessageFormat.format(getString("gb.nOpenTickets"), currentMilestone.getOpenTickets()),
TicketsPage.class,
queryParameters(null, currentMilestone.name, TicketsUI.openStatii, null, sortBy, desc, 1)));
milestonePanel.add(new LinkPanel("closedTickets", null,
MessageFormat.format(getString("gb.nClosedTickets"), currentMilestone.getClosedTickets()),
TicketsPage.class,
queryParameters(null, currentMilestone.name, TicketsUI.closedStatii, null, sortBy, desc, 1)));
milestonePanel.add(new Label("totalTickets", MessageFormat.format(getString("gb.nTotalTickets"), currentMilestone.getTotalTickets())));
add(milestonePanel);
}
Fragment milestoneDropdown = new Fragment("milestoneDropdown", "milestoneDropdownFragment", this);
PageParameters resetMilestone = queryParameters(queryParam, null, statiiParam, assignedToParam, sortBy, desc, 1);
milestoneDropdown.add(new BookmarkablePageLink<Void>("resetMilestone", TicketsPage.class, resetMilestone));
ListDataProvider<TicketMilestone> milestonesDp = new ListDataProvider<TicketMilestone>(milestones);
DataView<TicketMilestone> milestonesMenu = new DataView<TicketMilestone>("milestone", milestonesDp) {
private static final long serialVersionUID = 1L;
@Override
public void populateItem(final Item<TicketMilestone> item) {
final TicketMilestone tm = item.getModelObject();
PageParameters params = queryParameters(queryParam, tm.name, statiiParam, assignedToParam, sortBy, desc, 1);
item.add(new LinkPanel("milestoneLink", null, tm.name, TicketsPage.class, params).setRenderBodyOnly(true));
}
};
milestoneDropdown.add(milestonesMenu);
milestonePanel.add(milestoneDropdown);
// search or query tickets
int page = Math.max(1, WicketUtils.getPage(params));
int pageSize = app().settings().getInteger(Keys.tickets.perPage, 25);
List<QueryResult> results;
if (StringUtils.isEmpty(searchParam)) {
results = app().tickets().queryFor(luceneQuery, page, pageSize, sortBy, desc);
} else {
results = app().tickets().searchFor(getRepositoryModel(), searchParam, page, pageSize);
}
int totalResults = results.size() == 0 ? 0 : results.get(0).totalResults;
// standard queries
add(new BookmarkablePageLink<Void>("changesQuery", TicketsPage.class,
queryParameters(
Lucene.type.matches(TicketModel.Type.Proposal.name()),
milestoneParam,
statiiParam,
assignedToParam,
sortBy,
desc,
1)));
add(new BookmarkablePageLink<Void>("bugsQuery", TicketsPage.class,
queryParameters(
Lucene.type.matches(TicketModel.Type.Bug.name()),
milestoneParam,
statiiParam,
assignedToParam,
sortBy,
desc,
1)));
add(new BookmarkablePageLink<Void>("enhancementsQuery", TicketsPage.class,
queryParameters(
Lucene.type.matches(TicketModel.Type.Enhancement.name()),
milestoneParam,
statiiParam,
assignedToParam,
sortBy,
desc,
1)));
add(new BookmarkablePageLink<Void>("tasksQuery", TicketsPage.class,
queryParameters(
Lucene.type.matches(TicketModel.Type.Task.name()),
milestoneParam,
statiiParam,
assignedToParam,
sortBy,
desc,
1)));
add(new BookmarkablePageLink<Void>("questionsQuery", TicketsPage.class,
queryParameters(
Lucene.type.matches(TicketModel.Type.Question.name()),
milestoneParam,
statiiParam,
assignedToParam,
sortBy,
desc,
1)));
add(new BookmarkablePageLink<Void>("maintenanceQuery", TicketsPage.class,
queryParameters(
Lucene.type.matches(TicketModel.Type.Maintenance.name()),
milestoneParam,
statiiParam,
assignedToParam,
sortBy,
desc,
1)));
add(new BookmarkablePageLink<Void>("resetQuery", TicketsPage.class,
queryParameters(
null,
milestoneParam,
TicketsUI.openStatii,
null,
null,
true,
1)));
if (isAuthenticated) {
add(new Label("userDivider"));
add(new BookmarkablePageLink<Void>("createdQuery", TicketsPage.class,
queryParameters(
Lucene.createdby.matches(user.username),
milestoneParam,
statiiParam,
assignedToParam,
sortBy,
desc,
1)));
add(new BookmarkablePageLink<Void>("watchedQuery", TicketsPage.class,
queryParameters(
Lucene.watchedby.matches(user.username),
milestoneParam,
statiiParam,
assignedToParam,
sortBy,
desc,
1)));
add(new BookmarkablePageLink<Void>("mentionsQuery", TicketsPage.class,
queryParameters(
Lucene.mentions.matches(user.username),
milestoneParam,
statiiParam,
assignedToParam,
sortBy,
desc,
1)));
} else {
add(new Label("userDivider").setVisible(false));
add(new Label("createdQuery").setVisible(false));
add(new Label("watchedQuery").setVisible(false));
add(new Label("mentionsQuery").setVisible(false));
}
Set<TicketQuery> dynamicQueries = new TreeSet<TicketQuery>();
for (TicketLabel label : app().tickets().getLabels(getRepositoryModel())) {
String q = QueryBuilder.q(Lucene.labels.matches(label.name)).build();
dynamicQueries.add(new TicketQuery(label.name, q).color(label.color));
}
for (QueryResult ticket : results) {
if (!StringUtils.isEmpty(ticket.topic)) {
String q = QueryBuilder.q(Lucene.topic.matches(ticket.topic)).build();
dynamicQueries.add(new TicketQuery(ticket.topic, q));
}
if (!ArrayUtils.isEmpty(ticket.labels)) {
for (String label : ticket.labels) {
String q = QueryBuilder.q(Lucene.labels.matches(label)).build();
dynamicQueries.add(new TicketQuery(label, q));
}
}
}
if (dynamicQueries.size() == 0) {
add(new Label("dynamicQueries").setVisible(false));
} else {
Fragment fragment = new Fragment("dynamicQueries", "dynamicQueriesFragment", this);
ListDataProvider<TicketQuery> dynamicQueriesDp = new ListDataProvider<TicketQuery>(new ArrayList<TicketQuery>(dynamicQueries));
DataView<TicketQuery> dynamicQueriesList = new DataView<TicketQuery>("dynamicQuery", dynamicQueriesDp) {
private static final long serialVersionUID = 1L;
@Override
public void populateItem(final Item<TicketQuery> item) {
final TicketQuery tq = item.getModelObject();
Component swatch = new Label("swatch", " ").setEscapeModelStrings(false);
if (StringUtils.isEmpty(tq.color)) {
// calculate a color
tq.color = StringUtils.getColor(tq.name);
}
String background = MessageFormat.format("background-color:{0};", tq.color);
swatch.add(new SimpleAttributeModifier("style", background));
item.add(swatch);
if (activeQuery.contains(tq.query)) {
// selected
String q = QueryBuilder.q(activeQuery).remove(tq.query).build();
PageParameters params = queryParameters(q, milestoneParam, statiiParam, assignedToParam, sortBy, desc, 1);
item.add(new LinkPanel("link", "active", tq.name, TicketsPage.class, params).setRenderBodyOnly(true));
Label checked = new Label("checked");
WicketUtils.setCssClass(checked, "iconic-o-x");
item.add(checked);
item.add(new SimpleAttributeModifier("style", background));
} else {
// unselected
String q = QueryBuilder.q(queryParam).toSubquery().and(tq.query).build();
PageParameters params = queryParameters(q, milestoneParam, statiiParam, assignedToParam, sortBy, desc, 1);
item.add(new LinkPanel("link", null, tq.name, TicketsPage.class, params).setRenderBodyOnly(true));
item.add(new Label("checked").setVisible(false));
}
}
};
fragment.add(dynamicQueriesList);
add(fragment);
}
// states
if (ArrayUtils.isEmpty(statiiParam)) {
add(new Label("selectedStatii", getString("gb.all")));
} else {
add(new Label("selectedStatii", StringUtils.flattenStrings(Arrays.asList(statiiParam), ",")));
}
add(new BookmarkablePageLink<Void>("openTickets", TicketsPage.class, queryParameters(queryParam, milestoneParam, TicketsUI.openStatii, assignedToParam, sortBy, desc, 1)));
add(new BookmarkablePageLink<Void>("closedTickets", TicketsPage.class, queryParameters(queryParam, milestoneParam, TicketsUI.closedStatii, assignedToParam, sortBy, desc, 1)));
add(new BookmarkablePageLink<Void>("allTickets", TicketsPage.class, queryParameters(queryParam, milestoneParam, null, assignedToParam, sortBy, desc, 1)));
// by status
List<Status> statii = new ArrayList<Status>(Arrays.asList(Status.values()));
statii.remove(Status.Closed);
ListDataProvider<Status> resolutionsDp = new ListDataProvider<Status>(statii);
DataView<Status> statiiLinks = new DataView<Status>("statii", resolutionsDp) {
private static final long serialVersionUID = 1L;
@Override
public void populateItem(final Item<Status> item) {
final Status status = item.getModelObject();
PageParameters p = queryParameters(queryParam, milestoneParam, new String [] { status.name().toLowerCase() }, assignedToParam, sortBy, desc, 1);
String css = TicketsUI.getStatusClass(status);
item.add(new LinkPanel("statusLink", css, status.toString(), TicketsPage.class, p).setRenderBodyOnly(true));
}
};
add(statiiLinks);
// responsible filter
List<TicketResponsible> responsibles = new ArrayList<TicketResponsible>();
for (RegistrantAccessPermission perm : app().repositories().getUserAccessPermissions(getRepositoryModel())) {
if (perm.permission.atLeast(AccessPermission.PUSH)) {
UserModel u = app().users().getUserModel(perm.registrant);
responsibles.add(new TicketResponsible(u));
}
}
Collections.sort(responsibles);
responsibles.add(0, any);
TicketResponsible currentResponsible = null;
for (TicketResponsible u : responsibles) {
if (u.username.equals(assignedToParam)) {
currentResponsible = u;
break;
}
}
add(new Label("currentResponsible", currentResponsible == null ? "" : currentResponsible.displayname));
ListDataProvider<TicketResponsible> responsibleDp = new ListDataProvider<TicketResponsible>(responsibles);
DataView<TicketResponsible> responsibleMenu = new DataView<TicketResponsible>("responsible", responsibleDp) {
private static final long serialVersionUID = 1L;
@Override
public void populateItem(final Item<TicketResponsible> item) {
final TicketResponsible u = item.getModelObject();
PageParameters params = queryParameters(queryParam, milestoneParam, statiiParam, u.username, sortBy, desc, 1);
item.add(new LinkPanel("responsibleLink", null, u.displayname, TicketsPage.class, params).setRenderBodyOnly(true));
}
};
add(responsibleMenu);
PageParameters resetResponsibleParams = queryParameters(queryParam, milestoneParam, statiiParam, null, sortBy, desc, 1);
add(new BookmarkablePageLink<Void>("resetResponsible", TicketsPage.class, resetResponsibleParams));
List<TicketSort> sortChoices = new ArrayList<TicketSort>();
sortChoices.add(new TicketSort(getString("gb.sortNewest"), Lucene.created.name(), true));
sortChoices.add(new TicketSort(getString("gb.sortOldest"), Lucene.created.name(), false));
sortChoices.add(new TicketSort(getString("gb.sortMostRecentlyUpdated"), Lucene.updated.name(), true));
sortChoices.add(new TicketSort(getString("gb.sortLeastRecentlyUpdated"), Lucene.updated.name(), false));
sortChoices.add(new TicketSort(getString("gb.sortMostComments"), Lucene.comments.name(), true));
sortChoices.add(new TicketSort(getString("gb.sortLeastComments"), Lucene.comments.name(), false));
sortChoices.add(new TicketSort(getString("gb.sortMostPatchsetRevisions"), Lucene.patchsets.name(), true));
sortChoices.add(new TicketSort(getString("gb.sortLeastPatchsetRevisions"), Lucene.patchsets.name(), false));
sortChoices.add(new TicketSort(getString("gb.sortMostVotes"), Lucene.votes.name(), true));
sortChoices.add(new TicketSort(getString("gb.sortLeastVotes"), Lucene.votes.name(), false));
sortChoices.add(new TicketSort(getString("gb.sortHighestPriority"), Lucene.priority.name(), true));
sortChoices.add(new TicketSort(getString("gb.sortLowestPriority"), Lucene.priority.name(), false));
sortChoices.add(new TicketSort(getString("gb.sortHighestSeverity"), Lucene.severity.name(), true));
sortChoices.add(new TicketSort(getString("gb.sortLowestSeverity"), Lucene.severity.name(), false));
TicketSort currentSort = sortChoices.get(0);
for (TicketSort ts : sortChoices) {
if (ts.sortBy.equals(sortBy) && desc == ts.desc) {
currentSort = ts;
break;
}
}
add(new Label("currentSort", currentSort.name));
ListDataProvider<TicketSort> sortChoicesDp = new ListDataProvider<TicketSort>(sortChoices);
DataView<TicketSort> sortMenu = new DataView<TicketSort>("sort", sortChoicesDp) {
private static final long serialVersionUID = 1L;
@Override
public void populateItem(final Item<TicketSort> item) {
final TicketSort ts = item.getModelObject();
PageParameters params = queryParameters(queryParam, milestoneParam, statiiParam, assignedToParam, ts.sortBy, ts.desc, 1);
item.add(new LinkPanel("sortLink", null, ts.name, TicketsPage.class, params).setRenderBodyOnly(true));
}
};
add(sortMenu);
// paging links
buildPager(queryParam, milestoneParam, statiiParam, assignedToParam, sortBy, desc, page, pageSize, results.size(), totalResults);
add(new TicketListPanel("ticketList", results, false, false));
// new milestone link
RepositoryModel repositoryModel = getRepositoryModel();
final boolean acceptingUpdates = app().tickets().isAcceptingTicketUpdates(repositoryModel)
&& user != null && user.canAdmin(getRepositoryModel());
if (acceptingUpdates) {
add(new LinkPanel("newMilestone", null, getString("gb.newMilestone"),
NewMilestonePage.class, WicketUtils.newRepositoryParameter(repositoryName)));
} else {
add(new Label("newMilestone").setVisible(false));
}
// milestones list
List<TicketMilestone> openMilestones = new ArrayList<TicketMilestone>();
List<TicketMilestone> closedMilestones = new ArrayList<TicketMilestone>();
for (TicketMilestone milestone : app().tickets().getMilestones(repositoryModel)) {
if (milestone.isOpen()) {
openMilestones.add(milestone);
} else {
closedMilestones.add(milestone);
}
}
Collections.sort(openMilestones, new Comparator<TicketMilestone>() {
@Override
public int compare(TicketMilestone o1, TicketMilestone o2) {
if (o1.due == null) {
return (o2.due == null) ? 0 : 1;
} else if (o2.due == null) {
return -1;
} else {
return o1.due.compareTo(o2.due);
}
}
});
Collections.sort(closedMilestones, new Comparator<TicketMilestone>() {
@Override
public int compare(TicketMilestone o1, TicketMilestone o2) {
if (o1.due == null) {
return (o2.due == null) ? 0 : 1;
} else if (o2.due == null) {
return -1;
} else {
return o1.due.compareTo(o2.due);
}
}
});
DataView<TicketMilestone> openMilestonesList = milestoneList("openMilestonesList", openMilestones, acceptingUpdates);
add(openMilestonesList);
DataView<TicketMilestone> closedMilestonesList = milestoneList("closedMilestonesList", closedMilestones, acceptingUpdates);
add(closedMilestonesList);
}
protected DataView<TicketMilestone> milestoneList(String wicketId, List<TicketMilestone> milestones, final boolean acceptingUpdates) {
ListDataProvider<TicketMilestone> milestonesDp = new ListDataProvider<TicketMilestone>(milestones);
DataView<TicketMilestone> milestonesList = new DataView<TicketMilestone>(wicketId, milestonesDp) {
private static final long serialVersionUID = 1L;
@Override
public void populateItem(final Item<TicketMilestone> item) {
Fragment entryPanel = new Fragment("entryPanel", "milestoneListFragment", this);
item.add(entryPanel);
final TicketMilestone tm = item.getModelObject();
String [] states;
if (tm.isOpen()) {
states = TicketsUI.openStatii;
} else {
states = TicketsUI.closedStatii;
}
PageParameters params = queryParameters(null, tm.name, states, null, null, true, 1);
entryPanel.add(new LinkPanel("milestoneName", null, tm.name, TicketsPage.class, params).setRenderBodyOnly(true));
String css;
String status = tm.status.name();
switch (tm.status) {
case Open:
if (tm.isOverdue()) {
css = "aui-lozenge aui-lozenge-subtle aui-lozenge-error";
status = "overdue";
} else {
css = "aui-lozenge aui-lozenge-subtle";
}
break;
default:
css = "aui-lozenge";
break;
}
Label stateLabel = new Label("milestoneState", status);
WicketUtils.setCssClass(stateLabel, css);
entryPanel.add(stateLabel);
if (tm.due == null) {
entryPanel.add(new Label("milestoneDue", getString("gb.notSpecified")));
} else {
entryPanel.add(WicketUtils.createDatestampLabel("milestoneDue", tm.due, getTimeZone(), getTimeUtils()));
}
if (acceptingUpdates) {
entryPanel.add(new LinkPanel("editMilestone", null, getString("gb.edit"), EditMilestonePage.class,
WicketUtils.newObjectParameter(repositoryName, tm.name)));
} else {
entryPanel.add(new Label("editMilestone").setVisible(false));
}
if (tm.isOpen()) {
// re-load milestone with query results
TicketMilestone m = app().tickets().getMilestone(getRepositoryModel(), tm.name);
Fragment milestonePanel = new Fragment("milestonePanel", "openMilestoneFragment", this);
Label label = new Label("progress");
WicketUtils.setCssStyle(label, "width:" + m.getProgress() + "%;");
milestonePanel.add(label);
milestonePanel.add(new LinkPanel("openTickets", null,
MessageFormat.format(getString("gb.nOpenTickets"), m.getOpenTickets()),
TicketsPage.class,
queryParameters(null, tm.name, TicketsUI.openStatii, null, null, true, 1)));
milestonePanel.add(new LinkPanel("closedTickets", null,
MessageFormat.format(getString("gb.nClosedTickets"), m.getClosedTickets()),
TicketsPage.class,
queryParameters(null, tm.name, TicketsUI.closedStatii, null, null, true, 1)));
milestonePanel.add(new Label("totalTickets", MessageFormat.format(getString("gb.nTotalTickets"), m.getTotalTickets())));
entryPanel.add(milestonePanel);
} else {
entryPanel.add(new Label("milestonePanel").setVisible(false));
}
}
};
return milestonesList;
}
protected PageParameters queryParameters(
String query,
String milestone,
String[] states,
String assignedTo,
String sort,
boolean descending,
int page) {
PageParameters params = WicketUtils.newRepositoryParameter(repositoryName);
if (!StringUtils.isEmpty(query)) {
params.add("q", query);
}
if (!StringUtils.isEmpty(milestone)) {
params.add(Lucene.milestone.name(), milestone);
}
if (!ArrayUtils.isEmpty(states)) {
for (String state : states) {
params.add(Lucene.status.name(), state);
}
}
if (!StringUtils.isEmpty(assignedTo)) {
params.add(Lucene.responsible.name(), assignedTo);
}
if (!StringUtils.isEmpty(sort)) {
params.add("sort", sort);
}
if (!descending) {
params.add("direction", "asc");
}
if (page > 1) {
params.add("pg", "" + page);
}
return params;
}
protected PageParameters newTicketParameter(QueryResult ticket) {
return WicketUtils.newObjectParameter(repositoryName, "" + ticket.number);
}
@Override
protected String getPageName() {
return getString("gb.tickets");
}
protected void buildPager(
final String query,
final String milestone,
final String [] states,
final String assignedTo,
final String sort,
final boolean desc,
final int page,
int pageSize,
int count,
int total) {
boolean showNav = total > (2 * pageSize);
boolean allowPrev = page > 1;
boolean allowNext = (pageSize * (page - 1) + count) < total;
add(new BookmarkablePageLink<Void>("prevLink", TicketsPage.class, queryParameters(query, milestone, states, assignedTo, sort, desc, page - 1)).setEnabled(allowPrev).setVisible(showNav));
add(new BookmarkablePageLink<Void>("nextLink", TicketsPage.class, queryParameters(query, milestone, states, assignedTo, sort, desc, page + 1)).setEnabled(allowNext).setVisible(showNav));
if (total <= pageSize) {
add(new Label("pageLink").setVisible(false));
return;
}
// determine page numbers to display
int pages = count == 0 ? 0 : ((total / pageSize) + (total % pageSize == 0 ? 0 : 1));
// preferred number of pagelinks
int segments = 5;
if (pages < segments) {
// not enough data for preferred number of page links
segments = pages;
}
int minpage = Math.min(Math.max(1, page - 2), pages - (segments - 1));
int maxpage = Math.min(pages, minpage + (segments - 1));
List<Integer> sequence = new ArrayList<Integer>();
for (int i = minpage; i <= maxpage; i++) {
sequence.add(i);
}
ListDataProvider<Integer> pagesDp = new ListDataProvider<Integer>(sequence);
DataView<Integer> pagesView = new DataView<Integer>("pageLink", pagesDp) {
private static final long serialVersionUID = 1L;
@Override
public void populateItem(final Item<Integer> item) {
final Integer i = item.getModelObject();
LinkPanel link = new LinkPanel("page", null, "" + i, TicketsPage.class, queryParameters(query, milestone, states, assignedTo, sort, desc, i));
link.setRenderBodyOnly(true);
if (i == page) {
WicketUtils.setCssClass(item, "active");
}
item.add(link);
}
};
add(pagesView);
}
}
|
|
package com.snail.util;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import com.jakewharton.disklrucache.DiskLruCache;
public class SimpleDiskCache {
private static final int VALUE_IDX = 0;
private static final int METADATA_IDX = 1;
private static final List<File> usedDirs = new ArrayList<File>();
private final DiskLruCache diskLruCache;
private SimpleDiskCache(File dir, int appVersion, long maxSize) throws IOException {
diskLruCache = DiskLruCache.open(dir, appVersion, 2, maxSize);
}
public static synchronized SimpleDiskCache open(File dir, int appVersion, long maxSize)
throws IOException {
if (!usedDirs.contains(dir)) {
usedDirs.add(dir);
}
return new SimpleDiskCache(dir, appVersion, maxSize);
}
public InputStreamEntry getInputStream(String key) throws IOException {
DiskLruCache.Snapshot snapshot = diskLruCache.get(toInternalKey(key));
if (snapshot == null) return null;
return new InputStreamEntry(snapshot, readMetadata(snapshot));
}
public BitmapEntry getBitmap(String key) throws IOException {
DiskLruCache.Snapshot snapshot = diskLruCache.get(toInternalKey(key));
if (snapshot == null) return null;
try {
Bitmap bitmap = BitmapFactory.decodeStream(snapshot.getInputStream(VALUE_IDX));
return new BitmapEntry(bitmap, readMetadata(snapshot));
} finally {
snapshot.close();
}
}
public StringEntry getString(String key) throws IOException {
DiskLruCache.Snapshot snapshot = diskLruCache.get(toInternalKey(key));
if (snapshot == null) return null;
try {
return new StringEntry(snapshot.getString(VALUE_IDX), readMetadata(snapshot));
} finally {
snapshot.close();
}
}
public boolean contains(String key) throws IOException {
DiskLruCache.Snapshot snapshot = diskLruCache.get(toInternalKey(key));
if(snapshot==null) return false;
snapshot.close();
return true;
}
public OutputStream openStream(String key) throws IOException {
return openStream(key, new HashMap<String, Serializable>());
}
public OutputStream openStream(String key, Map<String, ? extends Serializable> metadata)
throws IOException {
DiskLruCache.Editor editor = diskLruCache.edit(toInternalKey(key));
try {
writeMetadata(metadata, editor);
BufferedOutputStream bos = new BufferedOutputStream(editor.newOutputStream(VALUE_IDX));
return new CacheOutputStream(bos, editor);
} catch (IOException e) {
editor.abort();
throw e;
}
}
public void put(String key, InputStream is) throws IOException {
put(key, is, new HashMap<String, Serializable>());
}
public void put(String key, InputStream is, Map<String, Serializable> annotations)
throws IOException {
OutputStream os = null;
try {
os = openStream(key, annotations);
IOUtils.copy(is, os);
} finally {
if (os != null) os.close();
}
}
public void put(String key, String value) throws IOException {
put(key, value, new HashMap<String, Serializable>());
}
public void put(String key, String value, Map<String, ? extends Serializable> annotations)
throws IOException {
OutputStream cos = null;
try {
cos = openStream(key, annotations);
cos.write(value.getBytes());
} finally {
if (cos != null) cos.close();
}
}
public void remove(String key) throws IOException {
diskLruCache.remove(key);
}
private void writeMetadata(Map<String, ? extends Serializable> metadata,
DiskLruCache.Editor editor) throws IOException {
ObjectOutputStream oos = null;
try {
oos = new ObjectOutputStream(new BufferedOutputStream(
editor.newOutputStream(METADATA_IDX)));
oos.writeObject(metadata);
} finally {
IOUtils.closeQuietly(oos);
}
}
private Map<String, Serializable> readMetadata(DiskLruCache.Snapshot snapshot)
throws IOException {
ObjectInputStream ois = null;
try {
ois = new ObjectInputStream(new BufferedInputStream(
snapshot.getInputStream(METADATA_IDX)));
@SuppressWarnings("unchecked")
Map<String, Serializable> annotations = (Map<String, Serializable>) ois.readObject();
return annotations;
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
} finally {
IOUtils.closeQuietly(ois);
}
}
private String toInternalKey(String key) {
return md5(key);
}
private String md5(String s) {
try {
MessageDigest m = MessageDigest.getInstance("MD5");
m.update(s.getBytes("UTF-8"));
byte[] digest = m.digest();
BigInteger bigInt = new BigInteger(1, digest);
return bigInt.toString(16);
} catch (NoSuchAlgorithmException e) {
throw new AssertionError();
} catch (UnsupportedEncodingException e) {
throw new AssertionError();
}
}
private class CacheOutputStream extends FilterOutputStream {
private final DiskLruCache.Editor editor;
private boolean failed = false;
private CacheOutputStream(OutputStream os, DiskLruCache.Editor editor) {
super(os);
this.editor = editor;
}
@Override
public void close() throws IOException {
IOException closeException = null;
try {
super.close();
} catch (IOException e) {
closeException = e;
}
if (failed) {
editor.abort();
} else {
editor.commit();
}
if (closeException != null) throw closeException;
}
@Override
public void flush() throws IOException {
try {
super.flush();
} catch (IOException e) {
failed = true;
throw e;
}
}
@Override
public void write(int oneByte) throws IOException {
try {
super.write(oneByte);
} catch (IOException e) {
failed = true;
throw e;
}
}
@Override
public void write(byte[] buffer) throws IOException {
try {
super.write(buffer);
} catch (IOException e) {
failed = true;
throw e;
}
}
@Override
public void write(byte[] buffer, int offset, int length) throws IOException {
try {
super.write(buffer, offset, length);
} catch (IOException e) {
failed = true;
throw e;
}
}
}
public static class InputStreamEntry {
private final DiskLruCache.Snapshot snapshot;
private final Map<String, Serializable> metadata;
public InputStreamEntry(DiskLruCache.Snapshot snapshot, Map<String, Serializable> metadata) {
this.metadata = metadata;
this.snapshot = snapshot;
}
public InputStream getInputStream() {
return snapshot.getInputStream(VALUE_IDX);
}
public Map<String, Serializable> getMetadata() {
return metadata;
}
public void close() {
snapshot.close();
}
}
public static class BitmapEntry {
private final Bitmap bitmap;
private final Map<String, Serializable> metadata;
public BitmapEntry(Bitmap bitmap, Map<String, Serializable> metadata) {
this.bitmap = bitmap;
this.metadata = metadata;
}
public Bitmap getBitmap() {
return bitmap;
}
public Map<String, Serializable> getMetadata() {
return metadata;
}
}
public static class StringEntry {
private final String string;
private final Map<String, Serializable> metadata;
public StringEntry(String string, Map<String, Serializable> metadata) {
this.string = string;
this.metadata = metadata;
}
public String getString() {
return string;
}
public Map<String, Serializable> getMetadata() {
return metadata;
}
}
}
|
|
/*******************************************************************************
* Copyright (c) 2000, 2011 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.scalaide.debug.internal.spy;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketException;
import java.util.HashMap;
import java.util.Map;
import com.ibm.icu.text.MessageFormat;
/**
* This class can be used to spy all JDWP packets. It should be configured 'in
* between' the debugger application and the VM (or J9 debug proxy). Its
* parameters are: 1) The port number to which the debugger application
* connects; 2) The name of the host on which the VM or proxy waits for a JDWP
* connection; 3) The port number on which the VM or proxy waits for a JDWP
* connection; 4) The file where the trace is written to.
*
* Note that if this program is used for tracing JDWP activity of Leapfrog, the
* 'debug remote program' option must be used, and the J9 proxy must first be
* started up by hand on the port to which Leapfrog will connect. The J9 proxy
* that is started up by Leapfrog is not used and will return immediately.
*/
public class TcpipSpy extends Thread {
private static final byte[] handshakeBytes = "JDWP-Handshake".getBytes(); //$NON-NLS-1$
private boolean fVMtoDebugger;
private DataInputStream fDataIn;
private DataOutputStream fDataOut;
private static VerbosePacketStream out = new VerbosePacketStream(System.out);
private static Map<Integer, JdwpConversation> fPackets = new HashMap<Integer, JdwpConversation>();
private static int fFieldIDSize;
private static int fMethodIDSize;
private static int fObjectIDSize;
private static int fReferenceTypeIDSize;
private static int fFrameIDSize;
private static boolean fHasSizes;
public TcpipSpy(boolean VMtoDebugger, InputStream in, OutputStream out) {
fVMtoDebugger = VMtoDebugger;
fDataIn = new DataInputStream(new BufferedInputStream(in));
fDataOut = new DataOutputStream(new BufferedOutputStream(out));
fHasSizes = false;
}
public static void main(String[] args) {
boolean listenMode = false;
int inPort = 0;
String serverHost = null;
int outPort = 0;
String outputFile = null;
try {
listenMode = args[0].equals("-l");
int argIndex = listenMode ? 1 : 0;
inPort = Integer.parseInt(args[argIndex ++]);
serverHost = args[argIndex++];
outPort = Integer.parseInt(args[argIndex++]);
if (args.length >= argIndex) {
outputFile = args[argIndex];
}
} catch (Exception e) {
out.println("usage: TcpipSpy [-l] <client port> <server host> <server port> [<output file>]"); //$NON-NLS-1$
System.exit(-1);
}
if (outputFile != null) {
File file = new File(outputFile);
out.println(MessageFormat
.format("Writing output to {0}", new Object[] { file.getAbsolutePath() })); //$NON-NLS-1$
try {
out = new VerbosePacketStream(new BufferedOutputStream(
new FileOutputStream(file)));
} catch (FileNotFoundException e) {
out.println(MessageFormat
.format("Could not open {0}. Using stdout instead", new Object[] { file.getAbsolutePath() })); //$NON-NLS-1$
}
}
out.println();
ServerSocket serverSock = null;
Socket outSock = null;
try {
serverSock = new ServerSocket(inPort);
Socket inSock = serverSock.accept();
outSock = new Socket(InetAddress.getByName(serverHost),
outPort);
Thread inThread = new TcpipSpy(listenMode, inSock.getInputStream(),
outSock.getOutputStream());
Thread outThread = new TcpipSpy(!listenMode, outSock.getInputStream(),
inSock.getOutputStream());
inThread.start();
outThread.start();
inThread.join();
outThread.join();
} catch (Exception e) {
out.println(e);
} finally {
try {
if (serverSock != null) {
serverSock.close();
}
} catch (IOException e) {}
try {
if (outSock != null) {
outSock.close();
}
} catch (Exception e) {}
}
}
@Override
public void run() {
try {
// Skip handshake.
int handshakeLength;
handshakeLength = handshakeBytes.length;
while (handshakeLength-- > 0) {
int b = fDataIn.read();
fDataOut.write(b);
}
fDataOut.flush();
// Print all packages.
while (true) {
JdwpPacket p = JdwpPacket.read(fDataIn);
// we need to store conversation only for command send by the
// debugger,
// as there is no answer from the debugger to VM commands.
if (!(fVMtoDebugger && (p.getFlags() & JdwpPacket.FLAG_REPLY_PACKET) == 0)) {
store(p);
}
out.print(p, fVMtoDebugger);
out.flush();
p.write(fDataOut);
fDataOut.flush();
}
} catch (EOFException e) {
} catch (SocketException e) {
} catch (IOException e) {
out.println(MessageFormat.format(
"Caught exception: {0}", new Object[] { e.toString() })); //$NON-NLS-1$
e.printStackTrace(out);
} finally {
try {
fDataIn.close();
fDataOut.close();
} catch (IOException e) {
}
out.flush();
}
}
public static JdwpCommandPacket getCommand(int id) {
JdwpConversation conversation = fPackets
.get(new Integer(id));
if (conversation != null)
return conversation.getCommand();
return null;
}
protected static void store(JdwpPacket packet) {
int id = packet.getId();
JdwpConversation conversation = fPackets
.get(new Integer(id));
if (conversation == null) {
conversation = new JdwpConversation(id);
fPackets.put(new Integer(id), conversation);
}
if ((packet.getFlags() & JdwpPacket.FLAG_REPLY_PACKET) != 0) {
conversation.setReply((JdwpReplyPacket) packet);
} else {
conversation.setCommand((JdwpCommandPacket) packet);
}
}
public static int getCommand(JdwpPacket packet)
throws UnableToParseDataException {
JdwpCommandPacket command = null;
if (packet instanceof JdwpCommandPacket) {
command = (JdwpCommandPacket) packet;
} else {
command = getCommand(packet.getId());
if (command == null) {
throw new UnableToParseDataException(
"This packet is marked as reply, but there is no command with the same id.", null); //$NON-NLS-1$
}
}
return command.getCommand();
}
public static boolean hasSizes() {
return fHasSizes;
}
public static void setHasSizes(boolean value) {
fHasSizes = value;
}
public static void setFieldIDSize(int fieldIDSize) {
fFieldIDSize = fieldIDSize;
}
public static int getFieldIDSize() {
return fFieldIDSize;
}
public static void setMethodIDSize(int methodIDSize) {
fMethodIDSize = methodIDSize;
}
public static int getMethodIDSize() {
return fMethodIDSize;
}
public static void setObjectIDSize(int objectIDSize) {
fObjectIDSize = objectIDSize;
}
public static int getObjectIDSize() {
return fObjectIDSize;
}
public static void setReferenceTypeIDSize(int referenceTypeIDSize) {
fReferenceTypeIDSize = referenceTypeIDSize;
}
public static int getReferenceTypeIDSize() {
return fReferenceTypeIDSize;
}
public static void setFrameIDSize(int frameIDSize) {
fFrameIDSize = frameIDSize;
}
public static int getFrameIDSize() {
return fFrameIDSize;
}
}
|
|
package org.opencloudb.jdbc;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.*;
import org.apache.log4j.Logger;
import org.opencloudb.MycatServer;
import org.opencloudb.backend.BackendConnection;
import org.opencloudb.config.ErrorCode;
import org.opencloudb.mysql.nio.handler.ResponseHandler;
import org.opencloudb.net.mysql.EOFPacket;
import org.opencloudb.net.mysql.ErrorPacket;
import org.opencloudb.net.mysql.FieldPacket;
import org.opencloudb.net.mysql.OkPacket;
import org.opencloudb.net.mysql.ResultSetHeaderPacket;
import org.opencloudb.net.mysql.RowDataPacket;
import org.opencloudb.route.RouteResultsetNode;
import org.opencloudb.server.ServerConnection;
import org.opencloudb.server.parser.ServerParse;
import org.opencloudb.util.MysqlDefs;
import org.opencloudb.util.ResultSetUtil;
import org.opencloudb.util.StringUtil;
public class JDBCConnection implements BackendConnection {
protected static final Logger LOGGER = Logger
.getLogger(JDBCConnection.class);
private JDBCDatasource pool;
private volatile String schema;
private volatile String dbType;
private volatile String oldSchema;
private byte packetId;
private int txIsolation;
private volatile boolean running = false;
private volatile boolean borrowed;
private long id = 0;
private String host;
private int port;
private Connection con;
private ResponseHandler respHandler;
private volatile Object attachement;
boolean headerOutputed = false;
private volatile boolean modifiedSQLExecuted;
private final long startTime;
private long lastTime;
private boolean isSpark = false;
public JDBCConnection() {
startTime = System.currentTimeMillis();
}
public Connection getCon() {
return con;
}
public void setCon(Connection con) {
this.con = con;
}
@Override
public void close(String reason) {
try {
con.close();
} catch (SQLException e) {
}
}
public void setPool(JDBCDatasource pool) {
this.pool = pool;
}
public void setHost(String host) {
this.host = host;
}
public void setPort(int port) {
this.port = port;
}
@Override
public boolean isClosed() {
try {
return con == null || con.isClosed();
} catch (SQLException e) {
return true;
}
}
@Override
public void idleCheck() {
}
@Override
public long getStartupTime() {
return startTime;
}
@Override
public String getHost() {
return this.host;
}
@Override
public int getPort() {
return this.port;
}
@Override
public int getLocalPort() {
return 0;
}
@Override
public long getNetInBytes() {
return 0;
}
@Override
public long getNetOutBytes() {
return 0;
}
@Override
public boolean isModifiedSQLExecuted() {
return modifiedSQLExecuted;
}
@Override
public boolean isFromSlaveDB() {
return false;
}
public String getDbType() {
return this.dbType;
}
public void setDbType(String newDbType) {
this.dbType = newDbType.toUpperCase();
this.isSpark = dbType.equals("SPARK");
}
@Override
public String getSchema() {
return this.schema;
}
@Override
public void setSchema(String newSchema) {
this.oldSchema = this.schema;
this.schema = newSchema;
}
@Override
public long getLastTime() {
return lastTime;
}
@Override
public boolean isClosedOrQuit() {
return this.isClosed();
}
@Override
public void setAttachment(Object attachment) {
this.attachement = attachment;
}
@Override
public void quit() {
this.close("client quit");
}
@Override
public void setLastTime(long currentTimeMillis) {
this.lastTime = currentTimeMillis;
}
@Override
public void release() {
modifiedSQLExecuted = false;
setResponseHandler(null);
pool.releaseChannel(this);
}
public void setRunning(boolean running) {
this.running = running;
}
@Override
public boolean setResponseHandler(ResponseHandler commandHandler) {
respHandler = commandHandler;
return false;
}
@Override
public void commit() {
try {
con.commit();
this.respHandler.okResponse(OkPacket.OK, this);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
private void executeSQL(RouteResultsetNode rrn, ServerConnection sc,
boolean autocommit) throws IOException {
String orgin = rrn.getStatement();
// String sql = rrn.getStatement().toLowerCase();
// LOGGER.info("JDBC SQL:"+orgin+"|"+sc.toString());
if (!modifiedSQLExecuted && rrn.isModifySQL()) {
modifiedSQLExecuted = true;
}
try {
if (!this.schema.equals(this.oldSchema)) {
con.setCatalog(schema);
this.oldSchema = schema;
}
if (!this.isSpark) {
con.setAutoCommit(autocommit);
}
int sqlType = rrn.getSqlType();
if (sqlType == ServerParse.SELECT || sqlType == ServerParse.SHOW) {
if ((sqlType == ServerParse.SHOW) && (!dbType.equals("MYSQL"))) {
// showCMD(sc, orgin);
//ShowVariables.execute(sc, orgin);
ShowVariables.execute(sc, orgin,this);
} else if ("SELECT CONNECTION_ID()".equalsIgnoreCase(orgin)) {
//ShowVariables.justReturnValue(sc,String.valueOf(sc.getId()));
ShowVariables.justReturnValue(sc,String.valueOf(sc.getId()),this);
} else {
ouputResultSet(sc, orgin);
}
} else {
executeddl(sc, orgin);
}
} catch (SQLException e) {
String msg = e.getMessage();
ErrorPacket error = new ErrorPacket();
error.packetId = ++packetId;
error.errno = e.getErrorCode();
error.message = msg.getBytes();
this.respHandler.errorResponse(error.writeToBytes(sc), this);
}
catch (Exception e) {
String msg = e.getMessage();
ErrorPacket error = new ErrorPacket();
error.packetId = ++packetId;
error.errno = ErrorCode.ER_UNKNOWN_ERROR;
error.message = msg.getBytes();
this.respHandler.errorResponse(error.writeToBytes(sc), this);
}
finally {
this.running = false;
}
}
private FieldPacket getNewFieldPacket(String charset, String fieldName) {
FieldPacket fieldPacket = new FieldPacket();
fieldPacket.orgName = StringUtil.encode(fieldName, charset);
fieldPacket.name = StringUtil.encode(fieldName, charset);
fieldPacket.length = 20;
fieldPacket.flags = 0;
fieldPacket.decimals = 0;
int javaType = 12;
fieldPacket.type = (byte) (MysqlDefs.javaTypeMysql(javaType) & 0xff);
return fieldPacket;
}
private void executeddl(ServerConnection sc, String sql)
throws SQLException {
Statement stmt = null;
try {
stmt = con.createStatement();
int count = stmt.executeUpdate(sql);
OkPacket okPck = new OkPacket();
okPck.affectedRows = count;
okPck.insertId = 0;
okPck.packetId = ++packetId;
okPck.message = " OK!".getBytes();
this.respHandler.okResponse(okPck.writeToBytes(sc), this);
} finally {
if (stmt != null) {
try {
stmt.close();
} catch (SQLException e) {
}
}
}
}
private void ouputResultSet(ServerConnection sc, String sql)
throws SQLException {
ResultSet rs = null;
Statement stmt = null;
try {
stmt = con.createStatement();
rs = stmt.executeQuery(sql);
List<FieldPacket> fieldPks = new LinkedList<FieldPacket>();
ResultSetUtil.resultSetToFieldPacket(sc.getCharset(), fieldPks, rs,
this.isSpark);
int colunmCount = fieldPks.size();
ByteBuffer byteBuf = sc.allocate();
ResultSetHeaderPacket headerPkg = new ResultSetHeaderPacket();
headerPkg.fieldCount = fieldPks.size();
headerPkg.packetId = ++packetId;
byteBuf = headerPkg.write(byteBuf, sc, true);
byteBuf.flip();
byte[] header = new byte[byteBuf.limit()];
byteBuf.get(header);
byteBuf.clear();
List<byte[]> fields = new ArrayList<byte[]>(fieldPks.size());
Iterator<FieldPacket> itor = fieldPks.iterator();
while (itor.hasNext()) {
FieldPacket curField = itor.next();
curField.packetId = ++packetId;
byteBuf = curField.write(byteBuf, sc, false);
byteBuf.flip();
byte[] field = new byte[byteBuf.limit()];
byteBuf.get(field);
byteBuf.clear();
fields.add(field);
itor.remove();
}
EOFPacket eofPckg = new EOFPacket();
eofPckg.packetId = ++packetId;
byteBuf = eofPckg.write(byteBuf, sc, false);
byteBuf.flip();
byte[] eof = new byte[byteBuf.limit()];
byteBuf.get(eof);
byteBuf.clear();
this.respHandler.fieldEofResponse(header, fields, eof, this);
// output row
while (rs.next()) {
RowDataPacket curRow = new RowDataPacket(colunmCount);
for (int i = 0; i < colunmCount; i++) {
int j = i + 1;
curRow.add(StringUtil.encode(rs.getString(j),
sc.getCharset()));
}
curRow.packetId = ++packetId;
byteBuf = curRow.write(byteBuf, sc, false);
byteBuf.flip();
byte[] row = new byte[byteBuf.limit()];
byteBuf.get(row);
byteBuf.clear();
this.respHandler.rowResponse(row, this);
}
// end row
eofPckg = new EOFPacket();
eofPckg.packetId = ++packetId;
byteBuf = eofPckg.write(byteBuf, sc, false);
byteBuf.flip();
eof = new byte[byteBuf.limit()];
byteBuf.get(eof);
sc.recycle(byteBuf);
this.respHandler.rowEofResponse(eof, this);
} finally {
if (rs != null) {
try {
rs.close();
} catch (SQLException e) {
}
}
if (stmt != null) {
try {
stmt.close();
} catch (SQLException e) {
}
}
}
}
@Override
public void query(String sql) throws UnsupportedEncodingException {
throw new UnsupportedEncodingException("unsupported yet ");
}
@Override
public Object getAttachment() {
return this.attachement;
}
@Override
public String getCharset() {
return null;
}
@Override
public void execute(final RouteResultsetNode node,
final ServerConnection source, final boolean autocommit)
throws IOException {
Runnable runnable = new Runnable() {
@Override
public void run() {
try {
executeSQL(node, source, autocommit);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
};
MycatServer.getInstance().getBusinessExecutor().execute(runnable);
}
@Override
public void recordSql(String host, String schema, String statement) {
}
@Override
public boolean syncAndExcute() {
return true;
}
@Override
public void rollback() {
try {
con.rollback();
this.respHandler.okResponse(OkPacket.OK, this);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
public boolean isRunning() {
return this.running;
}
@Override
public boolean isBorrowed() {
return this.borrowed;
}
@Override
public void setBorrowed(boolean borrowed) {
this.borrowed = borrowed;
}
@Override
public int getTxIsolation() {
if (con != null) {
try {
return con.getTransactionIsolation();
} catch (SQLException e) {
return 0;
}
} else {
return -1;
}
}
@Override
public boolean isAutocommit() {
if (con == null) {
return true;
} else {
try {
return con.getAutoCommit();
} catch (SQLException e) {
}
}
return true;
}
@Override
public long getId() {
return id;
}
@Override
public String toString() {
return "JDBCConnection [autocommit=" + this.isAutocommit()
+ ", txIsolation=" + txIsolation + ", running=" + running
+ ", borrowed=" + borrowed + ", id=" + id + ", host=" + host
+ ", port=" + port + "]";
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.hbase;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import java.util.Set;
import org.apache.camel.Exchange;
import org.apache.camel.ExchangePropertyKey;
import org.apache.camel.Processor;
import org.apache.camel.component.hbase.mapping.CellMappingStrategy;
import org.apache.camel.component.hbase.mapping.CellMappingStrategyFactory;
import org.apache.camel.component.hbase.model.HBaseCell;
import org.apache.camel.component.hbase.model.HBaseData;
import org.apache.camel.component.hbase.model.HBaseRow;
import org.apache.camel.support.DefaultExchange;
import org.apache.camel.support.ScheduledBatchPollingConsumer;
import org.apache.camel.util.CastUtils;
import org.apache.camel.util.ObjectHelper;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.PageFilter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The HBase consumer.
*/
public class HBaseConsumer extends ScheduledBatchPollingConsumer {
private static final Logger LOG = LoggerFactory.getLogger(HBaseConsumer.class);
private final HBaseEndpoint endpoint;
private HBaseRow rowModel;
public HBaseConsumer(HBaseEndpoint endpoint, Processor processor) {
super(endpoint, processor);
this.endpoint = endpoint;
this.rowModel = endpoint.getRowModel();
}
@Override
protected int poll() throws Exception {
try (Table table = endpoint.getTable()) {
shutdownRunningTask = null;
pendingExchanges = 0;
Queue<Exchange> queue = new LinkedList<>();
Scan scan = new Scan();
List<Filter> filters = new LinkedList<>();
if (endpoint.getFilters() != null) {
filters.addAll(endpoint.getFilters());
}
if (maxMessagesPerPoll > 0) {
filters.add(new PageFilter(maxMessagesPerPoll));
}
if (!filters.isEmpty()) {
Filter compoundFilter = new FilterList(filters);
scan.setFilter(compoundFilter);
}
if (rowModel != null && rowModel.getCells() != null) {
Set<HBaseCell> cellModels = rowModel.getCells();
for (HBaseCell cellModel : cellModels) {
scan.addColumn(HBaseHelper.getHBaseFieldAsBytes(cellModel.getFamily()),
HBaseHelper.getHBaseFieldAsBytes(cellModel.getQualifier()));
}
}
ResultScanner scanner = table.getScanner(scan);
int exchangeCount = 0;
// The next three statements are used just to get a reference to the BodyCellMappingStrategy instance.
Exchange exchange = new DefaultExchange(endpoint);
exchange.getIn().setHeader(CellMappingStrategyFactory.STRATEGY, CellMappingStrategyFactory.BODY);
CellMappingStrategy mappingStrategy = endpoint.getCellMappingStrategyFactory().getStrategy(exchange.getIn());
for (Result result = scanner.next();
(exchangeCount < maxMessagesPerPoll || maxMessagesPerPoll <= 0) && result != null;
result = scanner.next()) {
HBaseData data = new HBaseData();
HBaseRow resultRow = new HBaseRow();
resultRow.apply(rowModel);
byte[] row = result.getRow();
resultRow.setId(endpoint.getCamelContext().getTypeConverter().convertTo(rowModel.getRowType(), row));
List<Cell> cells = result.listCells();
if (cells != null) {
Set<HBaseCell> cellModels = rowModel.getCells();
if (!cellModels.isEmpty()) {
for (HBaseCell modelCell : cellModels) {
HBaseCell resultCell = new HBaseCell();
String family = modelCell.getFamily();
String column = modelCell.getQualifier();
resultCell.setValue(endpoint.getCamelContext().getTypeConverter().convertTo(
modelCell.getValueType(),
result.getValue(HBaseHelper.getHBaseFieldAsBytes(family),
HBaseHelper.getHBaseFieldAsBytes(column))));
resultCell.setFamily(modelCell.getFamily());
resultCell.setQualifier(modelCell.getQualifier());
resultRow.getCells().add(resultCell);
}
} else {
// just need to put every key value into the result Cells
for (Cell cell : cells) {
String qualifier = new String(CellUtil.cloneQualifier(cell));
String family = new String(CellUtil.cloneFamily(cell));
HBaseCell resultCell = new HBaseCell();
resultCell.setFamily(family);
resultCell.setQualifier(qualifier);
resultCell.setValue(endpoint.getCamelContext().getTypeConverter().convertTo(String.class,
CellUtil.cloneValue(cell)));
resultRow.getCells().add(resultCell);
}
}
data.getRows().add(resultRow);
exchange = createExchange(true);
// Probably overkill but kept it here for consistency.
exchange.getIn().setHeader(CellMappingStrategyFactory.STRATEGY, CellMappingStrategyFactory.BODY);
mappingStrategy.applyScanResults(exchange.getIn(), data);
//Make sure that there is a header containing the marked row ids, so that they can be deleted.
exchange.getIn().setHeader(HBaseAttribute.HBASE_MARKED_ROW_ID.asHeader(), result.getRow());
queue.add(exchange);
exchangeCount++;
}
}
scanner.close();
return queue.isEmpty() ? 0 : processBatch(CastUtils.cast(queue));
}
}
@Override
public int processBatch(Queue<Object> exchanges) throws Exception {
int total = exchanges.size();
// limit if needed
if (maxMessagesPerPoll > 0 && total > maxMessagesPerPoll) {
LOG.debug("Limiting to maximum messages to poll {} as there were {} messages in this poll.", maxMessagesPerPoll,
total);
total = maxMessagesPerPoll;
}
for (int index = 0; index < total && isBatchAllowed(); index++) {
// only loop if we are started (allowed to run)
Exchange exchange = ObjectHelper.cast(Exchange.class, exchanges.poll());
// add current index and total as properties
exchange.setProperty(ExchangePropertyKey.BATCH_INDEX, index);
exchange.setProperty(ExchangePropertyKey.BATCH_SIZE, total);
exchange.setProperty(ExchangePropertyKey.BATCH_COMPLETE, index == total - 1);
// update pending number of exchanges
pendingExchanges = total - index - 1;
LOG.trace("Processing exchange [{}]...", exchange);
getProcessor().process(exchange);
if (exchange.getException() != null) {
// if we failed then throw exception
throw exchange.getException();
}
if (endpoint.isRemove()) {
remove((byte[]) exchange.getIn().getHeader(HBaseAttribute.HBASE_MARKED_ROW_ID.asHeader()));
}
}
return total;
}
/**
* Delegates to the {@link HBaseRemoveHandler}.
*/
private void remove(byte[] row) throws IOException {
try (Table table = endpoint.getTable()) {
endpoint.getRemoveHandler().remove(table, row);
}
}
public HBaseRow getRowModel() {
return rowModel;
}
public void setRowModel(HBaseRow rowModel) {
this.rowModel = rowModel;
}
}
|
|
/*
* Copyright (c) 2017, Sensirion AG
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of Sensirion AG nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.sensirion.smartgadget.view.device_management;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.AlertDialog.Builder;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.AssetManager;
import android.content.res.ColorStateList;
import android.graphics.Typeface;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.util.Log;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodManager;
import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.CompoundButton.OnCheckedChangeListener;
import android.widget.EditText;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.Switch;
import android.widget.TextView;
import com.sensirion.libsmartgadget.Gadget;
import com.sensirion.libsmartgadget.GadgetDataPoint;
import com.sensirion.libsmartgadget.GadgetDownloadService;
import com.sensirion.libsmartgadget.GadgetListener;
import com.sensirion.libsmartgadget.GadgetService;
import com.sensirion.libsmartgadget.GadgetValue;
import com.sensirion.libsmartgadget.smartgadget.BatteryService;
import com.sensirion.smartgadget.R;
import com.sensirion.smartgadget.peripheral.rht_sensor.external.RHTHumigadgetSensorManager;
import com.sensirion.smartgadget.persistence.device_name_database.DeviceNameDatabaseManager;
import com.sensirion.smartgadget.utils.DeviceModel;
import com.sensirion.smartgadget.utils.Interval;
import com.sensirion.smartgadget.utils.TimeFormatter;
import com.sensirion.smartgadget.utils.download.LoggerInterval;
import com.sensirion.smartgadget.utils.view.ParentFragment;
import com.sensirion.smartgadget.view.MainActivity;
import java.util.List;
import java.util.Locale;
import butterknife.BindBool;
import butterknife.BindColor;
import butterknife.BindDrawable;
import butterknife.BindString;
import butterknife.BindView;
import butterknife.ButterKnife;
// TODO: Requires improved initUiElements calling behavior...
public class ManageDeviceFragment extends ParentFragment implements GadgetListener {
private static final String TAG = ManageDeviceFragment.class.getSimpleName();
public static final int UNKNOWN_BATTERY_LEVEL = -1;
private static final int UNKNOWN_LOGGING_INTERVAL = -1;
private static final int DOWNLOAD_COMPLETE_RESET_DELAY_MS = 2000;
private Gadget mSelectedGadget;
private DeviceModel mSelectedDeviceModel;
private Runnable mDownloadButtonReset;
// XML Resources
@BindBool(R.bool.is_tablet)
boolean IS_TABLET;
@BindString(R.string.enable_logging)
String ENABLE_LOGGING_STRING;
@BindString(R.string.label_advice_logging_enable)
String GADGET_ENABLE_ADVICE_STRING;
@BindString(R.string.interval_modification)
String INTERVAL_MODIFICATION_TITLE;
@BindString(R.string.interval_modification_message)
String INTERVAL_MODIFICATION_MESSAGE;
@BindString(R.string.yes)
String YES_STRING;
@BindString(R.string.no)
String NO_STRING;
@BindString(R.string.typeface_condensed)
String TYPEFACE_CONDENSED_LOCATION;
@BindString(R.string.typeface_bold)
String TYPEFACE_BOLD_LOCATION;
// XML Views
@BindView(R.id.dashboard_battery_bar)
Button mBatteryBoardBoardView;
@BindView(R.id.dashboard_gadget_logging)
Button mLoggingBoardBoardView;
@BindView(R.id.dashboard_logging_interval)
Button mIntervalBoardView;
@BindView(R.id.dashboard_download_progress)
Button mDownloadBoardView;
@BindView(R.id.manage_device_gadget_name_edit_field)
EditText mGadgetNameEditText;
@BindView(R.id.manage_device_button_disconnect)
Button mDisconnectButton;
@BindView(R.id.manage_device_button_logging_interval)
Button mLoggingIntervalButton;
@BindView(R.id.manage_device_battery_level_value)
TextView mBatteryLevelValue;
@BindView(R.id.manage_device_battery_bar)
ProgressBar mBatteryBar;
@BindView(R.id.manage_device_switch_toggle_logger)
Switch mLoggingToggle;
@BindView(R.id.manage_device_battery_bar_layout)
RelativeLayout mBatteryLevelLayout;
@BindView(R.id.manage_device_gadget_logging_layout)
RelativeLayout mLoggingLayout;
@BindView(R.id.manage_device_download_progress)
TextView mDownloadButtonText;
@BindView(R.id.manage_device_download_progress_bar)
ProgressBar mDownloadProgressBar;
@BindView(R.id.manage_device_gdaget_type)
TextView mGadgetType;
@BindColor(R.color.sensirion_green_darkened)
int mColorSensirionGreenDarkened;
@BindColor(R.color.yellow)
int mColorYellow;
@BindColor(R.color.orange)
int mColorOrange;
@BindColor(R.color.red)
int mColorRed;
@BindColor(R.color.light_gray)
int mColorLightGray;
@BindDrawable(R.drawable.download_progress)
Drawable mDownloadProgressDrawable;
@BindColor(R.color.manage_device_button)
ColorStateList mDeviceButtonColors;
public ManageDeviceFragment() {
}
/*
* Initialization
*/
/**
* This method should be called before starting the class.
*
* @param deviceAddress device of the device that the users wants to manage.
*/
public void init(@NonNull final String deviceAddress) {
mSelectedDeviceModel = RHTHumigadgetSensorManager.getInstance().getConnectedDevice(deviceAddress);
mSelectedGadget = RHTHumigadgetSensorManager.getInstance().getConnectedGadget(deviceAddress);
}
/*
* Lifecycle Methods
*/
@Override
public View onCreateView(@NonNull final LayoutInflater inflater, final ViewGroup container,
final Bundle savedInstanceState) {
final View view = inflater.inflate(R.layout.fragment_manage_device, container, false);
ButterKnife.bind(this, view);
view.setOnTouchListener(new OnTouchOpenTabletMenuListener());
setHasOptionsMenu(true);
return view;
}
@Override
public void onResume() {
super.onResume();
if (isSelectedDeviceAvailable()) {
mSelectedGadget.addListener(this);
initUiElements();
} else {
closeScreen();
}
}
@Override
public void onPause() {
if (isSelectedDeviceAvailable()) {
assert mSelectedGadget != null;
mSelectedGadget.removeListener(this);
}
if (mDownloadButtonReset != null) {
mDownloadButtonText.removeCallbacks(mDownloadButtonReset);
}
super.onPause();
}
private boolean isSelectedDeviceAvailable() {
return mSelectedDeviceModel != null && mSelectedGadget != null && mSelectedGadget.isConnected();
}
private void closeScreen() {
if (mGadgetNameEditText.hasFocus()) {
mGadgetNameEditText.clearFocus();
}
final Activity activity = getParent();
if (activity instanceof MainActivity) {
((MainActivity) activity).changeFragment(new ScanDeviceFragment());
}
}
/*
* Implementation of {@link GadgetListener}
*/
@Override
public void onGadgetConnected(@NonNull Gadget gadget) {
// ignore... the screen shouldn't even open if not connected -> see onResume()
}
@Override
public void onGadgetDisconnected(@NonNull Gadget gadget) {
closeScreen();
}
@Override
public void onGadgetValuesReceived(@NonNull Gadget gadget, @NonNull GadgetService service, @NonNull GadgetValue[] values) {
if (service instanceof BatteryService) {
updateBatteryLevel();
}
}
@Override
public void onGadgetNewDataPoint(@NonNull Gadget gadget, @NonNull GadgetService service, GadgetDataPoint dataPoint) {
// Ignore...
}
@Override
public void onGadgetDownloadNewDataPoints(@NonNull Gadget gadget, @NonNull GadgetDownloadService service, @NonNull GadgetDataPoint[] dataPoints) {
// Ignore...
}
@Override
public void onGadgetDownloadProgress(@NonNull Gadget gadget, @NonNull GadgetDownloadService service, int progress) {
mDownloadProgressBar.setProgressDrawable(mDownloadProgressDrawable);
mDownloadButtonText.setText(String.format(Locale.GERMAN, getString(R.string.manage_device_download_progress), progress));
mDownloadProgressBar.setProgress(progress);
}
@Override
public void onSetGadgetLoggingEnabledFailed(@NonNull Gadget gadget, @NonNull GadgetDownloadService service) {
initUiElements();
}
@Override
public void onSetLoggerIntervalSuccess(@NonNull final Gadget gadget) {
final int valueInMilliseconds = getLoggerInterval(gadget);
if (valueInMilliseconds == UNKNOWN_LOGGING_INTERVAL) {
return;
}
final int intervalSeconds = valueInMilliseconds / Interval.ONE_SECOND.getNumberMilliseconds();
mLoggingIntervalButton.setText(new TimeFormatter(intervalSeconds).getShortTime(getContext().getApplicationContext()));
if (!isDownloading(gadget)) {
mLoggingIntervalButton.setEnabled(true);
}
}
@Override
public void onSetLoggerIntervalFailed(@NonNull Gadget gadget, @NonNull GadgetDownloadService service) {
initUiElements();
mLoggingIntervalButton.setTextColor(mColorOrange);
}
@Override
public void onDownloadFailed(@NonNull Gadget gadget, @NonNull GadgetDownloadService service) {
mDownloadButtonText.setTextColor(mColorOrange);
resetAfterDownload(isLoggingStateEditable(gadget), R.string.manage_device_download_failed_retry);
}
@Override
public void onDownloadCompleted(@NonNull final Gadget gadget, @NonNull GadgetDownloadService service) {
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
resetAfterDownload(isLoggingStateEditable(gadget), R.string.manage_device_download_completed);
}
});
}
@Override
public void onDownloadNoData(@NonNull final Gadget gadget, @NonNull final GadgetDownloadService service) {
mDownloadButtonText.setTextColor(mColorOrange);
resetAfterDownload(isLoggingStateEditable(gadget), R.string.manage_device_download_no_data);
}
/*
* Private helpers
*/
private void resetAfterDownload(boolean isLoggingStateEditable, int stringId) {
mDownloadProgressBar.setProgress(0);
mDownloadButtonText.setText(stringId);
if (isLoggingStateEditable) {
mLoggingToggle.setEnabled(true);
mLoggingIntervalButton.setEnabled(!mLoggingToggle.isChecked());
} else {
mLoggingIntervalButton.setEnabled(true);
}
mDownloadButtonReset = new Runnable() {
@Override
public void run() {
mDownloadProgressBar.setVisibility(View.GONE);
mDownloadButtonText.setText(R.string.label_download);
mDownloadButtonText.setEnabled(true);
mDownloadButtonText.setTextColor(mDeviceButtonColors);
}
};
mDownloadButtonText.postDelayed(mDownloadButtonReset, DOWNLOAD_COMPLETE_RESET_DELAY_MS);
}
/*
* Manage Gadget Methods
*/
private void setLoggerInterval(@NonNull final Gadget gadget, final int valueInMilliseconds) {
final GadgetDownloadService downloadService = getDownloadService(gadget);
if (downloadService == null) {
return;
}
downloadService.setLoggerInterval(valueInMilliseconds);
mLoggingIntervalButton.setEnabled(false);
mLoggingIntervalButton.setText(R.string.label_logging_interval_updating);
}
private int getLoggerInterval(Gadget gadget) {
final GadgetDownloadService downloadService = getDownloadService(gadget);
if (downloadService == null) {
return UNKNOWN_LOGGING_INTERVAL;
}
return downloadService.getLoggerInterval();
}
private boolean isLoggingStateEnabled(Gadget gadget) {
final GadgetDownloadService downloadService = getDownloadService(gadget);
return downloadService != null && downloadService.isGadgetLoggingEnabled();
}
private boolean isLoggingStateEditable(Gadget gadget) {
final GadgetDownloadService downloadService = getDownloadService(gadget);
return downloadService != null && downloadService.isGadgetLoggingStateEditable();
}
private void setLoggingStateEnabled(Gadget gadget, boolean enabled) {
final GadgetDownloadService downloadService = getDownloadService(gadget);
if (downloadService == null) {
return;
}
downloadService.setGadgetLoggingEnabled(enabled);
}
private boolean isDownloadingEnabled(final Gadget gadget) {
return getDownloadService(gadget) != null;
}
private boolean isDownloading(final Gadget gadget) {
final GadgetDownloadService downloadService = getDownloadService(gadget);
return downloadService != null && downloadService.isDownloading();
}
private void downloadLog(Gadget gadget) {
final GadgetDownloadService downloadService = getDownloadService(gadget);
if (downloadService == null) {
return;
}
mDownloadButtonText.setText(R.string.manage_device_download_start);
mDownloadProgressBar.setProgress(0);
mDownloadProgressBar.setVisibility(View.VISIBLE);
downloadService.download();
}
private int getBatteryLevel(Gadget gadget) {
final GadgetService batteryService = getServiceOfType(gadget, BatteryService.class);
if (batteryService == null) {
return UNKNOWN_BATTERY_LEVEL;
}
final GadgetValue[] lastValues = batteryService.getLastValues();
return (lastValues.length > 0) ? lastValues[0].getValue().intValue() : UNKNOWN_BATTERY_LEVEL;
}
private GadgetDownloadService getDownloadService(@NonNull final Gadget gadget) {
return (GadgetDownloadService) getServiceOfType(gadget, GadgetDownloadService.class);
}
private GadgetService getServiceOfType(@NonNull final Gadget gadget,
@NonNull final Class<? extends GadgetService> gadgetServiceClass) {
final List<GadgetService> services = gadget.getServicesOfType(gadgetServiceClass);
if (services.size() == 0) {
return null;
}
if (services.size() > 1) {
Log.w(TAG, String.format("Multiple services of type %s available - Application can only handle one", gadgetServiceClass));
}
return services.get(0);
}
/*
* UI Elements
*/
private void initUiElements() {
initCustomFonts();
initIntervalChooser();
initLoggingSwitch();
initBatteryBar();
initDownloadButton();
initDisconnectButton();
initGadgetName();
}
private void initCustomFonts() {
final AssetManager assets = getContext().getAssets();
final Typeface typefaceNormal = Typeface.createFromAsset(assets, TYPEFACE_CONDENSED_LOCATION);
final Typeface typefaceBold = Typeface.createFromAsset(assets, TYPEFACE_BOLD_LOCATION);
mBatteryLevelValue.setTypeface(typefaceNormal);
mGadgetNameEditText.setTypeface(typefaceNormal);
mGadgetType.setTypeface(typefaceNormal);
mDisconnectButton.setTypeface(typefaceBold);
mDownloadButtonText.setTypeface(typefaceNormal);
mLoggingIntervalButton.setTypeface(typefaceNormal);
mBatteryBoardBoardView.setTypeface(typefaceNormal);
mLoggingBoardBoardView.setTypeface(typefaceNormal);
mIntervalBoardView.setTypeface(typefaceNormal);
mDownloadBoardView.setTypeface(typefaceNormal);
}
private void initIntervalChooser() {
mLoggingIntervalButton.setTextColor(mColorLightGray);
final int loggerIntervalMs = getLoggerInterval(mSelectedGadget);
if (loggerIntervalMs == UNKNOWN_LOGGING_INTERVAL) {
return;
}
final int intervalSeconds = loggerIntervalMs / Interval.ONE_SECOND.getNumberMilliseconds();
mLoggingIntervalButton.setText(new TimeFormatter(intervalSeconds).getShortTime(getContext().getApplicationContext()));
mLoggingIntervalButton.setTextColor(mDeviceButtonColors);
mLoggingIntervalButton.setEnabled(!(isLoggingStateEditable(mSelectedGadget) && isLoggingStateEnabled(mSelectedGadget)) &&
!isDownloading(mSelectedGadget));
mLoggingIntervalButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(@NonNull final View v) {
showAdviceWhenModifyingInterval();
}
});
}
private void initDisconnectButton() {
mDisconnectButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(@NonNull final View v) {
if (mSelectedGadget != null) {
mSelectedGadget.disconnect();
}
closeScreen();
}
});
}
private void initGadgetName() {
final String deviceAddress = mSelectedGadget.getAddress();
final String deviceName = DeviceNameDatabaseManager.getInstance().readDeviceName(deviceAddress);
mGadgetNameEditText.setText(deviceName);
mGadgetNameEditText.setEnabled(true);
mSelectedDeviceModel.setDisplayName(deviceName);
mGadgetType.setText(mSelectedGadget.getName());
mGadgetNameEditText.setOnEditorActionListener(new TextView.OnEditorActionListener() {
@Override
public boolean onEditorAction(TextView v, int actionId, KeyEvent event) {
if (actionId == EditorInfo.IME_ACTION_DONE) {
if (mGadgetNameEditText.getText().toString().isEmpty()) {
mGadgetNameEditText.setText(deviceAddress);
}
mGadgetNameEditText.clearFocus(); // triggers a focus change
InputMethodManager imm = (InputMethodManager) getActivity().getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(mGadgetNameEditText.getWindowToken(), 0);
return true;
}
return false;
}
});
mGadgetNameEditText.setOnFocusChangeListener(new View.OnFocusChangeListener() {
@Override
public void onFocusChange(View v, boolean hasFocus) {
if (hasFocus) {
if (mGadgetNameEditText.getText().toString().equals(deviceAddress)) {
mGadgetNameEditText.getText().clear();
}
} else {
String displayName = mGadgetNameEditText.getText().toString();
mSelectedDeviceModel.setDisplayName(displayName);
DeviceNameDatabaseManager.getInstance().updateDeviceName(deviceAddress, displayName);
}
}
});
}
private void initLoggingSwitch() {
mLoggingToggle.setOnCheckedChangeListener(null);
if (isLoggingStateEditable(mSelectedGadget)) {
mLoggingToggle.setChecked(isLoggingStateEnabled(mSelectedGadget));
mLoggingToggle.setEnabled(isDownloadingEnabled(mSelectedGadget) && !isDownloading(mSelectedGadget));
mLoggingToggle.setOnCheckedChangeListener(new OnCheckedChangeListener() {
@Override
public void onCheckedChanged(final CompoundButton buttonView, final boolean isChecked) {
if (isChecked) {
showLoggingAdviceAlert();
mLoggingIntervalButton.setEnabled(false);
} else {
setLoggingStateEnabled(mSelectedGadget, false);
mLoggingIntervalButton.setEnabled(true);
}
}
});
} else {
mLoggingLayout.setVisibility(View.GONE);
}
}
private void initBatteryBar() {
updateBatteryLevel();
}
private void initDownloadButton() {
mDownloadProgressBar.setProgressDrawable(mDownloadProgressDrawable);
if (isDownloading(mSelectedGadget)) {
mDownloadProgressBar.setVisibility(View.VISIBLE);
} else {
mDownloadProgressBar.setVisibility(View.GONE);
}
mDownloadButtonText.setEnabled(isDownloadingEnabled(mSelectedGadget) && !isDownloading(mSelectedGadget));
mDownloadButtonText.setText(R.string.label_download);
mDownloadButtonText.setTextColor(mDeviceButtonColors);
mDownloadButtonText.setOnClickListener(new OnClickListener() {
@Override
public void onClick(final View v) {
final Activity parent = getParent();
if (parent == null) {
Log.e(TAG, "initDownloadButton -> Parent is null");
return;
}
mLoggingToggle.setEnabled(false);
mLoggingIntervalButton.setEnabled(false);
mDownloadButtonText.setEnabled(false);
downloadLog(mSelectedGadget);
}
});
}
private void updateBatteryLevel() {
final int batteryLevel = getBatteryLevel(mSelectedGadget);
if (batteryLevel == UNKNOWN_BATTERY_LEVEL) {
mBatteryBar.setVisibility(View.GONE);
mBatteryLevelValue.setText(R.string.label_battery_loading);
} else {
mBatteryLevelLayout.setVisibility(View.VISIBLE);
mBatteryLevelValue.setText(String.format(Locale.GERMAN, "%d%%", batteryLevel));
if (batteryLevel > 40) {
mBatteryLevelValue.setTextColor(mColorSensirionGreenDarkened);
} else if (batteryLevel > 20) {
mBatteryLevelValue.setTextColor(mColorYellow);
} else if (batteryLevel > 10) {
mBatteryLevelValue.setTextColor(mColorOrange);
} else {
mBatteryLevelValue.setTextColor(mColorRed);
}
mBatteryBar.setVisibility(View.VISIBLE);
mBatteryBar.setProgress(batteryLevel);
mBatteryBar.setEnabled(false);
}
}
/*
* Dialogs
*/
private void showAdviceWhenModifyingInterval() {
final AlertDialog.Builder builder = new AlertDialog.Builder(getContext());
builder.setTitle(INTERVAL_MODIFICATION_TITLE);
builder.setCancelable(false);
builder.setMessage(INTERVAL_MODIFICATION_MESSAGE);
builder.setPositiveButton(YES_STRING, new DialogInterface.OnClickListener() {
public void onClick(@NonNull final DialogInterface dialog, final int which) {
dialog.cancel();
showIntervalSelector();
}
});
builder.setNegativeButton(NO_STRING, new DialogInterface.OnClickListener() {
public void onClick(@NonNull final DialogInterface dialog, final int which) {
dialog.cancel();
}
});
builder.show();
}
private void showLoggingAdviceAlert() {
final AlertDialog.Builder builder = new AlertDialog.Builder(getParent());
builder.setTitle(ENABLE_LOGGING_STRING);
builder.setCancelable(false);
builder.setMessage(GADGET_ENABLE_ADVICE_STRING);
builder.setPositiveButton(YES_STRING, new DialogInterface.OnClickListener() {
public void onClick(@NonNull final DialogInterface dialog, final int which) {
dialog.cancel();
setLoggingStateEnabled(mSelectedGadget, true);
}
});
builder.setNegativeButton(NO_STRING, new DialogInterface.OnClickListener() {
public void onClick(@NonNull final DialogInterface dialog, final int which) {
dialog.cancel();
mLoggingToggle.setChecked(false);
}
});
builder.show();
}
private void showIntervalSelector() {
final AlertDialog.Builder builder = new Builder(getContext());
builder.setCancelable(false)
.setTitle(R.string.title_button_choice)
.setItems(R.array.array_interval_choices, new DialogInterface.OnClickListener() {
public void onClick(@NonNull DialogInterface dialog, int which) {
final LoggerInterval interval = LoggerInterval.fromNumberElement(which);
if (interval == null) {
throw new IllegalStateException("Invalid logger interval selected");
}
setLoggerInterval(mSelectedGadget, interval.getValueInMilliseconds());
}
});
final AlertDialog dialog = builder.create();
dialog.show();
}
/*
* Tablet Mode
*/
public class OnTouchOpenTabletMenuListener implements View.OnTouchListener {
public boolean onTouch(@NonNull final View v, @NonNull final MotionEvent event) {
if (IS_TABLET) {
final MainActivity parent = (MainActivity) getParent();
if (parent == null) {
final String mainActivityName = MainActivity.class.getSimpleName();
Log.e(TAG, String.format("onCreateView -> Cannot obtain the %s", mainActivityName));
} else {
parent.toggleTabletMenu();
}
}
return true;
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.yarn.server.federation.utils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.*;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.server.federation.policies.ConfigurableFederationPolicy;
import org.apache.hadoop.yarn.server.federation.policies.FederationPolicyInitializationContext;
import org.apache.hadoop.yarn.server.federation.policies.dao.WeightedPolicyInfo;
import org.apache.hadoop.yarn.server.federation.resolver.DefaultSubClusterResolverImpl;
import org.apache.hadoop.yarn.server.federation.resolver.SubClusterResolver;
import org.apache.hadoop.yarn.server.federation.store.FederationStateStore;
import org.apache.hadoop.yarn.server.federation.store.records.*;
import org.apache.hadoop.yarn.util.Records;
import java.io.File;
import java.net.URL;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Support class providing common initialization methods to test federation
* policies.
*/
public final class FederationPoliciesTestUtil {
private FederationPoliciesTestUtil() {
// disabled.
}
private static final String FEDR_NODE_PREFIX = "fedr-test-node-";
public static List<ResourceRequest> createResourceRequests(String[] hosts,
int memory, int vCores, int priority, int containers,
String labelExpression, boolean relaxLocality) throws YarnException {
List<ResourceRequest> reqs = new ArrayList<ResourceRequest>();
for (String host : hosts) {
ResourceRequest hostReq =
createResourceRequest(host, memory, vCores, priority, containers,
labelExpression, relaxLocality);
reqs.add(hostReq);
ResourceRequest rackReq =
createResourceRequest("/default-rack", memory, vCores, priority,
containers, labelExpression, relaxLocality);
reqs.add(rackReq);
}
ResourceRequest offRackReq =
createResourceRequest(ResourceRequest.ANY, memory, vCores, priority,
containers, labelExpression, relaxLocality);
reqs.add(offRackReq);
return reqs;
}
protected static ResourceRequest createResourceRequest(String resource,
int memory, int vCores, int priority, int containers,
boolean relaxLocality) throws YarnException {
return createResourceRequest(resource, memory, vCores, priority, containers,
null, relaxLocality);
}
@SuppressWarnings("checkstyle:parameternumber")
public static ResourceRequest createResourceRequest(long id, String resource,
int memory, int vCores, int priority, int containers,
String labelExpression, boolean relaxLocality) throws YarnException {
ResourceRequest out =
createResourceRequest(resource, memory, vCores, priority, containers,
labelExpression, relaxLocality);
out.setAllocationRequestId(id);
return out;
}
public static ResourceRequest createResourceRequest(String resource,
int memory, int vCores, int priority, int containers,
String labelExpression, boolean relaxLocality) throws YarnException {
ResourceRequest req = Records.newRecord(ResourceRequest.class);
req.setResourceName(resource);
req.setNumContainers(containers);
Priority pri = Records.newRecord(Priority.class);
pri.setPriority(priority);
req.setPriority(pri);
Resource capability = Records.newRecord(Resource.class);
capability.setMemorySize(memory);
capability.setVirtualCores(vCores);
req.setCapability(capability);
if (labelExpression != null) {
req.setNodeLabelExpression(labelExpression);
}
req.setRelaxLocality(relaxLocality);
return req;
}
public static void initializePolicyContext(
FederationPolicyInitializationContext fpc, ConfigurableFederationPolicy
policy, WeightedPolicyInfo policyInfo,
Map<SubClusterId, SubClusterInfo> activeSubclusters)
throws YarnException {
ByteBuffer buf = policyInfo.toByteBuffer();
fpc.setSubClusterPolicyConfiguration(SubClusterPolicyConfiguration
.newInstance("queue1", policy.getClass().getCanonicalName(), buf));
FederationStateStoreFacade facade = FederationStateStoreFacade
.getInstance();
FederationStateStore fss = mock(FederationStateStore.class);
if (activeSubclusters == null) {
activeSubclusters = new HashMap<SubClusterId, SubClusterInfo>();
}
GetSubClustersInfoResponse response = GetSubClustersInfoResponse
.newInstance(new ArrayList<SubClusterInfo>(activeSubclusters.values()));
when(fss.getSubClusters(any())).thenReturn(response);
facade.reinitialize(fss, new Configuration());
fpc.setFederationStateStoreFacade(facade);
policy.reinitialize(fpc);
}
public static void initializePolicyContext(
ConfigurableFederationPolicy policy,
WeightedPolicyInfo policyInfo, Map<SubClusterId,
SubClusterInfo> activeSubclusters) throws YarnException {
FederationPolicyInitializationContext context =
new FederationPolicyInitializationContext(null, initResolver(),
initFacade(), SubClusterId.newInstance("homesubcluster"));
initializePolicyContext(context, policy, policyInfo, activeSubclusters);
}
/**
* Initialize a {@link SubClusterResolver}.
*
* @return a subcluster resolver for tests.
*/
public static SubClusterResolver initResolver() {
YarnConfiguration conf = new YarnConfiguration();
SubClusterResolver resolver =
new DefaultSubClusterResolverImpl();
URL url =
Thread.currentThread().getContextClassLoader().getResource("nodes");
if (url == null) {
throw new RuntimeException(
"Could not find 'nodes' dummy file in classpath");
}
// This will get rid of the beginning '/' in the url in Windows env
File file = new File(url.getPath());
conf.set(YarnConfiguration.FEDERATION_MACHINE_LIST, file.getPath());
resolver.setConf(conf);
resolver.load();
return resolver;
}
/**
* Initialiaze a main-memory {@link FederationStateStoreFacade} used for
* testing, wiht a mock resolver.
*
* @param subClusterInfos the list of subclusters to be served on
* getSubClusters invocations.
*
* @return the facade.
*
* @throws YarnException in case the initialization is not successful.
*/
public static FederationStateStoreFacade initFacade(
List<SubClusterInfo> subClusterInfos, SubClusterPolicyConfiguration
policyConfiguration) throws YarnException {
FederationStateStoreFacade goodFacade = FederationStateStoreFacade
.getInstance();
FederationStateStore fss = mock(FederationStateStore.class);
GetSubClustersInfoResponse response = GetSubClustersInfoResponse
.newInstance(subClusterInfos);
when(fss.getSubClusters(any())).thenReturn(response);
List<SubClusterPolicyConfiguration> configurations = new ArrayList<>();
configurations.add(policyConfiguration);
GetSubClusterPoliciesConfigurationsResponse policiesResponse =
GetSubClusterPoliciesConfigurationsResponse
.newInstance(configurations);
when(fss.getPoliciesConfigurations(any())).thenReturn(policiesResponse);
GetSubClusterPolicyConfigurationResponse policyResponse =
GetSubClusterPolicyConfigurationResponse
.newInstance(policyConfiguration);
when(fss.getPolicyConfiguration(any())).thenReturn(policyResponse);
goodFacade.reinitialize(fss, new Configuration());
return goodFacade;
}
/**
* Initialiaze a main-memory {@link FederationStateStoreFacade} used for
* testing, wiht a mock resolver.
*
* @return the facade.
*
* @throws YarnException in case the initialization is not successful.
*/
public static FederationStateStoreFacade initFacade() throws YarnException {
return initFacade(new ArrayList<>(), mock(SubClusterPolicyConfiguration
.class));
}
}
|
|
/*
* Copyright (c) 2021, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.gui.feature;
import boofcv.struct.feature.AssociatedIndex;
import boofcv.struct.geo.AssociatedPair;
import georegression.struct.point.Point2D_F64;
import org.ddogleg.struct.FastAccess;
import org.jetbrains.annotations.Nullable;
import java.awt.*;
import java.awt.event.MouseListener;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Random;
/**
* Shows which two features are associated with each other. An individual feature
* can be shown alone by clicking on it.
*
* @author Peter Abeles
*/
@SuppressWarnings({"NullAway.Init"})
public class AssociationPanel extends CompareTwoImagePanel implements MouseListener {
// lock for all the data structures below
final Object lock = new Object();
// which features are associated with each other
private int[] assocLeft, assocRight;
// color of each points. Randomly select at runtime
Color[] colors;
public AssociationPanel( int borderSize ) {
super(borderSize, true);
}
public void setAssociation( List<Point2D_F64> leftPts, List<Point2D_F64> rightPts,
FastAccess<AssociatedIndex> matches ) {
synchronized (lock) {
List<Point2D_F64> allLeft = new ArrayList<>();
List<Point2D_F64> allRight = new ArrayList<>();
assocLeft = new int[matches.size()];
assocRight = new int[matches.size()];
for (int i = 0; i < matches.size(); i++) {
AssociatedIndex a = matches.get(i);
allLeft.add(leftPts.get(a.src));
allRight.add(rightPts.get(a.dst));
assocLeft[i] = i;
assocRight[i] = i;
}
setLocation(allLeft, allRight);
Random rand = new Random(234);
colors = new Color[matches.size()];
for (int i = 0; i < matches.size(); i++) {
colors[i] = new Color(rand.nextInt() | 0xFF000000);
}
}
}
public void setAssociation( List<AssociatedPair> matches ) {
synchronized (lock) {
List<Point2D_F64> leftPts = new ArrayList<>();
List<Point2D_F64> rightPts = new ArrayList<>();
for (AssociatedPair p : matches) {
leftPts.add(p.p1);
rightPts.add(p.p2);
}
setLocation(leftPts, rightPts);
assocLeft = new int[leftPts.size()];
assocRight = new int[rightPts.size()];
for (int i = 0; i < assocLeft.length; i++) {
assocLeft[i] = i;
assocRight[i] = i;
}
Random rand = new Random(234);
colors = new Color[matches.size()];
for (int i = 0; i < colors.length; i++) {
colors[i] = new Color(rand.nextInt() | 0xFF000000);
}
}
}
public void setAssociation( List<Point2D_F64> leftPts, List<Point2D_F64> rightPts ) {
synchronized (lock) {
setLocation(leftPts, rightPts);
assocLeft = new int[leftPts.size()];
assocRight = new int[rightPts.size()];
for (int i = 0; i < assocLeft.length; i++) {
assocLeft[i] = i;
assocRight[i] = i;
}
Random rand = new Random(234);
colors = new Color[leftPts.size()];
for (int i = 0; i < colors.length; i++) {
colors[i] = new Color(rand.nextInt() | 0xFF000000);
}
}
}
@Override
protected void drawFeatures( Graphics2D g2,
double scaleLeft, int leftX, int leftY,
double scaleRight, int rightX, int rightY ) {
synchronized (lock) {
if (selected.isEmpty())
drawAllFeatures(g2, scaleLeft, scaleRight, rightX);
else {
for (int selectedIndex : selected) {
// draw just an individual feature pair
Point2D_F64 l, r;
Color color;
if (selectedIsLeft) {
l = leftPts.get(selectedIndex);
if (assocLeft[selectedIndex] < 0) {
r = null;
color = null;
} else {
r = rightPts.get(assocLeft[selectedIndex]);
color = colors[selectedIndex];
}
} else {
r = rightPts.get(selectedIndex);
if (assocRight[selectedIndex] < 0) {
l = null;
color = null;
} else {
l = leftPts.get(assocRight[selectedIndex]);
color = colors[assocRight[selectedIndex]];
}
}
if (color == null) // clicking on something with no association is annoying
drawAllFeatures(g2, scaleLeft, scaleRight, rightX);
else
drawAssociation(g2, scaleLeft, scaleRight, rightX, l, r, color);
}
}
}
}
private void drawAllFeatures( Graphics2D g2, double scaleLeft, double scaleRight, int rightX ) {
if (assocLeft == null || rightPts == null || leftPts == null)
return;
for (int i = 0; i < assocLeft.length; i++) {
if (assocLeft[i] == -1)
continue;
Point2D_F64 l = leftPts.get(i);
Point2D_F64 r = rightPts.get(assocLeft[i]);
Color color = colors[i];
drawAssociation(g2, scaleLeft, scaleRight, rightX, l, r, color);
}
}
private void drawAssociation( Graphics2D g2, double scaleLeft, double scaleRight, int rightX,
@Nullable Point2D_F64 l, @Nullable Point2D_F64 r, Color color ) {
if (r == null) {
Objects.requireNonNull(l);
int x1 = (int)(scaleLeft*l.x);
int y1 = (int)(scaleLeft*l.y);
VisualizeFeatures.drawPoint(g2, x1, y1, Color.RED);
} else if (l == null) {
Objects.requireNonNull(r);
int x2 = (int)(scaleRight*r.x) + rightX;
int y2 = (int)(scaleRight*r.y);
VisualizeFeatures.drawPoint(g2, x2, y2, Color.RED);
} else {
int x1 = (int)(scaleLeft*l.x);
int y1 = (int)(scaleLeft*l.y);
VisualizeFeatures.drawPoint(g2, x1, y1, color);
int x2 = (int)(scaleRight*r.x) + rightX;
int y2 = (int)(scaleRight*r.y);
VisualizeFeatures.drawPoint(g2, x2, y2, color);
g2.setColor(color);
g2.drawLine(x1, y1, x2, y2);
}
}
@Override
protected boolean isValidPoint( int index ) {
if (selectedIsLeft)
return assocLeft[index] >= 0;
else
return assocRight[index] >= 0;
}
}
|
|
package com.cultomebizlang.language.interpreter;
import java.util.ArrayList;
import java.util.List;
import java.util.Stack;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.TerminalNode;
import com.cultome.bizlang.language.BizlangArray;
import com.cultome.bizlang.language.BizlangAssignation;
import com.cultome.bizlang.language.BizlangBlock;
import com.cultome.bizlang.language.BizlangConditionalExpression;
import com.cultome.bizlang.language.BizlangCustomLogicOperation;
import com.cultome.bizlang.language.BizlangExpression;
import com.cultome.bizlang.language.BizlangFunction;
import com.cultome.bizlang.language.BizlangLogicOperation;
import com.cultome.bizlang.language.BizlangMathOperation;
import com.cultome.bizlang.language.BizlangRange;
import com.cultome.bizlang.language.BizlangRepetition;
import com.cultome.bizlang.language.BizlangRule;
import com.cultome.bizlang.language.BizlangSwitch;
import com.cultome.bizlang.language.BizlangSwitchBlock;
import com.cultome.bizlang.language.BizlangValue;
import com.cultome.bizlang.language.parser.BizlangBaseListener;
import com.cultome.bizlang.language.parser.BizlangLexer;
import com.cultome.bizlang.language.parser.BizlangParser.ArrayContext;
import com.cultome.bizlang.language.parser.BizlangParser.AssignationContext;
import com.cultome.bizlang.language.parser.BizlangParser.BlockContext;
import com.cultome.bizlang.language.parser.BizlangParser.CaseBlockContext;
import com.cultome.bizlang.language.parser.BizlangParser.ConditionalContext;
import com.cultome.bizlang.language.parser.BizlangParser.CstmLogOpContext;
import com.cultome.bizlang.language.parser.BizlangParser.DefRuleContext;
import com.cultome.bizlang.language.parser.BizlangParser.ElseBlkContext;
import com.cultome.bizlang.language.parser.BizlangParser.FnctCallContext;
import com.cultome.bizlang.language.parser.BizlangParser.LogicOpContext;
import com.cultome.bizlang.language.parser.BizlangParser.MathExprContext;
import com.cultome.bizlang.language.parser.BizlangParser.ParamLstContext;
import com.cultome.bizlang.language.parser.BizlangParser.RangeContext;
import com.cultome.bizlang.language.parser.BizlangParser.RepetitionContext;
import com.cultome.bizlang.language.parser.BizlangParser.SwtchContext;
import com.cultome.bizlang.language.parser.BizlangParser.ValueContext;
public class TreeListener extends BizlangBaseListener {
public static final int COMPLEX_TYPE_ARRAY = 100;
public static final int COMPLEX_TYPE_RANGE = 101;
private Stack<ParsingStatus> parsingStatus;
private Stack<BizlangExpression> buffer;
private ExecutionFlow flow;
@Override
public String toString() {
return parsingStatus.toString() + "\n";
}
public TreeListener() {
flow = new ExecutionFlow();
parsingStatus = new Stack<ParsingStatus>();
buffer = new Stack<BizlangExpression>();
}
public ExecutionFlow getExecutionFlow() {
return flow;
}
@Override
public void enterFnctCall(FnctCallContext ctx) {
BizlangFunction fnct = new BizlangFunction(ctx.getChild(TerminalNode.class, 0).getText(), ctx.getStart().getLine());
buffer.push(fnct);
parsingStatus.push(ParsingStatus.PARSING_FNCT);
}
@Override
public void enterAssignation(AssignationContext ctx) {
BizlangAssignation assign = new BizlangAssignation(ctx.ID().getText(), ctx.getStart().getLine());
buffer.push(assign);
parsingStatus.push(ParsingStatus.ASSIGNING_VAL);
}
@Override
public void enterMathExpr(MathExprContext ctx) {
BizlangMathOperation mathOperation = new BizlangMathOperation(ctx.getChild(TerminalNode.class, 0).getText(), ctx.getStart().getLine());
buffer.push(mathOperation);
parsingStatus.push(ParsingStatus.PARSING_MATH_EXPR);
}
@Override
public void enterConditional(ConditionalContext ctx) {
BizlangConditionalExpression condExpr = new BizlangConditionalExpression(ctx.getChild(TerminalNode.class, 0).getText(), ctx.getStart().getLine());
buffer.push(condExpr);
parsingStatus.push(ParsingStatus.PARSING_CONDITION);
}
@Override
public void enterLogicOp(LogicOpContext ctx) {
BizlangLogicOperation condExpr = new BizlangLogicOperation(ctx.getChild(TerminalNode.class, 0).getText(), ctx.getStart().getLine());
buffer.push(condExpr);
parsingStatus.push(ParsingStatus.PARSING_LOGIC_COMP);
}
@Override
public void enterBlock(BlockContext ctx) {
BizlangBlock block = new BizlangBlock("__block__", ctx.getStart().getLine());
buffer.push(block);
parsingStatus.push(ParsingStatus.PARSING_BLOCK);
}
@Override
public void enterElseBlk(ElseBlkContext ctx) {
BizlangBlock block = new BizlangBlock("__else_block__", ctx.getStart().getLine());
buffer.push(block);
parsingStatus.push(ParsingStatus.PARSING_ELSE_BLOCK);
}
@Override
public void enterRepetition(RepetitionContext ctx) {
BizlangRepetition repetition = new BizlangRepetition(ctx.getChild(TerminalNode.class, 0).getText(), ctx.getStart().getLine());
repetition.setRepetitionVarName(ctx.getChild(TerminalNode.class, 1).getText());
TerminalNode collRefNameNode = ctx.getChild(TerminalNode.class, 3);
String collRefName = null;
if(collRefNameNode != null){
// el array NO esta inline
collRefName = collRefNameNode.getText();
}
repetition.setCollectionName(collRefName);
buffer.push(repetition);
parsingStatus.push(ParsingStatus.PARSING_REPETITION);
}
@Override
public void enterArray(ArrayContext ctx) {
BizlangArray array = new BizlangArray(ctx.getStart().getLine());
buffer.push(array);
parsingStatus.push(ParsingStatus.PARSING_ARRAY);
}
@Override
public void enterRange(RangeContext ctx) {
BizlangRange range;
if(!parsingStatus.peek().equals(ParsingStatus.GETTING_VALUE)){
range = getRange(ctx);
} else {
range = new BizlangRange(ctx.getStart().getLine());
}
buffer.push(range);
parsingStatus.push(ParsingStatus.PARSING_RANGE);
}
@Override
public void enterValue(ValueContext ctx) {
BizlangValue value = getValue(ctx);
buffer.push(value);
parsingStatus.push(ParsingStatus.GETTING_VALUE);
}
@Override
public void enterCstmLogOp(CstmLogOpContext ctx) {
BizlangCustomLogicOperation cstmLogOp = new BizlangCustomLogicOperation(ctx.getChild(TerminalNode.class, 0).getText(), ctx.getStart().getLine());
buffer.push(cstmLogOp);
parsingStatus.push(ParsingStatus.PARSING_CSTM_LOG_OP);
}
@Override
public void enterSwtch(SwtchContext ctx) {
BizlangSwitch swtch = new BizlangSwitch(ctx.getStart().getLine());
buffer.push(swtch);
parsingStatus.push(ParsingStatus.PARSING_SWITCH);
}
@Override
public void enterCaseBlock(CaseBlockContext ctx) {
BizlangSwitchBlock cstmLogOp = new BizlangSwitchBlock(ctx.getChild(TerminalNode.class, 0).getText(), ctx.getStart().getLine());
buffer.push(cstmLogOp);
parsingStatus.push(ParsingStatus.PARSING_CASE_BLOCK);
}
@Override
public void enterDefRule(DefRuleContext ctx) {
String fnctNameColons = ctx.getChild(TerminalNode.class, 1).getText();
BizlangRule rule = new BizlangRule(fnctNameColons.replaceAll("\"", ""), ctx.getStart().getLine());
buffer.push(rule);
parsingStatus.push(ParsingStatus.PARSING_RULE);
}
@Override
public void exitDefRule(DefRuleContext ctx) {
exitExpression();
}
@Override
public void exitCaseBlock(CaseBlockContext ctx) {
exitExpression();
}
@Override
public void exitSwtch(SwtchContext ctx) {
exitExpression();
}
@Override
public void exitCstmLogOp(CstmLogOpContext ctx) {
exitExpression();
}
@Override
public void exitRange(RangeContext ctx) {
exitExpression();
}
@Override
public void exitArray(ArrayContext ctx) {
exitExpression();
}
@Override
public void exitRepetition(RepetitionContext ctx) {
exitExpression();
}
@Override
public void exitBlock(BlockContext ctx) {
exitExpression();
}
@Override
public void exitElseBlk(ElseBlkContext ctx) {
exitExpression();
}
@Override
public void exitLogicOp(LogicOpContext ctx) {
exitExpression();
}
@Override
public void exitConditional(ConditionalContext ctx) {
exitExpression();
}
@Override
public void exitFnctCall(FnctCallContext ctx) {
exitExpression();
}
@Override
public void exitAssignation(AssignationContext ctx) {
exitExpression();
}
@Override
public void exitMathExpr(MathExprContext ctx) {
exitExpression();
}
@Override
public void exitValue(ValueContext ctx) {
exitExpression();
}
private void exitExpression(){
ParsingStatus prevStatus = parsingStatus.pop();
BizlangExpression r = buffer.pop();
if(prevStatus.equals(ParsingStatus.PARSING_RULE)){
flow.addRule((BizlangRule) r);
}
if(parsingStatus.isEmpty()){
flow.addToFlow(r);
} else {
switch(parsingStatus.peek()){
case PARSING_MATH_EXPR:
((BizlangMathOperation) buffer.peek()).addParam(r);
break;
case PARSING_FNCT:
((BizlangFunction) buffer.peek()).addParam(r);
break;
case ASSIGNING_VAL:
((BizlangAssignation) buffer.peek()).addLValue(r);
break;
case PARSING_CONDITION:
if(prevStatus.equals(ParsingStatus.PARSING_LOGIC_COMP)){
((BizlangConditionalExpression) buffer.peek()).addCondition((BizlangLogicOperation) r);
} else if(prevStatus.equals(ParsingStatus.PARSING_CSTM_LOG_OP)){
((BizlangConditionalExpression) buffer.peek()).addCondition((BizlangCustomLogicOperation) r);
} else if(prevStatus.equals(ParsingStatus.PARSING_BLOCK)){
((BizlangConditionalExpression) buffer.peek()).addBlock((BizlangBlock) r);
}
break;
case PARSING_LOGIC_COMP:
((BizlangLogicOperation) buffer.peek()).addParam((BizlangValue) r);
break;
case PARSING_CSTM_LOG_OP:
if(prevStatus.equals(ParsingStatus.PARSING_BLOCK)){
((BizlangConditionalExpression) buffer.elementAt(buffer.size() - 2)).addBlock((BizlangBlock) r);
} else {
((BizlangCustomLogicOperation) buffer.peek()).addParam((BizlangValue) r);
}
break;
case PARSING_BLOCK:
case PARSING_ELSE_BLOCK:
if(prevStatus.equals(ParsingStatus.PARSING_ELSE_BLOCK)){
BizlangExpression elementAt = buffer.elementAt(buffer.size() - 2);
if(elementAt instanceof BizlangConditionalExpression){
((BizlangConditionalExpression) elementAt).addElseBlock((BizlangBlock) r);
} else {
((BizlangConditionalExpression) buffer.elementAt(buffer.size() - 3)).addElseBlock((BizlangBlock) r);
}
} else {
((BizlangBlock) buffer.peek()).addExpression((BizlangExpression) r);
}
break;
case PARSING_REPETITION:
if(prevStatus.equals(ParsingStatus.PARSING_BLOCK)){
((BizlangRepetition) buffer.peek()).addBlock((BizlangBlock) r);
} else if(prevStatus.equals(ParsingStatus.PARSING_ARRAY)){
((BizlangRepetition) buffer.peek()).setCollection((BizlangArray) r);
} else if(prevStatus.equals(ParsingStatus.PARSING_RANGE)){
((BizlangRepetition) buffer.peek()).setCollection((BizlangRange) r);
}
break;
case PARSING_ARRAY:
((BizlangArray) buffer.peek()).addElement((BizlangValue) r);
break;
case PARSING_RANGE:
((BizlangRange) buffer.peek()).addLimit((BizlangValue) r);
break;
case PARSING_SWITCH:
if(prevStatus.equals(ParsingStatus.GETTING_VALUE)){
((BizlangSwitch) buffer.peek()).addReference((BizlangValue) r);
} else if(prevStatus.equals(ParsingStatus.PARSING_CASE_BLOCK)){
((BizlangSwitch) buffer.peek()).addCase((BizlangSwitchBlock) r);
}
break;
case PARSING_CASE_BLOCK:
BizlangSwitchBlock bizlangSwitchBlock = (BizlangSwitchBlock) buffer.peek();
if(prevStatus.equals(ParsingStatus.GETTING_VALUE)){
if(bizlangSwitchBlock.getCondition() == null){
bizlangSwitchBlock.addCondition((BizlangValue) r);
} else {
bizlangSwitchBlock.addExpression((BizlangExpression) r);
}
} else {
bizlangSwitchBlock.addExpression((BizlangExpression) r);
}
break;
case PARSING_RULE:
((BizlangRule) buffer.peek()).addExpression((BizlangExpression) r);
case WAITING:
case GETTING_VALUE:
break;
}
}
}
private BizlangValue getValue(ValueContext ctx) {
TerminalNode valueNode = ctx.getChild(TerminalNode.class, 0);
if(valueNode != null){
return getPrimitiveValue(valueNode, ctx.getStart().getLine());
} else {
// es un tipo complejo
if(ctx.getChild(0) instanceof ArrayContext){
List<BizlangValue> values = extractValuesFromParamList((ParamLstContext) ctx.getChild(0).getChild(1));
return new BizlangArray(ctx.getChild(0).getText(), ctx.getStart().getLine(), values);
} else if(ctx.getChild(0)instanceof RangeContext){
return getRange((RangeContext) ctx.getChild(0));
}
}
throw new RuntimeException("Uknown type. [" + ctx.getText() + "]");
}
private BizlangRange getRange(RangeContext ctx) {
BizlangValue lowLimit = getPrimitiveValue((TerminalNode) ctx.getChild(1), ctx.getStart().getLine());
BizlangValue highLimit = getPrimitiveValue((TerminalNode) ctx.getChild(3), ctx.getStart().getLine());
return new BizlangRange("_range_", ctx.getStart().getLine(), lowLimit, highLimit);
}
private BizlangValue getPrimitiveValue(TerminalNode valueNode, int srcLineDefinedAt) {
// es un tipo simple
switch (valueNode.getSymbol().getType()) {
case BizlangLexer.STR:
return new BizlangValue(BizlangLexer.STR, valueNode.getText(), srcLineDefinedAt);
case BizlangLexer.ID:
return new BizlangValue(BizlangLexer.ID, valueNode.getText(), srcLineDefinedAt);
case BizlangLexer.NBR:
return new BizlangValue(BizlangLexer.NBR, valueNode.getText(), srcLineDefinedAt);
case BizlangLexer.OBJPROP:
return new BizlangValue(BizlangLexer.OBJPROP, valueNode.getText(), srcLineDefinedAt);
case BizlangLexer.DATE:
return new BizlangValue(BizlangLexer.DATE, valueNode.getText(), srcLineDefinedAt);
default:
throw new RuntimeException("Symbol type uknown in line " + srcLineDefinedAt + ". [" + valueNode.getSymbol().getType() + "]");
}
}
private List<BizlangValue> extractValuesFromParamList(ParamLstContext values) {
ArrayList<BizlangValue> arrayValues = new ArrayList<BizlangValue>();
for(int i = 0; i < values.getChildCount(); i++){
ParseTree child = values.getChild(i);
if(child instanceof ValueContext){
arrayValues.add(getValue((ValueContext) child));
} else if(child instanceof ParamLstContext){
arrayValues.addAll(extractValuesFromParamList((ParamLstContext) child));
} else if(child instanceof TerminalNode){
// ignore the colons
} else {
throw new RuntimeException("Unknown type in inner arrau value. [" + child.getClass().getName() + "]");
}
}
return arrayValues;
}
}
|
|
package org.camunda.bpm.engine.rest.history;
import static com.jayway.restassured.RestAssured.expect;
import static com.jayway.restassured.RestAssured.given;
import static com.jayway.restassured.path.json.JsonPath.from;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.ws.rs.core.Response.Status;
import javax.xml.registry.InvalidRequestException;
import org.camunda.bpm.engine.history.HistoricDecisionInstance;
import org.camunda.bpm.engine.history.HistoricDecisionInstanceQuery;
import org.camunda.bpm.engine.impl.calendar.DateTimeUtil;
import org.camunda.bpm.engine.impl.digest._apacheCommonsCodec.Base64;
import org.camunda.bpm.engine.rest.AbstractRestServiceTest;
import org.camunda.bpm.engine.rest.dto.VariableValueDto;
import org.camunda.bpm.engine.rest.dto.converter.StringArrayConverter;
import org.camunda.bpm.engine.rest.dto.history.HistoricDecisionInputInstanceDto;
import org.camunda.bpm.engine.rest.dto.history.HistoricDecisionOutputInstanceDto;
import org.camunda.bpm.engine.rest.helper.MockProvider;
import org.camunda.bpm.engine.variable.value.*;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InOrder;
import org.mockito.Mockito;
import com.jayway.restassured.http.ContentType;
import com.jayway.restassured.response.Response;
public abstract class AbstractHistoricDecisionInstanceRestServiceQueryTest extends AbstractRestServiceTest {
protected static final String HISTORIC_DECISION_INSTANCE_RESOURCE_URL = TEST_RESOURCE_ROOT_PATH + "/history/decision-instance";
protected static final String HISTORIC_DECISION_INSTANCE_COUNT_RESOURCE_URL = HISTORIC_DECISION_INSTANCE_RESOURCE_URL + "/count";
protected HistoricDecisionInstanceQuery mockedQuery;
@Before
public void setUpRuntimeData() {
mockedQuery = setUpMockHistoricDecisionInstanceQuery(MockProvider.createMockHistoricDecisionInstances());
}
protected HistoricDecisionInstanceQuery setUpMockHistoricDecisionInstanceQuery(List<HistoricDecisionInstance> mockedHistoricDecisionInstances) {
HistoricDecisionInstanceQuery mockedHistoricDecisionInstanceQuery = mock(HistoricDecisionInstanceQuery.class);
when(mockedHistoricDecisionInstanceQuery.list()).thenReturn(mockedHistoricDecisionInstances);
when(mockedHistoricDecisionInstanceQuery.count()).thenReturn((long) mockedHistoricDecisionInstances.size());
when(processEngine.getHistoryService().createHistoricDecisionInstanceQuery()).thenReturn(mockedHistoricDecisionInstanceQuery);
return mockedHistoricDecisionInstanceQuery;
}
@Test
public void testEmptyQuery() {
String queryKey = "";
given()
.queryParam("caseDefinitionKey", queryKey)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_DECISION_INSTANCE_RESOURCE_URL);
}
@Test
public void testNoParametersQuery() {
expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_DECISION_INSTANCE_RESOURCE_URL);
verify(mockedQuery).list();
verifyNoMoreInteractions(mockedQuery);
}
@Test
public void testInvalidSortingOptions() {
executeAndVerifySorting("anInvalidSortByOption", "asc", Status.BAD_REQUEST);
executeAndVerifySorting("definitionId", "anInvalidSortOrderOption", Status.BAD_REQUEST);
}
@Test
public void testSortByParameterOnly() {
given()
.queryParam("sortBy", "evaluationTime")
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.contentType(ContentType.JSON)
.body("type", equalTo(InvalidRequestException.class.getSimpleName()))
.body("message", containsString("Only a single sorting parameter specified. sortBy and sortOrder required"))
.when()
.get(HISTORIC_DECISION_INSTANCE_RESOURCE_URL);
}
@Test
public void testSortOrderParameterOnly() {
given()
.queryParam("sortOrder", "asc")
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.contentType(ContentType.JSON)
.body("type", equalTo(InvalidRequestException.class.getSimpleName()))
.body("message", containsString("Only a single sorting parameter specified. sortBy and sortOrder required"))
.when()
.get(HISTORIC_DECISION_INSTANCE_RESOURCE_URL);
}
@Test
public void testSortingParameters() {
InOrder inOrder = Mockito.inOrder(mockedQuery);
executeAndVerifySorting("evaluationTime", "asc", Status.OK);
inOrder.verify(mockedQuery).orderByEvaluationTime();
inOrder.verify(mockedQuery).asc();
inOrder = Mockito.inOrder(mockedQuery);
executeAndVerifySorting("evaluationTime", "desc", Status.OK);
inOrder.verify(mockedQuery).orderByEvaluationTime();
inOrder.verify(mockedQuery).desc();
}
@Test
public void testSuccessfulPagination() {
int firstResult = 0;
int maxResults = 10;
given()
.queryParam("firstResult", firstResult)
.queryParam("maxResults", maxResults)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_DECISION_INSTANCE_RESOURCE_URL);
verify(mockedQuery).listPage(firstResult, maxResults);
}
@Test
public void testMissingFirstResultParameter() {
int maxResults = 10;
given()
.queryParam("maxResults", maxResults)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_DECISION_INSTANCE_RESOURCE_URL);
verify(mockedQuery).listPage(0, maxResults);
}
@Test
public void testMissingMaxResultsParameter() {
int firstResult = 10;
given()
.queryParam("firstResult", firstResult)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_DECISION_INSTANCE_RESOURCE_URL);
verify(mockedQuery).listPage(firstResult, Integer.MAX_VALUE);
}
@Test
public void testQueryCount() {
expect()
.statusCode(Status.OK.getStatusCode())
.body("count", equalTo(1))
.when()
.get(HISTORIC_DECISION_INSTANCE_COUNT_RESOURCE_URL);
verify(mockedQuery).count();
}
@Test
public void testSimpleHistoricDecisionInstanceQuery() {
String decisionDefinitionId = MockProvider.EXAMPLE_DECISION_DEFINITION_ID;
Response response = given()
.queryParam("decisionDefinitionId", decisionDefinitionId)
.then().expect().log().body()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_DECISION_INSTANCE_RESOURCE_URL);
InOrder inOrder = inOrder(mockedQuery);
inOrder.verify(mockedQuery).decisionDefinitionId(decisionDefinitionId);
inOrder.verify(mockedQuery).list();
String content = response.asString();
List<String> instances = from(content).getList("");
assertEquals(1, instances.size());
Assert.assertNotNull(instances.get(0));
String returnedHistoricDecisionInstanceId = from(content).getString("[0].id");
String returnedDecisionDefinitionId = from(content).getString("[0].decisionDefinitionId");
String returnedDecisionDefinitionKey = from(content).getString("[0].decisionDefinitionKey");
String returnedDecisionDefinitionName = from(content).getString("[0].decisionDefinitionName");
String returnedEvaluationTime = from(content).getString("[0].evaluationTime");
String returnedProcessDefinitionId = from(content).getString("[0].processDefinitionId");
String returnedProcessDefinitionKey = from(content).getString("[0].processDefinitionKey");
String returnedProcessInstanceId = from(content).getString("[0].processInstanceId");
String returnedActivityId = from(content).getString("[0].activityId");
String returnedActivityInstanceId = from(content).getString("[0].activityInstanceId");
List<HistoricDecisionInputInstanceDto> returnedInputs = from(content).getList("[0].inputs");
List<HistoricDecisionOutputInstanceDto> returnedOutputs = from(content).getList("[0].outputs");
Double returnedCollectResultValue = from(content).getDouble("[0].collectResultValue");
assertThat(returnedHistoricDecisionInstanceId, is(MockProvider.EXAMPLE_HISTORIC_DECISION_INSTANCE_ID));
assertThat(returnedDecisionDefinitionId, is(MockProvider.EXAMPLE_DECISION_DEFINITION_ID));
assertThat(returnedDecisionDefinitionKey, is(MockProvider.EXAMPLE_DECISION_DEFINITION_KEY));
assertThat(returnedDecisionDefinitionName, is(MockProvider.EXAMPLE_DECISION_DEFINITION_NAME));
assertThat(returnedEvaluationTime, is(MockProvider.EXAMPLE_HISTORIC_DECISION_INSTANCE_EVALUATION_TIME));
assertThat(returnedProcessDefinitionId, is(MockProvider.EXAMPLE_PROCESS_DEFINITION_ID));
assertThat(returnedProcessDefinitionKey, is(MockProvider.EXAMPLE_PROCESS_DEFINITION_KEY));
assertThat(returnedProcessInstanceId, is(MockProvider.EXAMPLE_PROCESS_INSTANCE_ID));
assertThat(returnedActivityId, is(MockProvider.EXAMPLE_HISTORIC_DECISION_INSTANCE_ACTIVITY_ID));
assertThat(returnedActivityInstanceId, is(MockProvider.EXAMPLE_HISTORIC_DECISION_INSTANCE_ACTIVITY_INSTANCE_ID));
assertThat(returnedInputs, is(nullValue()));
assertThat(returnedOutputs, is(nullValue()));
assertThat(returnedCollectResultValue, is(MockProvider.EXAMPLE_HISTORIC_DECISION_INSTANCE_COLLECT_RESULT_VALUE));
}
@Test
public void testAdditionalParameters() {
Map<String, String> stringQueryParameters = getCompleteStringQueryParameters();
given()
.queryParams(stringQueryParameters)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_DECISION_INSTANCE_RESOURCE_URL);
verifyStringParameterQueryInvocations();
}
@Test
public void testIncludeInputs() {
mockedQuery = setUpMockHistoricDecisionInstanceQuery(Collections.singletonList(MockProvider.createMockHistoricDecisionInstanceWithInputs()));
String decisionDefinitionId = MockProvider.EXAMPLE_DECISION_DEFINITION_ID;
Response response = given()
.queryParam("decisionDefinitionId", decisionDefinitionId)
.queryParam("includeInputs", true)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_DECISION_INSTANCE_RESOURCE_URL);
InOrder inOrder = inOrder(mockedQuery);
inOrder.verify(mockedQuery).decisionDefinitionId(decisionDefinitionId);
inOrder.verify(mockedQuery).includeInputs();
inOrder.verify(mockedQuery, never()).includeOutputs();
inOrder.verify(mockedQuery).list();
String content = response.asString();
List<String> instances = from(content).getList("");
assertEquals(1, instances.size());
Assert.assertNotNull(instances.get(0));
List<Map<String, Object>> returnedInputs = from(content).getList("[0].inputs");
List<Map<String, Object>> returnedOutputs = from(content).getList("[0].outputs");
assertThat(returnedInputs, is(notNullValue()));
assertThat(returnedOutputs, is(nullValue()));
verifyHistoricDecisionInputInstances(returnedInputs);
}
@Test
public void testIncludeOutputs() {
mockedQuery = setUpMockHistoricDecisionInstanceQuery(Collections.singletonList(MockProvider.createMockHistoricDecisionInstanceWithOutputs()));
String decisionDefinitionId = MockProvider.EXAMPLE_DECISION_DEFINITION_ID;
Response response = given()
.queryParam("decisionDefinitionId", decisionDefinitionId)
.queryParam("includeOutputs", true)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_DECISION_INSTANCE_RESOURCE_URL);
InOrder inOrder = inOrder(mockedQuery);
inOrder.verify(mockedQuery).decisionDefinitionId(decisionDefinitionId);
inOrder.verify(mockedQuery, never()).includeInputs();
inOrder.verify(mockedQuery).includeOutputs();
inOrder.verify(mockedQuery).list();
String content = response.asString();
List<String> instances = from(content).getList("");
assertEquals(1, instances.size());
Assert.assertNotNull(instances.get(0));
List<Map<String, Object>> returnedInputs = from(content).getList("[0].inputs");
List<Map<String, Object>> returnedOutputs = from(content).getList("[0].outputs");
assertThat(returnedInputs, is(nullValue()));
assertThat(returnedOutputs, is(notNullValue()));
verifyHistoricDecisionOutputInstances(returnedOutputs);
}
@Test
public void testIncludeInputsAndOutputs() {
mockedQuery = setUpMockHistoricDecisionInstanceQuery(Collections.singletonList(MockProvider.createMockHistoricDecisionInstanceWithInputsAndOutputs()));
String decisionDefinitionId = MockProvider.EXAMPLE_DECISION_DEFINITION_ID;
Response response = given()
.queryParam("decisionDefinitionId", decisionDefinitionId)
.queryParam("includeInputs", true)
.queryParam("includeOutputs", true)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_DECISION_INSTANCE_RESOURCE_URL);
InOrder inOrder = inOrder(mockedQuery);
inOrder.verify(mockedQuery).decisionDefinitionId(decisionDefinitionId);
inOrder.verify(mockedQuery).includeInputs();
inOrder.verify(mockedQuery).includeOutputs();
inOrder.verify(mockedQuery).list();
String content = response.asString();
List<String> instances = from(content).getList("");
assertEquals(1, instances.size());
Assert.assertNotNull(instances.get(0));
List<Map<String, Object>> returnedInputs = from(content).getList("[0].inputs");
List<Map<String, Object>> returnedOutputs = from(content).getList("[0].outputs");
assertThat(returnedInputs, is(notNullValue()));
assertThat(returnedOutputs, is(notNullValue()));
verifyHistoricDecisionInputInstances(returnedInputs);
verifyHistoricDecisionOutputInstances(returnedOutputs);
}
@Test
public void testDefaultBinaryFetching() {
String decisionDefinitionId = MockProvider.EXAMPLE_DECISION_DEFINITION_ID;
given()
.queryParam("decisionDefinitionId", decisionDefinitionId)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_DECISION_INSTANCE_RESOURCE_URL);
InOrder inOrder = inOrder(mockedQuery);
inOrder.verify(mockedQuery).decisionDefinitionId(decisionDefinitionId);
inOrder.verify(mockedQuery, never()).disableBinaryFetching();
inOrder.verify(mockedQuery).list();
}
@Test
public void testDisableBinaryFetching() {
String decisionDefinitionId = MockProvider.EXAMPLE_DECISION_DEFINITION_ID;
given()
.queryParam("decisionDefinitionId", decisionDefinitionId)
.queryParam("disableBinaryFetching", true)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_DECISION_INSTANCE_RESOURCE_URL);
InOrder inOrder = inOrder(mockedQuery);
inOrder.verify(mockedQuery).decisionDefinitionId(decisionDefinitionId);
inOrder.verify(mockedQuery).disableBinaryFetching();
inOrder.verify(mockedQuery).list();
}
@Test
public void testDefaultCustomObjectDeserialization() {
String decisionDefinitionId = MockProvider.EXAMPLE_DECISION_DEFINITION_ID;
given()
.queryParam("decisionDefinitionId", decisionDefinitionId)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_DECISION_INSTANCE_RESOURCE_URL);
InOrder inOrder = inOrder(mockedQuery);
inOrder.verify(mockedQuery).decisionDefinitionId(decisionDefinitionId);
inOrder.verify(mockedQuery, never()).disableCustomObjectDeserialization();
inOrder.verify(mockedQuery).list();
}
@Test
public void testDisableCustomObjectDeserialization() {
String decisionDefinitionId = MockProvider.EXAMPLE_DECISION_DEFINITION_ID;
given()
.queryParam("decisionDefinitionId", decisionDefinitionId)
.queryParam("disableCustomObjectDeserialization", true)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_DECISION_INSTANCE_RESOURCE_URL);
InOrder inOrder = inOrder(mockedQuery);
inOrder.verify(mockedQuery).decisionDefinitionId(decisionDefinitionId);
inOrder.verify(mockedQuery).disableCustomObjectDeserialization();
inOrder.verify(mockedQuery).list();
}
protected Map<String, String> getCompleteStringQueryParameters() {
Map<String, String> parameters = new HashMap<String, String>();
parameters.put("decisionInstanceId", MockProvider.EXAMPLE_HISTORIC_DECISION_INSTANCE_ID);
parameters.put("decisionInstanceIdIn", MockProvider.EXAMPLE_HISTORIC_DECISION_INSTANCE_ID_IN);
parameters.put("decisionDefinitionId", MockProvider.EXAMPLE_DECISION_DEFINITION_ID);
parameters.put("decisionDefinitionKey", MockProvider.EXAMPLE_DECISION_DEFINITION_KEY);
parameters.put("decisionDefinitionName", MockProvider.EXAMPLE_DECISION_DEFINITION_NAME);
parameters.put("processDefinitionId", MockProvider.EXAMPLE_PROCESS_DEFINITION_ID);
parameters.put("processDefinitionKey", MockProvider.EXAMPLE_PROCESS_DEFINITION_KEY);
parameters.put("processInstanceId", MockProvider.EXAMPLE_PROCESS_INSTANCE_ID);
parameters.put("activityIdIn", MockProvider.EXAMPLE_HISTORIC_DECISION_INSTANCE_ACTIVITY_ID_IN);
parameters.put("activityInstanceIdIn", MockProvider.EXAMPLE_HISTORIC_DECISION_INSTANCE_ACTIVITY_INSTANCE_ID_IN);
parameters.put("evaluatedBefore", MockProvider.EXAMPLE_HISTORIC_DECISION_INSTANCE_EVALUATED_BEFORE);
parameters.put("evaluatedAfter", MockProvider.EXAMPLE_HISTORIC_DECISION_INSTANCE_EVALUATED_AFTER);
return parameters;
}
protected void verifyStringParameterQueryInvocations() {
Map<String, String> stringQueryParameters = getCompleteStringQueryParameters();
StringArrayConverter stringArrayConverter = new StringArrayConverter();
verify(mockedQuery).decisionInstanceId(stringQueryParameters.get("decisionInstanceId"));
verify(mockedQuery).decisionInstanceIdIn(stringArrayConverter.convertQueryParameterToType(stringQueryParameters.get("decisionInstanceIdIn")));
verify(mockedQuery).decisionDefinitionId(stringQueryParameters.get("decisionDefinitionId"));
verify(mockedQuery).decisionDefinitionKey(stringQueryParameters.get("decisionDefinitionKey"));
verify(mockedQuery).decisionDefinitionName(stringQueryParameters.get("decisionDefinitionName"));
verify(mockedQuery).processDefinitionId(stringQueryParameters.get("processDefinitionId"));
verify(mockedQuery).processDefinitionKey(stringQueryParameters.get("processDefinitionKey"));
verify(mockedQuery).processInstanceId(stringQueryParameters.get("processInstanceId"));
verify(mockedQuery).activityIdIn(stringArrayConverter.convertQueryParameterToType(stringQueryParameters.get("activityIdIn")));
verify(mockedQuery).activityInstanceIdIn(stringArrayConverter.convertQueryParameterToType(stringQueryParameters.get("activityInstanceIdIn")));
verify(mockedQuery).evaluatedBefore(DateTimeUtil.parseDate(stringQueryParameters.get("evaluatedBefore")));
verify(mockedQuery).evaluatedAfter(DateTimeUtil.parseDate(stringQueryParameters.get("evaluatedAfter")));
verify(mockedQuery).list();
}
protected void executeAndVerifySorting(String sortBy, String sortOrder, Status expectedStatus) {
given()
.queryParam("sortBy", sortBy)
.queryParam("sortOrder", sortOrder)
.then().expect()
.statusCode(expectedStatus.getStatusCode())
.when()
.get(HISTORIC_DECISION_INSTANCE_RESOURCE_URL);
}
protected void verifyHistoricDecisionInputInstances(List<Map<String, Object>> returnedInputs) {
assertThat(returnedInputs, hasSize(3));
// verify common properties
for (Map<String, Object> returnedInput : returnedInputs) {
assertThat(returnedInput, hasEntry("id", (Object) MockProvider.EXAMPLE_HISTORIC_DECISION_INPUT_INSTANCE_ID));
assertThat(returnedInput, hasEntry("decisionInstanceId", (Object) MockProvider.EXAMPLE_HISTORIC_DECISION_INSTANCE_ID));
assertThat(returnedInput, hasEntry("clauseId", (Object) MockProvider.EXAMPLE_HISTORIC_DECISION_INPUT_INSTANCE_CLAUSE_ID));
assertThat(returnedInput, hasEntry("clauseName", (Object) MockProvider.EXAMPLE_HISTORIC_DECISION_INPUT_INSTANCE_CLAUSE_NAME));
assertThat(returnedInput, hasEntry("errorMessage", null));
}
verifyStringValue(returnedInputs.get(0));
verifyByteArrayValue(returnedInputs.get(1));
verifySerializedValue(returnedInputs.get(2));
}
protected void verifyHistoricDecisionOutputInstances(List<Map<String, Object>> returnedOutputs) {
assertThat(returnedOutputs, hasSize(3));
// verify common properties
for (Map<String, Object> returnedOutput : returnedOutputs) {
assertThat(returnedOutput, hasEntry("id", (Object) MockProvider.EXAMPLE_HISTORIC_DECISION_OUTPUT_INSTANCE_ID));
assertThat(returnedOutput, hasEntry("decisionInstanceId", (Object) MockProvider.EXAMPLE_HISTORIC_DECISION_INSTANCE_ID));
assertThat(returnedOutput, hasEntry("clauseId", (Object) MockProvider.EXAMPLE_HISTORIC_DECISION_OUTPUT_INSTANCE_CLAUSE_ID));
assertThat(returnedOutput, hasEntry("clauseName", (Object) MockProvider.EXAMPLE_HISTORIC_DECISION_OUTPUT_INSTANCE_CLAUSE_NAME));
assertThat(returnedOutput, hasEntry("ruleId", (Object) MockProvider.EXAMPLE_HISTORIC_DECISION_OUTPUT_INSTANCE_RULE_ID));
assertThat(returnedOutput, hasEntry("ruleOrder", (Object) MockProvider.EXAMPLE_HISTORIC_DECISION_OUTPUT_INSTANCE_RULE_ORDER));
assertThat(returnedOutput, hasEntry("variableName", (Object) MockProvider.EXAMPLE_HISTORIC_DECISION_OUTPUT_INSTANCE_VARIABLE_NAME));
assertThat(returnedOutput, hasEntry("errorMessage", null));
}
verifyStringValue(returnedOutputs.get(0));
verifyByteArrayValue(returnedOutputs.get(1));
verifySerializedValue(returnedOutputs.get(2));
}
protected void verifyStringValue(Map<String, Object> stringValue) {
StringValue exampleValue = MockProvider.EXAMPLE_HISTORIC_DECISION_STRING_VALUE;
assertThat(stringValue, hasEntry("type", (Object) VariableValueDto.toRestApiTypeName(exampleValue.getType().getName())));
assertThat(stringValue, hasEntry("value", (Object) exampleValue.getValue()));
assertThat(stringValue, hasEntry("valueInfo", (Object) Collections.emptyMap()));
}
protected void verifyByteArrayValue(Map<String, Object> byteArrayValue) {
BytesValue exampleValue = MockProvider.EXAMPLE_HISTORIC_DECISION_BYTE_ARRAY_VALUE;
assertThat(byteArrayValue, hasEntry("type", (Object) VariableValueDto.toRestApiTypeName(exampleValue.getType().getName())));
String byteString = Base64.encodeBase64String(exampleValue.getValue()).trim();
assertThat(byteArrayValue, hasEntry("value", (Object) byteString));
assertThat(byteArrayValue, hasEntry("valueInfo", (Object) Collections.emptyMap()));
}
@SuppressWarnings("unchecked")
protected void verifySerializedValue(Map<String, Object> serializedValue) {
ObjectValue exampleValue = MockProvider.EXAMPLE_HISTORIC_DECISION_SERIALIZED_VALUE;
assertThat(serializedValue, hasEntry("type", (Object) VariableValueDto.toRestApiTypeName(exampleValue.getType().getName())));
assertThat(serializedValue, hasEntry("value", exampleValue.getValue()));
Map<String, String> valueInfo = (Map<String, String>) serializedValue.get("valueInfo");
assertThat(valueInfo, hasEntry("serializationDataFormat", exampleValue.getSerializationDataFormat()));
assertThat(valueInfo, hasEntry("objectTypeName", exampleValue.getObjectTypeName()));
}
}
|
|
package org.jivesoftware.openfire;
/**
* @author sdatar
*
* If defining a new property my.foo.bar. in bootstrap.properties, you need to add a private
* member string named myfooBar;
*
*
*/
public class BootstrapProperties {
private String mysqlHost;
private String mysqlPort;
private String mysqlDb;
private String mysqlUser;
private String mysqlPassword;
private String xmppDomain = "127.0.0.1";
private String xmppPort = "5222";
private String xmppAddr = "127.0.0.1";
private String xmppPortSecure = "5223";
private String xmppSecurePort = "5223";
private String encryptionKey = null;
private String encryption = null;
private String httpPort = "9090";
private String httpsPort = "9091";
private String locale = "EN";
public String getMysqlHost() {
return mysqlHost;
}
public void setMysqlHost(String mysqlHost) {
this.mysqlHost = mysqlHost;
}
public String getMysqlPort() {
return mysqlPort;
}
public void setMysqlPort(String mysqlPort) {
this.mysqlPort = mysqlPort;
}
public String getMysqlDb() {
return mysqlDb;
}
public void setMysqlDb(String mysqlDb) {
this.mysqlDb = mysqlDb;
}
public String getMysqlUser() {
return mysqlUser;
}
public void setMysqlUser(String mysqlUser) {
this.mysqlUser = mysqlUser;
}
public String getMysqlPassword() {
return mysqlPassword;
}
public void setMysqlPassword(String mysqlPassword) {
this.mysqlPassword = mysqlPassword;
}
public String getXmppDomain() {
return xmppDomain;
}
public void setXmppDomain(String xmppDomain) {
this.xmppDomain = xmppDomain;
}
public String getXmppPort() {
return xmppPort;
}
public void setXmppPort(String xmppPort) {
this.xmppPort = xmppPort;
}
public String getXmppAddr() {
return xmppAddr;
}
public void setXmppAddr(String xmppAddr) {
this.xmppAddr = xmppAddr;
}
public String getXmppPortSecure() {
return xmppPortSecure;
}
public void setXmppPortSecure(String xmppPortSecure) {
this.xmppPortSecure = xmppPortSecure;
}
public String getXmppSecurePort() {
return xmppSecurePort;
}
public void setXmppSecurePort(String xmppSecurePort) {
this.xmppSecurePort = xmppSecurePort;
}
public String getEncryptionKey() {
return encryptionKey;
}
public void setEncryptionKey(String encryptionKey) {
this.encryptionKey = encryptionKey;
}
public String getEncryption() {
return encryption;
}
public void setEncryption(String encryption) {
this.encryption = encryption;
}
public String getHttpPort() {
return httpPort;
}
public void setHttpPort(String httpPort) {
this.httpPort = httpPort;
}
public String getHttpsPort() {
return httpsPort;
}
public void setHttpsPort(String httpsPort) {
this.httpsPort = httpsPort;
}
public String getLocale() {
return locale;
}
public void setLocale(String locale) {
this.locale = locale;
}
@Override
public String toString() {
return "BootstrapProperties [mysqlHost=" + mysqlHost + ", mysqlPort=" + mysqlPort + ", mysqlDb=" + mysqlDb + ", mysqlUser=" + mysqlUser
+ ", mysqlPassword=" + mysqlPassword + ", xmppDomain=" + xmppDomain + ", xmppPort=" + xmppPort + ", xmppAddr=" + xmppAddr + ", xmppPortSecure="
+ xmppPortSecure + ", xmppSecurePort=" + xmppSecurePort + ", encryptionKey=" + encryptionKey + ", encryption=" + encryption + ", httpPort="
+ httpPort + ", httpsPort=" + httpsPort + ", locale=" + locale + "]";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((encryption == null) ? 0 : encryption.hashCode());
result = prime * result + ((encryptionKey == null) ? 0 : encryptionKey.hashCode());
result = prime * result + ((httpPort == null) ? 0 : httpPort.hashCode());
result = prime * result + ((httpsPort == null) ? 0 : httpsPort.hashCode());
result = prime * result + ((locale == null) ? 0 : locale.hashCode());
result = prime * result + ((mysqlDb == null) ? 0 : mysqlDb.hashCode());
result = prime * result + ((mysqlHost == null) ? 0 : mysqlHost.hashCode());
result = prime * result + ((mysqlPassword == null) ? 0 : mysqlPassword.hashCode());
result = prime * result + ((mysqlPort == null) ? 0 : mysqlPort.hashCode());
result = prime * result + ((mysqlUser == null) ? 0 : mysqlUser.hashCode());
result = prime * result + ((xmppAddr == null) ? 0 : xmppAddr.hashCode());
result = prime * result + ((xmppDomain == null) ? 0 : xmppDomain.hashCode());
result = prime * result + ((xmppPort == null) ? 0 : xmppPort.hashCode());
result = prime * result + ((xmppPortSecure == null) ? 0 : xmppPortSecure.hashCode());
result = prime * result + ((xmppSecurePort == null) ? 0 : xmppSecurePort.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
BootstrapProperties other = (BootstrapProperties) obj;
if (encryption == null) {
if (other.encryption != null)
return false;
} else if (!encryption.equals(other.encryption))
return false;
if (encryptionKey == null) {
if (other.encryptionKey != null)
return false;
} else if (!encryptionKey.equals(other.encryptionKey))
return false;
if (httpPort == null) {
if (other.httpPort != null)
return false;
} else if (!httpPort.equals(other.httpPort))
return false;
if (httpsPort == null) {
if (other.httpsPort != null)
return false;
} else if (!httpsPort.equals(other.httpsPort))
return false;
if (locale == null) {
if (other.locale != null)
return false;
} else if (!locale.equals(other.locale))
return false;
if (mysqlDb == null) {
if (other.mysqlDb != null)
return false;
} else if (!mysqlDb.equals(other.mysqlDb))
return false;
if (mysqlHost == null) {
if (other.mysqlHost != null)
return false;
} else if (!mysqlHost.equals(other.mysqlHost))
return false;
if (mysqlPassword == null) {
if (other.mysqlPassword != null)
return false;
} else if (!mysqlPassword.equals(other.mysqlPassword))
return false;
if (mysqlPort == null) {
if (other.mysqlPort != null)
return false;
} else if (!mysqlPort.equals(other.mysqlPort))
return false;
if (mysqlUser == null) {
if (other.mysqlUser != null)
return false;
} else if (!mysqlUser.equals(other.mysqlUser))
return false;
if (xmppAddr == null) {
if (other.xmppAddr != null)
return false;
} else if (!xmppAddr.equals(other.xmppAddr))
return false;
if (xmppDomain == null) {
if (other.xmppDomain != null)
return false;
} else if (!xmppDomain.equals(other.xmppDomain))
return false;
if (xmppPort == null) {
if (other.xmppPort != null)
return false;
} else if (!xmppPort.equals(other.xmppPort))
return false;
if (xmppPortSecure == null) {
if (other.xmppPortSecure != null)
return false;
} else if (!xmppPortSecure.equals(other.xmppPortSecure))
return false;
if (xmppSecurePort == null) {
if (other.xmppSecurePort != null)
return false;
} else if (!xmppSecurePort.equals(other.xmppSecurePort))
return false;
return true;
}
}
|
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.network.fluent;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.Response;
import com.azure.core.management.polling.PollResult;
import com.azure.core.util.Context;
import com.azure.core.util.polling.PollerFlux;
import com.azure.core.util.polling.SyncPoller;
import com.azure.resourcemanager.network.fluent.models.VirtualNetworkPeeringInner;
import com.azure.resourcemanager.network.models.SyncRemoteAddressSpace;
import java.nio.ByteBuffer;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
/** An instance of this class provides access to all the operations defined in VirtualNetworkPeeringsClient. */
public interface VirtualNetworkPeeringsClient {
/**
* Deletes the specified virtual network peering.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the virtual network peering.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link Response} on successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<Flux<ByteBuffer>>> deleteWithResponseAsync(
String resourceGroupName, String virtualNetworkName, String virtualNetworkPeeringName);
/**
* Deletes the specified virtual network peering.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the virtual network peering.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link PollerFlux} for polling of long-running operation.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
PollerFlux<PollResult<Void>, Void> beginDeleteAsync(
String resourceGroupName, String virtualNetworkName, String virtualNetworkPeeringName);
/**
* Deletes the specified virtual network peering.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the virtual network peering.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link SyncPoller} for polling of long-running operation.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
SyncPoller<PollResult<Void>, Void> beginDelete(
String resourceGroupName, String virtualNetworkName, String virtualNetworkPeeringName);
/**
* Deletes the specified virtual network peering.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the virtual network peering.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link SyncPoller} for polling of long-running operation.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
SyncPoller<PollResult<Void>, Void> beginDelete(
String resourceGroupName, String virtualNetworkName, String virtualNetworkPeeringName, Context context);
/**
* Deletes the specified virtual network peering.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the virtual network peering.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return A {@link Mono} that completes when a successful response is received.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Void> deleteAsync(String resourceGroupName, String virtualNetworkName, String virtualNetworkPeeringName);
/**
* Deletes the specified virtual network peering.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the virtual network peering.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void delete(String resourceGroupName, String virtualNetworkName, String virtualNetworkPeeringName);
/**
* Deletes the specified virtual network peering.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the virtual network peering.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void delete(String resourceGroupName, String virtualNetworkName, String virtualNetworkPeeringName, Context context);
/**
* Gets the specified virtual network peering.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the virtual network peering.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the specified virtual network peering along with {@link Response} on successful completion of {@link
* Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<VirtualNetworkPeeringInner>> getWithResponseAsync(
String resourceGroupName, String virtualNetworkName, String virtualNetworkPeeringName);
/**
* Gets the specified virtual network peering.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the virtual network peering.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the specified virtual network peering on successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<VirtualNetworkPeeringInner> getAsync(
String resourceGroupName, String virtualNetworkName, String virtualNetworkPeeringName);
/**
* Gets the specified virtual network peering.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the virtual network peering.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the specified virtual network peering.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
VirtualNetworkPeeringInner get(
String resourceGroupName, String virtualNetworkName, String virtualNetworkPeeringName);
/**
* Gets the specified virtual network peering.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the virtual network peering.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the specified virtual network peering along with {@link Response}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<VirtualNetworkPeeringInner> getWithResponse(
String resourceGroupName, String virtualNetworkName, String virtualNetworkPeeringName, Context context);
/**
* Creates or updates a peering in the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the peering.
* @param virtualNetworkPeeringParameters Parameters supplied to the create or update virtual network peering
* operation.
* @param syncRemoteAddressSpace Parameter indicates the intention to sync the peering with the current address
* space on the remote vNet after it's updated.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return peerings in a virtual network resource along with {@link Response} on successful completion of {@link
* Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<Flux<ByteBuffer>>> createOrUpdateWithResponseAsync(
String resourceGroupName,
String virtualNetworkName,
String virtualNetworkPeeringName,
VirtualNetworkPeeringInner virtualNetworkPeeringParameters,
SyncRemoteAddressSpace syncRemoteAddressSpace);
/**
* Creates or updates a peering in the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the peering.
* @param virtualNetworkPeeringParameters Parameters supplied to the create or update virtual network peering
* operation.
* @param syncRemoteAddressSpace Parameter indicates the intention to sync the peering with the current address
* space on the remote vNet after it's updated.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link PollerFlux} for polling of peerings in a virtual network resource.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
PollerFlux<PollResult<VirtualNetworkPeeringInner>, VirtualNetworkPeeringInner> beginCreateOrUpdateAsync(
String resourceGroupName,
String virtualNetworkName,
String virtualNetworkPeeringName,
VirtualNetworkPeeringInner virtualNetworkPeeringParameters,
SyncRemoteAddressSpace syncRemoteAddressSpace);
/**
* Creates or updates a peering in the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the peering.
* @param virtualNetworkPeeringParameters Parameters supplied to the create or update virtual network peering
* operation.
* @param syncRemoteAddressSpace Parameter indicates the intention to sync the peering with the current address
* space on the remote vNet after it's updated.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link SyncPoller} for polling of peerings in a virtual network resource.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
SyncPoller<PollResult<VirtualNetworkPeeringInner>, VirtualNetworkPeeringInner> beginCreateOrUpdate(
String resourceGroupName,
String virtualNetworkName,
String virtualNetworkPeeringName,
VirtualNetworkPeeringInner virtualNetworkPeeringParameters,
SyncRemoteAddressSpace syncRemoteAddressSpace);
/**
* Creates or updates a peering in the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the peering.
* @param virtualNetworkPeeringParameters Parameters supplied to the create or update virtual network peering
* operation.
* @param syncRemoteAddressSpace Parameter indicates the intention to sync the peering with the current address
* space on the remote vNet after it's updated.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link SyncPoller} for polling of peerings in a virtual network resource.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
SyncPoller<PollResult<VirtualNetworkPeeringInner>, VirtualNetworkPeeringInner> beginCreateOrUpdate(
String resourceGroupName,
String virtualNetworkName,
String virtualNetworkPeeringName,
VirtualNetworkPeeringInner virtualNetworkPeeringParameters,
SyncRemoteAddressSpace syncRemoteAddressSpace,
Context context);
/**
* Creates or updates a peering in the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the peering.
* @param virtualNetworkPeeringParameters Parameters supplied to the create or update virtual network peering
* operation.
* @param syncRemoteAddressSpace Parameter indicates the intention to sync the peering with the current address
* space on the remote vNet after it's updated.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return peerings in a virtual network resource on successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<VirtualNetworkPeeringInner> createOrUpdateAsync(
String resourceGroupName,
String virtualNetworkName,
String virtualNetworkPeeringName,
VirtualNetworkPeeringInner virtualNetworkPeeringParameters,
SyncRemoteAddressSpace syncRemoteAddressSpace);
/**
* Creates or updates a peering in the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the peering.
* @param virtualNetworkPeeringParameters Parameters supplied to the create or update virtual network peering
* operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return peerings in a virtual network resource on successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<VirtualNetworkPeeringInner> createOrUpdateAsync(
String resourceGroupName,
String virtualNetworkName,
String virtualNetworkPeeringName,
VirtualNetworkPeeringInner virtualNetworkPeeringParameters);
/**
* Creates or updates a peering in the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the peering.
* @param virtualNetworkPeeringParameters Parameters supplied to the create or update virtual network peering
* operation.
* @param syncRemoteAddressSpace Parameter indicates the intention to sync the peering with the current address
* space on the remote vNet after it's updated.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return peerings in a virtual network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
VirtualNetworkPeeringInner createOrUpdate(
String resourceGroupName,
String virtualNetworkName,
String virtualNetworkPeeringName,
VirtualNetworkPeeringInner virtualNetworkPeeringParameters,
SyncRemoteAddressSpace syncRemoteAddressSpace);
/**
* Creates or updates a peering in the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the peering.
* @param virtualNetworkPeeringParameters Parameters supplied to the create or update virtual network peering
* operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return peerings in a virtual network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
VirtualNetworkPeeringInner createOrUpdate(
String resourceGroupName,
String virtualNetworkName,
String virtualNetworkPeeringName,
VirtualNetworkPeeringInner virtualNetworkPeeringParameters);
/**
* Creates or updates a peering in the specified virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param virtualNetworkPeeringName The name of the peering.
* @param virtualNetworkPeeringParameters Parameters supplied to the create or update virtual network peering
* operation.
* @param syncRemoteAddressSpace Parameter indicates the intention to sync the peering with the current address
* space on the remote vNet after it's updated.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return peerings in a virtual network resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
VirtualNetworkPeeringInner createOrUpdate(
String resourceGroupName,
String virtualNetworkName,
String virtualNetworkPeeringName,
VirtualNetworkPeeringInner virtualNetworkPeeringParameters,
SyncRemoteAddressSpace syncRemoteAddressSpace,
Context context);
/**
* Gets all virtual network peerings in a virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all virtual network peerings in a virtual network as paginated response with {@link PagedFlux}.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedFlux<VirtualNetworkPeeringInner> listAsync(String resourceGroupName, String virtualNetworkName);
/**
* Gets all virtual network peerings in a virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all virtual network peerings in a virtual network as paginated response with {@link PagedIterable}.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<VirtualNetworkPeeringInner> list(String resourceGroupName, String virtualNetworkName);
/**
* Gets all virtual network peerings in a virtual network.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all virtual network peerings in a virtual network as paginated response with {@link PagedIterable}.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<VirtualNetworkPeeringInner> list(
String resourceGroupName, String virtualNetworkName, Context context);
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.orc.writer;
import com.facebook.presto.common.block.Block;
import com.facebook.presto.common.block.ColumnarArray;
import com.facebook.presto.orc.ColumnWriterOptions;
import com.facebook.presto.orc.DwrfDataEncryptor;
import com.facebook.presto.orc.OrcEncoding;
import com.facebook.presto.orc.checkpoint.BooleanStreamCheckpoint;
import com.facebook.presto.orc.checkpoint.LongStreamCheckpoint;
import com.facebook.presto.orc.metadata.ColumnEncoding;
import com.facebook.presto.orc.metadata.CompressedMetadataWriter;
import com.facebook.presto.orc.metadata.MetadataWriter;
import com.facebook.presto.orc.metadata.RowGroupIndex;
import com.facebook.presto.orc.metadata.Stream;
import com.facebook.presto.orc.metadata.Stream.StreamKind;
import com.facebook.presto.orc.metadata.statistics.ColumnStatistics;
import com.facebook.presto.orc.stream.LongOutputStream;
import com.facebook.presto.orc.stream.PresentOutputStream;
import com.facebook.presto.orc.stream.StreamDataOutput;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.slice.Slice;
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static com.facebook.presto.common.block.ColumnarArray.toColumnarArray;
import static com.facebook.presto.orc.OrcEncoding.DWRF;
import static com.facebook.presto.orc.metadata.ColumnEncoding.ColumnEncodingKind.DIRECT;
import static com.facebook.presto.orc.metadata.ColumnEncoding.ColumnEncodingKind.DIRECT_V2;
import static com.facebook.presto.orc.metadata.CompressionKind.NONE;
import static com.facebook.presto.orc.stream.LongOutputStream.createLengthOutputStream;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static java.util.Objects.requireNonNull;
public class ListColumnWriter
implements ColumnWriter
{
private static final int INSTANCE_SIZE = ClassLayout.parseClass(ListColumnWriter.class).instanceSize();
private final int column;
private final boolean compressed;
private final ColumnEncoding columnEncoding;
private final LongOutputStream lengthStream;
private final PresentOutputStream presentStream;
private final CompressedMetadataWriter metadataWriter;
private final ColumnWriter elementWriter;
private final List<ColumnStatistics> rowGroupColumnStatistics = new ArrayList<>();
private long columnStatisticsRetainedSizeInBytes;
private int nonNullValueCount;
private boolean closed;
public ListColumnWriter(int column, ColumnWriterOptions columnWriterOptions, Optional<DwrfDataEncryptor> dwrfEncryptor, OrcEncoding orcEncoding, ColumnWriter elementWriter, MetadataWriter metadataWriter)
{
checkArgument(column >= 0, "column is negative");
requireNonNull(columnWriterOptions, "columnWriterOptions is null");
this.column = column;
this.compressed = columnWriterOptions.getCompressionKind() != NONE;
this.columnEncoding = new ColumnEncoding(orcEncoding == DWRF ? DIRECT : DIRECT_V2, 0);
this.elementWriter = requireNonNull(elementWriter, "elementWriter is null");
this.lengthStream = createLengthOutputStream(columnWriterOptions, dwrfEncryptor, orcEncoding);
this.presentStream = new PresentOutputStream(columnWriterOptions, dwrfEncryptor);
this.metadataWriter = new CompressedMetadataWriter(metadataWriter, columnWriterOptions, dwrfEncryptor);
}
@Override
public List<ColumnWriter> getNestedColumnWriters()
{
return ImmutableList.<ColumnWriter>builder()
.add(elementWriter)
.addAll(elementWriter.getNestedColumnWriters())
.build();
}
@Override
public Map<Integer, ColumnEncoding> getColumnEncodings()
{
ImmutableMap.Builder<Integer, ColumnEncoding> encodings = ImmutableMap.builder();
encodings.put(column, columnEncoding);
encodings.putAll(elementWriter.getColumnEncodings());
return encodings.build();
}
@Override
public void beginRowGroup()
{
lengthStream.recordCheckpoint();
presentStream.recordCheckpoint();
elementWriter.beginRowGroup();
}
@Override
public long writeBlock(Block block)
{
checkState(!closed);
checkArgument(block.getPositionCount() > 0, "Block is empty");
ColumnarArray columnarArray = toColumnarArray(block);
return writeColumnarArray(columnarArray);
}
private long writeColumnarArray(ColumnarArray columnarArray)
{
// write nulls and lengths
int blockNonNullValueCount = 0;
long childRawSize = 0;
for (int position = 0; position < columnarArray.getPositionCount(); position++) {
boolean present = !columnarArray.isNull(position);
presentStream.writeBoolean(present);
if (present) {
blockNonNullValueCount++;
lengthStream.writeLong(columnarArray.getLength(position));
}
}
// write element values
Block elementsBlock = columnarArray.getElementsBlock();
if (elementsBlock.getPositionCount() > 0) {
childRawSize += elementWriter.writeBlock(elementsBlock);
}
nonNullValueCount += blockNonNullValueCount;
return (columnarArray.getPositionCount() - blockNonNullValueCount) * NULL_SIZE + childRawSize;
}
@Override
public Map<Integer, ColumnStatistics> finishRowGroup()
{
checkState(!closed);
ColumnStatistics statistics = new ColumnStatistics((long) nonNullValueCount, null);
rowGroupColumnStatistics.add(statistics);
columnStatisticsRetainedSizeInBytes += statistics.getRetainedSizeInBytes();
nonNullValueCount = 0;
ImmutableMap.Builder<Integer, ColumnStatistics> columnStatistics = ImmutableMap.builder();
columnStatistics.put(column, statistics);
columnStatistics.putAll(elementWriter.finishRowGroup());
return columnStatistics.build();
}
@Override
public void close()
{
closed = true;
elementWriter.close();
lengthStream.close();
presentStream.close();
}
@Override
public Map<Integer, ColumnStatistics> getColumnStripeStatistics()
{
checkState(closed);
ImmutableMap.Builder<Integer, ColumnStatistics> columnStatistics = ImmutableMap.builder();
columnStatistics.put(column, ColumnStatistics.mergeColumnStatistics(rowGroupColumnStatistics));
columnStatistics.putAll(elementWriter.getColumnStripeStatistics());
return columnStatistics.build();
}
@Override
public List<StreamDataOutput> getIndexStreams()
throws IOException
{
checkState(closed);
ImmutableList.Builder<RowGroupIndex> rowGroupIndexes = ImmutableList.builder();
List<LongStreamCheckpoint> lengthCheckpoints = lengthStream.getCheckpoints();
Optional<List<BooleanStreamCheckpoint>> presentCheckpoints = presentStream.getCheckpoints();
for (int i = 0; i < rowGroupColumnStatistics.size(); i++) {
int groupId = i;
ColumnStatistics columnStatistics = rowGroupColumnStatistics.get(groupId);
LongStreamCheckpoint lengthCheckpoint = lengthCheckpoints.get(groupId);
Optional<BooleanStreamCheckpoint> presentCheckpoint = presentCheckpoints.map(checkpoints -> checkpoints.get(groupId));
List<Integer> positions = createArrayColumnPositionList(compressed, lengthCheckpoint, presentCheckpoint);
rowGroupIndexes.add(new RowGroupIndex(positions, columnStatistics));
}
Slice slice = metadataWriter.writeRowIndexes(rowGroupIndexes.build());
Stream stream = new Stream(column, StreamKind.ROW_INDEX, slice.length(), false);
ImmutableList.Builder<StreamDataOutput> indexStreams = ImmutableList.builder();
indexStreams.add(new StreamDataOutput(slice, stream));
indexStreams.addAll(elementWriter.getIndexStreams());
return indexStreams.build();
}
private static List<Integer> createArrayColumnPositionList(
boolean compressed,
LongStreamCheckpoint lengthCheckpoint,
Optional<BooleanStreamCheckpoint> presentCheckpoint)
{
ImmutableList.Builder<Integer> positionList = ImmutableList.builder();
presentCheckpoint.ifPresent(booleanStreamCheckpoint -> positionList.addAll(booleanStreamCheckpoint.toPositionList(compressed)));
positionList.addAll(lengthCheckpoint.toPositionList(compressed));
return positionList.build();
}
@Override
public List<StreamDataOutput> getDataStreams()
{
checkState(closed);
ImmutableList.Builder<StreamDataOutput> outputDataStreams = ImmutableList.builder();
presentStream.getStreamDataOutput(column).ifPresent(outputDataStreams::add);
outputDataStreams.add(lengthStream.getStreamDataOutput(column));
outputDataStreams.addAll(elementWriter.getDataStreams());
return outputDataStreams.build();
}
@Override
public long getBufferedBytes()
{
return lengthStream.getBufferedBytes() + presentStream.getBufferedBytes() + elementWriter.getBufferedBytes();
}
@Override
public long getRetainedBytes()
{
return INSTANCE_SIZE + lengthStream.getRetainedBytes() + presentStream.getRetainedBytes() + elementWriter.getRetainedBytes() + columnStatisticsRetainedSizeInBytes;
}
@Override
public void reset()
{
closed = false;
lengthStream.reset();
presentStream.reset();
elementWriter.reset();
rowGroupColumnStatistics.clear();
columnStatisticsRetainedSizeInBytes = 0;
nonNullValueCount = 0;
}
}
|
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.model;
import java.io.Serializable;
/**
* <p>
* Paginated representation of a workflow history for a workflow
* execution. This is the up to date, complete and authoritative record
* of the events related to all tasks and events in the life of the
* workflow execution.
* </p>
*/
public class History implements Serializable, Cloneable {
/**
* The list of history events.
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<HistoryEvent> events;
/**
* If a <code>NextPageToken</code> was returned by a previous call, there
* are more results available. To retrieve the next page of results, make
* the call again using the returned token in <code>nextPageToken</code>.
* Keep all other arguments unchanged. <p>The configured
* <code>maximumPageSize</code> determines how many results can be
* returned in a single call.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 2048<br/>
*/
private String nextPageToken;
/**
* The list of history events.
*
* @return The list of history events.
*/
public java.util.List<HistoryEvent> getEvents() {
return events;
}
/**
* The list of history events.
*
* @param events The list of history events.
*/
public void setEvents(java.util.Collection<HistoryEvent> events) {
if (events == null) {
this.events = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<HistoryEvent> eventsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<HistoryEvent>(events.size());
eventsCopy.addAll(events);
this.events = eventsCopy;
}
/**
* The list of history events.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setEvents(java.util.Collection)} or {@link
* #withEvents(java.util.Collection)} if you want to override the
* existing values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param events The list of history events.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public History withEvents(HistoryEvent... events) {
if (getEvents() == null) setEvents(new java.util.ArrayList<HistoryEvent>(events.length));
for (HistoryEvent value : events) {
getEvents().add(value);
}
return this;
}
/**
* The list of history events.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param events The list of history events.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public History withEvents(java.util.Collection<HistoryEvent> events) {
if (events == null) {
this.events = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<HistoryEvent> eventsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<HistoryEvent>(events.size());
eventsCopy.addAll(events);
this.events = eventsCopy;
}
return this;
}
/**
* If a <code>NextPageToken</code> was returned by a previous call, there
* are more results available. To retrieve the next page of results, make
* the call again using the returned token in <code>nextPageToken</code>.
* Keep all other arguments unchanged. <p>The configured
* <code>maximumPageSize</code> determines how many results can be
* returned in a single call.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 2048<br/>
*
* @return If a <code>NextPageToken</code> was returned by a previous call, there
* are more results available. To retrieve the next page of results, make
* the call again using the returned token in <code>nextPageToken</code>.
* Keep all other arguments unchanged. <p>The configured
* <code>maximumPageSize</code> determines how many results can be
* returned in a single call.
*/
public String getNextPageToken() {
return nextPageToken;
}
/**
* If a <code>NextPageToken</code> was returned by a previous call, there
* are more results available. To retrieve the next page of results, make
* the call again using the returned token in <code>nextPageToken</code>.
* Keep all other arguments unchanged. <p>The configured
* <code>maximumPageSize</code> determines how many results can be
* returned in a single call.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 2048<br/>
*
* @param nextPageToken If a <code>NextPageToken</code> was returned by a previous call, there
* are more results available. To retrieve the next page of results, make
* the call again using the returned token in <code>nextPageToken</code>.
* Keep all other arguments unchanged. <p>The configured
* <code>maximumPageSize</code> determines how many results can be
* returned in a single call.
*/
public void setNextPageToken(String nextPageToken) {
this.nextPageToken = nextPageToken;
}
/**
* If a <code>NextPageToken</code> was returned by a previous call, there
* are more results available. To retrieve the next page of results, make
* the call again using the returned token in <code>nextPageToken</code>.
* Keep all other arguments unchanged. <p>The configured
* <code>maximumPageSize</code> determines how many results can be
* returned in a single call.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 2048<br/>
*
* @param nextPageToken If a <code>NextPageToken</code> was returned by a previous call, there
* are more results available. To retrieve the next page of results, make
* the call again using the returned token in <code>nextPageToken</code>.
* Keep all other arguments unchanged. <p>The configured
* <code>maximumPageSize</code> determines how many results can be
* returned in a single call.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public History withNextPageToken(String nextPageToken) {
this.nextPageToken = nextPageToken;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getEvents() != null) sb.append("Events: " + getEvents() + ",");
if (getNextPageToken() != null) sb.append("NextPageToken: " + getNextPageToken() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getEvents() == null) ? 0 : getEvents().hashCode());
hashCode = prime * hashCode + ((getNextPageToken() == null) ? 0 : getNextPageToken().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof History == false) return false;
History other = (History)obj;
if (other.getEvents() == null ^ this.getEvents() == null) return false;
if (other.getEvents() != null && other.getEvents().equals(this.getEvents()) == false) return false;
if (other.getNextPageToken() == null ^ this.getNextPageToken() == null) return false;
if (other.getNextPageToken() != null && other.getNextPageToken().equals(this.getNextPageToken()) == false) return false;
return true;
}
@Override
public History clone() {
try {
return (History) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}
|
|
/**
* Copyright 2005-2007 Xue Yong Zhi, Ye Zheng, Jie Li, Yu Su
* Distributed under the BSD License
*/
package com.xruby.runtime.builtin;
import com.xruby.runtime.lang.*;
//import com.xruby.runtime.lang.annotation.RubyLevelClass;
//import com.xruby.runtime.lang.annotation.RubyLevelMethod;
import j2me.util.*;
import j2me.lang.Iterable;
import j2me.lang.AssertMe;
////@RubyLevelClass(name="Array", modules="Enumerable")
public class RubyArray extends RubyBasic implements Iterable/*<RubyValue>*/ {
private List/*<RubyValue>*/ array_;
//MRHS will be converted to a RubyArray, but its original form need to be saved
//to handle multiple assignment
private final int rhs_size_;
private final boolean has_single_asterisk_or_lambda_call_;
public RubyArray() {
this(0);
}
public RubyArray(int size) {
this(size, 0, false);
}
public RubyArray(RubyValue v) {
this(1);
this.array_.add(v);
}
public RubyArray(RubyValue value1, RubyValue value2) {
this(2);
this.array_.add(value1);
this.array_.add(value2);
}
RubyArray(int size, RubyValue default_value) {
this(size);
for (int i = 0; i < size; ++i) {
array_.add(default_value);
}
}
RubyArray(int size, int rhs_size, boolean has_single_asterisk_or_lambda_call) {
super(RubyRuntime.ArrayClass);
array_ = new ArrayList/*<RubyValue>*/(size);
rhs_size_ = rhs_size;
has_single_asterisk_or_lambda_call_ = has_single_asterisk_or_lambda_call;
}
private RubyArray(List/*<RubyValue>*/ array) {
super(RubyRuntime.ArrayClass);
this.array_ = array;
rhs_size_ = 0;
has_single_asterisk_or_lambda_call_ = false;
}
//@SuppressWarnings("unchecked")
public /*RubyArray*/RubyValue clone() {
RubyArray v = new RubyArray(0, this.rhs_size_, this.has_single_asterisk_or_lambda_call_);//(RubyArray) super.clone();
v.array_ = new ArrayList/*<RubyValue>*/(this.array_);
v.doClone(this);
return v;
}
public RubyArray toAry() {
return this;
}
public boolean isSingleAsterisk() {
return (0 == rhs_size_) && has_single_asterisk_or_lambda_call_;
}
public boolean isSingleRhs() {
return (rhs_size_ <= 1) && !has_single_asterisk_or_lambda_call_;
}
//@RubyLevelMethod(name="to_a")
public RubyValue to_a() {
if( this.getRubyClass() != RubyRuntime.ArrayClass)
return clone();
return this;
}
//@RubyLevelMethod(name="[]")
public static RubyValue create(RubyValue receiver) {
RubyArray a = new RubyArray();
a.setRubyClass((RubyClass) receiver);
return a;
}
//@RubyLevelMethod(name="[]")
public static RubyValue create(RubyValue receiver, RubyArray args) {
RubyArray a = args.copy();
a.setRubyClass((RubyClass) receiver);
return a;
}
public RubyArray add(/*RubyValue*/Object v) {
array_.add(v);
return this;
}
public RubyArray add(RubyValue v) {
array_.add(v);
return this;
}
private RubyArray insert(int index, RubyArray a) {
for (int i = array_.size(); i < index; ++i) {
array_.add(RubyConstant.QNIL);
}
array_.addAll(index, a.array_);
return this;
}
public RubyArray insert(int index, RubyValue v) {
array_.add(index, v);
return this;
}
public int size() {
return array_.size();
}
public RubyValue delete_at(int index) {
if (index >= size()) {
return RubyConstant.QNIL;
}
if(index < 0){
index += size();
}
if(index < 0){
return RubyConstant.QNIL;
}
return (RubyValue)array_.remove(index);
}
private RubyArray delete_at(int begin, int length) {
final int arraySize = array_.size();
if (begin > arraySize) {
return null;
}
if (length < 0) {
return null;
}
if (begin < 0) {
begin += arraySize;
}
if (begin + length > arraySize) {
length = arraySize - begin;
if (length < 0) {
length = 0;
}
}
while(length > 0){
array_.remove(begin);
length--;
}
return this;
}
private RubyArray plus(RubyArray v) {
int size = array_.size() + v.size();
RubyArray resultArray = new RubyArray(size);
resultArray.array_.addAll(array_);
resultArray.array_.addAll(v.array_);
return resultArray;
}
private RubyArray minus(RubyArray other) {
RubyArray a = this.copy();
// for (RubyValue v : other) {
for (Iterator iter = other.iterator(); iter.hasNext();) {
RubyValue v = (RubyValue)iter.next();
a.remove(v);
}
return a;
}
private boolean remove(RubyValue v) {
boolean r = false;
while (array_.remove(v)) {
r = true;
}
return r;
}
public RubyArray times(int times) {
if (times < 0) {
throw new RubyException(RubyRuntime.ArgumentErrorClass, "negative argument");
}
int size = array_.size() * times;
RubyArray resultArray = new RubyArray(size);
for (int i = 0; i < times; i++) {
resultArray.array_.addAll(array_);
}
return resultArray;
}
public RubyArray expand(RubyValue v) {
if (v instanceof RubyArray) {
//[5,6,*[1, 2]]
array_.addAll(((RubyArray) v).array_);
} else {
//[5,6,*1], [5,6,*nil]
array_.add(v);
}
return this;
}
//create a new Array containing every element from index to the end
public RubyValue collect(int index) {
AssertMe.rho_assert(index >= 0);
final int size = array_.size() - index;
if (size < 0) {
return new RubyArray();
}
RubyArray a = new RubyArray(size);
for (int i = index; i < array_.size(); ++i) {
a.add(array_.get(i));
}
return a;
}
public Iterator/*<RubyValue>*/ iterator() {
return array_.iterator();
}
private int getRealIndex(int i) {
int index = i;
if (index < 0) {
index = array_.size() + index;
}
if (index < 0) {
throw new RubyException(RubyRuntime.IndexErrorClass, "index " + i + " out of array");
}
return index;
}
public RubyValue set(int start, RubyValue value) {
int index = getRealIndex(start);
if (index < array_.size()) {
array_.set(index, value);
} else {
for (int i = array_.size(); i < index; ++i) {
array_.add(RubyConstant.QNIL);
}
array_.add(value);
}
return value;
}
public RubyValue replace(int start, int length, RubyValue value) {
int index = getRealIndex(start);
if (value == RubyConstant.QNIL) {
for(int i=0;i<length;i++) {
array_.remove(index);
}
return value;
}
if (length < 0) {
throw new RubyException(RubyRuntime.IndexErrorClass, "negative length (" + length + ")");
} else if (0 == length) {
if (value instanceof RubyArray) {
array_.addAll(index, ((RubyArray) value).array_);
} else {
array_.add(index, value);
}
} else {
for (int i = 0; i < length -1; ++i) {
array_.remove(index);
}
if (value instanceof RubyArray) {
array_.remove(index);
array_.addAll(index, ((RubyArray) value).array_);
} else {
array_.set(index, value);
}
}
return value;
}
public RubyValue get(int index) {
if (index < 0) {
index = array_.size() + index;
}
if (index < 0 || index >= size()) {
return RubyConstant.QNIL;
} else {
return (RubyValue)array_.get(index);
}
}
public RubyArray copy() {
RubyArray resultArray = new RubyArray(array_.size());
// for (RubyValue v : array_) {
for (Iterator iter = array_.iterator(); iter.hasNext();) {
RubyValue v = (RubyValue)iter.next();
resultArray.add(v);
}
return resultArray;
}
private RubyValue compare(RubyArray other_array) {
int length = (size() <= other_array.size()) ? size() : other_array.size();
for (int i = 0; i < length; ++i) {
RubyValue v = RubyAPI.callPublicOneArgMethod(get(i), other_array.get(i), null, RubyID.unequalID);
if (!RubyAPI.testEqual(v, ObjectFactory.FIXNUM0)) {
return v;
}
}
if (size() == other_array.size()) {
return ObjectFactory.FIXNUM0;
} else if (size() > other_array.size()) {
return ObjectFactory.FIXNUM1;
} else {
return ObjectFactory.FIXNUM_NEGATIVE_ONE;
}
}
public RubyArray subarray(int begin, int length) {
final int arraySize = array_.size();
if (begin > arraySize) {
return null;
}
if (length < 0) {
return null;
}
if (begin < 0) {
begin += arraySize;
}
if (begin + length > arraySize) {
length = arraySize - begin;
if (length < 0) {
length = 0;
}
}
if (length == 0) {
return new RubyArray(0);
}
return new RubyArray(array_.subList(begin, begin + length));
}
//RHO_COMMENT
public boolean equals(Object o, boolean bConvToAry) {
if (this == o) {
return true;
}
if (o instanceof RubyArray) {
RubyArray that = (RubyArray)o;
int size = array_.size();
if (size != that.size()) {
return false;
}
for (int i = 0; i < size; ++i) {
if (!this.get(i).equals(that.get(i))) {
return false;
}
}
return true;
} else if (o instanceof RubyValue && bConvToAry ) {
RubyValue v = (RubyValue)o;
if (!v.respondTo(RubyID.toAryID)) {
return false;
} else {
return v.equals(this);
}
} else {
return false;
}
}
private void sort() {
Collections.sort(array_, new Comparator/*<RubyValue>*/() {
public int compare(Object/*RubyValue*/ arg0, Object/*RubyValue*/ arg1) {
RubyValue v = RubyAPI.callPublicOneArgMethod((RubyValue)arg0, (RubyValue)arg1, null, RubyID.unequalID);
return ((RubyFixnum) v).toInt();
}
}
);
}
private void sort_with_block(final RubyBlock block) {
final RubyArray self = this;
Collections.sort(array_, new Comparator/*<RubyValue>*/() {
public int compare(Object/*RubyValue*/ arg0, Object/*RubyValue*/ arg1) {
//TODO can not check if block return/break occured.
RubyValue v = block.invoke(self, (RubyValue)arg0, (RubyValue)arg1);
return ((RubyFixnum) v).toInt();
}
}
);
}
private int getRealIndex(int size,int i) {
int index = i;
if (index < 0) {
index = size + index;
}
if (index < 0) {
throw new RubyException(RubyRuntime.IndexErrorClass, "index " + i + " out of array");
}
return index;
}
private boolean recursiveAdd(RubyArray receiver,RubyArray array){
boolean modified = false;
for (int i=0;i<array.size();i++) {
RubyValue val = array.get(i);
if (val instanceof RubyArray) {
modified = true;
recursiveAdd(receiver,(RubyArray)val);
} else {
receiver.add(val);
}
}
return modified;
}
//@RubyLevelMethod(name="new", singleton=true)
public static RubyValue newArray(RubyValue receiver, RubyBlock block) {
RubyArray a = new RubyArray();
a.setRubyClass((RubyClass) receiver);
return a;
}
//@RubyLevelMethod(name="new", singleton=true)
public static RubyValue newArray(RubyValue receiver, RubyArray args, RubyBlock block) {
RubyArray a;
if (null == args) {
a = new RubyArray();
} else if (null == block) {
if (args.get(0) instanceof RubyArray) {
a = (RubyArray)args.get(0).clone();
} else {
RubyFixnum size = (RubyFixnum) args.get(0);
RubyValue default_value = args.get(1);
a = ObjectFactory.createArray(size.toInt(), default_value);
}
} else {
RubyFixnum size = (RubyFixnum) args.get(0);
a = new RubyArray();
for (int i=0; i<size.toFloat(); i++) {
RubyValue return_value = block.invoke(receiver, ObjectFactory.createFixnum(i));
a.add(return_value);
}
}
a.setRubyClass((RubyClass) receiver);
return a;
}
//@RubyLevelMethod(name="empty?")
public RubyValue empty_p() {
return ObjectFactory.createBoolean(this.array_.isEmpty());
}
//RHO_COMMENT
//@RubyLevelMethod(name="to_s")
/*public RubyValue to_s() {
RubyString r = ObjectFactory.createString();
// for (RubyValue v : array_) {
for (Iterator iter = array_.iterator(); iter.hasNext();) {
RubyValue v = (RubyValue)iter.next();
r.appendString(v);
// TODO: The output of to_s is not as the same as cruby, we should solve this issue
// TODO: and change the corresponding testcases in RubyCompilerTest, such as test_array_expand.
}
return r;
}*/
//@RubyLevelMethod(name="length")
public RubyFixnum length() {
return ObjectFactory.createFixnum(this.array_.size());
}
//@RubyLevelMethod(name="clear")
public RubyArray clear() {
array_.clear();
return this;
}
//@RubyLevelMethod(name="[]", alias="slice")
public RubyValue aref(RubyValue arg) {
if (arg instanceof RubyFixnum) {
return this.get(arg.toInt());
}
if (arg instanceof RubySymbol) {
throw new RubyException(RubyRuntime.TypeErrorClass, "Symbol as array index");
}
if (arg instanceof RubyRange) {
RubyRange range = (RubyRange)arg;
int begin = range.getLeft().toInt();
int end = range.getRight().toInt();
if (begin < 0) {
begin = this.size() + begin;
}
if (end < 0) {
end = this.size() + end;
}
if (!range.isExcludeEnd()) {
++end;
}
RubyArray resultValue = this.subarray(begin, end - begin);
if ( null == resultValue )
return RubyConstant.QNIL;
return resultValue;
//return (null == resultValue ? RubyConstant.QNIL : resultValue);
}
return this.get(arg.toInt());
}
//@RubyLevelMethod(name="[]", alias="slice")
public RubyValue aref(RubyValue begin, RubyValue length) {
if (begin instanceof RubySymbol) {
throw new RubyException(RubyRuntime.TypeErrorClass, "Symbol as array index");
}
RubyArray resultValue = this.subarray(begin.toInt(), length.toInt());
if ( null == resultValue )
return RubyConstant.QNIL;
return resultValue;
// return (null == resultValue ? RubyConstant.QNIL : resultValue);
}
//@RubyLevelMethod(name="insert")
public RubyArray insert(RubyArray ary) {
int argc = ary.size();
if (argc == 1) {
return this;
}
if (argc < 1) {
throw new RubyException(RubyRuntime.ArgumentErrorClass, "wrong number of arguments (at least 1)");
}
int pos = ary.get(0).toInt();
if (pos < 0) {
pos += this.array_.size() + 1;
}
return this.insert(pos, ary.subarray(1, ary.size() - 1));
}
//@RubyLevelMethod(name="first")
public RubyValue first() {
if (this.array_.size() == 0) {
return RubyConstant.QNIL;
} else {
return (RubyValue)this.array_.get(0);
}
}
//@RubyLevelMethod(name="first")
public RubyValue first(RubyValue v) {
int n = v.toInt();
int size = this.array_.size();
if (n > size) {
n = size;
}
return new RubyArray(this.array_.subList(0, n));
}
//@RubyLevelMethod(name="last")
public RubyValue last() {
if (this.array_.size() == 0) {
return RubyConstant.QNIL;
} else {
return (RubyValue)this.array_.get(this.size() - 1);
}
}
//@RubyLevelMethod(name="last")
public RubyValue last(RubyValue v) {
int n = v.toInt();
int size = this.array_.size();
if (n > size) {
n = size;
}
return new RubyArray(this.array_.subList(size - n, size));
}
//@RubyLevelMethod(name="at")
public RubyValue at(RubyValue value) {
return this.get(value.toInt());
}
//@RubyLevelMethod(name="<<")
public RubyArray push(RubyValue v) {
this.array_.add(v);
return this;
}
//@RubyLevelMethod(name="push")
public RubyArray multiPush() {
return this;
}
//@RubyLevelMethod(name="push")
public RubyArray multiPush(RubyValue v) {
this.array_.add(v);
return this;
}
//@RubyLevelMethod(name="push")
public RubyArray multiPush(RubyArray args) {
// for (RubyValue v : args) {
for (Iterator iter = args.iterator(); iter.hasNext();) {
RubyValue v = (RubyValue)iter.next();
this.array_.add(v);
}
return this;
}
//@RubyLevelMethod(name="pop")
public RubyValue pop() {
int size = this.array_.size();
if (0 == size) {
return RubyConstant.QNIL;
}
return (RubyValue)this.array_.remove(size - 1);
}
//@RubyLevelMethod(name="delete_at")
public RubyValue deleteAt(RubyValue v) {
return this.delete_at(v.toInt());
}
//@RubyLevelMethod(name="shift")
public RubyValue shift() {
return (RubyValue)((this.array_.size() != 0) ? this.array_.remove(0) : RubyConstant.QNIL);
}
//@RubyLevelMethod(name="delete_if")
public RubyValue delete_if(RubyBlock block) {
for (int i = 0; i < array_.size();) {
RubyValue r = block.invoke(this, (RubyValue)array_.get(i));
if (r.isTrue()) {
array_.remove(i);
} else {
++i;
}
}
return this;
}
//@RubyLevelMethod(name="delete")
public RubyValue delete(RubyValue item, RubyBlock block) {
boolean found = false;
while (array_.remove(item)) {
found = true;
}
if (block != null && !found) {
return block.invoke(item);
} else {
return found ? item : RubyConstant.QNIL;
}
}
//@RubyLevelMethod(name="<=>")
public RubyValue compare(RubyValue v) {
return this.compare(v.toAry());
}
//RHO_COMMENT
//@RubyLevelMethod(name="==")
public RubyValue opEquals(RubyValue v) {
return ObjectFactory.createBoolean(equals(v,true));
}
//@RubyLevelMethod(name="eql?")
public RubyValue opEql(RubyValue v) {
return ObjectFactory.createBoolean(equals(v,false));
}
//@RubyLevelMethod(name="concat")
public RubyArray concat(RubyValue v) {
RubyArray ary = v.toAry();
array_.addAll(ary.array_);
return this;
}
//@RubyLevelMethod(name="+")
public RubyArray plus(RubyValue v) {
return this.plus(v.toAry());
}
//@RubyLevelMethod(name="-")
public RubyArray minus(RubyValue v) {
return this.minus(v.toAry());
}
//@RubyLevelMethod(name="include?")
public RubyValue include(RubyValue v) {
// for (RubyValue value : array_) {
for (Iterator iter = array_.iterator(); iter.hasNext();) {
RubyValue value = (RubyValue)iter.next();
if (RubyAPI.testEqual(value, v)) {
return RubyConstant.QTRUE;
}
}
return RubyConstant.QFALSE;
}
//@RubyLevelMethod(name="each")
public RubyValue each(RubyBlock block)
{
if ( block == null )
return this;
// for (RubyValue item : array_) {
for (Iterator iter = array_.iterator(); iter.hasNext();) {
RubyValue item = (RubyValue)iter.next();
RubyValue v = block.invoke(this, item);
if (block.breakedOrReturned()) {
return v;
}
}
return this;
}
//@RubyLevelMethod(name="each_index")
public RubyValue each_index(RubyBlock block)
{
if ( block == null )
return this;
for (int i=0;i<size();i++) {
RubyValue v = block.invoke(this, new RubyFixnum(i));
if (block.breakedOrReturned()) {
return v;
}
}
return this;
}
//@RubyLevelMethod(name="reverse_each")
public RubyValue reverse_each(RubyBlock block)
{
if ( block == null )
return this;
ListIterator/*<RubyValue>*/ ite = array_.listIterator(array_.size());
while (ite.hasPrevious()) {
RubyValue v = block.invoke(this, (RubyValue)ite.previous());
if (block.breakedOrReturned()) {
return v;
}
}
return this;
}
//@RubyLevelMethod(name="unshift")
public RubyArray unshift(RubyValue value) {
array_.add(0, value);
return this;
}
//@RubyLevelMethod(name="unshift")
public RubyArray unshift(RubyArray value) {
array_.addAll(0, value.array_);
return this;
}
public int hashCode() {
int hash = 0;
// for (RubyValue v : array_) {
for (Iterator iter = array_.iterator(); iter.hasNext();) {
RubyValue v = (RubyValue)iter.next();
hash += v.hashCode();
}
return hash;
}
//@RubyLevelMethod(name="hash")
public RubyFixnum hash() {
return ObjectFactory.createFixnum(hashCode());
}
//@RubyLevelMethod(name="&")
public RubyArray and(RubyValue value) {
RubyArray other = value.toAry();
RubyArray a = new RubyArray();
// for (RubyValue v : array_) {
for (Iterator iter = array_.iterator(); iter.hasNext();) {
RubyValue v = (RubyValue)iter.next();
if (other.include(v) == RubyConstant.QTRUE &&
a.include(v) == RubyConstant.QFALSE) {
a.add(v);
}
}
return a;
}
//@RubyLevelMethod(name="|")
public RubyArray or(RubyValue value) {
RubyArray other = value.toAry();
RubyArray a = new RubyArray();
// for (RubyValue v : array_) {
for (Iterator iter = array_.iterator(); iter.hasNext();) {
RubyValue v = (RubyValue)iter.next();
if (a.include(v) == RubyConstant.QFALSE) {
a.add(v);
}
}
// for (RubyValue v : other) {
for (Iterator iter = other.iterator(); iter.hasNext();) {
RubyValue v = (RubyValue)iter.next();
if (a.include(v) == RubyConstant.QFALSE) {
a.add(v);
}
}
return a;
}
//@RubyLevelMethod(name="compact")
public RubyValue compact() {
//RHO_COMMENT
//return copy().compact_danger();
RubyArray arRes = copy();
arRes.compact_danger();//If No nil inside it return NIL
return arRes;
}
//@RubyLevelMethod(name="compact!")
public RubyValue compact_danger() {
if (remove(RubyConstant.QNIL)) {
return this;
} else {
return RubyConstant.QNIL;
}
}
//@RubyLevelMethod(name="uniq")
public RubyValue uniq()
{
RubyArray res = copy();
res.uniq_danger();
return res;
}
//@RubyLevelMethod(name="uniq!")
public RubyValue uniq_danger() {
boolean b = false;
for (int i = 0; i < array_.size(); ++i) {
for (int j = i + 1; j < array_.size();) {
if (RubyAPI.testEqual((RubyValue)array_.get(i), (RubyValue)array_.get(j))) {
array_.remove(j);
b = true;
} else {
++j;
}
}
}
if ( !b )
return RubyConstant.QNIL;
return this;
//return b ? this : RubyConstant.QNIL;
}
//@RubyLevelMethod(name="reverse")
public RubyArray reverse() {
RubyArray array = this.copy();
Collections.reverse(array.array_);
return array;
}
//@RubyLevelMethod(name="reverse!")
public RubyArray reverseBang() {
Collections.reverse(array_);
return this;
}
//@RubyLevelMethod(name="index")
public RubyValue index(RubyValue arg) {
for(int i=0; i<size(); i++){
if(get(i).equals(arg))
return ObjectFactory.createFixnum(i);
}
return RubyConstant.QNIL;
}
//@RubyLevelMethod(name="rindex")
public RubyValue rindex(RubyValue arg) {
for(int i=size()-1; i>=0; i--){
if(get(i).equals(arg))
return ObjectFactory.createFixnum(i);
}
return RubyConstant.QNIL;
}
//@RubyLevelMethod(name="replace")
public RubyValue replace(RubyValue arg) {
RubyArray anotherArray = (RubyArray)arg;
if(this == anotherArray)
return this;
clear();
for(int i=0;i<anotherArray.size();i++){
add(anotherArray.get(i));
}
return this;
}
//@RubyLevelMethod(name="indexes", alias="indices")
public RubyValue indexes(RubyArray args) {
RubyArray a = new RubyArray(args.size());
for(int i=0;i<size();i++){
RubyFixnum n = (RubyFixnum) args.get(i);
a.add(get(n.toInt()));
}
return a;
}
//@RubyLevelMethod(name="nitems")
public RubyValue nitems() {
int n = 0;
for(int i=0;i<size();i++){
if(get(i) != RubyConstant.QNIL)
n++;
}
return ObjectFactory.createFixnum(n);
}
//@RubyLevelMethod(name="flatten")
public RubyValue flatten() {
RubyArray a = new RubyArray();
recursiveAdd(a, this);
return a;
}
//@RubyLevelMethod(name="flatten!")
public RubyValue flatten_danger() {
RubyArray copy = copy();
clear();
boolean hasModified = recursiveAdd(this, copy);
if (hasModified){
return this;
} else {
return RubyConstant.QNIL;
}
}
//@RubyLevelMethod(name="collect!")
public RubyValue collect_danger(RubyBlock block) {
RubyArray a = (RubyArray)RubyAPI.callPublicNoArgMethod(this, block, RubyID.intern("collect"));
clear();
for(int i=0;i<a.size();i++){
add(a.get(i));
}
return this;
}
//@RubyLevelMethod(name="assoc")
public RubyValue assoc(RubyValue arg) {
RubyValue val = null;
for(int i=0;i<size();i++){
val = get(i);
if(val instanceof RubyArray){
if(((RubyArray)val).size() > 0){
RubyValue tmp = ((RubyArray)val).get(0);
if(arg.equals(tmp)){
return val;
}
}
}
}
return RubyConstant.QNIL;
}
//@RubyLevelMethod(name="rassoc")
public RubyValue rassoc(RubyValue arg) {
int size = this.array_.size();
for(int i=0; i < size; i++){
RubyValue val = get(i);
if(val instanceof RubyArray){
if(((RubyArray)val).size() > 1){
RubyValue tmp = ((RubyArray)val).get(1);
if(arg.equals(tmp)){
return val;
}
}
}
}
return RubyConstant.QNIL;
}
//@RubyLevelMethod(name="*")
public RubyValue run(RubyValue arg, RubyBlock block) {
if (arg instanceof RubyFixnum) {
RubyFixnum times = (RubyFixnum) arg;
return times(times.toInt());
} else if (arg instanceof RubyString) {
return RubyAPI.callOneArgMethod(this, arg, block, RubyID.joinID);
}
throw new RubyException(RubyRuntime.TypeErrorClass, "no implicit conversion from " + arg + " to integer");
}
//@RubyLevelMethod(name="sort")
public RubyValue sort(RubyBlock block) {
return copy().sort_danger(block);
}
//@RubyLevelMethod(name="sort!")
public RubyValue sort_danger(RubyBlock block) {
if (null == block) {
sort();
} else {
sort_with_block(block);
}
return this;
}
//@RubyLevelMethod(name="[]=")
public RubyValue aset() {
throw new RubyException(RubyRuntime.ArgumentErrorClass, "wrong number of arguments (0 for 2)");
}
//@RubyLevelMethod(name="[]=")
public RubyValue aset(RubyValue arg) {
throw new RubyException(RubyRuntime.ArgumentErrorClass, "wrong number of arguments (1 for 2)");
}
//@RubyLevelMethod(name="[]=")
public RubyValue aset(RubyValue arg0, RubyValue arg1) {
if (arg0 instanceof RubyRange) {
RubyRange range = (RubyRange)arg0;
int left = range.getLeft().toInt();
int right = range.getRight().toInt();
int l_index = getRealIndex(size(), left);
int r_index = getRealIndex(size(), right);
int length = r_index - l_index + 1;
return replace(l_index, length, arg1);
} else {
return set(arg0.toInt(), arg1);
}
}
//@RubyLevelMethod(name="[]=")
public RubyValue aset(RubyArray args) {
if (3 == args.size()) {
int index = args.get(0).toInt();
int length = args.get(1).toInt();
return replace(index, length, args.get(2));
}
throw new RubyException(RubyRuntime.ArgumentErrorClass, "wrong number of arguments (" + args.size() + " for 2)");
}
//@RubyLevelMethod(name="slice!")
public RubyValue slice_danger(RubyArray args) {
if (1 == args.size()) {
Object argValue = args.get(0);
if (argValue instanceof RubyFixnum) {
RubyFixnum index = (RubyFixnum) argValue;
return delete_at(index.toInt());
} else if (args.get(0) instanceof RubyRange) {
int begin = ((RubyFixnum) ((RubyRange) args.get(0)).getLeft()).toInt();
int end = ((RubyFixnum) ((RubyRange) args.get(0)).getRight()).toInt();
if (begin < 0) {
begin = size() + begin;
}
if (end < 0) {
end = size() + end;
}
if (!((RubyRange) args.get(0)).isExcludeEnd()) {
++end;
}
RubyArray resultValue = delete_at(begin, end - begin);
if ( null == resultValue )
return RubyConstant.QNIL;
return resultValue;
// return (null == resultValue ? RubyConstant.QNIL : resultValue);
}
} else if (2 == args.size()) {
Object arg0Value = args.get(0);
Object arg1Value = args.get(1);
if (arg0Value instanceof RubyFixnum && arg1Value instanceof RubyFixnum) {
int begin = ((RubyFixnum) arg0Value).toInt();
int length = ((RubyFixnum) arg1Value).toInt();
RubyArray resultValue = delete_at(begin, length);
if ( null == resultValue )
return RubyConstant.QNIL;
return resultValue;
// return (null == resultValue ? RubyConstant.QNIL : resultValue);
}
}
//TODO
throw new RubyException("not implemented");
}
//@RubyLevelMethod(name="pack")
public RubyValue pack(RubyValue arg) {
String format = ((RubyString) arg).toString();
return ObjectFactory.createString(ArrayPacker.pack(this, format));
}
}
|
|
/*
* #%L
* Wisdom-Framework
* %%
* Copyright (C) 2013 - 2014 Wisdom Framework
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.wisdom.cache.ehcache;
import org.joda.time.Duration;
import org.junit.Ignore;
import org.junit.Test;
import org.wisdom.api.cache.Cache;
import org.wisdom.api.cache.Cached;
import org.wisdom.api.configuration.ApplicationConfiguration;
import org.wisdom.api.http.*;
import org.wisdom.api.interception.RequestContext;
import java.util.TreeMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
/**
* Checks the cached interceptor.
*/
public class CachedActionInterceptorTest {
@Test
public void testCaching() throws Exception {
CachedActionInterceptor interceptor = new CachedActionInterceptor();
interceptor.cache = mock(Cache.class);
Cached cached = mock(Cached.class);
when(cached.duration()).thenReturn(10);
when(cached.key()).thenReturn("key");
RequestContext context = mock(RequestContext.class);
when(context.request()).thenReturn(mock(Request.class));
Context ctx = mock(Context.class);
when(context.context()).thenReturn(ctx);
when(context.context().header(anyString())).thenReturn(null);
final Result r = Results.ok("Result");
when(context.proceed()).thenReturn(r);
Result result = interceptor.call(cached, context);
assertThat(result.getRenderable().<String>content()).isEqualTo("Result");
assertThat(result).isEqualTo(r);
// Check that the result was put in cache.
verify(interceptor.cache, times(1)).get("key");
verify(interceptor.cache, times(1)).set("key", r, Duration.standardSeconds(10));
when(interceptor.cache.get("key")).thenReturn(r);
result = interceptor.call(cached, context);
assertThat(result).isEqualTo(r);
verify(interceptor.cache, times(2)).get("key");
}
@Test
public void testCachingWithoutKey() throws Exception {
CachedActionInterceptor interceptor = new CachedActionInterceptor();
interceptor.cache = mock(Cache.class);
Cached cached = mock(Cached.class);
when(cached.duration()).thenReturn(10);
when(cached.key()).thenReturn("");
RequestContext context = mock(RequestContext.class);
final Request request = mock(Request.class);
when(request.uri()).thenReturn("/my/url?withquery");
when(context.request()).thenReturn(request);
Context ctx = mock(Context.class);
when(context.context()).thenReturn(ctx);
when(context.context().header(anyString())).thenReturn(null);
final Result r = Results.ok("Result");
when(context.proceed()).thenReturn(r);
Result result = interceptor.call(cached, context);
assertThat(result.getRenderable().<String>content()).isEqualTo("Result");
assertThat(result).isEqualTo(r);
// Check that the result was put in cache.
verify(interceptor.cache, times(1)).get("/my/url?withquery");
verify(interceptor.cache, times(1)).set("/my/url?withquery", r, Duration.standardSeconds(10));
when(interceptor.cache.get("key")).thenReturn(r);
result = interceptor.call(cached, context);
assertThat(result).isEqualTo(r);
verify(interceptor.cache, times(2)).get("/my/url?withquery");
}
@Test
public void testCachingNoCache() throws Exception {
CachedActionInterceptor interceptor = new CachedActionInterceptor();
interceptor.cache = new DummyCache();
Cached cached = mock(Cached.class);
when(cached.duration()).thenReturn(10);
when(cached.key()).thenReturn("key");
RequestContext context = mock(RequestContext.class);
when(context.request()).thenReturn(mock(Request.class));
Context ctx = mock(Context.class);
when(context.context()).thenReturn(ctx);
when(context.context().header(anyString())).thenReturn(null);
final Result r = Results.ok("Result");
when(context.proceed()).thenReturn(r);
Result result = interceptor.call(cached, context);
assertThat(result.getRenderable().<String>content()).isEqualTo("Result");
assertThat(result).isEqualTo(r);
final Result r2 = Results.ok("Result2");
when(context.proceed()).thenReturn(r2);
result = interceptor.call(cached, context);
// r is cached return r even is r2 is the new result.
assertThat(result).isEqualTo(r);
// The object is cached, let's use NO CACHE
when(context.context().header(HeaderNames.CACHE_CONTROL)).thenReturn(HeaderNames.NOCACHE_VALUE);
result = interceptor.call(cached, context);
assertThat(result).isNotEqualTo(r).isEqualTo(r2);
final Result r3 = Results.ok("Result3");
when(context.proceed()).thenReturn(r3);
// Remove the cache-control
when(context.context().header(HeaderNames.CACHE_CONTROL)).thenReturn(null);
result = interceptor.call(cached, context);
assertThat(result).isEqualTo(r2).isNotEqualTo(r3);
}
@Test
@Ignore("Does not reproduce the race condition")
public void testPeak() throws InterruptedException {
ApplicationConfiguration configuration = mock(ApplicationConfiguration.class);
final EhCacheService svc = new EhCacheService();
svc.configuration = configuration;
svc.start();
final CachedActionInterceptor interceptor = new CachedActionInterceptor();
interceptor.cache = svc;
final Cached cached = mock(Cached.class);
when(cached.duration()).thenReturn(10);
when(cached.key()).thenReturn("key");
CountDownLatch startSignal = new CountDownLatch(1);
final int client = 100;
final CountDownLatch doneSignal = new CountDownLatch(client);
ExecutorService executor = Executors.newFixedThreadPool(client);
final AtomicInteger counter = new AtomicInteger();
for (int i = 1; i < client + 1; ++i) {
executor.submit(new Runnable() {
@Override
public void run() {
try {
RequestContext context = mock(RequestContext.class);
when(context.request()).thenReturn(mock(Request.class));
Context ctx = mock(Context.class);
when(context.context()).thenReturn(ctx);
when(context.context().header(anyString())).thenReturn(null);
final Result r = Results.ok("Result");
when(context.proceed()).thenReturn(r);
Result result = interceptor.call(cached, context);
if (! result.getRenderable().content().equals("Result")) {
counter.getAndIncrement();
}
} catch (Exception e) {
counter.getAndIncrement();
}
doneSignal.countDown();
}
});
}
startSignal.countDown();
doneSignal.await(60, TimeUnit.SECONDS);
assertThat(counter.get()).isEqualTo(0);
svc.remove("key");
svc.stop();
}
private class DummyCache extends TreeMap<String, Object> implements Cache {
@Override
public void set(String key, Object value, int expiration) {
put(key, value);
}
@Override
public void set(String key, Object value, Duration expiration) {
put(key, value);
}
@Override
public Object get(String key) {
return super.get(key);
}
@Override
public boolean remove(String key) {
return super.remove(key) != null;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.query.functional;
import static org.apache.geode.cache.Region.SEPARATOR;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.query.CacheUtils;
import org.apache.geode.cache.query.Index;
import org.apache.geode.cache.query.IndexType;
import org.apache.geode.cache.query.Query;
import org.apache.geode.cache.query.QueryService;
import org.apache.geode.cache.query.SelectResults;
import org.apache.geode.cache.query.data.Address;
import org.apache.geode.cache.query.data.Employee;
import org.apache.geode.cache.query.data.Portfolio;
import org.apache.geode.cache.query.data.Position;
import org.apache.geode.cache.query.data.Quote;
import org.apache.geode.cache.query.data.Restricted;
import org.apache.geode.cache.query.internal.QueryObserverAdapter;
import org.apache.geode.cache.query.internal.QueryObserverHolder;
import org.apache.geode.cache.query.internal.index.IndexManager;
import org.apache.geode.test.junit.categories.OQLIndexTest;
@Category({OQLIndexTest.class})
public class IUMRMultiIndexesMultiRegionJUnitTest {
@Before
public void setUp() throws java.lang.Exception {
CacheUtils.startCache();
}
@After
public void tearDown() throws java.lang.Exception {
CacheUtils.closeCache();
}
@Test
public void testMultiIteratorsMultiRegion1() throws Exception {
Object[][] r = new Object[4][2];
QueryService qs;
qs = CacheUtils.getQueryService();
Position.resetCounter();
// Create Regions
Region r1 = CacheUtils.createRegion("portfolio1", Portfolio.class);
for (int i = 0; i < 4; i++) {
r1.put(i + "", new Portfolio(i));
}
Region r2 = CacheUtils.createRegion("portfolio2", Portfolio.class);
for (int i = 0; i < 4; i++) {
r2.put(i + "", new Portfolio(i));
}
Set add1 = new HashSet();
add1.add(new Address("411045", "Baner"));
add1.add(new Address("411001", "DholePatilRd"));
Region r3 = CacheUtils.createRegion("employees", Employee.class);
for (int i = 0; i < 4; i++) {
r3.put(i + "", new Employee("empName", (20 + i), i, "Mr.", (5000 + i), add1));
}
String[] queries = {
// Test case No. IUMR021
"SELECT DISTINCT * FROM " + SEPARATOR + "portfolio1 pf1, " + SEPARATOR + "portfolio2 pf2, "
+ SEPARATOR + "employees e WHERE pf1.status = 'active'",};
// Execute Queries without Indexes
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
r[i][0] = q.execute();
}
// Create Indexes
qs.createIndex("statusIndexPf1", IndexType.FUNCTIONAL, "status", SEPARATOR + "portfolio1");
qs.createIndex("statusIndexPf2", IndexType.FUNCTIONAL, "status", SEPARATOR + "portfolio2");
// Execute Queries with Indexes
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
QueryObserverImpl observer = new QueryObserverImpl();
QueryObserverHolder.setInstance(observer);
r[i][1] = q.execute();
if (!observer.isIndexesUsed) {
fail("Index is NOT uesd");
}
for (final Object o : observer.indexesUsed) {
String indexUsed = o.toString();
if (!(indexUsed).equals("statusIndexPf1")) {
fail("<statusIndexPf1> was expected but found " + indexUsed);
}
}
assertThat(observer.indexesUsed.size()).isGreaterThan(0);
}
StructSetOrResultsSet ssOrrs = new StructSetOrResultsSet();
ssOrrs.CompareQueryResultsWithoutAndWithIndexes(r, queries.length, queries);
}
@Test
public void testMultiIteratorsMultiRegion2() throws Exception {
Object[][] r = new Object[4][2];
QueryService qs = CacheUtils.getQueryService();
Position.resetCounter();
// Create Regions
Region r1 = CacheUtils.createRegion("portfolio1", Portfolio.class);
for (int i = 0; i < 4; i++) {
r1.put(i + "", new Portfolio(i));
}
Region r2 = CacheUtils.createRegion("portfolio2", Portfolio.class);
for (int i = 0; i < 4; i++) {
r2.put(i + "", new Portfolio(i));
}
Set add1 = new HashSet();
add1.add(new Address("411045", "Baner"));
add1.add(new Address("411001", "DholePatilRd"));
Region r3 = CacheUtils.createRegion("employees", Employee.class);
for (int i = 0; i < 4; i++) {
r3.put(i + "", new Employee("empName", (20 + i), i, "Mr.", (5000 + i), add1));
}
String[] queries = {
// Test case No. IUMR022
// Both the Indexes Must get used. Presently only one Index is being used.
"SELECT DISTINCT * FROM " + SEPARATOR + "portfolio1 pf1, " + SEPARATOR + "portfolio2 pf2, "
+ SEPARATOR + "employees e1 WHERE pf1.status = 'active' AND e1.empId < 10"};
// Execute Queries without Indexes
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
r[i][0] = q.execute();
}
// Create Indexes & Execute the queries
qs.createIndex("statusIndexPf1", IndexType.FUNCTIONAL, "pf1.status",
SEPARATOR + "portfolio1 pf1");
qs.createIndex("empIdIndex", IndexType.FUNCTIONAL, "e.empId", SEPARATOR + "employees e");
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
QueryObserverImpl observer = new QueryObserverImpl();
QueryObserverHolder.setInstance(observer);
r[i][1] = q.execute();
if (!observer.isIndexesUsed) {
fail("Index is NOT uesd");
}
int indxs = observer.indexesUsed.size();
if (indxs != 2) {
fail("Both the idexes are not getting used.Only " + indxs + " index is getting used");
}
Iterator itr = observer.indexesUsed.iterator();
String temp;
while (itr.hasNext()) {
temp = itr.next().toString();
if (temp.equals("statusIndexPf1")) {
break;
} else if (temp.equals("empIdIndex")) {
break;
} else {
fail("indices used do not match with those which are expected to be used"
+ "<statusIndexPf1> and <empIdIndex> were expected but found " + itr.next());
}
}
}
StructSetOrResultsSet ssORrs = new StructSetOrResultsSet();
ssORrs.CompareQueryResultsWithoutAndWithIndexes(r, queries.length, queries);
}
@Test
public void testMultiIteratorsMultiRegion3() throws Exception {
Object[][] r = new Object[9][2];
QueryService qs = CacheUtils.getQueryService();
Position.resetCounter();
// Create Regions
Region r1 = CacheUtils.createRegion("portfolio1", Portfolio.class);
for (int i = 0; i < 4; i++) {
r1.put(i + "", new Portfolio(i));
}
Region r2 = CacheUtils.createRegion("portfolio2", Portfolio.class);
for (int i = 0; i < 4; i++) {
r2.put(i + "", new Portfolio(i));
}
String[] queries = {
// Test Case No. IUMR004
"SELECT DISTINCT * FROM " + SEPARATOR + "portfolio1 pf1, " + SEPARATOR
+ "portfolio2 pf2, pf1.positions.values posit1,"
+ " pf2.positions.values posit2 WHERE posit1.secId='IBM' AND posit2.secId='IBM'",
// Test Case No.IUMR023
"SELECT DISTINCT * FROM " + SEPARATOR + "portfolio1 pf1," + SEPARATOR
+ "portfolio2 pf2, pf1.positions.values posit1,"
+ " pf2.positions.values posit2 WHERE posit1.secId='IBM' OR posit2.secId='IBM'",
"SELECT DISTINCT * FROM " + SEPARATOR
+ "portfolio1 pf1, pf1.collectionHolderMap.values coll1,"
+ " pf1.positions.values posit1, " + SEPARATOR
+ "portfolio2 pf2, pf2.collectionHolderMap.values coll2, pf2.positions.values posit2 "
+ " WHERE posit1.secId='IBM' AND posit2.secId='IBM'",
// Test Case No. IUMR005
"SELECT DISTINCT * FROM " + SEPARATOR + "portfolio1 pf1," + SEPARATOR
+ "portfolio2 pf2, pf1.positions.values posit1, pf2.positions.values posit2,"
+ " pf1.collectionHolderMap.values coll1,pf2.collectionHolderMap.values coll2 "
+ " WHERE posit1.secId='IBM' OR posit2.secId='IBM'",
// Test Case No. IUMR006
"SELECT DISTINCT coll1 as collHldrMap1 , coll2 as CollHldrMap2 FROM " + SEPARATOR
+ "portfolio1 pf1, " + SEPARATOR
+ "portfolio2 pf2, pf1.positions.values posit1, pf2.positions.values posit2,"
+ " pf1.collectionHolderMap.values coll1,pf2.collectionHolderMap.values coll2 "
+ " WHERE posit1.secId='IBM' OR posit2.secId='IBM'",};
// Execute Queries Without Indexes
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
r[i][0] = q.execute();
}
// Create Indexes and Execute the Queries
qs.createIndex("secIdIndexPf1", IndexType.FUNCTIONAL, "pos11.secId",
SEPARATOR
+ "portfolio1 pf1, pf1.collectionHolderMap.values coll1, pf1.positions.values pos11");
qs.createIndex("secIdIndexPf2", IndexType.FUNCTIONAL, "pos22.secId",
SEPARATOR
+ "portfolio2 pf2, pf2.collectionHolderMap.values coll2, pf2.positions.values pos22");
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
QueryObserverImpl observer = new QueryObserverImpl();
QueryObserverHolder.setInstance(observer);
r[i][1] = q.execute();
if (!observer.isIndexesUsed) {
fail("Index is NOT uesd");
}
int indxs = observer.indexesUsed.size();
if (indxs != 2) {
fail("Both the idexes are not getting used.Only " + indxs + " index is getting used");
}
Iterator itr = observer.indexesUsed.iterator();
String temp;
while (itr.hasNext()) {
temp = itr.next().toString();
if (temp.equals("secIdIndexPf1")) {
break;
} else if (temp.equals("secIdIndexPf2")) {
break;
} else {
fail("indices used do not match with those which are expected to be used"
+ "<secIdIndexPf1> and <secIdIndexPf2> were expected but found " + itr.next());
}
}
}
// Verify the Query Results
StructSetOrResultsSet ssORrs = new StructSetOrResultsSet();
ssORrs.CompareQueryResultsWithoutAndWithIndexes(r, queries.length, queries);
}
@Test
public void testMultiIteratorsMultiRegion4() throws Exception {
Object[][] r = new Object[4][2];
QueryService qs = CacheUtils.getQueryService();
Position.resetCounter();
// Create Regions
Region r1 = CacheUtils.createRegion("portfolio1", Portfolio.class);
for (int i = 0; i < 4; i++) {
r1.put(i + "", new Portfolio(i));
}
Region r2 = CacheUtils.createRegion("portfolio2", Portfolio.class);
for (int i = 0; i < 4; i++) {
r2.put(i + "", new Portfolio(i));
}
String[] queries = {
// Test case No. IUMR024
// Both the Indexes Must get used. Presently only one Index is being used.
"SELECT DISTINCT * FROM " + SEPARATOR + "portfolio1 pf1, pf1.positions.values posit1, "
+ SEPARATOR
+ "portfolio2 pf2, pf2.positions.values posit2 WHERE pf2.status='active' AND posit1.secId='IBM'"};
// Execute Queries without Indexes
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
r[i][0] = q.execute();
}
// Create Indexes
qs.createIndex("secIdIndexPf1", IndexType.FUNCTIONAL, "pos11.secId",
SEPARATOR + "portfolio1 pf1, pf1.positions.values pos11");
qs.createIndex("statusIndexPf2", IndexType.FUNCTIONAL, "pf2.status",
SEPARATOR + "portfolio2 pf2");
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
QueryObserverImpl observer = new QueryObserverImpl();
QueryObserverHolder.setInstance(observer);
r[i][1] = q.execute();
if (!observer.isIndexesUsed) {
fail("Index is NOT uesd");
}
int indxs = observer.indexesUsed.size();
if (indxs != 2) {
fail("Both the idexes are not getting used.Only " + indxs + " index is getting used");
}
Iterator itr = observer.indexesUsed.iterator();
String temp;
while (itr.hasNext()) {
temp = itr.next().toString();
if (temp.equals("secIdIndexPf1")) {
break;
} else if (temp.equals("statusIndexPf2")) {
break;
} else {
fail("indices used do not match with those which are expected to be used"
+ "<statusIndexPf1> and <statusIndexPf2> were expected but found " + itr.next());
}
}
}
// Verify the Query Results
StructSetOrResultsSet ssORrs = new StructSetOrResultsSet();
ssORrs.CompareQueryResultsWithoutAndWithIndexes(r, queries.length, queries);
}
@Test
public void testMultiIteratorsMultiRegion5() throws Exception {
Object[][] r = new Object[4][2];
QueryService qs = CacheUtils.getQueryService();
Position.resetCounter();
// Create Regions
Region r1 = CacheUtils.createRegion("portfolio1", Portfolio.class);
for (int i = 0; i < 4; i++) {
r1.put(i + "", new Portfolio(i));
}
Region r2 = CacheUtils.createRegion("portfolio2", Portfolio.class);
for (int i = 0; i < 4; i++) {
r2.put(i + "", new Portfolio(i));
}
Set add1 = new HashSet();
add1.add(new Address("411045", "Baner"));
add1.add(new Address("411001", "DholePatilRd"));
Region r3 = CacheUtils.createRegion("employees", Employee.class);
for (int i = 0; i < 4; i++) {
r3.put(i + "", new Employee("empName", (20 + i), i, "Mr.", (5000 + i), add1));
}
String[] queries = {
// Test case IUMR025
// Three of the indexes must get used.. Presently only one Index is being used.
"SELECT DISTINCT * FROM " + SEPARATOR + "portfolio1 pf1, " + SEPARATOR + "portfolio2 pf2, "
+ SEPARATOR
+ "employees e1 WHERE pf1.status = 'active' AND pf2.status = 'active' AND e1.empId < 10"};
// Execute Queries without Indexes
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
QueryObserverImpl observer = new QueryObserverImpl();
QueryObserverHolder.setInstance(observer);
r[i][0] = q.execute();
}
// Create Indexes and Execute the Queries
qs.createIndex("statusIndexPf1", IndexType.FUNCTIONAL, "pf1.status",
SEPARATOR + "portfolio1 pf1");
qs.createIndex("statusIndexPf2", IndexType.FUNCTIONAL, "pf2.status",
SEPARATOR + "portfolio2 pf2");
qs.createIndex("empIdIndex", IndexType.FUNCTIONAL, "empId", SEPARATOR + "employees");
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
QueryObserverImpl observer = new QueryObserverImpl();
QueryObserverHolder.setInstance(observer);
r[i][1] = q.execute();
if (!observer.isIndexesUsed) {
fail("Index is NOT uesd");
}
int indxs = observer.indexesUsed.size();
if (indxs != 3) {
fail("Three of the idexes are not getting used. Only " + indxs + " index is getting used");
}
Iterator itr = observer.indexesUsed.iterator();
String temp;
while (itr.hasNext()) {
temp = itr.next().toString();
if (temp.equals("statusIndexPf1")) {
break;
} else if (temp.equals("statusIndexPf2")) {
break;
} else if (temp.equals("empIdIndex")) {
break;
} else {
fail("indices used do not match with those which are expected to be used"
+ "<statusIndexPf1>, <statusIndexPf2> and <empIdIndex> were expected but found "
+ itr.next());
}
}
}
// Verify the Query Results
StructSetOrResultsSet ssORrs = new StructSetOrResultsSet();
ssORrs.CompareQueryResultsWithoutAndWithIndexes(r, queries.length, queries);
}
@Test
public void testMultiIteratorsMultiRegion6() throws Exception {
Object[][] r = new Object[4][2];
QueryService qs = CacheUtils.getQueryService();
Position.resetCounter();
// Create Regions
Region r1 = CacheUtils.createRegion("portfolio1", Portfolio.class);
for (int i = 0; i < 4; i++) {
r1.put(i + "", new Portfolio(i));
}
Region r2 = CacheUtils.createRegion("portfolio2", Portfolio.class);
for (int i = 0; i < 4; i++) {
r2.put(i + "", new Portfolio(i));
}
String[] queries = {
// Both the Indexes Must get used. Presently only one Index is being used.
" SELECT DISTINCT * FROM " + SEPARATOR + "portfolio1 pf1, " + SEPARATOR
+ "portfolio2 pf2, pf1.positions.values posit1,"
+ " pf2.positions.values posit2 WHERE posit1.secId='IBM' AND posit2.secId='IBM' ",
" SELECT DISTINCT * FROM " + SEPARATOR
+ "portfolio1 pf1, pf1.collectionHolderMap.values coll1,"
+ " pf1.positions.values posit1, " + SEPARATOR
+ "portfolio2 pf2, pf2.collectionHolderMap.values coll2, pf2.positions.values posit2 "
+ " WHERE posit1.secId='IBM' AND posit2.secId='IBM'",};
// Execute Queries Without Indexes
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
r[i][0] = q.execute();
}
// Create Indexes and Execute the Queries
qs.createIndex("secIdIndexPf1", IndexType.FUNCTIONAL, "pos11.secId",
SEPARATOR + "portfolio1 pf1, pf1.positions.values pos11");
qs.createIndex("secIdIndexPf2", IndexType.FUNCTIONAL, "pos22.secId",
SEPARATOR + "portfolio2 pf2, pf2.positions.values pos22");
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
QueryObserverImpl observer = new QueryObserverImpl();
QueryObserverHolder.setInstance(observer);
r[i][1] = q.execute();
if (!observer.isIndexesUsed) {
fail("Index is NOT uesd");
}
int indxs = observer.indexesUsed.size();
if (indxs != 2) {
fail("Both the idexes are not getting used.Only " + indxs + " index is getting used");
}
Iterator itr = observer.indexesUsed.iterator();
String temp;
while (itr.hasNext()) {
temp = itr.next().toString();
if (temp.equals("secIdIndexPf1")) {
break;
} else if (temp.equals("secIdIndexPf2")) {
break;
} else {
fail("indices used do not match with those which are expected to be used"
+ "<secIdIndexPf1> and <secIdIndexPf2> were expected but found " + itr.next());
}
}
}
// Verify the Query Results
StructSetOrResultsSet ssORrs = new StructSetOrResultsSet();
ssORrs.CompareQueryResultsWithoutAndWithIndexes(r, queries.length, queries);
}
@Test
public void testMultiIteratorsMultiRegion7() throws Exception {
Object[][] r = new Object[4][2];
QueryService qs = CacheUtils.getQueryService();
Position.resetCounter();
// Create Regions
Region r1 = CacheUtils.createRegion("portfolio1", Portfolio.class);
for (int i = 0; i < 4; i++) {
r1.put(i + "", new Portfolio(i));
}
Region r2 = CacheUtils.createRegion("portfolio2", Portfolio.class);
for (int i = 0; i < 4; i++) {
r2.put(i + "", new Portfolio(i));
}
String[] queries = {
// Task IUMR007
"SELECT DISTINCT coll1 as collHldrMap1 , coll2 as CollHldrMap2 FROM " + SEPARATOR
+ "portfolio1 pf1, " + SEPARATOR
+ "portfolio2 pf2, pf1.positions.values posit1,pf2.positions.values posit2,"
+ "pf1.collectionHolderMap.values coll1, pf2.collectionHolderMap.values coll2 "
+ "WHERE posit1.secId='IBM' OR posit2.secId='IBM'",};
// Execute Queries Without Indexes
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
r[i][0] = q.execute();
}
// Create Indexes and Execute the Queries
qs.createIndex("secIdIndexPf1", IndexType.FUNCTIONAL, "pos11.secId",
SEPARATOR + "portfolio1 pf1, pf1.positions.values pos11");
qs.createIndex("secIdIndexPf2", IndexType.FUNCTIONAL, "pos22.secId",
SEPARATOR
+ "portfolio2 pf2, pf2.collectionHolderMap.values coll2, pf2.positions.values pos22");
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
QueryObserverImpl observer = new QueryObserverImpl();
QueryObserverHolder.setInstance(observer);
r[i][1] = q.execute();
if (!observer.isIndexesUsed) {
fail("Index is NOT uesd");
}
int indxs = observer.indexesUsed.size();
if (indxs != 2) {
fail("Both the idexes are not getting used.Only " + indxs + " index is getting used");
}
Iterator itr = observer.indexesUsed.iterator();
String temp;
while (itr.hasNext()) {
temp = itr.next().toString();
if (temp.equals("secIdIndexPf1")) {
break;
} else if (temp.equals("secIdIndexPf2")) {
break;
} else {
fail("indices used do not match with those which are expected to be used"
+ "<secIdIndexPf1> and <secIdIndexPf2> were expected but found " + itr.next());
}
}
}
// Verify the Query Results
StructSetOrResultsSet ssORrs = new StructSetOrResultsSet();
ssORrs.CompareQueryResultsWithoutAndWithIndexes(r, queries.length, queries);
}
@Test
public void testMultiIteratorsMultiRegion8() throws Exception {
Object[][] r = new Object[4][2];
QueryService qs = CacheUtils.getQueryService();
Position.resetCounter();
// Create Regions
Region r1 = CacheUtils.createRegion("portfolio1", Portfolio.class);
for (int i = 0; i < 4; i++) {
r1.put(i + "", new Portfolio(i));
}
Region r2 = CacheUtils.createRegion("portfolio2", Portfolio.class);
for (int i = 0; i < 4; i++) {
r2.put(i + "", new Portfolio(i));
}
Set add1 = new HashSet();
add1.add(new Address("411045", "Baner"));
add1.add(new Address("411001", "DholePatilRd"));
Region r3 = CacheUtils.createRegion("employees", Employee.class);
for (int i = 0; i < 4; i++) {
r3.put(i + "", new Employee("empName", (20 + i), i, "Mr.", (5000 + i), add1));
}
String[] queries = {
"SELECT DISTINCT * FROM " + SEPARATOR + "portfolio1 pf1, pf1.positions.values posit1, "
+ SEPARATOR + "portfolio2 pf2, " + SEPARATOR + "employees e WHERE posit1.secId='IBM'"};
// Execute Queries without Indexes
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
r[i][0] = q.execute();
}
// Create Indexes
qs.createIndex("statusIndexPf1", IndexType.FUNCTIONAL, "status", SEPARATOR + "portfolio1");
qs.createIndex("secIdIndexPf1", IndexType.FUNCTIONAL, "posit1.secId",
SEPARATOR + "portfolio1 pf1, pf1.positions.values posit1");
// Execute Queries with Indexes
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
QueryObserverImpl observer = new QueryObserverImpl();
QueryObserverHolder.setInstance(observer);
r[i][1] = q.execute();
if (!observer.isIndexesUsed) {
fail("Index is NOT uesd");
}
Iterator itr = observer.indexesUsed.iterator();
assertEquals("secIdIndexPf1", itr.next().toString());
assertThat(observer.indexesUsed.size()).isGreaterThan(0);
}
StructSetOrResultsSet ssOrrs = new StructSetOrResultsSet();
ssOrrs.CompareQueryResultsWithoutAndWithIndexes(r, queries.length, queries);
}
@Test
public void testBasicCompositeIndexUsage() throws Exception {
try {
IndexManager.TEST_RANGEINDEX_ONLY = true;
QueryService qs = CacheUtils.getQueryService();
Position.resetCounter();
// Create Regions
Region r1 = CacheUtils.createRegion("portfolio", Portfolio.class);
for (int i = 0; i < 1000; i++) {
r1.put(i + "", new Portfolio(i));
}
Set add1 = new HashSet();
add1.add(new Address("411045", "Baner"));
add1.add(new Address("411001", "DholePatilRd"));
Region r2 = CacheUtils.createRegion("employee", Employee.class);
for (int i = 0; i < 1000; i++) {
r2.put(i + "", new Employee("empName", (20 + i), i /* empId */, "Mr.", (5000 + i), add1));
}
String[][] queriesWithResCount = {
// Test case No. IUMR021
{"SELECT DISTINCT * FROM " + SEPARATOR + "portfolio pf, " + SEPARATOR
+ "employee emp WHERE pf.ID = emp.empId",
1000 + ""},
{"SELECT * FROM " + SEPARATOR + "portfolio pf, " + SEPARATOR
+ "employee emp WHERE pf.ID = emp.empId", "" + 1000},
{"SELECT pf.status, emp.empId, pf.getType() FROM " + SEPARATOR + "portfolio pf, "
+ SEPARATOR + "employee emp WHERE pf.ID = emp.empId",
"" + 1000},
/*
* Following query returns more (999001) than expected (1000) results as pf.ID > 0
* conditions is evaluated first for all Portfolio and Employee objects (999 * 1000) and
* then other condition with AND is executed for pf.ID = 0 and pf.status = ''active' and
* pf.ID = emp.ID. So total results are 999001.
*
* For expected results correct parenthesis must be used as in next query.
*
*/
{"SELECT pf.status, emp.empId, pf.getType() FROM " + SEPARATOR + "portfolio pf, "
+ SEPARATOR
+ "employee emp WHERE pf.ID = emp.empId AND pf.status='active' OR pf.ID > 0",
"" + 999001},
{"SELECT * FROM " + SEPARATOR + "portfolio pf, " + SEPARATOR
+ "employee emp WHERE pf.ID = emp.empId AND (pf.status='active' OR pf.ID > 499)",
"" + 750},
{"SELECT pf.status, emp.empId, pf.getType() FROM " + SEPARATOR + "portfolio pf, "
+ SEPARATOR
+ "employee emp WHERE pf.ID = emp.empId AND (pf.status='active' OR pf.ID > 499)",
"" + 750},};
String[] queries = new String[queriesWithResCount.length];
Object[][] r = new Object[queries.length][2];
// Execute Queries without Indexes
for (int i = 0; i < queries.length; i++) {
queries[i] = queriesWithResCount[i][0];
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
r[i][0] = q.execute();
assertTrue(r[i][0] instanceof SelectResults);
assertEquals(Integer.parseInt(queriesWithResCount[i][1]), ((SelectResults) r[i][0]).size());
}
// Create Indexes
qs.createIndex("idIndexPf", IndexType.FUNCTIONAL, "ID", SEPARATOR + "portfolio");
qs.createIndex("statusIndexPf", IndexType.FUNCTIONAL, "status", SEPARATOR + "portfolio");
qs.createIndex("empIdIndexPf2", IndexType.FUNCTIONAL, "empId", SEPARATOR + "employee");
// Execute Queries with Indexes
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
QueryObserverImpl observer = new QueryObserverImpl();
QueryObserverHolder.setInstance(observer);
r[i][1] = q.execute();
assertTrue(r[i][0] instanceof SelectResults);
assertEquals(Integer.parseInt(queriesWithResCount[i][1]), ((SelectResults) r[i][0]).size());
if (!observer.isIndexesUsed && i != 3 /* For join query without parenthesis */) {
fail("Index is NOT used for query" + queries[i]);
}
for (final Object o : observer.indexesUsed) {
String temp = o.toString();
if (!(temp.equals("idIndexPf") || temp.equals("empIdIndexPf2")
|| temp.equals("statusIndexPf"))) {
fail("<idIndexPf> or <empIdIndexPf2> was expected but found " + temp);
}
}
if (i != 3 /* For join query without parenthesis */) {
int indxs = observer.indexesUsed.size();
assertTrue("Indexes used is not of size >= 2", indxs >= 2);
}
}
StructSetOrResultsSet ssOrrs = new StructSetOrResultsSet();
ssOrrs.CompareQueryResultsWithoutAndWithIndexes(r, queries.length, queries);
} finally {
IndexManager.TEST_RANGEINDEX_ONLY = false;
}
}
@Test
public void testBasicCompositeIndexUsageWithOneIndexExpansionAndTruncation() throws Exception {
try {
IndexManager.TEST_RANGEINDEX_ONLY = true;
Object[][] r = new Object[1][2];
QueryService qs = CacheUtils.getQueryService();
Position.resetCounter();
// Create Regions
Region r1 = CacheUtils.createRegion("portfolio", Portfolio.class);
for (int i = 0; i < 1000; i++) {
r1.put(i + "", new Portfolio(i));
}
Set add1 = new HashSet();
add1.add(new Address("411045", "Baner"));
add1.add(new Address("411001", "DholePatilRd"));
Region r2 = CacheUtils.createRegion("employee", Employee.class);
for (int i = 0; i < 1000; i++) {
r2.put(i + "", new Employee("empName", (20 + i), i, "Mr.", (5000 + i), add1));
}
String[] queries = {
// Test case No. IUMR021
"SELECT DISTINCT * FROM " + SEPARATOR + "portfolio pf, pf.positions pos, " + SEPARATOR
+ "employee emp WHERE pf.iD = emp.empId",};
// Execute Queries without Indexes
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
r[i][0] = q.execute();
}
// Create Indexes
qs.createIndex("idIndexPf", IndexType.FUNCTIONAL, "iD",
SEPARATOR + "portfolio pf , pf.collectionHolderMap");
qs.createIndex("empIdIndexPf2", IndexType.FUNCTIONAL, "empId", SEPARATOR + "employee");
// Execute Queries with Indexes
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
QueryObserverImpl observer = new QueryObserverImpl();
QueryObserverHolder.setInstance(observer);
r[i][1] = q.execute();
if (!observer.isIndexesUsed) {
fail("Index is NOT uesd");
}
for (final Object o : observer.indexesUsed) {
String temp = o.toString();
if (!(temp.equals("idIndexPf") || temp.equals("empIdIndexPf2"))) {
fail("<idIndexPf> or <empIdIndexPf2> was expected but found " + temp);
}
}
int indxs = observer.indexesUsed.size();
assertTrue("Indexes used is not of size = 2", indxs == 2);
}
StructSetOrResultsSet ssOrrs = new StructSetOrResultsSet();
ssOrrs.CompareQueryResultsWithoutAndWithIndexes(r, queries.length, queries);
} finally {
IndexManager.TEST_RANGEINDEX_ONLY = false;
}
}
@Test
public void testBasicCompositeIndexUsageWithMultipleIndexes() throws Exception {
try {
IndexManager.TEST_RANGEINDEX_ONLY = true;
Object[][] r = new Object[1][2];
QueryService qs = CacheUtils.getQueryService();
Position.resetCounter();
// Create Regions
Region r1 = CacheUtils.createRegion("portfolio", Portfolio.class);
for (int i = 0; i < 1000; i++) {
r1.put(i + "", new Portfolio(i));
}
Set add1 = new HashSet();
add1.add(new Address("411045", "Baner"));
add1.add(new Address("411001", "DholePatilRd"));
Region r2 = CacheUtils.createRegion("employee", Employee.class);
for (int i = 0; i < 1000; i++) {
r2.put(i + "", new Employee("empName", (20 + i), i, "Mr.", (5000 + i), add1));
}
String[] queries = {
// Test case No. IUMR021
"SELECT DISTINCT * FROM " + SEPARATOR + "portfolio pf, pf.positions pos, " + SEPARATOR
+ "employee emp WHERE pf.iD = emp.empId and pf.status='active' and emp.age > 900",};
// Execute Queries without Indexes
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
r[i][0] = q.execute();
}
// Create Indexes
qs.createIndex("idIndexPf", IndexType.FUNCTIONAL, "iD",
SEPARATOR + "portfolio pf , pf.collectionHolderMap");
qs.createIndex("empIdIndexPf2", IndexType.FUNCTIONAL, "empId", SEPARATOR + "employee");
qs.createIndex("ageIndexemp", IndexType.FUNCTIONAL, "age", SEPARATOR + "employee emp ");
// Execute Queries with Indexes
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
QueryObserverImpl observer = new QueryObserverImpl();
QueryObserverHolder.setInstance(observer);
r[i][1] = q.execute();
if (!observer.isIndexesUsed) {
fail("Index is NOT uesd");
}
for (final Object o : observer.indexesUsed) {
String temp = o.toString();
if (!(temp.equals("ageIndexemp") || temp.equals("idIndexPf")
|| temp.equals("empIdIndexPf2") || temp.equals("statusIndexPf2"))) {
fail("<idIndexPf> or <empIdIndexPf2> was expected but found " + temp);
}
}
int indxs = observer.indexesUsed.size();
assertTrue("Indexes used is not of size = 3", indxs == 3);
}
StructSetOrResultsSet ssOrrs = new StructSetOrResultsSet();
ssOrrs.CompareQueryResultsWithoutAndWithIndexes(r, queries.length, queries);
} finally {
IndexManager.TEST_RANGEINDEX_ONLY = false;
}
}
@Test
public void testAssertionBug() throws Exception {
try {
IndexManager.TEST_RANGEINDEX_ONLY = true;
Region region1 = CacheUtils.createRegion("Quotes1", Quote.class);
Region region2 = CacheUtils.createRegion("Quotes2", Quote.class);
Region region3 = CacheUtils.createRegion("Restricted1", Restricted.class);
for (int i = 0; i < 10; i++) {
region1.put(i, new Quote(i));
region2.put(i, new Quote(i));
region3.put(i, new Restricted(i));
}
QueryService qs = CacheUtils.getQueryService();
////////// creating indexes on region Quotes1
qs.createIndex("Quotes1Region-quoteIdStrIndex", IndexType.PRIMARY_KEY, "q.quoteIdStr",
SEPARATOR + "Quotes1 q");
qs.createIndex("Quotes1Region-quoteTypeIndex", IndexType.FUNCTIONAL, "q.quoteType",
SEPARATOR + "Quotes1 q, q.restrict r");
qs.createIndex("Quotes1Region-dealerPortfolioIndex", IndexType.FUNCTIONAL,
"q.dealerPortfolio", SEPARATOR + "Quotes1 q, q.restrict r");
qs.createIndex("Quotes1Region-channelNameIndex", IndexType.FUNCTIONAL, "q.channelName",
SEPARATOR + "Quotes1 q, q.restrict r");
qs.createIndex("Quotes1Region-priceTypeIndex", IndexType.FUNCTIONAL, "q.priceType",
SEPARATOR + "Quotes1 q, q.restrict r");
qs.createIndex("Quotes1Region-lowerQtyIndex", IndexType.FUNCTIONAL, "q.lowerQty",
SEPARATOR + "Quotes1 q, q.restrict r");
qs.createIndex("Quotes1Region-upperQtyIndex", IndexType.FUNCTIONAL, "q.upperQty",
SEPARATOR + "Quotes1 q, q.restrict r");
qs.createIndex("Quotes1Restricted-quoteTypeIndex", IndexType.FUNCTIONAL, "r.quoteType",
SEPARATOR + "Quotes1 q, q.restrict r");
qs.createIndex("Quotes1Restricted-minQtyIndex", IndexType.FUNCTIONAL, "r.minQty",
SEPARATOR + "Quotes1 q, q.restrict r");
qs.createIndex("Quotes1Restricted-maxQtyIndex", IndexType.FUNCTIONAL, "r.maxQty",
SEPARATOR + "Quotes1 q, q.restrict r");
////////// creating indexes on region Quotes2
qs.createIndex("Quotes2Region-quoteIdStrIndex", IndexType.PRIMARY_KEY, "q.quoteIdStr",
SEPARATOR + "Quotes2 q");
qs.createIndex("Quotes2Region-quoteTypeIndex", IndexType.FUNCTIONAL, "q.quoteType",
SEPARATOR + "Quotes2 q, q.restrict r");
qs.createIndex("Quotes2Region-dealerPortfolioIndex", IndexType.FUNCTIONAL,
"q.dealerPortfolio", SEPARATOR + "Quotes2 q, q.restrict r");
qs.createIndex("Quotes2Region-channelNameIndex", IndexType.FUNCTIONAL, "q.channelName",
SEPARATOR + "Quotes2 q, q.restrict r");
qs.createIndex("Quotes2Region-priceTypeIndex", IndexType.FUNCTIONAL, "q.priceType",
SEPARATOR + "Quotes2 q, q.restrict r");
qs.createIndex("Quotes2Region-lowerQtyIndex", IndexType.FUNCTIONAL, "q.lowerQty",
SEPARATOR + "Quotes2 q, q.restrict r");
qs.createIndex("Quotes2Region-upperQtyIndex", IndexType.FUNCTIONAL, "q.upperQty",
SEPARATOR + "Quotes2 q, q.restrict r");
qs.createIndex("Quotes2Restricted-quoteTypeIndex", IndexType.FUNCTIONAL, "r.quoteType",
SEPARATOR + "Quotes2 q, q.restrict r");
qs.createIndex("Quotes2Restricted-minQtyIndex", IndexType.FUNCTIONAL, "r.minQty",
SEPARATOR + "Quotes2 q, q.restrict r");
qs.createIndex("Quotes2Restricted-maxQtyIndex", IndexType.FUNCTIONAL, "r.maxQty",
SEPARATOR + "Quotes2 q, q.restrict r");
////////// creating indexes on region Restricted1
qs.createIndex("RestrictedRegion-quoteTypeIndex", IndexType.FUNCTIONAL, "r.quoteType",
SEPARATOR + "Restricted1 r");
qs.createIndex("RestrictedRegion-minQtyIndex", IndexType.FUNCTIONAL, "r.minQty",
SEPARATOR + "Restricted1 r");
qs.createIndex("RestrictedRegion-maxQtyIndex-1", IndexType.FUNCTIONAL, "r.maxQty",
SEPARATOR + "Restricted1 r");
Query q = qs.newQuery(
"SELECT DISTINCT q.cusip, q.quoteType, q.dealerPortfolio, q.channelName, q.dealerCode, q.priceType, q.price, q.lowerQty, q.upperQty, q.ytm, r.minQty, r.maxQty, r.incQty FROM "
+ SEPARATOR + "Quotes1 q, " + SEPARATOR
+ "Restricted1 r WHERE q.cusip = r.cusip AND q.quoteType = r.quoteType");
q.execute();
} finally {
IndexManager.TEST_RANGEINDEX_ONLY = false;
}
}
@Test
public void testBasicCompositeIndexUsageInAllGroupJunction() throws Exception {
try {
IndexManager.TEST_RANGEINDEX_ONLY = true;
Object[][] r = new Object[1][2];
QueryService qs = CacheUtils.getQueryService();
Position.resetCounter();
// Create Regions
Region r1 = CacheUtils.createRegion("portfolio", Portfolio.class);
for (int i = 0; i < 100; i++) {
r1.put(i + "", new Portfolio(i));
}
Region r3 = CacheUtils.createRegion("portfolio3", Portfolio.class);
for (int i = 0; i < 10; i++) {
r3.put(i + "", new Portfolio(i));
}
Set add1 = new HashSet();
add1.add(new Address("411045", "Baner"));
add1.add(new Address("411001", "DholePatilRd"));
Region r2 = CacheUtils.createRegion("employee", Employee.class);
for (int i = 0; i < 100; i++) {
r2.put(i + "", new Employee("empName", (20 + i), i, "Mr.", (5000 + i), add1));
}
String[] queries = {
// Test case No. IUMR021
"SELECT DISTINCT * FROM " + SEPARATOR + "portfolio pf, pf.positions pos, " + SEPARATOR
+ "portfolio3 pf3, " + SEPARATOR
+ "employee emp WHERE pf.iD = emp.empId and pf.status='active' and emp.age > 50 and pf3.status='active'",};
// Execute Queries without Indexes
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
r[i][0] = q.execute();
}
// Create Indexes
qs.createIndex("idIndexPf", IndexType.FUNCTIONAL, "iD",
SEPARATOR + "portfolio pf , pf.collectionHolderMap");
qs.createIndex("empIdIndexPf2", IndexType.FUNCTIONAL, "empId", SEPARATOR + "employee");
qs.createIndex("statusIndexPf3", IndexType.FUNCTIONAL, "status",
SEPARATOR + "portfolio3 pf3 ");
qs.createIndex("ageIndexemp", IndexType.FUNCTIONAL, "age", SEPARATOR + "employee emp ");
// Execute Queries with Indexes
for (int i = 0; i < queries.length; i++) {
Query q = CacheUtils.getQueryService().newQuery(queries[i]);
QueryObserverImpl observer = new QueryObserverImpl();
QueryObserverHolder.setInstance(observer);
r[i][1] = q.execute();
if (!observer.isIndexesUsed) {
fail("Index is NOT uesd");
}
for (final Object o : observer.indexesUsed) {
String temp = o.toString();
if (!(temp.equals("ageIndexemp") || temp.equals("idIndexPf")
|| temp.equals("empIdIndexPf2") || temp.equals("statusIndexPf3"))) {
fail("<idIndexPf> or <empIdIndexPf2> was expected but found " + temp);
}
}
int indxs = observer.indexesUsed.size();
assertTrue("Indexes used is not of size = 4 but of size = " + indxs, indxs == 4);
}
StructSetOrResultsSet ssOrrs = new StructSetOrResultsSet();
ssOrrs.CompareQueryResultsWithoutAndWithIndexes(r, queries.length, queries);
} finally {
IndexManager.TEST_RANGEINDEX_ONLY = false;
}
}
private static class QueryObserverImpl extends QueryObserverAdapter {
boolean isIndexesUsed = false;
ArrayList indexesUsed = new ArrayList();
String indexName;
@Override
public void beforeIndexLookup(Index index, int oper, Object key) {
indexName = index.getName();
indexesUsed.add(index.getName());
}
@Override
public void afterIndexLookup(Collection results) {
if (results != null) {
isIndexesUsed = true;
}
}
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fields;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.script.MockScriptPlugin;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.lookup.FieldLookup;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.ReadableDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.function.Function;
import static java.util.Collections.singleton;
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.elasticsearch.client.Requests.refreshRequest;
import static org.elasticsearch.common.util.set.Sets.newHashSet;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public class SearchFieldsIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singletonList(CustomScriptPlugin.class);
}
public static class CustomScriptPlugin extends MockScriptPlugin {
@Override
@SuppressWarnings("unchecked")
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
scripts.put("doc['num1'].value", vars -> {
Map<?, ?> doc = (Map) vars.get("doc");
ScriptDocValues.Doubles num1 = (ScriptDocValues.Doubles) doc.get("num1");
return num1.getValue();
});
scripts.put("doc['num1'].value * factor", vars -> {
Map<?, ?> doc = (Map) vars.get("doc");
ScriptDocValues.Doubles num1 = (ScriptDocValues.Doubles) doc.get("num1");
Double factor = (Double) vars.get("factor");
return num1.getValue() * factor;
});
scripts.put("doc['date'].date.millis", vars -> {
Map<?, ?> doc = (Map) vars.get("doc");
ScriptDocValues.Dates dates = (ScriptDocValues.Dates) doc.get("date");
return dates.getValue().getMillis();
});
scripts.put("_fields['num1'].value", vars -> fieldsScript(vars, "num1"));
scripts.put("_fields._uid.value", vars -> fieldsScript(vars, "_uid"));
scripts.put("_fields._id.value", vars -> fieldsScript(vars, "_id"));
scripts.put("_fields._type.value", vars -> fieldsScript(vars, "_type"));
scripts.put("_source.obj1", vars -> sourceScript(vars, "obj1"));
scripts.put("_source.obj1.test", vars -> sourceScript(vars, "obj1.test"));
scripts.put("_source.obj1.test", vars -> sourceScript(vars, "obj1.test"));
scripts.put("_source.obj2", vars -> sourceScript(vars, "obj2"));
scripts.put("_source.obj2.arr2", vars -> sourceScript(vars, "obj2.arr2"));
scripts.put("_source.arr3", vars -> sourceScript(vars, "arr3"));
scripts.put("return null", vars -> null);
scripts.put("doc['l'].values", vars -> docScript(vars, "l"));
scripts.put("doc['ml'].values", vars -> docScript(vars, "ml"));
scripts.put("doc['d'].values", vars -> docScript(vars, "d"));
scripts.put("doc['md'].values", vars -> docScript(vars, "md"));
scripts.put("doc['s'].values", vars -> docScript(vars, "s"));
scripts.put("doc['ms'].values", vars -> docScript(vars, "ms"));
return scripts;
}
@SuppressWarnings("unchecked")
static Object fieldsScript(Map<String, Object> vars, String fieldName) {
Map<?, ?> fields = (Map) vars.get("_fields");
FieldLookup fieldLookup = (FieldLookup) fields.get(fieldName);
return fieldLookup.getValue();
}
@SuppressWarnings("unchecked")
static Object sourceScript(Map<String, Object> vars, String path) {
Map<String, Object> source = (Map) vars.get("_source");
return XContentMapValues.extractValue(path, source);
}
@SuppressWarnings("unchecked")
static Object docScript(Map<String, Object> vars, String fieldName) {
Map<?, ?> doc = (Map) vars.get("doc");
ScriptDocValues<?> values = (ScriptDocValues<?>) doc.get(fieldName);
return values.getValues();
}
}
public void testStoredFields() throws Exception {
createIndex("test");
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("field1").field("type", "text").field("store", true).endObject()
.startObject("field2").field("type", "text").field("store", false).endObject()
.startObject("field3").field("type", "text").field("store", true).endObject()
.endObject().endObject().endObject().string();
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet();
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("field1", "value1")
.field("field2", "value2")
.field("field3", "value3")
.endObject()).execute().actionGet();
client().admin().indices().prepareRefresh().execute().actionGet();
SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field1").execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getHits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1"));
// field2 is not stored, check that it is not extracted from source.
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field2").execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getHits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(0));
assertThat(searchResponse.getHits().getAt(0).getFields().get("field2"), nullValue());
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field3").execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getHits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3"));
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*3").execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getHits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3"));
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addStoredField("*3")
.addStoredField("field1")
.addStoredField("field2")
.get();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getHits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(2));
assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1"));
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field*").execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getHits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(2));
assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1"));
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("f*3").execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getHits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3"));
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*").execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getHits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getSourceAsMap(), nullValue());
assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(2));
assertThat(searchResponse.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3"));
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addStoredField("*")
.addStoredField("_source")
.get();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getHits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getSourceAsMap(), notNullValue());
assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(2));
assertThat(searchResponse.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3"));
}
public void testScriptDocAndFields() throws Exception {
createIndex("test");
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("num1").field("type", "double").field("store", true).endObject()
.endObject().endObject().endObject().string();
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet();
client().prepareIndex("test", "type1", "1")
.setSource(jsonBuilder().startObject()
.field("test", "value beck")
.field("num1", 1.0f)
.field("date", "1970-01-01T00:00:00")
.endObject())
.execute().actionGet();
client().admin().indices().prepareFlush().execute().actionGet();
client().prepareIndex("test", "type1", "2")
.setSource(jsonBuilder().startObject()
.field("test", "value beck")
.field("num1", 2.0f)
.field("date", "1970-01-01T00:00:25")
.endObject())
.get();
client().admin().indices().prepareFlush().execute().actionGet();
client().prepareIndex("test", "type1", "3")
.setSource(jsonBuilder().startObject()
.field("test", "value beck")
.field("num1", 3.0f)
.field("date", "1970-01-01T00:02:00")
.endObject())
.get();
client().admin().indices().refresh(refreshRequest()).actionGet();
logger.info("running doc['num1'].value");
SearchResponse response = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort("num1", SortOrder.ASC)
.addScriptField("sNum1",
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()))
.addScriptField("sNum1_field",
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields['num1'].value", Collections.emptyMap()))
.addScriptField("date1",
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['date'].date.millis", Collections.emptyMap()))
.execute().actionGet();
assertNoFailures(response);
assertThat(response.getHits().getTotalHits(), equalTo(3L));
assertFalse(response.getHits().getAt(0).hasSource());
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
Set<String> fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet());
assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1")));
assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(1.0));
assertThat(response.getHits().getAt(0).getFields().get("sNum1_field").getValues().get(0), equalTo(1.0));
assertThat(response.getHits().getAt(0).getFields().get("date1").getValues().get(0), equalTo(0L));
assertThat(response.getHits().getAt(1).getId(), equalTo("2"));
fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet());
assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1")));
assertThat(response.getHits().getAt(1).getFields().get("sNum1").getValues().get(0), equalTo(2.0));
assertThat(response.getHits().getAt(1).getFields().get("sNum1_field").getValues().get(0), equalTo(2.0));
assertThat(response.getHits().getAt(1).getFields().get("date1").getValues().get(0), equalTo(25000L));
assertThat(response.getHits().getAt(2).getId(), equalTo("3"));
fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet());
assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1")));
assertThat(response.getHits().getAt(2).getFields().get("sNum1").getValues().get(0), equalTo(3.0));
assertThat(response.getHits().getAt(2).getFields().get("sNum1_field").getValues().get(0), equalTo(3.0));
assertThat(response.getHits().getAt(2).getFields().get("date1").getValues().get(0), equalTo(120000L));
logger.info("running doc['num1'].value * factor");
Map<String, Object> params = MapBuilder.<String, Object>newMapBuilder().put("factor", 2.0).map();
response = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort("num1", SortOrder.ASC)
.addScriptField("sNum1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value * factor", params))
.get();
assertThat(response.getHits().getTotalHits(), equalTo(3L));
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet());
assertThat(fields, equalTo(singleton("sNum1")));
assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(2.0));
assertThat(response.getHits().getAt(1).getId(), equalTo("2"));
fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet());
assertThat(fields, equalTo(singleton("sNum1")));
assertThat(response.getHits().getAt(1).getFields().get("sNum1").getValues().get(0), equalTo(4.0));
assertThat(response.getHits().getAt(2).getId(), equalTo("3"));
fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet());
assertThat(fields, equalTo(singleton("sNum1")));
assertThat(response.getHits().getAt(2).getFields().get("sNum1").getValues().get(0), equalTo(6.0));
}
public void testUidBasedScriptFields() throws Exception {
prepareCreate("test").addMapping("type1", "num1", "type=long").execute().actionGet();
int numDocs = randomIntBetween(1, 30);
IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs];
for (int i = 0; i < numDocs; i++) {
indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i))
.setSource(jsonBuilder().startObject().field("num1", i).endObject());
}
indexRandom(true, indexRequestBuilders);
SearchResponse response = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort("num1", SortOrder.ASC)
.setSize(numDocs)
.addScriptField("uid", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._uid.value", Collections.emptyMap()))
.get();
assertNoFailures(response);
assertThat(response.getHits().getTotalHits(), equalTo((long)numDocs));
for (int i = 0; i < numDocs; i++) {
assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i)));
Set<String> fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet());
assertThat(fields, equalTo(singleton("uid")));
assertThat(response.getHits().getAt(i).getFields().get("uid").getValue(), equalTo("type1#" + Integer.toString(i)));
}
response = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort("num1", SortOrder.ASC)
.setSize(numDocs)
.addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._id.value", Collections.emptyMap()))
.get();
assertNoFailures(response);
assertThat(response.getHits().getTotalHits(), equalTo((long)numDocs));
for (int i = 0; i < numDocs; i++) {
assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i)));
Set<String> fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet());
assertThat(fields, equalTo(singleton("id")));
assertThat(response.getHits().getAt(i).getFields().get("id").getValue(), equalTo(Integer.toString(i)));
}
response = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort("num1", SortOrder.ASC)
.setSize(numDocs)
.addScriptField("type",
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._type.value", Collections.emptyMap()))
.get();
assertNoFailures(response);
assertThat(response.getHits().getTotalHits(), equalTo((long)numDocs));
for (int i = 0; i < numDocs; i++) {
assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i)));
Set<String> fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet());
assertThat(fields, equalTo(singleton("type")));
assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo("type1"));
}
response = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort("num1", SortOrder.ASC)
.setSize(numDocs)
.addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._id.value", Collections.emptyMap()))
.addScriptField("uid", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._uid.value", Collections.emptyMap()))
.addScriptField("type",
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._type.value", Collections.emptyMap()))
.get();
assertNoFailures(response);
assertThat(response.getHits().getTotalHits(), equalTo((long)numDocs));
for (int i = 0; i < numDocs; i++) {
assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i)));
Set<String> fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet());
assertThat(fields, equalTo(newHashSet("uid", "type", "id")));
assertThat(response.getHits().getAt(i).getFields().get("uid").getValue(), equalTo("type1#" + Integer.toString(i)));
assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo("type1"));
assertThat(response.getHits().getAt(i).getFields().get("id").getValue(), equalTo(Integer.toString(i)));
}
}
public void testScriptFieldUsingSource() throws Exception {
createIndex("test");
client().prepareIndex("test", "type1", "1")
.setSource(jsonBuilder().startObject()
.startObject("obj1").field("test", "something").endObject()
.startObject("obj2").startArray("arr2").value("arr_value1").value("arr_value2").endArray().endObject()
.startArray("arr3").startObject().field("arr3_field1", "arr3_value1").endObject().endArray()
.endObject())
.execute().actionGet();
client().admin().indices().refresh(refreshRequest()).actionGet();
SearchResponse response = client().prepareSearch()
.setQuery(matchAllQuery())
.addScriptField("s_obj1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.obj1", Collections.emptyMap()))
.addScriptField("s_obj1_test",
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.obj1.test", Collections.emptyMap()))
.addScriptField("s_obj2", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.obj2", Collections.emptyMap()))
.addScriptField("s_obj2_arr2",
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.obj2.arr2", Collections.emptyMap()))
.addScriptField("s_arr3", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.arr3", Collections.emptyMap()))
.get();
assertThat("Failures " + Arrays.toString(response.getShardFailures()), response.getShardFailures().length, equalTo(0));
assertThat(response.getHits().getAt(0).field("s_obj1_test").getValue().toString(), equalTo("something"));
Map<String, Object> sObj1 = response.getHits().getAt(0).field("s_obj1").getValue();
assertThat(sObj1.get("test").toString(), equalTo("something"));
assertThat(response.getHits().getAt(0).field("s_obj1_test").getValue().toString(), equalTo("something"));
Map<String, Object> sObj2 = response.getHits().getAt(0).field("s_obj2").getValue();
List<?> sObj2Arr2 = (List<?>) sObj2.get("arr2");
assertThat(sObj2Arr2.size(), equalTo(2));
assertThat(sObj2Arr2.get(0).toString(), equalTo("arr_value1"));
assertThat(sObj2Arr2.get(1).toString(), equalTo("arr_value2"));
sObj2Arr2 = response.getHits().getAt(0).field("s_obj2_arr2").getValues();
assertThat(sObj2Arr2.size(), equalTo(2));
assertThat(sObj2Arr2.get(0).toString(), equalTo("arr_value1"));
assertThat(sObj2Arr2.get(1).toString(), equalTo("arr_value2"));
List<?> sObj2Arr3 = response.getHits().getAt(0).field("s_arr3").getValues();
assertThat(((Map<?, ?>) sObj2Arr3.get(0)).get("arr3_field1").toString(), equalTo("arr3_value1"));
}
public void testScriptFieldsForNullReturn() throws Exception {
client().prepareIndex("test", "type1", "1")
.setSource("foo", "bar")
.setRefreshPolicy("true").get();
SearchResponse response = client().prepareSearch()
.setQuery(matchAllQuery())
.addScriptField("test_script_1",
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "return null", Collections.emptyMap()))
.get();
assertNoFailures(response);
SearchHitField fieldObj = response.getHits().getAt(0).field("test_script_1");
assertThat(fieldObj, notNullValue());
List<?> fieldValues = fieldObj.getValues();
assertThat(fieldValues, hasSize(1));
assertThat(fieldValues.get(0), nullValue());
}
public void testPartialFields() throws Exception {
createIndex("test");
client().prepareIndex("test", "type1", "1").setSource(XContentFactory.jsonBuilder().startObject()
.field("field1", "value1")
.startObject("obj1")
.startArray("arr1")
.startObject().startObject("obj2").field("field2", "value21").endObject().endObject()
.startObject().startObject("obj2").field("field2", "value22").endObject().endObject()
.endArray()
.endObject()
.endObject())
.execute().actionGet();
client().admin().indices().prepareRefresh().execute().actionGet();
}
public void testStoredFieldsWithoutSource() throws Exception {
createIndex("test");
String mapping = XContentFactory.jsonBuilder()
.startObject()
.startObject("type1")
.startObject("_source")
.field("enabled", false)
.endObject()
.startObject("properties")
.startObject("byte_field")
.field("type", "byte")
.field("store", true)
.endObject()
.startObject("short_field")
.field("type", "short")
.field("store", true)
.endObject()
.startObject("integer_field")
.field("type", "integer")
.field("store", true)
.endObject()
.startObject("long_field")
.field("type", "long")
.field("store", true)
.endObject()
.startObject("float_field")
.field("type", "float")
.field("store", true)
.endObject()
.startObject("double_field")
.field("type", "double")
.field("store", true)
.endObject()
.startObject("date_field")
.field("type", "date")
.field("store", true)
.endObject()
.startObject("boolean_field")
.field("type", "boolean")
.field("store", true)
.endObject()
.startObject("binary_field")
.field("type", "binary")
.field("store", true)
.endObject()
.endObject()
.endObject()
.endObject()
.string();
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet();
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("byte_field", (byte) 1)
.field("short_field", (short) 2)
.field("integer_field", 3)
.field("long_field", 4L)
.field("float_field", 5.0f)
.field("double_field", 6.0d)
.field("date_field", Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC)))
.field("boolean_field", true)
.field("binary_field", Base64.getEncoder().encodeToString("testing text".getBytes("UTF-8")))
.endObject()).execute().actionGet();
client().admin().indices().prepareRefresh().execute().actionGet();
SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery())
.addStoredField("byte_field")
.addStoredField("short_field")
.addStoredField("integer_field")
.addStoredField("long_field")
.addStoredField("float_field")
.addStoredField("double_field")
.addStoredField("date_field")
.addStoredField("boolean_field")
.addStoredField("binary_field")
.execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getHits().length, equalTo(1));
Set<String> fields = new HashSet<>(searchResponse.getHits().getAt(0).getFields().keySet());
assertThat(fields, equalTo(newHashSet("byte_field", "short_field", "integer_field", "long_field",
"float_field", "double_field", "date_field", "boolean_field", "binary_field")));
SearchHit searchHit = searchResponse.getHits().getAt(0);
assertThat(searchHit.getFields().get("byte_field").getValue().toString(), equalTo("1"));
assertThat(searchHit.getFields().get("short_field").getValue().toString(), equalTo("2"));
assertThat(searchHit.getFields().get("integer_field").getValue(), equalTo((Object) 3));
assertThat(searchHit.getFields().get("long_field").getValue(), equalTo((Object) 4L));
assertThat(searchHit.getFields().get("float_field").getValue(), equalTo((Object) 5.0f));
assertThat(searchHit.getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
String dateTime = Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC));
assertThat(searchHit.getFields().get("date_field").getValue(), equalTo((Object) dateTime));
assertThat(searchHit.getFields().get("boolean_field").getValue(), equalTo((Object) Boolean.TRUE));
assertThat(searchHit.getFields().get("binary_field").getValue(), equalTo(new BytesArray("testing text" .getBytes("UTF8"))));
}
public void testSearchFieldsMetaData() throws Exception {
client().prepareIndex("my-index", "my-type1", "1")
.setRouting("1")
.setSource(jsonBuilder().startObject().field("field1", "value").endObject())
.setRefreshPolicy(IMMEDIATE)
.get();
SearchResponse searchResponse = client().prepareSearch("my-index")
.setTypes("my-type1")
.addStoredField("field1").addStoredField("_routing")
.get();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).field("field1"), nullValue());
assertThat(searchResponse.getHits().getAt(0).field("_routing").isMetadataField(), equalTo(true));
assertThat(searchResponse.getHits().getAt(0).field("_routing").getValue().toString(), equalTo("1"));
}
public void testSearchFieldsNonLeafField() throws Exception {
client().prepareIndex("my-index", "my-type1", "1")
.setSource(jsonBuilder().startObject().startObject("field1").field("field2", "value1").endObject().endObject())
.setRefreshPolicy(IMMEDIATE)
.get();
assertFailures(client().prepareSearch("my-index").setTypes("my-type1").addStoredField("field1"),
RestStatus.BAD_REQUEST,
containsString("field [field1] isn't a leaf field"));
}
public void testGetFieldsComplexField() throws Exception {
client().admin().indices().prepareCreate("my-index")
.setSettings(Settings.builder().put("index.refresh_interval", -1))
.addMapping("my-type2", jsonBuilder()
.startObject()
.startObject("my-type2")
.startObject("properties")
.startObject("field1")
.field("type", "object")
.startObject("properties")
.startObject("field2")
.field("type", "object")
.startObject("properties")
.startObject("field3")
.field("type", "object")
.startObject("properties")
.startObject("field4")
.field("type", "text")
.field("store", true)
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject())
.get();
BytesReference source = jsonBuilder().startObject()
.startArray("field1")
.startObject()
.startObject("field2")
.startArray("field3")
.startObject()
.field("field4", "value1")
.endObject()
.endArray()
.endObject()
.endObject()
.startObject()
.startObject("field2")
.startArray("field3")
.startObject()
.field("field4", "value2")
.endObject()
.endArray()
.endObject()
.endObject()
.endArray()
.endObject().bytes();
client().prepareIndex("my-index", "my-type1", "1").setSource(source, XContentType.JSON).get();
client().prepareIndex("my-index", "my-type2", "1").setRefreshPolicy(IMMEDIATE).setSource(source, XContentType.JSON).get();
String field = "field1.field2.field3.field4";
SearchResponse searchResponse = client().prepareSearch("my-index").setTypes("my-type1").addStoredField(field).get();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).field(field).isMetadataField(), equalTo(false));
assertThat(searchResponse.getHits().getAt(0).field(field).getValues().size(), equalTo(2));
assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(0).toString(), equalTo("value1"));
assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(1).toString(), equalTo("value2"));
searchResponse = client().prepareSearch("my-index").setTypes("my-type2").addStoredField(field).get();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).field(field).isMetadataField(), equalTo(false));
assertThat(searchResponse.getHits().getAt(0).field(field).getValues().size(), equalTo(2));
assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(0).toString(), equalTo("value1"));
assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(1).toString(), equalTo("value2"));
}
// see #8203
public void testSingleValueFieldDatatField() throws ExecutionException, InterruptedException {
assertAcked(client().admin().indices().prepareCreate("test")
.addMapping("type", "test_field", "type=keyword").get());
indexRandom(true, client().prepareIndex("test", "type", "1").setSource("test_field", "foobar"));
refresh();
SearchResponse searchResponse = client().prepareSearch("test").setTypes("type").setSource(
new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).fieldDataField("test_field")).get();
assertHitCount(searchResponse, 1);
Map<String,SearchHitField> fields = searchResponse.getHits().getHits()[0].getFields();
assertThat(fields.get("test_field").getValue(), equalTo("foobar"));
}
public void testFieldsPulledFromFieldData() throws Exception {
createIndex("test");
String mapping = XContentFactory.jsonBuilder()
.startObject()
.startObject("type1")
.startObject("_source")
.field("enabled", false)
.endObject()
.startObject("properties")
.startObject("text_field")
.field("type", "text")
.field("fielddata", true)
.endObject()
.startObject("keyword_field")
.field("type", "keyword")
.endObject()
.startObject("byte_field")
.field("type", "byte")
.endObject()
.startObject("short_field")
.field("type", "short")
.endObject()
.startObject("integer_field")
.field("type", "integer")
.endObject()
.startObject("long_field")
.field("type", "long")
.endObject()
.startObject("float_field")
.field("type", "float")
.endObject()
.startObject("double_field")
.field("type", "double")
.endObject()
.startObject("date_field")
.field("type", "date")
.endObject()
.startObject("boolean_field")
.field("type", "boolean")
.endObject()
.startObject("binary_field")
.field("type", "binary")
.endObject()
.startObject("ip_field")
.field("type", "ip")
.endObject()
.endObject()
.endObject()
.endObject()
.string();
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet();
ReadableDateTime date = new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC);
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("text_field", "foo")
.field("keyword_field", "foo")
.field("byte_field", (byte) 1)
.field("short_field", (short) 2)
.field("integer_field", 3)
.field("long_field", 4L)
.field("float_field", 5.0f)
.field("double_field", 6.0d)
.field("date_field", Joda.forPattern("dateOptionalTime").printer().print(date))
.field("boolean_field", true)
.field("ip_field", "::1")
.endObject()).execute().actionGet();
client().admin().indices().prepareRefresh().execute().actionGet();
SearchRequestBuilder builder = client().prepareSearch().setQuery(matchAllQuery())
.addDocValueField("text_field")
.addDocValueField("keyword_field")
.addDocValueField("byte_field")
.addDocValueField("short_field")
.addDocValueField("integer_field")
.addDocValueField("long_field")
.addDocValueField("float_field")
.addDocValueField("double_field")
.addDocValueField("date_field")
.addDocValueField("boolean_field")
.addDocValueField("ip_field");
SearchResponse searchResponse = builder.execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getHits().length, equalTo(1));
Set<String> fields = new HashSet<>(searchResponse.getHits().getAt(0).getFields().keySet());
assertThat(fields, equalTo(newHashSet("byte_field", "short_field", "integer_field", "long_field",
"float_field", "double_field", "date_field", "boolean_field", "text_field", "keyword_field",
"ip_field")));
assertThat(searchResponse.getHits().getAt(0).getFields().get("byte_field").getValue().toString(), equalTo("1"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("short_field").getValue().toString(), equalTo("2"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("integer_field").getValue(), equalTo((Object) 3L));
assertThat(searchResponse.getHits().getAt(0).getFields().get("long_field").getValue(), equalTo((Object) 4L));
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0));
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(), equalTo(date));
assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true));
assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("ip_field").getValue(), equalTo("::1"));
}
public void testScriptFields() throws Exception {
assertAcked(prepareCreate("index").addMapping("type",
"s", "type=keyword",
"l", "type=long",
"d", "type=double",
"ms", "type=keyword",
"ml", "type=long",
"md", "type=double").get());
final int numDocs = randomIntBetween(3, 8);
List<IndexRequestBuilder> reqs = new ArrayList<>();
for (int i = 0; i < numDocs; ++i) {
reqs.add(client().prepareIndex("index", "type", Integer.toString(i)).setSource(
"s", Integer.toString(i),
"ms", new String[] {Integer.toString(i), Integer.toString(i+1)},
"l", i,
"ml", new long[] {i, i+1},
"d", i,
"md", new double[] {i, i+1}));
}
indexRandom(true, reqs);
ensureSearchable();
SearchRequestBuilder req = client().prepareSearch("index");
for (String field : Arrays.asList("s", "ms", "l", "ml", "d", "md")) {
req.addScriptField(field,
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + field + "'].values", Collections.emptyMap()));
}
SearchResponse resp = req.get();
assertSearchResponse(resp);
for (SearchHit hit : resp.getHits().getHits()) {
final int id = Integer.parseInt(hit.getId());
Map<String, SearchHitField> fields = hit.getFields();
assertThat(fields.get("s").getValues(), equalTo(Collections.<Object> singletonList(Integer.toString(id))));
assertThat(fields.get("l").getValues(), equalTo(Collections.<Object> singletonList((long) id)));
assertThat(fields.get("d").getValues(), equalTo(Collections.<Object> singletonList((double) id)));
assertThat(fields.get("ms").getValues(), equalTo(Arrays.<Object> asList(Integer.toString(id), Integer.toString(id + 1))));
assertThat(fields.get("ml").getValues(), equalTo(Arrays.<Object> asList((long) id, id + 1L)));
assertThat(fields.get("md").getValues(), equalTo(Arrays.<Object> asList((double) id, id + 1d)));
}
}
public void testLoadMetadata() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("my-type1", "_parent", "type=parent"));
indexRandom(true,
client().prepareIndex("test", "my-type1", "1")
.setRouting("1")
.setParent("parent_1")
.setSource(jsonBuilder().startObject().field("field1", "value").endObject()));
SearchResponse response = client().prepareSearch("test").addStoredField("field1").get();
assertSearchResponse(response);
assertHitCount(response, 1);
Map<String, SearchHitField> fields = response.getHits().getAt(0).getFields();
assertThat(fields.get("field1"), nullValue());
assertThat(fields.get("_routing").isMetadataField(), equalTo(true));
assertThat(fields.get("_routing").getValue().toString(), equalTo("1"));
assertThat(fields.get("_parent").isMetadataField(), equalTo(true));
assertThat(fields.get("_parent").getValue().toString(), equalTo("parent_1"));
}
}
|
|
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.xml.util;
import com.intellij.codeInspection.InspectionProfile;
import com.intellij.codeInspection.htmlInspections.XmlEntitiesInspection;
import com.intellij.ide.highlighter.HtmlFileType;
import com.intellij.ide.highlighter.XHtmlFileType;
import com.intellij.javaee.ExternalResourceManagerEx;
import com.intellij.lang.Language;
import com.intellij.lang.html.HTMLLanguage;
import com.intellij.lang.xhtml.XHTMLLanguage;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.registry.RegistryValue;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.profile.codeInspection.InspectionProjectProfileManager;
import com.intellij.psi.FileViewProvider;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.html.HtmlTag;
import com.intellij.psi.impl.source.html.HtmlDocumentImpl;
import com.intellij.psi.impl.source.html.dtd.HtmlAttributeDescriptorImpl;
import com.intellij.psi.impl.source.parsing.xml.HtmlBuilderDriver;
import com.intellij.psi.impl.source.parsing.xml.XmlBuilder;
import com.intellij.psi.templateLanguages.TemplateLanguageFileViewProvider;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.xml.*;
import com.intellij.util.ArrayUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.xml.Html5SchemaProvider;
import com.intellij.xml.XmlAttributeDescriptor;
import com.intellij.xml.XmlElementDescriptor;
import com.intellij.xml.XmlNSDescriptor;
import com.intellij.xml.impl.schema.XmlAttributeDescriptorImpl;
import com.intellij.xml.impl.schema.XmlElementDescriptorImpl;
import com.intellij.xml.util.documentation.HtmlDescriptorsTable;
import com.intellij.xml.util.documentation.MimeTypeDictionary;
import gnu.trove.THashMap;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.nio.charset.Charset;
import java.util.*;
/**
* @author Maxim.Mossienko
*/
public class HtmlUtil {
private static final Logger LOG = Logger.getInstance("#com.intellij.xml.util.HtmlUtil");
@NonNls private static final String JSFC = "jsfc";
@NonNls private static final String CHARSET = "charset";
@NonNls private static final String CHARSET_PREFIX = CHARSET+"=";
@NonNls private static final String HTML5_DATA_ATTR_PREFIX = "data-";
public static final String SCRIPT_TAG_NAME = "script";
public static final String STYLE_TAG_NAME = "style";
public static final String STYLE_ATTRIBUTE_NAME = STYLE_TAG_NAME;
public static final String ID_ATTRIBUTE_NAME = "id";
public static final String CLASS_ATTRIBUTE_NAME = "class";
public static final String[] CONTENT_TYPES = ArrayUtil.toStringArray(MimeTypeDictionary.getContentTypes());
@NonNls public static final String MATH_ML_NAMESPACE = "http://www.w3.org/1998/Math/MathML";
@NonNls public static final String SVG_NAMESPACE = "http://www.w3.org/2000/svg";
public static final String[] RFC2616_HEADERS = new String[]{"Accept", "Accept-Charset", "Accept-Encoding", "Accept-Language",
"Accept-Ranges", "Age", "Allow", "Authorization", "Cache-Control", "Connection", "Content-Encoding", "Content-Language",
"Content-Length", "Content-Location", "Content-MD5", "Content-Range", "Content-Type", "Date", "ETag", "Expect", "Expires", "From",
"Host", "If-Match", "If-Modified-Since", "If-None-Match", "If-Range", "If-Unmodified-Since", "Last-Modified", "Location",
"Max-Forwards", "Pragma", "Proxy-Authenticate", "Proxy-Authorization", "Range", "Referer", "Refresh", "Retry-After", "Server", "TE",
"Trailer", "Transfer-Encoding", "Upgrade", "User-Agent", "Vary", "Via", "Warning", "WWW-Authenticate"};
private HtmlUtil() {
}
private static final Set<String> EMPTY_TAGS_MAP = new THashSet<String>();
@NonNls private static final String[] OPTIONAL_END_TAGS = {
//"html",
"head",
//"body",
"p", "li", "dd", "dt", "thead", "tfoot", "tbody", "colgroup", "tr", "th", "td", "option", "embed", "noembed"
};
private static final Set<String> OPTIONAL_END_TAGS_MAP = new THashSet<String>();
@NonNls private static final String[] BLOCK_TAGS = {"p", "h1", "h2", "h3", "h4", "h5", "h6", "ul", "ol", "dir", "menu", "pre",
"dl", "div", "center", "noscript", "noframes", "blockquote", "form", "isindex", "hr", "table", "fieldset", "address",
// nonexplicitly specified
"map",
// flow elements
"body", "object", "applet", "ins", "del", "dd", "li", "button", "th", "td", "iframe", "comment"
};
// flow elements are block or inline, so they should not close <p> for example
@NonNls private static final String[] POSSIBLY_INLINE_TAGS =
{"a", "abbr", "acronym", "applet", "b", "basefont", "bdo", "big", "br", "button",
"cite", "code", "del", "dfn", "em", "font", "i", "iframe", "img", "input", "ins",
"kbd", "label", "map", "object", "q", "s", "samp", "select", "small", "span", "strike",
"strong", "sub", "sup", "textarea", "tt", "u", "var"};
private static final Set<String> BLOCK_TAGS_MAP = new THashSet<String>();
@NonNls private static final String[] INLINE_ELEMENTS_CONTAINER = {"p", "h1", "h2", "h3", "h4", "h5", "h6", "pre", "dt"};
private static final Set<String> INLINE_ELEMENTS_CONTAINER_MAP = new THashSet<String>();
private static final Set<String> POSSIBLY_INLINE_TAGS_MAP = new THashSet<String>();
@NonNls private static final String[] HTML5_TAGS = {
"article", "aside", "audio", "canvas", "command", "datalist", "details", "embed", "figcaption", "figure", "footer", "header",
"keygen", "mark", "meter", "nav", "output", "progress", "rp", "rt", "ruby", "section", "source", "summary", "time", "video", "wbr",
"main"
};
private static final Set<String> HTML5_TAGS_SET = new THashSet<String>();
private static final Map<String, Set<String>> AUTO_CLOSE_BY_MAP = new THashMap<String, Set<String>>();
static {
for (HTMLControls.Control control : HTMLControls.getControls()) {
final String tagName = control.name.toLowerCase(Locale.US);
if (control.endTag == HTMLControls.TagState.FORBIDDEN) EMPTY_TAGS_MAP.add(tagName);
AUTO_CLOSE_BY_MAP.put(tagName, new THashSet<String>(control.autoClosedBy));
}
ContainerUtil.addAll(OPTIONAL_END_TAGS_MAP, OPTIONAL_END_TAGS);
ContainerUtil.addAll(BLOCK_TAGS_MAP, BLOCK_TAGS);
ContainerUtil.addAll(INLINE_ELEMENTS_CONTAINER_MAP, INLINE_ELEMENTS_CONTAINER);
ContainerUtil.addAll(POSSIBLY_INLINE_TAGS_MAP, POSSIBLY_INLINE_TAGS);
ContainerUtil.addAll(HTML5_TAGS_SET, HTML5_TAGS);
}
public static boolean isSingleHtmlTag(String tagName) {
return EMPTY_TAGS_MAP.contains(tagName.toLowerCase(Locale.US));
}
public static boolean isSingleHtmlTagL(String tagName) {
return EMPTY_TAGS_MAP.contains(tagName);
}
public static boolean isOptionalEndForHtmlTag(String tagName) {
return OPTIONAL_END_TAGS_MAP.contains(tagName.toLowerCase(Locale.US));
}
public static boolean isOptionalEndForHtmlTagL(String tagName) {
return OPTIONAL_END_TAGS_MAP.contains(tagName);
}
public static boolean canTerminate(final String childTagName, final String tagName) {
final Set<String> closingTags = AUTO_CLOSE_BY_MAP.get(tagName);
return closingTags != null && closingTags.contains(childTagName);
}
public static boolean isHtmlBlockTag(String tagName) {
return BLOCK_TAGS_MAP.contains(tagName.toLowerCase(Locale.US));
}
public static boolean isPossiblyInlineTag(String tagName) {
return POSSIBLY_INLINE_TAGS_MAP.contains(tagName);
}
public static boolean isHtmlBlockTagL(String tagName) {
return BLOCK_TAGS_MAP.contains(tagName);
}
public static boolean isInlineTagContainer(String tagName) {
return INLINE_ELEMENTS_CONTAINER_MAP.contains(tagName.toLowerCase(Locale.US));
}
public static boolean isInlineTagContainerL(String tagName) {
return INLINE_ELEMENTS_CONTAINER_MAP.contains(tagName);
}
public static void addHtmlSpecificCompletions(final XmlElementDescriptor descriptor,
final XmlTag element,
final List<XmlElementDescriptor> variants) {
// add html block completions for tags with optional ends!
String name = descriptor.getName(element);
if (name != null && isOptionalEndForHtmlTag(name)) {
PsiElement parent = element.getParent();
if (parent != null) {
// we need grand parent since completion already uses parent's descriptor
parent = parent.getParent();
}
if (parent instanceof HtmlTag) {
final XmlElementDescriptor parentDescriptor = ((HtmlTag)parent).getDescriptor();
if (parentDescriptor != descriptor && parentDescriptor != null) {
for (final XmlElementDescriptor elementsDescriptor : parentDescriptor.getElementsDescriptors((XmlTag)parent)) {
if (isHtmlBlockTag(elementsDescriptor.getName())) {
variants.add(elementsDescriptor);
}
}
}
} else if (parent instanceof HtmlDocumentImpl) {
final XmlNSDescriptor nsDescriptor = descriptor.getNSDescriptor();
for (XmlElementDescriptor elementDescriptor : nsDescriptor.getRootElementsDescriptors((XmlDocument)parent)) {
if (isHtmlBlockTag(elementDescriptor.getName()) && !variants.contains(elementDescriptor)) {
variants.add(elementDescriptor);
}
}
}
}
}
@Nullable
public static XmlDocument getRealXmlDocument(@Nullable XmlDocument doc) {
return HtmlPsiUtil.getRealXmlDocument(doc);
}
public static String[] getHtmlTagNames() {
return HtmlDescriptorsTable.getHtmlTagNames();
}
public static boolean isShortNotationOfBooleanAttributePreferred() {
return Registry.is("html.prefer.short.notation.of.boolean.attributes", true);
}
@TestOnly
public static void setShortNotationOfBooleanAttributeIsPreferred(boolean value, Disposable parent) {
final boolean oldValue = isShortNotationOfBooleanAttributePreferred();
final RegistryValue registryValue = Registry.get("html.prefer.short.notation.of.boolean.attributes");
registryValue.setValue(value);
Disposer.register(parent, new Disposable() {
@Override
public void dispose() {
registryValue.setValue(oldValue);
}
});
}
public static boolean isBooleanAttribute(@NotNull XmlAttributeDescriptor descriptor, @Nullable PsiElement context) {
if (descriptor instanceof HtmlAttributeDescriptorImpl && descriptor.isEnumerated()) {
final String[] values = descriptor.getEnumeratedValues();
if (values == null) {
return false;
}
if (values.length == 2) {
return values[0].isEmpty() && values[1].equals(descriptor.getName())
|| values[1].isEmpty() && values[0].equals(descriptor.getName());
}
else if (values.length == 1) {
return descriptor.getName().equals(values[0]);
}
}
return context != null && isCustomBooleanAttribute(descriptor.getName(), context);
}
public static boolean isCustomBooleanAttribute(@NotNull String attributeName, @NotNull PsiElement context) {
final String entitiesString = getEntitiesString(context, XmlEntitiesInspection.BOOLEAN_ATTRIBUTE_SHORT_NAME);
if (entitiesString != null) {
StringTokenizer tokenizer = new StringTokenizer(entitiesString, ",");
while (tokenizer.hasMoreElements()) {
if (tokenizer.nextToken().equalsIgnoreCase(attributeName)) {
return true;
}
}
}
return false;
}
public static XmlAttributeDescriptor[] getCustomAttributeDescriptors(XmlElement context) {
String entitiesString = getEntitiesString(context, XmlEntitiesInspection.ATTRIBUTE_SHORT_NAME);
if (entitiesString == null) return XmlAttributeDescriptor.EMPTY;
StringTokenizer tokenizer = new StringTokenizer(entitiesString, ",");
XmlAttributeDescriptor[] descriptors = new XmlAttributeDescriptor[tokenizer.countTokens()];
int index = 0;
while (tokenizer.hasMoreElements()) {
final String customName = tokenizer.nextToken();
if (customName.length() == 0) continue;
descriptors[index++] = new XmlAttributeDescriptorImpl() {
@Override
public String getName(PsiElement context) {
return customName;
}
@Override
public String getName() {
return customName;
}
};
}
return descriptors;
}
public static XmlElementDescriptor[] getCustomTagDescriptors(@Nullable PsiElement context) {
String entitiesString = getEntitiesString(context, XmlEntitiesInspection.TAG_SHORT_NAME);
if (entitiesString == null) return XmlElementDescriptor.EMPTY_ARRAY;
StringTokenizer tokenizer = new StringTokenizer(entitiesString, ",");
XmlElementDescriptor[] descriptors = new XmlElementDescriptor[tokenizer.countTokens()];
int index = 0;
while (tokenizer.hasMoreElements()) {
final String tagName = tokenizer.nextToken();
if (tagName.length() == 0) continue;
descriptors[index++] = new XmlElementDescriptorImpl(null) {
@Override
public String getName(PsiElement context) {
return tagName;
}
@Override
public String getDefaultName() {
return tagName;
}
@Override
public boolean allowElementsFromNamespace(final String namespace, final XmlTag context) {
return true;
}
};
}
return descriptors;
}
@Nullable
public static String getEntitiesString(@Nullable PsiElement context, @NotNull String inspectionName) {
if (context == null) return null;
PsiFile containingFile = context.getContainingFile().getOriginalFile();
final InspectionProfile profile = InspectionProjectProfileManager.getInstance(context.getProject()).getInspectionProfile();
XmlEntitiesInspection inspection = (XmlEntitiesInspection)profile.getUnwrappedTool(inspectionName, containingFile);
if (inspection != null) {
return inspection.getAdditionalEntries();
}
return null;
}
public static XmlAttributeDescriptor[] appendHtmlSpecificAttributeCompletions(final XmlTag declarationTag,
XmlAttributeDescriptor[] descriptors,
final XmlAttribute context) {
if (declarationTag instanceof HtmlTag) {
descriptors = ArrayUtil.mergeArrays(
descriptors,
getCustomAttributeDescriptors(context)
);
return descriptors;
}
boolean isJsfHtmlNamespace = false;
for (String jsfHtmlUri : XmlUtil.JSF_HTML_URIS) {
if (declarationTag.getPrefixByNamespace(jsfHtmlUri) != null) {
isJsfHtmlNamespace = true;
break;
}
}
if (isJsfHtmlNamespace && declarationTag.getNSDescriptor(XmlUtil.XHTML_URI, true) != null &&
!XmlUtil.JSP_URI.equals(declarationTag.getNamespace())) {
descriptors = ArrayUtil.append(
descriptors,
new XmlAttributeDescriptorImpl() {
@Override
public String getName(PsiElement context) {
return JSFC;
}
@Override
public String getName() {
return JSFC;
}
},
XmlAttributeDescriptor.class
);
}
return descriptors;
}
public static boolean isHtml5Document(XmlDocument doc) {
if (doc == null) {
return false;
}
XmlProlog prolog = doc.getProlog();
XmlDoctype doctype = prolog != null ? prolog.getDoctype() : null;
if (!isHtmlTagContainingFile(doc)) {
return false;
}
final PsiFile htmlFile = doc.getContainingFile();
final String htmlFileFullName;
if (htmlFile != null) {
final VirtualFile vFile = htmlFile.getVirtualFile();
if (vFile != null) {
htmlFileFullName = vFile.getPath();
}
else {
htmlFileFullName = htmlFile.getName();
}
}
else {
htmlFileFullName = "unknown";
}
if (doctype == null) {
LOG.debug("DOCTYPE for " + htmlFileFullName + " is null");
return Html5SchemaProvider.getHtml5SchemaLocation()
.equals(ExternalResourceManagerEx.getInstanceEx().getDefaultHtmlDoctype(doc.getProject()));
}
final boolean html5Doctype = isHtml5Doctype(doctype);
final String doctypeDescription = "text: " + doctype.getText() +
", dtdUri: " + doctype.getDtdUri() +
", publicId: " + doctype.getPublicId() +
", markupDecl: " + doctype.getMarkupDecl();
LOG.debug("DOCTYPE for " + htmlFileFullName + "; " + doctypeDescription + "; HTML5: " + html5Doctype);
return html5Doctype;
}
public static boolean isHtml5Doctype(XmlDoctype doctype) {
return doctype.getDtdUri() == null && doctype.getPublicId() == null && doctype.getMarkupDecl() == null;
}
public static boolean isHtml5Context(XmlElement context) {
XmlDocument doc = PsiTreeUtil.getParentOfType(context, XmlDocument.class);
return isHtml5Document(doc);
}
public static boolean isHtmlTag(@NotNull XmlTag tag) {
if (tag.getLanguage() != HTMLLanguage.INSTANCE) return false;
XmlDocument doc = PsiTreeUtil.getParentOfType(tag, XmlDocument.class);
String doctype = null;
if (doc != null) {
doctype = XmlUtil.getDtdUri(doc);
}
doctype = doctype == null ? ExternalResourceManagerEx.getInstanceEx().getDefaultHtmlDoctype(tag.getProject()) : doctype;
return XmlUtil.XHTML4_SCHEMA_LOCATION.equals(doctype) ||
!StringUtil.containsIgnoreCase(doctype, "xhtml");
}
public static boolean hasNonHtml5Doctype(XmlElement context) {
XmlDocument doc = PsiTreeUtil.getParentOfType(context, XmlDocument.class);
if (doc == null) {
return false;
}
XmlProlog prolog = doc.getProlog();
XmlDoctype doctype = prolog != null ? prolog.getDoctype() : null;
return doctype != null && !isHtml5Doctype(doctype);
}
public static boolean isHtml5Tag(String tagName) {
return HTML5_TAGS_SET.contains(tagName);
}
public static boolean isCustomHtml5Attribute(String attributeName) {
return attributeName.startsWith(HTML5_DATA_ATTR_PREFIX);
}
@Nullable
public static String getHrefBase(XmlFile file) {
final XmlTag root = file.getRootTag();
final XmlTag head = root != null ? root.findFirstSubTag("head") : null;
final XmlTag base = head != null ? head.findFirstSubTag("base") : null;
return base != null ? base.getAttributeValue("href") : null;
}
public static boolean isOwnHtmlAttribute(XmlAttributeDescriptor descriptor) {
// common html attributes are defined mostly in common.rnc, core-scripting.rnc, etc
// while own tag attributes are defined in meta.rnc
final PsiElement declaration = descriptor.getDeclaration();
final PsiFile file = declaration != null ? declaration.getContainingFile() : null;
final String name = file != null ? file.getName() : null;
return "meta.rnc".equals(name);
}
public static boolean tagHasHtml5Schema(@NotNull XmlTag context) {
XmlElementDescriptor descriptor = context.getDescriptor();
if (descriptor != null) {
XmlNSDescriptor nsDescriptor = descriptor.getNSDescriptor();
XmlFile descriptorFile = nsDescriptor != null ? nsDescriptor.getDescriptorFile() : null;
String descriptorPath = descriptorFile != null ? descriptorFile.getVirtualFile().getPath() : null;
return Comparing.equal(Html5SchemaProvider.getHtml5SchemaLocation(), descriptorPath) ||
Comparing.equal(Html5SchemaProvider.getXhtml5SchemaLocation(), descriptorPath);
}
return false;
}
private static class TerminateException extends RuntimeException {
private static final TerminateException INSTANCE = new TerminateException();
}
public static Charset detectCharsetFromMetaTag(@NotNull CharSequence content) {
// check for <meta http-equiv="charset=CharsetName" > or <meta charset="CharsetName"> and return Charset
// because we will lightly parse and explicit charset isn't used very often do quick check for applicability
int charPrefix = StringUtil.indexOf(content, CHARSET);
do {
if (charPrefix == -1) return null;
int charsetPrefixEnd = charPrefix + CHARSET.length();
while (charsetPrefixEnd < content.length() && Character.isWhitespace(content.charAt(charsetPrefixEnd))) ++charsetPrefixEnd;
if (charsetPrefixEnd < content.length() && content.charAt(charsetPrefixEnd) == '=') break;
charPrefix = StringUtil.indexOf(content,CHARSET, charsetPrefixEnd);
} while(true);
final Ref<String> charsetNameRef = new Ref<String>();
try {
new HtmlBuilderDriver(content).build(new XmlBuilder() {
@NonNls final Set<String> inTag = new THashSet<String>();
boolean metHttpEquiv = false;
boolean metHttml5Charset = false;
@Override
public void doctype(@Nullable final CharSequence publicId,
@Nullable final CharSequence systemId,
final int startOffset,
final int endOffset) {
}
@Override
public ProcessingOrder startTag(final CharSequence localName, final String namespace, final int startoffset, final int endoffset,
final int headerEndOffset) {
@NonNls String name = localName.toString().toLowerCase();
inTag.add(name);
if (!inTag.contains("head") && !"html".equals(name)) terminate();
return ProcessingOrder.TAGS_AND_ATTRIBUTES;
}
private void terminate() {
throw TerminateException.INSTANCE;
}
@Override
public void endTag(final CharSequence localName, final String namespace, final int startoffset, final int endoffset) {
@NonNls final String name = localName.toString().toLowerCase();
if ("meta".equals(name) && (metHttpEquiv || metHttml5Charset) && contentAttributeValue != null) {
String charsetName;
if (metHttpEquiv) {
int start = contentAttributeValue.indexOf(CHARSET_PREFIX);
if (start == -1) return;
start += CHARSET_PREFIX.length();
int end = contentAttributeValue.indexOf(';', start);
if (end == -1) end = contentAttributeValue.length();
charsetName = contentAttributeValue.substring(start, end);
} else /*if (metHttml5Charset) */ {
charsetName = StringUtil.stripQuotesAroundValue(contentAttributeValue);
}
charsetNameRef.set(charsetName);
terminate();
}
if ("head".equals(name)) {
terminate();
}
inTag.remove(name);
metHttpEquiv = false;
metHttml5Charset = false;
contentAttributeValue = null;
}
private String contentAttributeValue;
@Override
public void attribute(final CharSequence localName, final CharSequence v, final int startoffset, final int endoffset) {
@NonNls final String name = localName.toString().toLowerCase();
if (inTag.contains("meta")) {
@NonNls String value = v.toString().toLowerCase();
if (name.equals("http-equiv")) {
metHttpEquiv |= value.equals("content-type");
} else if (name.equals(CHARSET)) {
metHttml5Charset = true;
contentAttributeValue = value;
}
if (name.equals("content")) {
contentAttributeValue = value;
}
}
}
@Override
public void textElement(final CharSequence display, final CharSequence physical, final int startoffset, final int endoffset) {
}
@Override
public void entityRef(final CharSequence ref, final int startOffset, final int endOffset) {
}
@Override
public void error(String message, int startOffset, int endOffset) {
}
});
}
catch (TerminateException ignored) {
//ignore
}
catch (Exception ignored) {
// some weird things can happen, like unbalanaced tree
}
String name = charsetNameRef.get();
return CharsetToolkit.forName(name);
}
public static boolean isTagWithoutAttributes(@NonNls String tagName) {
return tagName != null && "br".equalsIgnoreCase(tagName);
}
public static boolean hasHtml(PsiFile file) {
return isHtmlFile(file) || file.getViewProvider() instanceof TemplateLanguageFileViewProvider;
}
public static boolean supportsXmlTypedHandlers(PsiFile file) {
Language language = file.getLanguage();
while (language != null) {
if ("JavaScript".equals(language.getID())) return true;
language = language.getBaseLanguage();
}
return false;
}
public static boolean hasHtmlPrefix(@NotNull String url) {
return url.startsWith("http://") ||
url.startsWith("https://") ||
url.startsWith("//") || //Protocol-relative URL
url.startsWith("ftp://");
}
public static boolean isHtmlFile(@NotNull PsiElement element) {
Language language = element.getLanguage();
return language == HTMLLanguage.INSTANCE || language == XHTMLLanguage.INSTANCE;
}
public static boolean isHtmlFile(@NotNull VirtualFile file) {
FileType fileType = file.getFileType();
return fileType == HtmlFileType.INSTANCE || fileType == XHtmlFileType.INSTANCE;
}
public static boolean isHtmlTagContainingFile(PsiElement element) {
if (element == null) {
return false;
}
final PsiFile containingFile = element.getContainingFile();
if (containingFile != null) {
final XmlTag tag = PsiTreeUtil.getParentOfType(element, XmlTag.class, false);
if (tag instanceof HtmlTag) {
return true;
}
final XmlDocument document = PsiTreeUtil.getParentOfType(element, XmlDocument.class, false);
if (document instanceof HtmlDocumentImpl) {
return true;
}
final FileViewProvider provider = containingFile.getViewProvider();
Language language;
if (provider instanceof TemplateLanguageFileViewProvider) {
language = ((TemplateLanguageFileViewProvider)provider).getTemplateDataLanguage();
}
else {
language = provider.getBaseLanguage();
}
return language == XHTMLLanguage.INSTANCE;
}
return false;
}
public static boolean isScriptTag(@Nullable XmlTag tag) {
return tag != null && tag.getLocalName().equalsIgnoreCase(SCRIPT_TAG_NAME);
}
}
|
|
/*
* Copyright (c) 2014 - 2016 Ngewi Fet <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gnucash.android.ui.transaction;
import android.app.Activity;
import android.content.Intent;
import android.content.res.Configuration;
import android.database.Cursor;
import android.inputmethodservice.KeyboardView;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.widget.SimpleCursorAdapter;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.CompoundButton;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import net.objecthunter.exp4j.Expression;
import net.objecthunter.exp4j.ExpressionBuilder;
import org.gnucash.android.R;
import org.gnucash.android.db.DatabaseSchema;
import org.gnucash.android.db.adapter.AccountsDbAdapter;
import org.gnucash.android.db.adapter.CommoditiesDbAdapter;
import org.gnucash.android.model.AccountType;
import org.gnucash.android.model.BaseModel;
import org.gnucash.android.model.Commodity;
import org.gnucash.android.model.Money;
import org.gnucash.android.model.Split;
import org.gnucash.android.model.Transaction;
import org.gnucash.android.model.TransactionType;
import org.gnucash.android.ui.common.FormActivity;
import org.gnucash.android.ui.common.UxArgument;
import org.gnucash.android.ui.transaction.dialog.TransferFundsDialogFragment;
import org.gnucash.android.ui.util.widget.CalculatorEditText;
import org.gnucash.android.ui.util.widget.CalculatorKeyboard;
import org.gnucash.android.ui.util.widget.TransactionTypeSwitch;
import org.gnucash.android.util.QualifiedAccountNameCursorAdapter;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Dialog for editing the splits in a transaction
*
* @author Ngewi Fet <[email protected]>
*/
public class SplitEditorFragment extends Fragment {
@BindView(R.id.split_list_layout) LinearLayout mSplitsLinearLayout;
@BindView(R.id.calculator_keyboard) KeyboardView mKeyboardView;
@BindView(R.id.imbalance_textview) TextView mImbalanceTextView;
private AccountsDbAdapter mAccountsDbAdapter;
private Cursor mCursor;
private SimpleCursorAdapter mCursorAdapter;
private List<View> mSplitItemViewList;
private String mAccountUID;
private Commodity mCommodity;
private BigDecimal mBaseAmount = BigDecimal.ZERO;
CalculatorKeyboard mCalculatorKeyboard;
BalanceTextWatcher mImbalanceWatcher = new BalanceTextWatcher();
/**
* Create and return a new instance of the fragment with the appropriate paramenters
* @param args Arguments to be set to the fragment. <br>
* See {@link UxArgument#AMOUNT_STRING} and {@link UxArgument#SPLIT_LIST}
* @return New instance of SplitEditorFragment
*/
public static SplitEditorFragment newInstance(Bundle args){
SplitEditorFragment fragment = new SplitEditorFragment();
fragment.setArguments(args);
return fragment;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_split_editor, container, false);
ButterKnife.bind(this, view);
return view;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
ActionBar actionBar = ((AppCompatActivity)getActivity()).getSupportActionBar();
assert actionBar != null;
actionBar.setTitle(R.string.title_split_editor);
setHasOptionsMenu(true);
mCalculatorKeyboard = new CalculatorKeyboard(getActivity(), mKeyboardView, R.xml.calculator_keyboard);
mSplitItemViewList = new ArrayList<>();
//we are editing splits for a new transaction.
// But the user may have already created some splits before. Let's check
List<Split> splitList = getArguments().getParcelableArrayList(UxArgument.SPLIT_LIST);
assert splitList != null;
initArgs();
if (!splitList.isEmpty()) {
//aha! there are some splits. Let's load those instead
loadSplitViews(splitList);
mImbalanceWatcher.afterTextChanged(null);
} else {
final String currencyCode = mAccountsDbAdapter.getAccountCurrencyCode(mAccountUID);
Split split = new Split(new Money(mBaseAmount, Commodity.getInstance(currencyCode)), mAccountUID);
AccountType accountType = mAccountsDbAdapter.getAccountType(mAccountUID);
TransactionType transactionType = Transaction.getTypeForBalance(accountType, mBaseAmount.signum() < 0);
split.setType(transactionType);
View view = addSplitView(split);
view.findViewById(R.id.input_accounts_spinner).setEnabled(false);
view.findViewById(R.id.btn_remove_split).setVisibility(View.GONE);
TransactionsActivity.displayBalance(mImbalanceTextView, new Money(mBaseAmount.negate(), mCommodity));
}
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
mCalculatorKeyboard = new CalculatorKeyboard(getActivity(), mKeyboardView, R.xml.calculator_keyboard);
}
private void loadSplitViews(List<Split> splitList) {
for (Split split : splitList) {
addSplitView(split);
}
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.split_editor_actions, menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
getActivity().setResult(Activity.RESULT_CANCELED);
getActivity().finish();
return true;
case R.id.menu_save:
saveSplits();
return true;
case R.id.menu_add_split:
addSplitView(null);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/**
* Add a split view and initialize it with <code>split</code>
* @param split Split to initialize the contents to
* @return Returns the split view which was added
*/
private View addSplitView(Split split){
LayoutInflater layoutInflater = getActivity().getLayoutInflater();
View splitView = layoutInflater.inflate(R.layout.item_split_entry, mSplitsLinearLayout, false);
mSplitsLinearLayout.addView(splitView,0);
SplitViewHolder viewHolder = new SplitViewHolder(splitView, split);
splitView.setTag(viewHolder);
mSplitItemViewList.add(splitView);
return splitView;
}
/**
* Extracts arguments passed to the view and initializes necessary adapters and cursors
*/
private void initArgs() {
mAccountsDbAdapter = AccountsDbAdapter.getInstance();
Bundle args = getArguments();
mAccountUID = ((FormActivity) getActivity()).getCurrentAccountUID();
mBaseAmount = new BigDecimal(args.getString(UxArgument.AMOUNT_STRING));
String conditions = "("
+ DatabaseSchema.AccountEntry.COLUMN_HIDDEN + " = 0 AND "
+ DatabaseSchema.AccountEntry.COLUMN_PLACEHOLDER + " = 0"
+ ")";
mCursor = mAccountsDbAdapter.fetchAccountsOrderedByFullName(conditions, null);
mCommodity = CommoditiesDbAdapter.getInstance().getCommodity(mAccountsDbAdapter.getCurrencyCode(mAccountUID));
}
/**
* Holds a split item view and binds the items in it
*/
class SplitViewHolder implements OnTransferFundsListener{
@BindView(R.id.input_split_memo) EditText splitMemoEditText;
@BindView(R.id.input_split_amount) CalculatorEditText splitAmountEditText;
@BindView(R.id.btn_remove_split) ImageView removeSplitButton;
@BindView(R.id.input_accounts_spinner) Spinner accountsSpinner;
@BindView(R.id.split_currency_symbol) TextView splitCurrencyTextView;
@BindView(R.id.split_uid) TextView splitUidTextView;
@BindView(R.id.btn_split_type) TransactionTypeSwitch splitTypeSwitch;
View splitView;
Money quantity;
public SplitViewHolder(View splitView, Split split){
ButterKnife.bind(this, splitView);
this.splitView = splitView;
if (split != null && !split.getQuantity().equals(split.getValue()))
this.quantity = split.getQuantity();
setListeners(split);
}
@Override
public void transferComplete(Money amount) {
quantity = amount;
}
private void setListeners(Split split){
splitAmountEditText.bindListeners(mCalculatorKeyboard);
removeSplitButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mSplitsLinearLayout.removeView(splitView);
mSplitItemViewList.remove(splitView);
mImbalanceWatcher.afterTextChanged(null);
}
});
updateTransferAccountsList(accountsSpinner);
splitCurrencyTextView.setText(mCommodity.getSymbol());
splitTypeSwitch.setAmountFormattingListener(splitAmountEditText, splitCurrencyTextView);
splitTypeSwitch.setChecked(mBaseAmount.signum() > 0);
splitUidTextView.setText(BaseModel.generateUID());
if (split != null) {
splitAmountEditText.setCommodity(split.getValue().getCommodity());
splitAmountEditText.setValue(split.getFormattedValue().asBigDecimal());
splitCurrencyTextView.setText(split.getValue().getCommodity().getSymbol());
splitMemoEditText.setText(split.getMemo());
splitUidTextView.setText(split.getUID());
String splitAccountUID = split.getAccountUID();
setSelectedTransferAccount(mAccountsDbAdapter.getID(splitAccountUID), accountsSpinner);
splitTypeSwitch.setAccountType(mAccountsDbAdapter.getAccountType(splitAccountUID));
splitTypeSwitch.setChecked(split.getType());
}
accountsSpinner.setOnItemSelectedListener(new SplitAccountListener(splitTypeSwitch, this));
splitTypeSwitch.addOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
mImbalanceWatcher.afterTextChanged(null);
}
});
splitAmountEditText.addTextChangedListener(mImbalanceWatcher);
}
/**
* Returns the value of the amount in the splitAmountEditText field without setting the value to the view
* <p>If the expression in the view is currently incomplete or invalid, null is returned.
* This method is used primarily for computing the imbalance</p>
* @return Value in the split item amount field, or {@link BigDecimal#ZERO} if the expression is empty or invalid
*/
public BigDecimal getAmountValue(){
String amountString = splitAmountEditText.getCleanString();
if (amountString.isEmpty())
return BigDecimal.ZERO;
ExpressionBuilder expressionBuilder = new ExpressionBuilder(amountString);
Expression expression;
try {
expression = expressionBuilder.build();
} catch (RuntimeException e) {
return BigDecimal.ZERO;
}
if (expression != null && expression.validate().isValid()) {
return new BigDecimal(expression.evaluate());
} else {
Log.v(SplitEditorFragment.this.getClass().getSimpleName(),
"Incomplete expression for updating imbalance: " + expression);
return BigDecimal.ZERO;
}
}
}
/**
* Updates the spinner to the selected transfer account
* @param accountId Database ID of the transfer account
*/
private void setSelectedTransferAccount(long accountId, final Spinner accountsSpinner){
for (int pos = 0; pos < mCursorAdapter.getCount(); pos++) {
if (mCursorAdapter.getItemId(pos) == accountId){
accountsSpinner.setSelection(pos);
break;
}
}
}
/**
* Updates the list of possible transfer accounts.
* Only accounts with the same currency can be transferred to
*/
private void updateTransferAccountsList(Spinner transferAccountSpinner){
mCursorAdapter = new QualifiedAccountNameCursorAdapter(getActivity(), mCursor);
transferAccountSpinner.setAdapter(mCursorAdapter);
}
/**
* Check if all the split amounts have valid values that can be saved
* @return {@code true} if splits can be saved, {@code false} otherwise
*/
private boolean canSave(){
for (View splitView : mSplitItemViewList) {
SplitViewHolder viewHolder = (SplitViewHolder) splitView.getTag();
viewHolder.splitAmountEditText.evaluate();
if (viewHolder.splitAmountEditText.getError() != null){
return false;
}
//TODO: also check that multicurrency splits have a conversion amount present
}
return true;
}
/**
* Save all the splits from the split editor
*/
private void saveSplits() {
if (!canSave()){
Toast.makeText(getActivity(), R.string.toast_error_check_split_amounts,
Toast.LENGTH_SHORT).show();
return;
}
Intent data = new Intent();
data.putParcelableArrayListExtra(UxArgument.SPLIT_LIST, extractSplitsFromView());
getActivity().setResult(Activity.RESULT_OK, data);
getActivity().finish();
}
/**
* Extracts the input from the views and builds {@link org.gnucash.android.model.Split}s to correspond to the input.
* @return List of {@link org.gnucash.android.model.Split}s represented in the view
*/
private ArrayList<Split> extractSplitsFromView(){
ArrayList<Split> splitList = new ArrayList<>();
for (View splitView : mSplitItemViewList) {
SplitViewHolder viewHolder = (SplitViewHolder) splitView.getTag();
if (viewHolder.splitAmountEditText.getValue() == null)
continue;
BigDecimal amountBigDecimal = viewHolder.splitAmountEditText.getValue();
String currencyCode = mAccountsDbAdapter.getCurrencyCode(mAccountUID);
Money valueAmount = new Money(amountBigDecimal.abs(), Commodity.getInstance(currencyCode));
String accountUID = mAccountsDbAdapter.getUID(viewHolder.accountsSpinner.getSelectedItemId());
Split split = new Split(valueAmount, accountUID);
split.setMemo(viewHolder.splitMemoEditText.getText().toString());
split.setType(viewHolder.splitTypeSwitch.getTransactionType());
split.setUID(viewHolder.splitUidTextView.getText().toString().trim());
if (viewHolder.quantity != null)
split.setQuantity(viewHolder.quantity.abs());
splitList.add(split);
}
return splitList;
}
/**
* Updates the displayed balance of the accounts when the amount of a split is changed
*/
private class BalanceTextWatcher implements TextWatcher {
@Override
public void beforeTextChanged(CharSequence charSequence, int i, int i2, int i3) {
//nothing to see here, move along
}
@Override
public void onTextChanged(CharSequence charSequence, int i, int i2, int i3) {
//nothing to see here, move along
}
@Override
public void afterTextChanged(Editable editable) {
BigDecimal imbalance = BigDecimal.ZERO;
for (View splitItem : mSplitItemViewList) {
SplitViewHolder viewHolder = (SplitViewHolder) splitItem.getTag();
BigDecimal amount = viewHolder.getAmountValue().abs();
long accountId = viewHolder.accountsSpinner.getSelectedItemId();
boolean hasDebitNormalBalance = AccountsDbAdapter.getInstance()
.getAccountType(accountId).hasDebitNormalBalance();
if (viewHolder.splitTypeSwitch.isChecked()) {
if (hasDebitNormalBalance)
imbalance = imbalance.add(amount);
else
imbalance = imbalance.subtract(amount);
} else {
if (hasDebitNormalBalance)
imbalance = imbalance.subtract(amount);
else
imbalance = imbalance.add(amount);
}
}
TransactionsActivity.displayBalance(mImbalanceTextView, new Money(imbalance, mCommodity));
}
}
/**
* Listens to changes in the transfer account and updates the currency symbol, the label of the
* transaction type and if neccessary
*/
private class SplitAccountListener implements AdapterView.OnItemSelectedListener {
TransactionTypeSwitch mTypeToggleButton;
SplitViewHolder mSplitViewHolder;
/**
* Flag to know when account spinner callback is due to user interaction or layout of components
*/
boolean userInteraction = false;
public SplitAccountListener(TransactionTypeSwitch typeToggleButton, SplitViewHolder viewHolder){
this.mTypeToggleButton = typeToggleButton;
this.mSplitViewHolder = viewHolder;
}
@Override
public void onItemSelected(AdapterView<?> parentView, View selectedItemView, int position, long id) {
AccountType accountType = mAccountsDbAdapter.getAccountType(id);
mTypeToggleButton.setAccountType(accountType);
//refresh the imbalance amount if we change the account
mImbalanceWatcher.afterTextChanged(null);
String fromCurrencyCode = mAccountsDbAdapter.getCurrencyCode(mAccountUID);
String targetCurrencyCode = mAccountsDbAdapter.getCurrencyCode(mAccountsDbAdapter.getUID(id));
if (!userInteraction || fromCurrencyCode.equals(targetCurrencyCode)){
//first call is on layout, subsequent calls will be true and transfer will work as usual
userInteraction = true;
return;
}
BigDecimal amountBigD = mSplitViewHolder.splitAmountEditText.getValue();
if (amountBigD == null)
return;
Money amount = new Money(amountBigD, Commodity.getInstance(fromCurrencyCode));
TransferFundsDialogFragment fragment
= TransferFundsDialogFragment.getInstance(amount, targetCurrencyCode, mSplitViewHolder);
fragment.show(getFragmentManager(), "tranfer_funds_editor");
}
@Override
public void onNothingSelected(AdapterView<?> adapterView) {
//nothing to see here, move along
}
}
}
|
|
/*******************************************************************************
*
* Copyright FUJITSU LIMITED 2017
*
* Creation Date: 2013-12-6
*
*******************************************************************************/
package org.oscm.subscriptionservice.bean;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyListOf;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.Query;
import org.junit.Before;
import org.junit.Test;
import org.oscm.communicationservice.local.CommunicationServiceLocal;
import org.oscm.configurationservice.local.ConfigurationServiceLocal;
import org.oscm.dataservice.local.DataService;
import org.oscm.domobjects.Marketplace;
import org.oscm.domobjects.Organization;
import org.oscm.domobjects.ParameterSet;
import org.oscm.domobjects.PlatformUser;
import org.oscm.domobjects.Product;
import org.oscm.domobjects.Session;
import org.oscm.domobjects.Subscription;
import org.oscm.domobjects.TechnicalProduct;
import org.oscm.domobjects.TriggerProcess;
import org.oscm.domobjects.enums.LocalizedObjectTypes;
import org.oscm.encrypter.AESEncrypter;
import org.oscm.internal.types.enumtypes.ServiceStatus;
import org.oscm.internal.types.enumtypes.SubscriptionStatus;
import org.oscm.internal.types.enumtypes.TriggerType;
import org.oscm.internal.types.exception.SubscriptionStateException;
import org.oscm.internal.vo.VOBillingContact;
import org.oscm.internal.vo.VOParameter;
import org.oscm.internal.vo.VOParameterDefinition;
import org.oscm.internal.vo.VOPaymentInfo;
import org.oscm.internal.vo.VOService;
import org.oscm.internal.vo.VOSubscription;
import org.oscm.internal.vo.VOTechnicalServiceOperation;
import org.oscm.internal.vo.VOUda;
import org.oscm.internal.vo.VOUsageLicense;
import org.oscm.internal.vo.VOUser;
import org.oscm.triggerservice.local.TriggerMessage;
import org.oscm.triggerservice.local.TriggerProcessMessageData;
import org.oscm.types.enumtypes.ProvisioningType;
import org.oscm.types.enumtypes.TriggerProcessParameterName;
/**
* @author Qiu
*
*/
public class SubscriptionServicePendingUpdateStatusTest
extends SubscriptionServiceMockBase {
private SubscriptionServiceBean bean;
private TriggerProcess tp;
private Subscription sub;
private Product product;
private VOSubscription voSubscription;
private List<VOParameter> voParameters;
private List<VOUda> voUdas;
private final List<VOUsageLicense> usersToBeAdded = new ArrayList<>();
private final List<VOUser> usersToBeRevoked = new ArrayList<>();
private static final long SUBSCRIPTION_KEY = 1000L;
private static final String SUBSCRIPTION_ID = "subId";
private DataService ds;
private ConfigurationServiceLocal cfgService;
@Before
public void setup() throws Exception {
AESEncrypter.generateKey();
bean = createMocksAndSpys();
cfgService = mock(ConfigurationServiceLocal.class);
bean.cfgService = cfgService;
PlatformUser user = new PlatformUser();
Organization org = new Organization();
user.setOrganization(org);
user.setLocale("en");
product = givenProduct(new ParameterSet(), ServiceStatus.ACTIVE);
sub = givenSubscription(user, SUBSCRIPTION_ID);
voSubscription = givenVOSubscription(SUBSCRIPTION_ID);
voParameters = givenVOParameters();
voUdas = new ArrayList<>();
tp = givenTriggerProcess();
ds = mock(DataService.class);
bean.dataManager = ds;
bean.modUpgBean.dataManager = ds;
bean.manageBean.dataManager = ds;
bean.terminateBean.dataManager = ds;
bean.terminateBean.commService = mock(CommunicationServiceLocal.class);
Query query = mock(Query.class);
doReturn(query).when(ds).createNamedQuery(anyString());
doReturn("rter").when(bean.terminateBean.localizer)
.getLocalizedTextFromBundle(
eq(LocalizedObjectTypes.MAIL_CONTENT),
(Marketplace) any(), eq(user.getLocale()),
eq("SUBSCRIPTION_TERMINATED_BY_SUPPLIER_REASON"));
when(bean.dataManager.getCurrentUser()).thenReturn(user);
when(bean.dataManager.getReference(eq(Subscription.class), anyLong()))
.thenReturn(sub);
doReturn(sub).when(bean.dataManager)
.getReferenceByBusinessKey(any(Subscription.class));
when(bean.prodSessionMgmt
.getProductSessionsForSubscriptionTKey(anyLong()))
.thenReturn(new ArrayList<Session>());
doReturn(true).when(cfgService).isPaymentInfoAvailable();
}
private SubscriptionServiceBean createMocksAndSpys() throws Exception {
bean = new SubscriptionServiceBean();
spyInjected(bean, givenSpyClasses());
mockEJBs(bean);
mockResources(bean);
copyMocks(bean, givenMocks());
return spy(bean);
}
private List<Class<?>> givenSpyClasses() {
return new ArrayList<>();
}
private List<Object> givenMocks() {
List<Object> mocks = new ArrayList<>();
mocks.add(bean.dataManager);
mocks.add(bean.appManager);
return mocks;
}
@Test
public void modifySubscriptionInt_PendingUpd() throws Exception {
// given
sub.setStatus(SubscriptionStatus.PENDING_UPD);
// when
try {
bean.modifySubscriptionInt(tp);
fail("call must cause an exception");
} catch (SubscriptionStateException e) {
// then
assertInvalidStateException(e, SubscriptionStatus.PENDING_UPD);
}
}
@Test
public void modifySubscriptionInt_SuspendedUpd() throws Exception {
// given
sub.setStatus(SubscriptionStatus.SUSPENDED_UPD);
// when
try {
bean.modifySubscriptionInt(tp);
fail("call must cause an exception");
} catch (SubscriptionStateException e) {
// then
assertInvalidStateException(e, SubscriptionStatus.SUSPENDED_UPD);
}
}
@Test
public void modifySubscriptionInt_Expired() throws Exception {
// given
sub.setStatus(SubscriptionStatus.EXPIRED);
// when
try {
bean.modifySubscriptionInt(tp);
fail("call must cause an exception");
} catch (SubscriptionStateException e) {
// then
assertInvalidStateException(e, SubscriptionStatus.EXPIRED);
}
}
@Test
public void terminateSubscription_PendingUpd() throws Exception {
// given
sub.setStatus(SubscriptionStatus.PENDING_UPD);
// when
bean.terminateSubscription(voSubscription, "Terminate");
// then
assertEquals(ServiceStatus.DELETED, product.getStatus());
assertEquals(SubscriptionStatus.DEACTIVATED, sub.getStatus());
verify(bean.appManager, times(1)).deleteInstance(eq(sub));
}
@Test
public void terminateSubscription_SuspendedUpd() throws Exception {
// given
sub.setStatus(SubscriptionStatus.SUSPENDED_UPD);
// when
bean.terminateSubscription(voSubscription, "Terminate");
// then
assertEquals(ServiceStatus.DELETED, product.getStatus());
assertEquals(SubscriptionStatus.DEACTIVATED, sub.getStatus());
verify(bean.appManager, times(1)).deleteInstance(eq(sub));
}
@Test
public void addRevokeUser_PendingUpd() throws Exception {
// given
sub.setStatus(SubscriptionStatus.PENDING_UPD);
mockTriggerMessageForAddRevokeUser();
// when
try {
bean.addRevokeUser(SUBSCRIPTION_ID, usersToBeAdded,
usersToBeRevoked);
fail();
} catch (SubscriptionStateException e) {
// then
assertInvalidStateException(e, SubscriptionStatus.PENDING_UPD);
}
}
@Test
public void addRevokeUser_Pending() throws Exception {
// given
sub.setStatus(SubscriptionStatus.PENDING);
mockTriggerMessageForAddRevokeUser();
// when
bean.addRevokeUser(SUBSCRIPTION_ID, usersToBeAdded, usersToBeRevoked);
}
@Test
public void addRevokeUser_SuspendedUpd() throws Exception {
// given
sub.setStatus(SubscriptionStatus.SUSPENDED_UPD);
mockTriggerMessageForAddRevokeUser();
// when
try {
bean.addRevokeUser(SUBSCRIPTION_ID, usersToBeAdded,
usersToBeRevoked);
fail();
} catch (SubscriptionStateException e) {
// then
assertInvalidStateException(e, SubscriptionStatus.SUSPENDED_UPD);
}
}
@Test
public void addRevokeUser_Suspended() throws Exception {
// given
sub.setStatus(SubscriptionStatus.SUSPENDED);
mockTriggerMessageForAddRevokeUser();
// when
try {
bean.addRevokeUser(SUBSCRIPTION_ID, usersToBeAdded,
usersToBeRevoked);
fail();
} catch (SubscriptionStateException e) {
// then
assertInvalidStateException(e, SubscriptionStatus.SUSPENDED);
}
}
@Test
public void unsubscribeFromService_INVALID() throws Exception {
// given
sub.setStatus(SubscriptionStatus.INVALID);
// when
try {
bean.unsubscribeFromService(sub.getSubscriptionId());
fail();
} catch (SubscriptionStateException e) {
// then
assertInvalidStateException(e, SubscriptionStatus.INVALID);
}
}
@Test
public void upgradeSubscription_PendingUpd() throws Exception {
// given
sub.setStatus(SubscriptionStatus.PENDING_UPD);
// when
try {
bean.upgradeSubscription(voSubscription, new VOService(),
new VOPaymentInfo(), new VOBillingContact(),
new ArrayList<VOUda>());
fail("call must cause an exception");
} catch (SubscriptionStateException e) {
// then
assertInvalidStateException(e, SubscriptionStatus.PENDING_UPD);
}
}
@Test
public void upgradeSubscription_SuspendedUpd() throws Exception {
// given
sub.setStatus(SubscriptionStatus.SUSPENDED_UPD);
// when
try {
bean.upgradeSubscription(voSubscription, new VOService(),
new VOPaymentInfo(), new VOBillingContact(),
new ArrayList<VOUda>());
fail("call must cause an exception");
} catch (SubscriptionStateException e) {
// then
assertInvalidStateException(e, SubscriptionStatus.SUSPENDED_UPD);
}
}
@Test
public void executeServiceOperation_Pending_B10754() throws Exception {
// given
sub.setStatus(SubscriptionStatus.PENDING);
VOTechnicalServiceOperation techOp = mock(
VOTechnicalServiceOperation.class);
// when
try {
bean.executeServiceOperation(voSubscription, techOp);
fail("call must cause an exception");
} catch (SubscriptionStateException e) {
// then
assertInvalidStateException(e, SubscriptionStatus.PENDING);
}
}
@Test
public void executeServiceOperation_Invalid_B10754() throws Exception {
// given
sub.setStatus(SubscriptionStatus.INVALID);
VOTechnicalServiceOperation techOp = mock(
VOTechnicalServiceOperation.class);
// when
try {
bean.executeServiceOperation(voSubscription, techOp);
fail("call must cause an exception");
} catch (SubscriptionStateException e) {
// then
assertInvalidStateException(e, SubscriptionStatus.INVALID);
}
}
@Test
public void executeServiceOperation_Deactive_B10754() throws Exception {
// given
sub.setStatus(SubscriptionStatus.DEACTIVATED);
VOTechnicalServiceOperation techOp = mock(
VOTechnicalServiceOperation.class);
// when
try {
bean.executeServiceOperation(voSubscription, techOp);
fail("call must cause an exception");
} catch (SubscriptionStateException e) {
// then
assertInvalidStateException(e, SubscriptionStatus.DEACTIVATED);
}
}
private List<VOParameter> givenVOParameters() {
List<VOParameter> voParameters = new ArrayList<>();
VOParameter voParameter = new VOParameter();
VOParameterDefinition vodefinition = new VOParameterDefinition();
vodefinition.setParameterId("PARAMETER_ID");
voParameter.setParameterDefinition(vodefinition);
voParameter.setValue("VALUE");
voParameters.add(voParameter);
return voParameters;
}
private TriggerProcess givenTriggerProcess() {
TriggerProcess triggerProcess = new TriggerProcess();
triggerProcess.addTriggerProcessParameter(
TriggerProcessParameterName.SUBSCRIPTION, voSubscription);
triggerProcess.addTriggerProcessParameter(
TriggerProcessParameterName.PARAMETERS, voParameters);
triggerProcess.addTriggerProcessParameter(
TriggerProcessParameterName.UDAS, voUdas);
return triggerProcess;
}
private Product givenProduct(ParameterSet paraSet, ServiceStatus status) {
Product product = new Product();
product.setParameterSet(paraSet);
product.setStatus(status);
product.setVendor(new Organization());
TechnicalProduct techProduct = new TechnicalProduct();
techProduct.setProvisioningType(ProvisioningType.ASYNCHRONOUS);
product.setTechnicalProduct(techProduct);
return product;
}
private Subscription givenSubscription(PlatformUser user,
String subscriptionId) {
Subscription subscription = new Subscription();
subscription.setKey(SUBSCRIPTION_KEY);
subscription.setSubscriptionId(subscriptionId);
subscription.setOwner(user);
subscription.setOrganization(user.getOrganization());
subscription.setProduct(product);
return subscription;
}
private VOSubscription givenVOSubscription(String subscriptionId) {
VOSubscription voSubscription = new VOSubscription();
voSubscription.setKey(SUBSCRIPTION_KEY);
voSubscription.setSubscriptionId(subscriptionId);
return voSubscription;
}
private void mockTriggerMessageForAddRevokeUser() throws Exception {
TriggerMessage message = new TriggerMessage(
TriggerType.ADD_REVOKE_USER);
List<TriggerProcessMessageData> list = new ArrayList<>();
TriggerProcess proc = new TriggerProcess();
TriggerProcessMessageData ProcMessage = new TriggerProcessMessageData(
proc, message);
list.add(ProcMessage);
doReturn(list).when(bean.triggerQS)
.sendSuspendingMessages(anyListOf(TriggerMessage.class));
doNothing().when(bean).validateTriggerProcessForSubscription(sub);
}
private void assertInvalidStateException(SubscriptionStateException e,
SubscriptionStatus s) {
assertEquals("ex.SubscriptionStateException.SUBSCRIPTION_INVALID_STATE",
e.getMessageKey());
assertEquals("enum.SubscriptionStatus." + s.name(),
e.getMessageParams()[0]);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.expression.aggregator;
import java.io.IOException;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.TreeMap;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.expression.Expression;
import org.apache.phoenix.schema.SortOrder;
import org.apache.phoenix.schema.tuple.Tuple;
import org.apache.phoenix.schema.types.PDataType;
import org.apache.phoenix.schema.types.PVarbinary;
import org.apache.phoenix.util.ByteUtil;
import org.apache.phoenix.util.FirstLastNthValueDataContainer;
import org.apache.phoenix.util.SizedUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Base server aggregator for (FIRST|LAST|NTH)_VALUE functions
*
*/
public class FirstLastValueServerAggregator extends BaseAggregator {
private static final Logger LOGGER = LoggerFactory.getLogger(FirstLastValueServerAggregator.class);
protected List<Expression> children;
protected BinaryComparator topOrder = new BinaryComparator(ByteUtil.EMPTY_BYTE_ARRAY);
protected byte[] topValue;
protected boolean useOffset = false;
protected int offset = -1;
protected TreeMap<byte[], LinkedList<byte[]>> topValues = new TreeMap<byte[], LinkedList<byte[]>>(new Bytes.ByteArrayComparator());
protected boolean isAscending;
protected boolean hasValueDescSortOrder;
protected Expression orderByColumn;
protected Expression dataColumn;
protected int topValuesCount = 0;
public FirstLastValueServerAggregator() {
super(SortOrder.getDefault());
}
@Override
public void reset() {
topOrder = new BinaryComparator(ByteUtil.EMPTY_BYTE_ARRAY);
topValue = null;
topValues.clear();
topValuesCount = 0;
}
@Override
public int getSize() {
return super.getSize() + SizedUtil.IMMUTABLE_BYTES_WRITABLE_SIZE;
}
@Override
public void aggregate(Tuple tuple, ImmutableBytesWritable ptr) {
//set pointer to ordering by field
orderByColumn.evaluate(tuple, ptr);
byte[] currentOrder = ptr.copyBytes();
if (!dataColumn.evaluate(tuple, ptr)) {
return;
}
if (useOffset) {
boolean addFlag = false;
if (topValuesCount < offset) {
try {
addFlag = true;
} catch (Exception e) {
LOGGER.error(e.getMessage());
}
} else {
if (isAscending) {
if (removeLastElement(currentOrder, topValues.lastKey(), -1)) {
addFlag = true;
topValuesCount--;
}
} else {
if (removeLastElement(currentOrder, topValues.firstKey(), 1)) {
addFlag = true;
topValuesCount--;
}
}
}
if (addFlag) {
topValuesCount++;
if (!topValues.containsKey(currentOrder)) {
topValues.put(currentOrder, new LinkedList<byte[]>());
}
//invert bytes if is SortOrder set
if (hasValueDescSortOrder) {
topValues.get(currentOrder).push(SortOrder.invert(ptr.get(), ptr.getOffset(), ptr.getLength()));
} else {
topValues.get(currentOrder).push(ptr.copyBytes());
}
}
} else {
boolean isHigher;
if (isAscending) {
isHigher = topOrder.compareTo(currentOrder) > 0;
} else {
isHigher = topOrder.compareTo(currentOrder) < 0;//desc
}
if (topOrder.getValue().length < 1 || isHigher) {
if (hasValueDescSortOrder) {
topValue = SortOrder.invert(ptr.get(), ptr.getOffset(), ptr.getLength());
} else {
topValue = ptr.copyBytes();
}
topOrder = new BinaryComparator(currentOrder);
}
}
}
@Override
public String toString() {
StringBuilder out = new StringBuilder("FirstLastValueServerAggregator"
+ " is ascending: " + isAscending + " value=");
if (useOffset) {
for (byte[] key : topValues.keySet()) {
out.append(Arrays.asList(topValues.get(key)));
}
out.append(" offset = ").append(offset);
} else {
out.append(Arrays.asList(topValue));
}
return out.toString();
}
@Override
public boolean evaluate(Tuple tuple, ImmutableBytesWritable ptr) {
FirstLastNthValueDataContainer payload = new FirstLastNthValueDataContainer();
payload.setIsAscending(isAscending);
payload.setFixedWidthOrderValues(orderByColumn.getDataType().isFixedWidth());
payload.setFixedWidthDataValues(dataColumn.getDataType().isFixedWidth());
if (useOffset) {
payload.setOffset(offset);
if (topValuesCount == 0) {
return false;
}
} else {
if (topValue == null) {
return false;
}
LinkedList<byte[]> topValueList = new LinkedList<byte[]>();
topValueList.push(topValue);
topValues.put(topOrder.getValue(), topValueList);
}
payload.setData(topValues);
try {
ptr.set(payload.getPayload());
} catch (IOException ex) {
LOGGER.error(ex.getMessage());
return false;
}
return true;
}
@Override
public PDataType getDataType() {
return PVarbinary.INSTANCE;
}
public void init(List<Expression> children, boolean isAscending, int offset) {
this.children = children;
this.offset = offset;
if (offset > 0) {
useOffset = true;
}
orderByColumn = children.get(0);
dataColumn = children.get(2);
//set order if modified
hasValueDescSortOrder = (dataColumn.getSortOrder() == SortOrder.DESC);
if (orderByColumn.getSortOrder() == SortOrder.DESC) {
this.isAscending = !isAscending;
} else {
this.isAscending = isAscending;
}
}
private boolean removeLastElement(byte[] currentOrder, byte[] lowestKey, int sortOrderInt) {
if (Bytes.compareTo(currentOrder, lowestKey) * sortOrderInt >= 0) {
if (topValues.get(lowestKey).size() == 1) {
topValues.remove(lowestKey);
} else {
topValues.get(lowestKey).pollFirst();
}
return true;
}
return false;
}
}
|
|
// LSRelation.java, created Feb 8, 2005 4:29:57 AM by joewhaley
// Copyright (C) 2005 John Whaley <[email protected]>
// Licensed under the terms of the GNU LGPL; see COPYING for details.
package net.sf.bddbddb;
import java.util.Comparator;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.StringTokenizer;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.math.BigInteger;
import jwutil.collections.SortedArraySet;
import jwutil.collections.SortedIntArraySet;
import jwutil.util.Assert;
/**
* LSRelation
*
* @author jwhaley
* @version $Id: LSRelation.java 522 2005-04-29 02:34:44Z joewhaley $
*/
public class LSRelation extends Relation {
/**
* Reference to solver.
*/
LSSolver solver;
/**
* Holds the actual data. Only one of these is non-null.
*/
SortedIntArraySet intSet;
SortedArraySet objSet;
/**
* Number of bits used for each attribute.
*/
int[] bits;
/**
* Add tuple to this relation.
*
* @param t tuple to add
* @return whether relation has changed
*/
public boolean add(BigInteger[] t) {
if (objSet != null) return objSet.add(t);
else return intSet.add(compress(t));
}
/**
* Extract packed value x into given array.
*
* @param arr array to extract into
* @param x packed value
*/
protected void extract(BigInteger[] arr, int x) {
for (int k = 0; k < bits.length; ++k) {
arr[k] = BigInteger.valueOf(x & ((1 << bits[k]) - 1));
x >>>= bits[k];
}
}
/**
* Compress the given array into a packed value.
*
* @param arr array to compress
* @return packed value
*/
protected int compress(BigInteger[] arr) {
int result = 0;
for (int k = 0; k < bits.length; ++k) {
if (arr[k].bitLength() > bits[k])
throw new InternalError(arr[k]+" too big for "+bits[k]+" bits");
result <<= bits[k];
result |= arr[k].intValue();
}
return result;
}
/**
* Construct a new LSRelation.
*
* @param solver solver object
* @param name relation name
* @param attributes relation attributes
*/
LSRelation(LSSolver solver, String name, List attributes) {
super(solver, name, attributes);
this.solver = solver;
}
/* (non-Javadoc)
* @see net.sf.bddbddb.Relation#initialize()
*/
public void initialize() {
int totalBits = 0;
bits = new int[attributes.size()];
int k = 0;
for (Iterator i = attributes.iterator(); i.hasNext(); ++k) {
Attribute a = (Attribute) i.next();
bits[k] = a.attributeDomain.size.bitLength();
totalBits += bits[k];
}
if (totalBits < 32) intSet = new SortedIntArraySet();
else objSet = (SortedArraySet) SortedArraySet.FACTORY.makeSet(TUPLE_COMPARATOR);
}
public static final TupleComparator TUPLE_COMPARATOR = new TupleComparator();
public static class TupleComparator implements Comparator {
private TupleComparator() { }
public int compare(Object arg0, Object arg1) {
BigInteger[] a = (BigInteger[]) arg0;
BigInteger[] b = (BigInteger[]) arg1;
for (int i = 0; i < a.length; ++i) {
int v = a[i].compareTo(b[i]);
if (v != 0) return v;
}
return 0;
}
}
BDDSolver temp;
/* (non-Javadoc)
* @see net.sf.bddbddb.Relation#load()
*/
public void load() throws IOException {
if (temp == null) {
temp = new BDDSolver();
try {
temp.load(solver.inputFilename);
} catch (Exception x) {
x.printStackTrace();
}
}
Relation r = temp.getRelation(name);
r.load();
TupleIterator i = r.iterator();
while (i.hasNext()) {
this.add(i.nextTuple());
}
}
/* (non-Javadoc)
* @see net.sf.bddbddb.Relation#loadTuples()
*/
public void loadTuples() throws IOException {
loadTuples(solver.basedir + name + ".tuples");
if (solver.NOISY) solver.out.println("Loaded tuples from file: " + name + ".tuples");
}
List checkInfoLine(String filename, String s, boolean order, boolean ex) throws IOException {
// todo.
return null;
}
/* (non-Javadoc)
* @see net.sf.bddbddb.Relation#loadTuples(java.lang.String)
*/
public void loadTuples(String filename) throws IOException {
Assert._assert(isInitialized);
BufferedReader in = null;
try {
in = new BufferedReader(new FileReader(filename));
// Load the header line.
String s = in.readLine();
if (s == null) return;
if (!s.startsWith("# ")) {
solver.err.println("Tuple file \""+filename+"\" is missing header line, using default.");
} else {
checkInfoLine(filename, s, true, true);
}
for (;;) {
s = in.readLine();
if (s == null) break;
if (s.length() == 0) continue;
if (s.startsWith("#")) continue;
parseTuple(s);
}
} finally {
if (in != null) in.close();
}
updateNegated();
}
/**
* Updated the negated form of this relation.
*/
void updateNegated() {
if (negated != null) {
// TODO.
}
}
void parseTuple(BigInteger[] t, int i, String s) {
if (i == t.length) {
add(t);
return;
}
int z = s.indexOf(' ');
String v = (z < 0) ? s : s.substring(0, z);
if (z <= 0) s = "";
else s = s.substring(z+1);
BigInteger l, m;
if (v.equals("*")) {
Attribute a = (Attribute) attributes.get(i);
l = BigInteger.ZERO;
m = a.attributeDomain.size.subtract(BigInteger.ONE);
} else {
int x = v.indexOf('-');
if (x < 0) {
t[i] = new BigInteger(v);
parseTuple(t, i+1, s);
return;
} else {
l = new BigInteger(v.substring(0, x));
m = new BigInteger(v.substring(x + 1));
}
}
while (l.compareTo(m) <= 0) {
t[i] = l;
parseTuple(t, i+1, s);
l = l.add(BigInteger.ONE);
}
}
/**
* Parse the given tuple string and add it to the relation.
*
* @param s tuple string
*/
void parseTuple(String s) {
BigInteger[] t = new BigInteger[attributes.size()];
if (s.indexOf('-') >= 0 || s.indexOf('*') >= 0) {
parseTuple(t, 0, s);
return;
}
StringTokenizer st = new StringTokenizer(s);
for (int i = 0; i < t.length; ++i) {
String v = st.nextToken();
t[i] = new BigInteger(v);
}
add(t);
}
/* (non-Javadoc)
* @see net.sf.bddbddb.Relation#save()
*/
public void save() throws IOException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see net.sf.bddbddb.Relation#saveTuples()
*/
public void saveTuples() throws IOException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see net.sf.bddbddb.Relation#saveTuples(java.lang.String)
*/
public void saveTuples(String filename) throws IOException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see net.sf.bddbddb.Relation#copy()
*/
public Relation copy() {
List a = new LinkedList(attributes);
Relation that = solver.createRelation(name + '\'', a);
return that;
}
/* (non-Javadoc)
* @see net.sf.bddbddb.Relation#free()
*/
public void free() {
intSet = null; objSet = null;
}
/* (non-Javadoc)
* @see net.sf.bddbddb.Relation#dsize()
*/
public double dsize() {
if (intSet != null) return intSet.size();
else return objSet.size();
}
BigInteger[] getTuple(BigInteger[] arr, int k) {
if (intSet != null) {
int x = intSet.get(k);
extract(arr, x);
return arr;
} else {
BigInteger[] x = (BigInteger[]) objSet.get(k++);
return x;
}
}
/* (non-Javadoc)
* @see net.sf.bddbddb.Relation#iterator()
*/
public TupleIterator iterator() {
return new TupleIterator() {
int k = 0;
BigInteger[] arr;
{ if (intSet != null) arr = new BigInteger[attributes.size()]; }
public BigInteger[] nextTuple() {
if (k == size()) throw new NoSuchElementException();
return getTuple(arr, k++);
}
public boolean hasNext() {
return k < size();
}
};
}
/* (non-Javadoc)
* @see net.sf.bddbddb.Relation#iterator(int)
*/
public TupleIterator iterator(final int k) {
return new TupleIterator() {
int n = 0;
BigInteger[] arr;
{ if (intSet != null) arr = new BigInteger[attributes.size()]; }
void gotoNext(BigInteger currVal) {
while (n < arr.length) {
arr = getTuple(arr, n);
if (!arr[k].equals(currVal)) return;
++n;
}
}
public BigInteger[] nextTuple() {
if (n == size()) throw new NoSuchElementException();
arr = getTuple(arr, n);
gotoNext(arr[k]);
return new BigInteger[] { arr[k] };
}
public boolean hasNext() {
return n < size();
}
};
}
/* (non-Javadoc)
* @see net.sf.bddbddb.Relation#iterator(int, java.math.BigInteger)
*/
public TupleIterator iterator(final int k, final BigInteger j) {
return new TupleIterator() {
int n = 0;
BigInteger[] arr;
{ if (intSet != null) arr = new BigInteger[attributes.size()]; }
void gotoNext() {
while (n < arr.length) {
arr = getTuple(arr, n);
if (arr[k].equals(j)) return;
++n;
}
throw new NoSuchElementException();
}
public BigInteger[] nextTuple() {
gotoNext();
return arr;
}
public boolean hasNext() {
return n < size();
}
};
}
/* (non-Javadoc)
* @see net.sf.bddbddb.Relation#iterator(java.math.BigInteger[])
*/
public TupleIterator iterator(final BigInteger[] j) {
return new TupleIterator() {
int n = 0;
BigInteger[] arr;
{ if (intSet != null) arr = new BigInteger[attributes.size()]; }
void gotoNext() {
outer:
while (n < arr.length) {
arr = getTuple(arr, n);
for (int k = 0; k < j.length; ++k) {
if (j[k].signum() >= 0 && !arr[k].equals(j)) {
++n;
continue outer;
}
}
return;
}
throw new NoSuchElementException();
}
public BigInteger[] nextTuple() {
gotoNext();
return arr;
}
public boolean hasNext() {
return n < size();
}
};
}
/* (non-Javadoc)
* @see net.sf.bddbddb.Relation#contains(int, java.math.BigInteger)
*/
public boolean contains(int k, BigInteger j) {
for (TupleIterator i = iterator(); i.hasNext(); ) {
BigInteger[] t = i.nextTuple();
if (t[k].equals(j)) return true;
}
return false;
}
public String verboseToString() {
StringBuffer sb = new StringBuffer();
sb.append(super.toString());
sb.append("[");
boolean any = false;
for(Iterator it = getAttributes().iterator(); it.hasNext(); ){
any = true;
Attribute a = (Attribute) it.next();
sb.append(a + ",");
}
if(any)
sb.deleteCharAt(sb.length() - 1);
sb.append("]");
return sb.toString();
}
}
|
|
package com.giddyplanet.embrace.tools.javawriter;
import com.giddyplanet.embrace.tools.model.TypeResolver;
import com.giddyplanet.embrace.tools.model.java.*;
import com.giddyplanet.embrace.tools.model.webidl.*;
import com.giddyplanet.embrace.tools.model.webidl.Enumeration;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.StandardOpenOption;
import java.util.*;
public class JavaWriter {
private static final String INDENT = " ";
private final File srcFolder;
private File packageFolder;
private String javaPackage;
private TypeResolver resolver;
public JavaWriter(File srcFolder, String javaPackage, TypeResolver resolver) {
this.srcFolder = srcFolder;
this.javaPackage = javaPackage;
this.resolver = resolver;
if (srcFolder != null && javaPackage != null) {
packageFolder = getPackagePath(srcFolder, javaPackage);
} else {
packageFolder = srcFolder;
}
}
private File getPackagePath(File srcFolder, String javaPackage) {
String[] parts = javaPackage.split("\\.");
File parent = srcFolder;
for (String part : parts) {
File folder = new File(parent, part);
folder.mkdirs();
parent = folder;
}
return parent;
}
public String createSource(JClass jClass) {
StringBuilder sb = new StringBuilder();
if (javaPackage != null) {
sb.append("package ").append(javaPackage).append(";\n");
sb.append("\n");
}
if (jClass.isFunctional()) {
sb.append("import jsinterop.annotations.JsFunction;\n");
}
sb.append("import jsinterop.annotations.JsIgnore;\n");
sb.append("import jsinterop.annotations.JsOverlay;\n");
sb.append("import jsinterop.annotations.JsPackage;\n");
sb.append("import jsinterop.annotations.JsProperty;\n");
if (!jClass.isFunctional()) {
sb.append("import jsinterop.annotations.JsType;\n");
}
sb.append("\n");
if (jClass.isFunctional()) {
sb.append("@JsFunction\n");
} else if (!jClass.isNoInterfaceObject()) {
sb.append("@JsType(isNative = true, namespace = JsPackage.GLOBAL)\n");
}
AbstractionLevel absLvl = jClass.getAbstraction();
switch (absLvl) {
case INTERFACE: {
sb.append("public interface ").append(jClass.getName());
LinkedHashSet<String> interfaces = jClass.getInterfaces();
writeInterfaces(sb, interfaces, " extends ");
sb.append(" {\n");
break;
}
case ABSTRACT_CLASS: {
sb.append("public abstract class ").append(jClass.getName());
String superTypeName = jClass.getSuperType();
if (superTypeName != null) {
sb.append(" extends ").append(superTypeName).append(" ");
}
LinkedHashSet<String> interfaces = jClass.getInterfaces();
writeInterfaces(sb, interfaces, " implements ");
sb.append(" {\n");
break;
}
case CLASS: {
sb.append("public class ").append(jClass.getName());
String superTypeName = jClass.getSuperType();
if (superTypeName != null) {
sb.append(" extends ").append(superTypeName).append(" ");
}
LinkedHashSet<String> interfaces = jClass.getInterfaces();
writeInterfaces(sb, interfaces, " implements ");
sb.append(" {\n");
break;
}
}
for (JConstant jConstant : jClass.getConstants()) {
switch (absLvl) {
case INTERFACE:
if (jClass.isNoInterfaceObject()) {
String type = fixType(jConstant.getType().getName());
sb.append(INDENT).append("public static ").append(type).append(" ").append(jConstant.getName()).append(" = ").append(jConstant.getValue()).append(";\n");
}
break;
case ABSTRACT_CLASS:
case CLASS:
String type = fixType(jConstant.getType().getName());
sb.append(INDENT).append("public static ").append(type).append(" ").append(jConstant.getName()).append("; // = ").append(jConstant.getValue()).append("\n");
break;
}
}
for (JField field : jClass.getFields()) {
switch (absLvl) {
case INTERFACE:
break;
case ABSTRACT_CLASS:
case CLASS:
sb.append(INDENT).append("public ").append(field.getType().getName()).append(" ").append(field.getName()).append(";\n");
break;
}
}
for (JMethod method : jClass.getConstructors()) {
sb.append(INDENT).append(method.getVisibility()).append(" ").append(method.getName()).append("(");
writeArguments(sb, method);
sb.append(") {}\n");
}
for (JMethod method : jClass.getMethods()) {
switch (absLvl) {
case INTERFACE:
sb.append(INDENT).append(method.getReturnType().getName()).append(" ").append(method.getName()).append("(");
writeArguments(sb, method);
sb.append(");\n");
break;
case ABSTRACT_CLASS:
sb.append(INDENT).append("public ");
if (method.isaStatic()) {
sb.append("static ");
}
sb.append("native ").append(method.getReturnType().getName()).append(" ").append(method.getName()).append("(");
writeArguments(sb, method);
sb.append(");\n");
break;
case CLASS:
sb.append(INDENT).append("public ");
if (method.isaStatic()) {
sb.append("static ");
}
sb.append("native ").append(method.getReturnType().getName()).append(" ").append(method.getName()).append("(");
writeArguments(sb, method);
sb.append(");\n");
break;
}
}
sb.append("}\n");
return sb.toString();
}
private void writeInterfaces(StringBuilder sb, LinkedHashSet<String> interfaces, String keyword) {
if (!interfaces.isEmpty()) {
sb.append(keyword);
for (Iterator<String> iterator = interfaces.iterator(); iterator.hasNext(); ) {
String anInterface = iterator.next();
sb.append(anInterface);
if (iterator.hasNext()) {
sb.append(", ");
}
}
}
}
public void createSourceFile(Definition definition) throws IOException {
if (definition instanceof Interface) {
String src = createSource((Interface) definition);
File srcFile = new File(packageFolder, ((Interface) definition).getJavaName() + ".java");
Files.write(srcFile.toPath(), src.getBytes("UTF-8"), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING);
} else if (definition instanceof Enumeration) {
Enumeration e = (Enumeration) definition;
String src = createSource(e);
File srcFile = new File(packageFolder, e.getName() + ".java");
Files.write(srcFile.toPath(), src.getBytes("UTF-8"), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING);
} else if (definition instanceof Callback) {
Callback callback = (Callback) definition;
String src = createSource(callback);
File srcFile = new File(packageFolder, callback.getName() + ".java");
Files.write(srcFile.toPath(), src.getBytes("UTF-8"), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING);
}
}
public void createSourceFile(JClass definition) throws IOException {
String src = createSource(definition);
File srcFile = new File(packageFolder, definition.getName() + ".java");
Files.write(srcFile.toPath(), src.getBytes("UTF-8"), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING);
}
private String createSource(Enumeration e) {
StringBuilder sb = new StringBuilder();
if (javaPackage != null) {
sb.append("package ").append(javaPackage).append(";\n");
sb.append("\n");
}
sb.append("import jsinterop.annotations.JsIgnore;\n");
sb.append("import jsinterop.annotations.JsOverlay;\n");
sb.append("import jsinterop.annotations.JsPackage;\n");
sb.append("import jsinterop.annotations.JsProperty;\n");
sb.append("import jsinterop.annotations.JsType;\n");
sb.append("\n");
sb.append("public interface " + e.getName() + " {\n");
for (String s : e.getValues()) {
sb.append(INDENT).append("String ").append(makeIdentifier(s).toUpperCase()).append(" = \"").append(s).append("\";\n");
}
sb.append("}\n");
return sb.toString();
}
private String createSource(Callback callback) {
StringBuilder sb = new StringBuilder();
if (javaPackage != null) {
sb.append("package ").append(javaPackage).append(";\n");
sb.append("\n");
}
sb.append("import jsinterop.annotations.JsFunction;\n");
sb.append("import jsinterop.annotations.JsIgnore;\n");
sb.append("import jsinterop.annotations.JsOverlay;\n");
sb.append("import jsinterop.annotations.JsPackage;\n");
sb.append("import jsinterop.annotations.JsProperty;\n");
sb.append("import jsinterop.annotations.JsType;\n");
sb.append("\n");
sb.append("@JsFunction\n");
sb.append("public interface ").append(callback.getName()).append(" {\n");
sb.append(INDENT).append(fixType(callback.getReturnType()));
sb.append(" ").append("execute").append("(");
for (Iterator<Argument> iterator = callback.getArguments().iterator(); iterator.hasNext(); ) {
Argument argument = iterator.next();
sb.append(fixType(argument.getType()));
if (argument.isVarArgs()) {
sb.append("...");
}
sb.append(" ");
sb.append(fixName(argument.getName()));
if (iterator.hasNext()) {
sb.append(", ");
}
}
sb.append(");\n");
sb.append("}\n");
return sb.toString();
}
public String createSource(Interface definition) {
StringBuilder sb = new StringBuilder();
if (javaPackage != null) {
sb.append("package ").append(javaPackage).append(";\n");
sb.append("\n");
}
sb.append("import jsinterop.annotations.JsFunction;\n");
sb.append("import jsinterop.annotations.JsIgnore;\n");
sb.append("import jsinterop.annotations.JsOverlay;\n");
sb.append("import jsinterop.annotations.JsPackage;\n");
sb.append("import jsinterop.annotations.JsProperty;\n");
sb.append("import jsinterop.annotations.JsType;\n");
sb.append("\n");
if (definition.isCallback()) {
sb.append("@JsFunction\n");
} else {
if (definition.getName().equals(definition.getJavaName())) {
sb.append("@JsType(isNative = true, namespace = JsPackage.GLOBAL)\n");
} else {
sb.append("@JsType(isNative = true, namespace = JsPackage.GLOBAL, name = \"" + definition.getName() + "\")\n");
}
}
Set<String> extendedAttributes = definition.getExtendedAttributes();
long constructorCount = definition.getConstructors().size();
Interface superType = definition.getSuperType().getResolved();
boolean isAbstract = false;
boolean isInterface = false;
String typeName = definition.getJavaName();
if (constructorCount > 0) {
isAbstract = false;
isInterface = false;
sb.append("public class ").append(typeName);
boolean superIsInterface = false;
if (superType != null) {
superIsInterface = superType.isCallback() || superType.getExtendedAttributes().stream().anyMatch(s -> "NoInterfaceObject".equals(s));
if (superIsInterface) {
if (definition.getInterfaces().isEmpty()) {
sb.append(" implements ").append(superType.getName());
}
} else {
sb.append(" extends ").append(superType.getName());
}
}
writeInterfaces((Interface) definition, sb, " implements ");
if (superIsInterface && !definition.getInterfaces().isEmpty()) {
sb.append(", ").append(superType.getName());
}
sb.append(" {\n");
} else {
if (definition.isCallback() || extendedAttributes.stream().anyMatch(s -> "NoInterfaceObject".equals(s))) {
isAbstract = false;
isInterface = true;
sb.append("public interface ").append(typeName);
writeInterfaces(definition, sb, " extends ");
if (superType != null) {
if (definition.getInterfaces().isEmpty()) {
sb.append(" extends ");
}
sb.append(superType.getName());
}
sb.append(" {\n");
} else {
isAbstract = true;
isInterface = false;
sb.append("public abstract class ").append(typeName);
if (superType != null) {
sb.append(" extends ").append(superType.getName());
}
writeInterfaces((Interface) definition, sb, " implements ");
sb.append(" {\n");
}
}
for (Constant constant : ((Interface) definition).getConstants()) {
if (isInterface) {
if (!definition.isCallback()) {
sb.append(INDENT).append("@JsOverlay ");
}
sb.append(INDENT).append("public static final ").append(fixType(constant.getType())).append(" ").append(constant.getName()).append("= ").append(constant.getValue()).append(";\n");
} else {
sb.append(INDENT).append("public static ").append(fixType(constant.getType())).append(" ").append(constant.getName()).append("; // = ").append(constant.getValue()).append("\n");
}
}
if (!isInterface) {
for (Attribute attribute : definition.getAttributes()) {
sb.append(INDENT).append("public ").append(fixType(attribute.getType())).append(" ").append(fixName(attribute.getName())).append(";\n");
}
HashSet<Attribute> attributes = new HashSet<>();
collectNoInterfaceObjectAttributes(definition, attributes);
for (Attribute attribute : attributes) {
sb.append(INDENT).append("public ").append(fixType(attribute.getType())).append(" ").append(fixName(attribute.getName())).append(";\n");
}
}
boolean missingDefaultConstructor = (!isAbstract && !isInterface && constructorCount > 0);
for (Operation constructor : definition.getConstructors()) {
boolean wroteEmpty = writeConstructor(definition, sb, constructor);
missingDefaultConstructor &= !wroteEmpty;
}
if (missingDefaultConstructor) {
sb.append(INDENT).append("protected ").append(definition.getName()).append("() {}\n");
}
for (Operation operation : ((Interface) definition).getOperations()) {
// todo: hack for HTMLFormControlsCollection
if ("namedItem".equals(operation.getName()) && "HTMLCollection".equals(definition.getName())) {
operation.setReturnType("Object");
}
writeOperation(sb, isInterface, operation);
}
if (!(isAbstract || isInterface)) {
HashSet<Operation> abstracts = new HashSet<>();
collectAbstractMethods((Interface) definition, abstracts);
for (Operation operation : abstracts) {
writeOperation(sb, isInterface, operation);
}
}
sb.append("}\n");
return sb.toString();
}
private boolean writeConstructor(Interface definition, StringBuilder sb, Operation constructor) {
boolean wroteEmptyConstructor = false;
sb.append(INDENT).append("public ").append(definition.getName()).append("(");
writeArguments(sb, constructor);
sb.append(") {}\n");
List<Argument> arguments = constructor.getArguments();
if (arguments.isEmpty()) {
wroteEmptyConstructor = true;
}
if (arguments.size() > 0 && arguments.get(arguments.size() - 1).isOptional()) {
Operation op2 = new Operation(constructor.getName());
for (Iterator<Argument> iterator = arguments.iterator(); iterator.hasNext(); ) {
Argument arg = iterator.next();
if (iterator.hasNext()) {
op2.addArgument(arg.getType(), arg.getName());
}
}
wroteEmptyConstructor = writeConstructor(definition, sb, op2);
}
return wroteEmptyConstructor;
}
private void collectNoInterfaceObjectAttributes(Interface definition, Set<Attribute> attributes) {
if (definition.getExtendedAttributes().contains("NoInterfaceObject")) {
attributes.addAll(definition.getAttributes());
}
LinkedHashSet<Interface> interfaces = definition.getInterfaces();
for (Interface anInterface : interfaces) {
collectNoInterfaceObjectAttributes(anInterface, attributes);
}
Interface superType = definition.getSuperType().getResolved();
if (superType != null && superType.getExtendedAttributes().contains("NoInterfaceObject")) {
collectNoInterfaceObjectAttributes(superType, attributes);
}
}
private void collectAbstractMethods(Interface definition, Set<Operation> ops) {
if (definition.getExtendedAttributes().contains("NoInterfaceObject") || definition.getConstructors().isEmpty()) {
ops.addAll(definition.getOperations());
}
LinkedHashSet<Interface> interfaces = definition.getInterfaces();
for (Interface anInterface : interfaces) {
collectAbstractMethods(anInterface, ops);
}
Interface superType = definition.getSuperType().getResolved();
if (superType != null) {
collectAbstractMethods(superType, ops);
}
}
private void writeOperation(StringBuilder sb, boolean isInterface, Operation operation) {
if (isInterface) {
sb.append(fixType(operation.getReturnType()));
} else {
if (operation.isStatic()) {
sb.append(INDENT).append("public static native ");
} else {
sb.append(INDENT).append("public native ");
}
sb.append(fixType(operation.getReturnType()));
}
sb.append(" ").append(operation.getName()).append("(");
List<Argument> arguments = writeArguments(sb, operation);
sb.append(");\n");
if (arguments.size() > 0 && arguments.get(arguments.size() - 1).isOptional()) {
Operation op2 = new Operation(operation.getName());
op2.setReturnType(operation.getReturnType());
for (Iterator<Argument> iterator = arguments.iterator(); iterator.hasNext(); ) {
Argument arg = iterator.next();
if (iterator.hasNext()) {
op2.addArgument(arg.getType(), arg.getName());
}
}
writeOperation(sb, isInterface, op2);
}
}
private void writeArguments(StringBuilder sb, JMethod method) {
LinkedList<JArgument> arguments = method.getArguments();
for (Iterator<JArgument> iterator = arguments.iterator(); iterator.hasNext(); ) {
JArgument argument = iterator.next();
sb.append(argument.getType().getName());
if (argument.isVarArgs()) {
sb.append("...");
}
sb.append(" ");
sb.append(fixName(argument.getName()));
if (iterator.hasNext()) {
sb.append(", ");
}
}
}
private List<Argument> writeArguments(StringBuilder sb, Operation operation) {
List<Argument> arguments = operation.getArguments();
for (Iterator<Argument> iterator = arguments.iterator(); iterator.hasNext(); ) {
Argument argument = iterator.next();
sb.append(fixType(argument.getType()));
if (argument.isVarArgs()) {
sb.append("...");
}
sb.append(" ");
sb.append(fixName(argument.getName()));
if (iterator.hasNext()) {
sb.append(", ");
}
}
return arguments;
}
private void writeInterfaces(Interface definition, StringBuilder sb, String prefix) {
LinkedHashSet<Interface> interfaces = definition.getInterfaces();
if (!interfaces.isEmpty()) {
sb.append(prefix);
for (Iterator<Interface> iterator = interfaces.iterator(); iterator.hasNext(); ) {
Interface anInterface = iterator.next();
sb.append(anInterface.getName());
if (iterator.hasNext()) sb.append(", ");
}
}
}
private String fixType(String type) {
if (type.endsWith("?")) {
// todo: nullable primitives should be boxed
type = type.substring(0, type.length() - 1);
}
if (type.startsWith("sequence<")) {
type = box(fixType(type.substring(9, type.length() - 1))) + "[]";
// returnType = "java.util.ArrayList<" + box(fixType(returnType.substring(9, returnType.length() - 1))) + ">";
}
if (type.startsWith("Promise<")) {
return "Object";
}
Definition resolved = resolver.resolve(type);
if (resolved != null) {
if (resolved instanceof Enumeration) {
return "String";
}
}
switch (type) {
case "void":
return "void";
case "DOMString":
case "USVString":
case "ByteString":
return "String";
case "short":
case "unsignedshort":
return "short";
case "int":
case "long":
case "unsignedlong":
return "int"; // long-int
case "float":
return "float";
case "double":
case "unrestricteddouble":
return "double";
case "boolean":
return "boolean";
case "ArrayBuffer":
return "com.google.gwt.typedarrays.shared.ArrayBuffer";
case "ArrayBufferView":
return "com.google.gwt.typedarrays.shared.ArrayBufferView";
case "Elements":
return "Element[]";
case "DOMTimeStamp":
case "DOMHighResTimeStamp":
return "Object"; // this really should be long
case "any":
case "object":
case "Uint8ClampedArray":
case "EventHandler":
case "OnErrorEventHandler":
case "OnBeforeUnloadEventHandler":
case "FileList":
case "MediaProvider":
case "MediaStream":
case "MediaSource":
case "DOMMatrix":
case "DOMMatrixInit":
case "Function":
case "WindowProxy":
case "RenderingContext":
case "WebGLRenderingContext":
case "MutationObserverInit":
case "Transferable":
case "CanvasImageSource":
case "HitRegionOptions":
case "ImageBitmapSource":
case "Blob": // https://html.spec.whatwg.org/#refsFILEAPI
case "File": // https://html.spec.whatwg.org/#refsFILEAPI
return "Object";
}
return resolved == null || "Object".equals(type) ? "Object" : resolved instanceof Interface ? ((Interface) resolved).getJavaName() : type;
}
private String box(String type) {
switch (type) {
case "int":
case "integer":
return "Integer";
case "double":
return "Double";
}
return type;
}
private String fixName(String name) {
switch (name) {
case "interface":
return "interface_";
case "default":
return "return_";
default:
return name;
}
}
private String makeIdentifier(String s) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (i == 0 && Character.isJavaIdentifierStart(c)) {
sb.append(c);
} else if (Character.isJavaIdentifierPart(c)) {
sb.append(c);
} else {
sb.append("_");
}
}
return sb.toString();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.test.integration.functions.mlcontext;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.sysml.api.DMLScript;
import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;
import org.apache.sysml.conf.ConfigurationManager;
import org.apache.sysml.parser.Expression.ValueType;
import org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext;
import org.apache.sysml.runtime.instructions.spark.utils.FrameRDDConverterUtils;
import org.apache.sysml.runtime.matrix.MatrixCharacteristics;
import org.apache.sysml.runtime.matrix.data.FrameBlock;
import org.apache.sysml.runtime.matrix.data.MatrixBlock;
import org.apache.sysml.runtime.util.DataConverter;
import org.apache.sysml.runtime.util.UtilFunctions;
import org.apache.sysml.test.integration.AutomatedTestBase;
import org.apache.sysml.test.integration.TestConfiguration;
import org.apache.sysml.test.utils.TestUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
public class DataFrameRowFrameConversionTest extends AutomatedTestBase
{
private final static String TEST_DIR = "functions/mlcontext/";
private final static String TEST_NAME = "DataFrameConversion";
private final static String TEST_CLASS_DIR = TEST_DIR + DataFrameRowFrameConversionTest.class.getSimpleName() + "/";
private final static int rows1 = 1045;
private final static int cols1 = 545;
private final static int cols2 = 864;
private final static double sparsity1 = 0.9;
private final static double sparsity2 = 0.1;
private final static double eps=0.0000000001;
private static SparkSession spark;
private static JavaSparkContext sc;
@BeforeClass
public static void setUpClass() {
spark = SparkSession.builder()
.appName("DataFrameRowFrameConversionTest")
.master("local")
.config("spark.memory.offHeap.enabled", "false")
.config("spark.sql.codegen.wholeStage", "false")
.getOrCreate();
sc = new JavaSparkContext(spark.sparkContext());
}
@Override
public void setUp() {
addTestConfiguration(TEST_NAME, new TestConfiguration(TEST_CLASS_DIR, TEST_NAME, new String[] {"A", "B"}));
}
@Test
public void testRowDoubleConversionSingleDense() {
testDataFrameConversion(ValueType.DOUBLE, true, true, false);
}
@Test
public void testRowDoubleConversionSingleDenseUnknown() {
testDataFrameConversion(ValueType.DOUBLE, true, true, true);
}
@Test
public void testRowDoubleConversionSingleSparse() {
testDataFrameConversion(ValueType.DOUBLE, true, false, false);
}
@Test
public void testRowDoubleConversionSingleSparseUnknown() {
testDataFrameConversion(ValueType.DOUBLE, true, false, true);
}
@Test
public void testRowDoubleConversionMultiDense() {
testDataFrameConversion(ValueType.DOUBLE, false, true, false);
}
@Test
public void testRowDoubleConversionMultiDenseUnknown() {
testDataFrameConversion(ValueType.DOUBLE, false, true, true);
}
@Test
public void testRowDoubleConversionMultiSparse() {
testDataFrameConversion(ValueType.DOUBLE, false, false, false);
}
@Test
public void testRowDoubleConversionMultiSparseUnknown() {
testDataFrameConversion(ValueType.DOUBLE, false, false, true);
}
@Test
public void testRowStringConversionSingleDense() {
testDataFrameConversion(ValueType.STRING, true, true, false);
}
@Test
public void testRowStringConversionSingleDenseUnknown() {
testDataFrameConversion(ValueType.STRING, true, true, true);
}
@Test
public void testRowStringConversionSingleSparse() {
testDataFrameConversion(ValueType.STRING, true, false, false);
}
@Test
public void testRowStringConversionSingleSparseUnknown() {
testDataFrameConversion(ValueType.STRING, true, false, true);
}
@Test
public void testRowStringConversionMultiDense() {
testDataFrameConversion(ValueType.STRING, false, true, false);
}
@Test
public void testRowStringConversionMultiDenseUnknown() {
testDataFrameConversion(ValueType.STRING, false, true, true);
}
@Test
public void testRowStringConversionMultiSparse() {
testDataFrameConversion(ValueType.STRING, false, false, false);
}
@Test
public void testRowStringConversionMultiSparseUnknown() {
testDataFrameConversion(ValueType.STRING, false, false, true);
}
@Test
public void testRowLongConversionSingleDense() {
testDataFrameConversion(ValueType.INT, true, true, false);
}
@Test
public void testRowLongConversionSingleDenseUnknown() {
testDataFrameConversion(ValueType.INT, true, true, true);
}
@Test
public void testRowLongConversionSingleSparse() {
testDataFrameConversion(ValueType.INT, true, false, false);
}
@Test
public void testRowLongConversionSingleSparseUnknown() {
testDataFrameConversion(ValueType.INT, true, false, true);
}
@Test
public void testRowLongConversionMultiDense() {
testDataFrameConversion(ValueType.INT, false, true, false);
}
@Test
public void testRowLongConversionMultiDenseUnknown() {
testDataFrameConversion(ValueType.INT, false, true, true);
}
@Test
public void testRowLongConversionMultiSparse() {
testDataFrameConversion(ValueType.INT, false, false, false);
}
@Test
public void testRowLongConversionMultiSparseUnknown() {
testDataFrameConversion(ValueType.INT, false, false, true);
}
private void testDataFrameConversion(ValueType vt, boolean singleColBlock, boolean dense, boolean unknownDims) {
boolean oldConfig = DMLScript.USE_LOCAL_SPARK_CONFIG;
RUNTIME_PLATFORM oldPlatform = setRuntimePlatform(RUNTIME_PLATFORM.HYBRID_SPARK);
if(shouldSkipTest())
return;
try
{
ConfigurationManager.getDMLOptions().setExecutionMode(RUNTIME_PLATFORM.HYBRID_SPARK);
//generate input data and setup metadata
int cols = singleColBlock ? cols1 : cols2;
double sparsity = dense ? sparsity1 : sparsity2;
double[][] A = getRandomMatrix(rows1, cols, -10, 10, sparsity, 2373);
A = (vt == ValueType.INT) ? TestUtils.round(A) : A;
MatrixBlock mbA = DataConverter.convertToMatrixBlock(A);
FrameBlock fbA = DataConverter.convertToFrameBlock(mbA, vt);
int blksz = ConfigurationManager.getBlocksize();
MatrixCharacteristics mc1 = new MatrixCharacteristics(rows1, cols, blksz, blksz, mbA.getNonZeros());
MatrixCharacteristics mc2 = unknownDims ? new MatrixCharacteristics() : new MatrixCharacteristics(mc1);
ValueType[] schema = UtilFunctions.nCopies(cols, vt);
//get binary block input rdd
JavaPairRDD<Long,FrameBlock> in = SparkExecutionContext.toFrameJavaPairRDD(sc, fbA);
//frame - dataframe - frame conversion
Dataset<Row> df = FrameRDDConverterUtils.binaryBlockToDataFrame(spark, in, mc1, schema);
JavaPairRDD<Long,FrameBlock> out = FrameRDDConverterUtils.dataFrameToBinaryBlock(sc, df, mc2, true);
//get output frame block
FrameBlock fbB = SparkExecutionContext.toFrameBlock(out, schema, rows1, cols);
//compare frame blocks
MatrixBlock mbB = DataConverter.convertToMatrixBlock(fbB);
double[][] B = DataConverter.convertToDoubleMatrix(mbB);
TestUtils.compareMatrices(A, B, rows1, cols, eps);
}
catch( Exception ex ) {
throw new RuntimeException(ex);
}
finally {
DMLScript.USE_LOCAL_SPARK_CONFIG = oldConfig;
ConfigurationManager.getDMLOptions().setExecutionMode(oldPlatform);
}
}
@AfterClass
public static void tearDownClass() {
// stop underlying spark context to allow single jvm tests (otherwise the
// next test that tries to create a SparkContext would fail)
spark.stop();
sc = null;
spark = null;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.realtime.appenderator;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.data.input.impl.JSONParseSpec;
import org.apache.druid.data.input.impl.MapInputRowParser;
import org.apache.druid.data.input.impl.TimestampSpec;
import org.apache.druid.indexer.partitions.PartitionsSpec;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.FileUtils;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.emitter.EmittingLogger;
import org.apache.druid.java.util.emitter.core.NoopEmitter;
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.CountAggregatorFactory;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.segment.IndexIO;
import org.apache.druid.segment.IndexMerger;
import org.apache.druid.segment.IndexMergerV9;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.incremental.AppendableIndexSpec;
import org.apache.druid.segment.incremental.ParseExceptionHandler;
import org.apache.druid.segment.incremental.RowIngestionMeters;
import org.apache.druid.segment.incremental.SimpleRowIngestionMeters;
import org.apache.druid.segment.indexing.DataSchema;
import org.apache.druid.segment.indexing.TuningConfig;
import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec;
import org.apache.druid.segment.loading.DataSegmentPusher;
import org.apache.druid.segment.realtime.FireDepartmentMetrics;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
import org.joda.time.Period;
import javax.annotation.Nullable;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.CopyOnWriteArrayList;
public class BatchAppenderatorTester implements AutoCloseable
{
public static final String DATASOURCE = "foo";
private final DataSchema schema;
private final AppenderatorConfig tuningConfig;
private final FireDepartmentMetrics metrics;
private final ObjectMapper objectMapper;
private final Appenderator appenderator;
private final ServiceEmitter emitter;
private final List<DataSegment> pushedSegments = new CopyOnWriteArrayList<>();
public BatchAppenderatorTester(
final int maxRowsInMemory
)
{
this(maxRowsInMemory, -1, null, false);
}
public BatchAppenderatorTester(
final int maxRowsInMemory,
final boolean enablePushFailure
)
{
this(maxRowsInMemory, -1, null, enablePushFailure);
}
public BatchAppenderatorTester(
final int maxRowsInMemory,
final long maxSizeInBytes,
final boolean enablePushFailure
)
{
this(maxRowsInMemory, maxSizeInBytes, null, enablePushFailure);
}
public BatchAppenderatorTester(
final int maxRowsInMemory,
final long maxSizeInBytes,
final File basePersistDirectory,
final boolean enablePushFailure
)
{
this(
maxRowsInMemory,
maxSizeInBytes,
basePersistDirectory,
enablePushFailure,
new SimpleRowIngestionMeters(),
false,
false
);
}
public BatchAppenderatorTester(
final int maxRowsInMemory,
final long maxSizeInBytes,
@Nullable final File basePersistDirectory,
final boolean enablePushFailure,
final RowIngestionMeters rowIngestionMeters
)
{
this(maxRowsInMemory, maxSizeInBytes, basePersistDirectory, enablePushFailure, rowIngestionMeters,
false, false
);
}
public BatchAppenderatorTester(
final int maxRowsInMemory,
final long maxSizeInBytes,
@Nullable final File basePersistDirectory,
final boolean enablePushFailure,
final RowIngestionMeters rowIngestionMeters,
final boolean skipBytesInMemoryOverheadCheck,
final boolean useLegacyBatchProcessing
)
{
objectMapper = new DefaultObjectMapper();
objectMapper.registerSubtypes(LinearShardSpec.class);
final Map<String, Object> parserMap = objectMapper.convertValue(
new MapInputRowParser(
new JSONParseSpec(
new TimestampSpec("ts", "auto", null),
new DimensionsSpec(null, null, null),
null,
null,
null
)
),
Map.class
);
schema = new DataSchema(
DATASOURCE,
null,
null,
new AggregatorFactory[]{
new CountAggregatorFactory("count"),
new LongSumAggregatorFactory("met", "met")
},
new UniformGranularitySpec(Granularities.MINUTE, Granularities.NONE, null),
null,
parserMap,
objectMapper
);
tuningConfig = new TestIndexTuningConfig(
TuningConfig.DEFAULT_APPENDABLE_INDEX,
maxRowsInMemory,
maxSizeInBytes == 0L ? getDefaultMaxBytesInMemory() : maxSizeInBytes,
skipBytesInMemoryOverheadCheck,
new IndexSpec(),
0,
false,
0L,
OffHeapMemorySegmentWriteOutMediumFactory.instance(),
IndexMerger.UNLIMITED_MAX_COLUMNS_TO_MERGE,
basePersistDirectory == null ? createNewBasePersistDirectory() : basePersistDirectory
);
metrics = new FireDepartmentMetrics();
IndexIO indexIO = new IndexIO(
objectMapper,
() -> 0
);
IndexMerger indexMerger = new IndexMergerV9(
objectMapper,
indexIO,
OffHeapMemorySegmentWriteOutMediumFactory.instance()
);
emitter = new ServiceEmitter(
"test",
"test",
new NoopEmitter()
);
emitter.start();
EmittingLogger.registerEmitter(emitter);
DataSegmentPusher dataSegmentPusher = new DataSegmentPusher()
{
private boolean mustFail = true;
@Deprecated
@Override
public String getPathForHadoop(String dataSource)
{
return getPathForHadoop();
}
@Override
public String getPathForHadoop()
{
throw new UnsupportedOperationException();
}
@Override
public DataSegment push(File file, DataSegment segment, boolean useUniquePath) throws IOException
{
if (enablePushFailure && mustFail) {
mustFail = false;
throw new IOException("Push failure test");
} else if (enablePushFailure) {
mustFail = true;
}
pushedSegments.add(segment);
return segment;
}
@Override
public Map<String, Object> makeLoadSpec(URI uri)
{
throw new UnsupportedOperationException();
}
};
appenderator = Appenderators.createOffline(
schema.getDataSource(),
schema,
tuningConfig,
metrics,
dataSegmentPusher,
objectMapper,
indexIO,
indexMerger,
rowIngestionMeters,
new ParseExceptionHandler(rowIngestionMeters, false, Integer.MAX_VALUE, 0),
useLegacyBatchProcessing
);
}
private long getDefaultMaxBytesInMemory()
{
return (Runtime.getRuntime().totalMemory()) / 3;
}
public DataSchema getSchema()
{
return schema;
}
public AppenderatorConfig getTuningConfig()
{
return tuningConfig;
}
public FireDepartmentMetrics getMetrics()
{
return metrics;
}
public ObjectMapper getObjectMapper()
{
return objectMapper;
}
public Appenderator getAppenderator()
{
return appenderator;
}
public List<DataSegment> getPushedSegments()
{
return pushedSegments;
}
@Override
public void close() throws Exception
{
appenderator.close();
emitter.close();
FileUtils.deleteDirectory(tuningConfig.getBasePersistDirectory());
}
private static File createNewBasePersistDirectory()
{
return FileUtils.createTempDir("druid-batch-persist");
}
private static class TestIndexTuningConfig implements AppenderatorConfig
{
private final AppendableIndexSpec appendableIndexSpec;
private final int maxRowsInMemory;
private final long maxBytesInMemory;
private final boolean skipBytesInMemoryOverheadCheck;
private final int maxColumnsToMerge;
private final PartitionsSpec partitionsSpec;
private final IndexSpec indexSpec;
private final File basePersistDirectory;
private final int maxPendingPersists;
private final boolean reportParseExceptions;
private final long pushTimeout;
private final IndexSpec indexSpecForIntermediatePersists;
@Nullable
private final SegmentWriteOutMediumFactory segmentWriteOutMediumFactory;
public TestIndexTuningConfig(
AppendableIndexSpec appendableIndexSpec,
Integer maxRowsInMemory,
Long maxBytesInMemory,
Boolean skipBytesInMemoryOverheadCheck,
IndexSpec indexSpec,
Integer maxPendingPersists,
Boolean reportParseExceptions,
Long pushTimeout,
@Nullable SegmentWriteOutMediumFactory segmentWriteOutMediumFactory,
Integer maxColumnsToMerge,
File basePersistDirectory
)
{
this.appendableIndexSpec = appendableIndexSpec;
this.maxRowsInMemory = maxRowsInMemory;
this.maxBytesInMemory = maxBytesInMemory;
this.skipBytesInMemoryOverheadCheck = skipBytesInMemoryOverheadCheck;
this.indexSpec = indexSpec;
this.maxPendingPersists = maxPendingPersists;
this.reportParseExceptions = reportParseExceptions;
this.pushTimeout = pushTimeout;
this.segmentWriteOutMediumFactory = segmentWriteOutMediumFactory;
this.maxColumnsToMerge = maxColumnsToMerge;
this.basePersistDirectory = basePersistDirectory;
this.partitionsSpec = null;
this.indexSpecForIntermediatePersists = this.indexSpec;
}
@Override
public TestIndexTuningConfig withBasePersistDirectory(File dir)
{
throw new UnsupportedOperationException();
}
@Override
public AppendableIndexSpec getAppendableIndexSpec()
{
return appendableIndexSpec;
}
@Override
public int getMaxRowsInMemory()
{
return maxRowsInMemory;
}
@Override
public long getMaxBytesInMemory()
{
return maxBytesInMemory;
}
@Override
public boolean isSkipBytesInMemoryOverheadCheck()
{
return skipBytesInMemoryOverheadCheck;
}
@Nullable
@Override
public PartitionsSpec getPartitionsSpec()
{
return partitionsSpec;
}
@Override
public IndexSpec getIndexSpec()
{
return indexSpec;
}
@Override
public IndexSpec getIndexSpecForIntermediatePersists()
{
return indexSpecForIntermediatePersists;
}
@Override
public int getMaxPendingPersists()
{
return maxPendingPersists;
}
@Override
public boolean isReportParseExceptions()
{
return reportParseExceptions;
}
@Nullable
@Override
public SegmentWriteOutMediumFactory getSegmentWriteOutMediumFactory()
{
return segmentWriteOutMediumFactory;
}
@Override
public int getMaxColumnsToMerge()
{
return maxColumnsToMerge;
}
@Override
public File getBasePersistDirectory()
{
return basePersistDirectory;
}
@Override
public Period getIntermediatePersistPeriod()
{
return new Period(Integer.MAX_VALUE); // intermediate persist doesn't make much sense for batch jobs
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TestIndexTuningConfig that = (TestIndexTuningConfig) o;
return Objects.equals(appendableIndexSpec, that.appendableIndexSpec) &&
maxRowsInMemory == that.maxRowsInMemory &&
maxBytesInMemory == that.maxBytesInMemory &&
skipBytesInMemoryOverheadCheck == that.skipBytesInMemoryOverheadCheck &&
maxColumnsToMerge == that.maxColumnsToMerge &&
maxPendingPersists == that.maxPendingPersists &&
reportParseExceptions == that.reportParseExceptions &&
pushTimeout == that.pushTimeout &&
Objects.equals(partitionsSpec, that.partitionsSpec) &&
Objects.equals(indexSpec, that.indexSpec) &&
Objects.equals(indexSpecForIntermediatePersists, that.indexSpecForIntermediatePersists) &&
Objects.equals(basePersistDirectory, that.basePersistDirectory) &&
Objects.equals(segmentWriteOutMediumFactory, that.segmentWriteOutMediumFactory);
}
@Override
public int hashCode()
{
return Objects.hash(
appendableIndexSpec,
maxRowsInMemory,
maxBytesInMemory,
skipBytesInMemoryOverheadCheck,
maxColumnsToMerge,
partitionsSpec,
indexSpec,
indexSpecForIntermediatePersists,
basePersistDirectory,
maxPendingPersists,
reportParseExceptions,
pushTimeout,
segmentWriteOutMediumFactory
);
}
@Override
public String toString()
{
return "IndexTuningConfig{" +
"maxRowsInMemory=" + maxRowsInMemory +
", maxBytesInMemory=" + maxBytesInMemory +
", skipBytesInMemoryOverheadCheck=" + skipBytesInMemoryOverheadCheck +
", maxColumnsToMerge=" + maxColumnsToMerge +
", partitionsSpec=" + partitionsSpec +
", indexSpec=" + indexSpec +
", indexSpecForIntermediatePersists=" + indexSpecForIntermediatePersists +
", basePersistDirectory=" + basePersistDirectory +
", maxPendingPersists=" + maxPendingPersists +
", reportParseExceptions=" + reportParseExceptions +
", pushTimeout=" + pushTimeout +
", segmentWriteOutMediumFactory=" + segmentWriteOutMediumFactory +
'}';
}
}
}
|
|
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.store.primitives.impl;
import com.google.common.collect.Maps;
import org.onlab.util.Tools;
import org.onosproject.store.primitives.TransactionId;
import org.onosproject.store.service.AsyncConsistentTreeMap;
import org.onosproject.store.service.MapEvent;
import org.onosproject.store.service.MapEventListener;
import org.onosproject.store.service.MapTransaction;
import org.onosproject.store.service.Versioned;
import java.util.Collection;
import java.util.Map;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
/**
* An {@code AsyncConsistentTreeMap} that maps its operations to operations on
* a differently typed {@code AsyncConsistentTreeMap} by transcoding operation
* inputs and outputs.
*
* @param <V2> value type of other map
* @param <V1> value type of this map
*/
public class TranscodingAsyncConsistentTreeMap<V1, V2>
implements AsyncConsistentTreeMap<V1> {
private final AsyncConsistentTreeMap<V2> backingMap;
private final Function<V2, V1> valueDecoder;
private final Function<V1, V2> valueEncoder;
private final Function<Versioned<V2>, Versioned<V1>>
versionedValueTransform;
private final Map<MapEventListener<String, V1>,
TranscodingAsyncConsistentTreeMap.InternalBackingMapEventListener>
listeners = Maps.newIdentityHashMap();
public TranscodingAsyncConsistentTreeMap(
AsyncConsistentTreeMap<V2> backingMap,
Function<V1, V2> valueEncoder,
Function<V2, V1> valueDecoder) {
this.backingMap = backingMap;
this.valueEncoder = v -> v == null ? null : valueEncoder.apply(v);
this.valueDecoder = v -> v == null ? null : valueDecoder.apply(v);
this.versionedValueTransform = v -> v == null ? null :
v.map(valueDecoder);
}
@Override
public CompletableFuture<String> firstKey() {
return backingMap.firstKey();
}
@Override
public CompletableFuture<String> lastKey() {
return backingMap.lastKey();
}
@Override
public CompletableFuture<Map.Entry<String, Versioned<V1>>>
ceilingEntry(String key) {
return backingMap.ceilingEntry(key)
.thenApply(
entry ->
Maps.immutableEntry(
entry.getKey(),
versionedValueTransform
.apply(entry.getValue())));
}
@Override
public CompletableFuture<Map.Entry<String, Versioned<V1>>>
floorEntry(String key) {
return backingMap.floorEntry(key)
.thenApply(
entry ->
Maps.immutableEntry(
entry.getKey(),
versionedValueTransform
.apply(entry.getValue())));
}
@Override
public CompletableFuture<Map.Entry<String, Versioned<V1>>>
higherEntry(String key) {
return backingMap
.higherEntry(key)
.thenApply(entry ->
Maps.immutableEntry(
entry.getKey(),
versionedValueTransform
.apply(entry.getValue())));
}
@Override
public CompletableFuture<Map.Entry<String, Versioned<V1>>>
lowerEntry(String key) {
return backingMap.lowerEntry(key).thenApply(
entry ->
Maps.immutableEntry(
entry.getKey(),
versionedValueTransform
.apply(entry.getValue())));
}
@Override
public CompletableFuture<Map.Entry<String, Versioned<V1>>>
firstEntry() {
return backingMap.firstEntry()
.thenApply(entry ->
Maps.immutableEntry(
entry.getKey(),
versionedValueTransform
.apply(entry.getValue())));
}
@Override
public CompletableFuture<Map.Entry<String, Versioned<V1>>>
lastEntry() {
return backingMap.lastEntry()
.thenApply(
entry -> Maps.immutableEntry(
entry.getKey(),
versionedValueTransform
.apply(entry.getValue())));
}
@Override
public CompletableFuture<Map.Entry<String, Versioned<V1>>>
pollFirstEntry() {
return backingMap.pollFirstEntry()
.thenApply(
entry -> Maps.immutableEntry(
entry.getKey(),
versionedValueTransform
.apply(entry.getValue())));
}
@Override
public CompletableFuture<Map.Entry<String, Versioned<V1>>>
pollLastEntry() {
return backingMap.pollLastEntry()
.thenApply(entry -> Maps.immutableEntry(
entry.getKey(),
versionedValueTransform.apply(entry.getValue())));
}
@Override
public CompletableFuture<String> lowerKey(String key) {
return backingMap.lowerKey(key);
}
@Override
public CompletableFuture<String> floorKey(String key) {
return backingMap.floorKey(key);
}
@Override
public CompletableFuture<String> ceilingKey(String key) {
return backingMap.ceilingKey(key);
}
@Override
public CompletableFuture<String> higherKey(String key) {
return backingMap.higherKey(key);
}
@Override
public CompletableFuture<NavigableSet<String>> navigableKeySet() {
return backingMap.navigableKeySet();
}
@Override
public CompletableFuture<NavigableMap<String, V1>> subMap(
String upperKey,
String lowerKey,
boolean inclusiveUpper,
boolean inclusiveLower) {
throw new UnsupportedOperationException("This operation is not yet" +
"supported.");
}
@Override
public String name() {
return backingMap.name();
}
@Override
public CompletableFuture<Integer> size() {
return backingMap.size();
}
@Override
public CompletableFuture<Boolean> containsKey(String key) {
return backingMap.containsKey(key);
}
@Override
public CompletableFuture<Boolean> containsValue(V1 value) {
return backingMap.containsValue(valueEncoder.apply(value));
}
@Override
public CompletableFuture<Versioned<V1>> get(String key) {
return backingMap.get(key).thenApply(versionedValueTransform);
}
@Override
public CompletableFuture<Versioned<V1>> computeIf(
String key, Predicate<? super V1> condition,
BiFunction<? super String, ? super V1, ? extends V1>
remappingFunction) {
try {
return backingMap
.computeIf(
key,
v -> condition.test(valueDecoder.apply(v)),
(k, v) -> valueEncoder
.apply(
remappingFunction.apply(
key,
valueDecoder.apply(v))))
.thenApply(versionedValueTransform);
} catch (Exception e) {
return Tools.exceptionalFuture(e);
}
}
@Override
public CompletableFuture<Versioned<V1>> put(String key, V1 value) {
return backingMap.put(key, valueEncoder.apply(value))
.thenApply(versionedValueTransform);
}
@Override
public CompletableFuture<Versioned<V1>> putAndGet(String key, V1 value) {
return backingMap.putAndGet(key, valueEncoder.apply(value))
.thenApply(versionedValueTransform);
}
@Override
public CompletableFuture<Versioned<V1>> remove(String key) {
return backingMap.remove(key).thenApply(versionedValueTransform);
}
@Override
public CompletableFuture<Void> clear() {
return backingMap.clear();
}
@Override
public CompletableFuture<Set<String>> keySet() {
return backingMap.keySet();
}
@Override
public CompletableFuture<Collection<Versioned<V1>>> values() {
return backingMap.values().thenApply(valueSet -> valueSet.stream()
.map(versionedValueTransform).collect(Collectors.toSet()));
}
@Override
public CompletableFuture<Set<Map.Entry<String, Versioned<V1>>>>
entrySet() {
return backingMap.entrySet()
.thenApply(
entries -> entries
.stream()
.map(entry ->
Maps.immutableEntry(
entry.getKey(),
versionedValueTransform
.apply(entry.getValue())
))
.collect(Collectors.toSet()));
}
@Override
public CompletableFuture<Versioned<V1>> putIfAbsent(String key, V1 value) {
return backingMap.putIfAbsent(key, valueEncoder.apply(value))
.thenApply(versionedValueTransform);
}
@Override
public CompletableFuture<Boolean> remove(String key, V1 value) {
return backingMap.remove(key, valueEncoder.apply(value));
}
@Override
public CompletableFuture<Boolean> remove(String key, long version) {
return backingMap.remove(key, version);
}
@Override
public CompletableFuture<Versioned<V1>> replace(String key, V1 value) {
return backingMap.replace(key, valueEncoder.apply(value))
.thenApply(versionedValueTransform);
}
@Override
public CompletableFuture<Boolean> replace(String key, V1 oldValue,
V1 newValue) {
return backingMap.replace(key, valueEncoder.apply(oldValue),
valueEncoder.apply(newValue));
}
@Override
public CompletableFuture<Boolean> replace(String key, long oldVersion,
V1 newValue) {
return backingMap.replace(key, oldVersion,
valueEncoder.apply(newValue));
}
@Override
public CompletableFuture<Void> addListener(
MapEventListener<String, V1> listener,
Executor executor) {
InternalBackingMapEventListener backingMapEventListener =
listeners.computeIfAbsent(
listener,
k -> new InternalBackingMapEventListener(listener));
return backingMap.addListener(backingMapEventListener, executor);
}
@Override
public CompletableFuture<Void> removeListener(
MapEventListener<String, V1> listener) {
InternalBackingMapEventListener backingMapEventListener =
listeners.remove(listener);
if (backingMapEventListener == null) {
return CompletableFuture.completedFuture(null);
} else {
return backingMap.removeListener(backingMapEventListener);
}
}
@Override
public CompletableFuture<Boolean> prepare(
MapTransaction<String, V1> transaction) {
throw new UnsupportedOperationException("This operation is not yet " +
"supported.");
}
@Override
public CompletableFuture<Void> commit(TransactionId transactionId) {
throw new UnsupportedOperationException("This operation is not yet " +
"supported."); }
@Override
public CompletableFuture<Void> rollback(TransactionId transactionId) {
throw new UnsupportedOperationException("This operation is not yet " +
"supported."); }
@Override
public CompletableFuture<Boolean> prepareAndCommit(
MapTransaction<String, V1> transaction) {
throw new UnsupportedOperationException("This operation is not yet " +
"supported."); }
private class InternalBackingMapEventListener
implements MapEventListener<String, V2> {
private final MapEventListener<String, V1> listener;
InternalBackingMapEventListener(
MapEventListener<String, V1> listener) {
this.listener = listener;
}
@Override
public void event(MapEvent<String, V2> event) {
listener.event(new MapEvent<String, V1>(
event.name(),
event.key(),
event.newValue() != null ?
event.newValue().map(valueDecoder) : null,
event.oldValue() != null ?
event.oldValue().map(valueDecoder) : null));
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pinterest.pinlater.backends.redis;
/**
* Encapsulates Redis LUA scripts used by the PinLaterRedisBackend and related classes.
*/
public final class RedisLuaScripts {
private RedisLuaScripts() {}
/*
* Enqueue job with given job information.
*
* Increment and get the jobId for the new job. Form the hash key for this job and store the
* given job information into the hash.
*
* Args:
* KEYS[1]: Auto incremental jobId string key.
* KEYS[2]: Hash key prefix.
* KEYS[3]: Priority queue sorted set key.
* ARGV[1]: Job body.
* ARGV[2]: Remaining(Allowed) attempts.
* ARGV[3]: Created at timestamp in seconds(float).
* ARGV[4]: Job to run timestamp in seconds(float).
* ARGV[5]: Custom status.
*
* Returns:
* The job Id of the enqueued job.
*/
public static final String ENQUEUE_JOB =
"local jobId = redis.call('INCR', KEYS[1])\n"
+ "redis.call('HMSET', KEYS[2]..jobId,"
+ " '" + RedisBackendUtils.PINLATER_JOB_HASH_BODY_FIELD + "', ARGV[1],"
+ " '" + RedisBackendUtils.PINLATER_JOB_HASH_ATTEMPTS_ALLOWED_FIELD + "', ARGV[2],"
+ " '" + RedisBackendUtils.PINLATER_JOB_HASH_ATTEMPTS_REMAINING_FIELD + "', ARGV[2],"
+ " '" + RedisBackendUtils.PINLATER_JOB_HASH_CREATED_AT_FIELD + "', ARGV[3],"
+ " '" + RedisBackendUtils.PINLATER_JOB_HASH_UPDATED_AT_FIELD + "', ARGV[3],"
+ " '" + RedisBackendUtils.PINLATER_JOB_HASH_CUSTOM_STATUS_FIELD + "', ARGV[5])"
+ "redis.call('ZADD', KEYS[3], ARGV[4], jobId)\n"
+ "return jobId";
/*
* Dequeue limit number of jobs in the queue up to the given timestamp.
*
* Get ``limit`` number of jobs in the given sorted set up to the given timestamp. The returned
* number of jobs might be smaller than ``limit`` if we do not have that many jobs in that queue.
* Remove the number of returned jobs from the sorted set, which ensures that we remove the
* obtained job ids. Add them with the new timestamp to the in progress queue. For each job, if
* the job hash does not exist, probably the job hash has been evicted. Do not dequeue invalid
* jobs to the client. Otherwise, get the job detailed information and return them altogether in
* a list.
*
* Args:
* KEYS[1]: Pending queue sorted set key.
* KEYS[2]: In progress queue sorted set key.
* KEYS[3]: Hash key prefix.
* ARGV[1]: Current timestamp.
* ARGV[2]: Limit.
* ARGV[3]: Claim descriptor.
*
* Returns:
* A list of 6n objects, where every 6 objects represents the job ID, body, attempts allowed,
* attempts remaining, created at, and updated at. Note it is unfortunate we have to return
* the data in this format since LUA 'dict' and list of 'list' is not convertible to JAVA
* or Jedis.
*/
public static final String DEQUEUE_JOBS =
"local jobIds = redis.call('ZRANGEBYSCORE', KEYS[1], '-inf', ARGV[1], 'LIMIT', '0',"
+ " ARGV[2])\n"
+ "local length = table.getn(jobIds)\n"
+ "local result = {}\n"
+ "if length > 0 then\n"
+ "redis.call('ZREMRANGEBYRANK', KEYS[1], 0, length - 1)\n"
+ "local zadd_args = {}\n"
+ "for i, jobId in ipairs(jobIds) do\n"
+ "table.insert(zadd_args, ARGV[1])\n"
+ "table.insert(zadd_args, jobId)\n"
+ "if redis.call('EXISTS', KEYS[3]..jobId) == 1 then\n"
+ "local jobInfo = redis.call('HMGET', KEYS[3]..jobId, '"
+ RedisBackendUtils.PINLATER_JOB_HASH_BODY_FIELD + "', '"
+ RedisBackendUtils.PINLATER_JOB_HASH_ATTEMPTS_ALLOWED_FIELD + "', '"
+ RedisBackendUtils.PINLATER_JOB_HASH_ATTEMPTS_REMAINING_FIELD + "', '"
+ RedisBackendUtils.PINLATER_JOB_HASH_CREATED_AT_FIELD + "', '"
+ RedisBackendUtils.PINLATER_JOB_HASH_UPDATED_AT_FIELD + "')\n"
+ "table.insert(result, jobId)\n"
+ "table.insert(result, jobInfo[1])\n"
+ "table.insert(result, jobInfo[2])\n"
+ "table.insert(result, jobInfo[3])\n"
+ "table.insert(result, jobInfo[4])\n"
+ "table.insert(result, jobInfo[5])\n"
+ "redis.call('HMSET', KEYS[3]..jobId, '"
+ RedisBackendUtils.PINLATER_JOB_HASH_UPDATED_AT_FIELD + "', ARGV[1], '"
+ RedisBackendUtils.PINLATER_JOB_HASH_CLAIM_DESCRIPTOR_FIELD + "', ARGV[3])\n"
+ "end\n"
+ "end\n"
+ "redis.call('ZADD', KEYS[2], unpack(zadd_args))\n"
+ "end\n"
+ "return result";
/*
* Remove the job from the in progress queue. This script should only be used to be prepended to
* ACK_SUCCEEDED_JOB and ACK_FAILED_JOB scripts.
*
* If this job is not in the in progress queue, then the script will just return -1. All other
* operations appended to this script will not be executed.
*
* Args:
* KEYS[1]: In progress queue sorted set key.
* ARGV[1]: Job id.
*/
private static final String REMOVE_JOB_FROM_IN_PROGRESS_QUEUE =
"local num = redis.call('ZREM', KEYS[1], ARGV[1])\n"
+ "if num ~= 1 then\n"
+ "return -1\n"
+ "end\n";
/*
* Set the custom status and update the updated timestamp. This script should only be used to be
* appended to ACK_SUCCEEDED_JOB and ACK_FAILED_JOB scripts.
*
* If the to append custom status is not empty, get the previous custom status. If the previous
* one is empty, just set the new custom status. Otherwise, append the new one.
*
* Args:
* KEYS[2]: Hash key prefix.
* ARGV[1]: Job id.
* ARGV[2]: Current timestamp.
* ARGV[3]: To append custom status.
*/
private static final String SET_CUSTOM_STATUS =
"redis.call('HMSET', KEYS[2]..ARGV[1],"
+ " '" + RedisBackendUtils.PINLATER_JOB_HASH_CUSTOM_STATUS_FIELD + "', ARGV[3],"
+ " '" + RedisBackendUtils.PINLATER_JOB_HASH_UPDATED_AT_FIELD + "', ARGV[2])\n";
/*
* Acknowledge the succeed job.
*
* Remove the job from the in progress queue and add to the succeeded queue. Also append the
* custom status.
*
* Args:
* KEYS[1]: In progress queue sorted set key.
* KEYS[2]: Hash key prefix.
* KEYS[3]: Succeeded queue sorted set key.
* ARGV[1]: Job id.
* ARGV[2]: Current timestamp.
* ARGV[3]: Custom status.
*/
public static final String ACK_SUCCEEDED_JOB =
REMOVE_JOB_FROM_IN_PROGRESS_QUEUE
+ "redis.call('ZADD', KEYS[3], ARGV[2], ARGV[1])\n"
+ SET_CUSTOM_STATUS;
/*
* Acknowledge the failed job.
*
* Remove the job from the in progress queue. Check the remaining attempts of the job. If it is
* > 1, add it to pending queue with the given timestamp. Otherwise, add it to the failed queue.
* Finally append the custom status.
*
* Args:
* KEYS[1]: In progress queue sorted set key.
* KEYS[2]: Hash key prefix.
* KEYS[3]: Pending queue sorted set key.
* KEYS[4]: Failed queue sorted set key.
* ARGV[1]: Job id.
* ARGV[2]: Current timestamp in seconds(float).
* ARGV[3]: Custom status.
* ARGV[4]: Retry timestamp in seconds(float).
*/
public static final String ACK_FAILED_JOB =
REMOVE_JOB_FROM_IN_PROGRESS_QUEUE
+ "local att = tonumber(redis.call('HGET', KEYS[2]..ARGV[1], '"
+ RedisBackendUtils.PINLATER_JOB_HASH_ATTEMPTS_REMAINING_FIELD + "'))\n"
+ "if att == nil then\n" +
// Job hash has been evicted. GC will take care of it so ignore.
"return\n"
+ "elseif att > 1 then\n"
+ "redis.call('ZADD', KEYS[3], ARGV[4], ARGV[1])\n"
+ "redis.call('HDEL', KEYS[2]..ARGV[1], '"
+ RedisBackendUtils.PINLATER_JOB_HASH_CLAIM_DESCRIPTOR_FIELD + "')\n"
+ "else\n"
+ "redis.call('ZADD', KEYS[4], ARGV[2], ARGV[1])\n"
+ "end\n"
+ "redis.call('HINCRBY', KEYS[2]..ARGV[1], '"
+ RedisBackendUtils.PINLATER_JOB_HASH_ATTEMPTS_REMAINING_FIELD + "', -1)\n"
+ SET_CUSTOM_STATUS;
/*
* Handle the timeout jobs in in-progress queue.
*
* Get the timeouted jobs from the in progress queue. Check the remaining attempts of the job.
* If it does not have, probably the hash of this job has been evicted. Just
* it is >1, add it to pending queue with the given timestamp. Otherwise, add it to the failed
* queue. Finally, decrement the remaining attempts of the job.
*
* Args:
* KEYS[1]: In progress queue sorted set key.
* KEYS[2]: Hash key prefix.
* KEYS[3]: Pending queue sorted set key.
* KEYS[4]: Failed queue sorted set key.
* ARGV[1]: Timeout timestamp.
* ARGV[2]: The max number of jobs to update.
* ARGV[3]: Current timestamp.
*
* Returns:
* A length of 2 list. The first element is the number of jobs that are moved to failed queue.
* The second element is the number of jobs that are moved to pending queue.
*/
public static final String MONITOR_TIMEOUT_UPDATE =
"local jobIds = redis.call('ZRANGEBYSCORE', KEYS[1], '-inf', ARGV[1], 'LIMIT', '0',"
+ " ARGV[2])\n"
+ "local length = table.getn(jobIds)\n"
+ "local doneNum = 0\n"
+ "local retryNum = 0\n"
+ "local evictNum = 0\n"
+ "if length > 0 then\n"
+ "redis.call('ZREMRANGEBYRANK', KEYS[1], 0, length - 1)\n"
+ "for i, jobId in ipairs(jobIds) do\n"
+ "local att = tonumber(redis.call('HGET', KEYS[2]..jobId, '"
+ RedisBackendUtils.PINLATER_JOB_HASH_ATTEMPTS_REMAINING_FIELD + "'))\n"
+ "if att == nil then\n"
+ "evictNum = evictNum + 1\n"
+ "else\n"
+ "if att > 1 then\n"
+ "retryNum = retryNum + 1\n"
+ "redis.call('ZADD', KEYS[3], ARGV[3], jobId)\n"
+ "redis.call('HDEL', KEYS[2]..jobId, '"
+ RedisBackendUtils.PINLATER_JOB_HASH_CLAIM_DESCRIPTOR_FIELD + "')\n"
+ "else\n"
+ "doneNum = doneNum + 1\n"
+ "redis.call('ZADD', KEYS[4], ARGV[3], jobId)\n"
+ "end\n"
+ "redis.call('HINCRBY', KEYS[2]..jobId, '"
+ RedisBackendUtils.PINLATER_JOB_HASH_ATTEMPTS_REMAINING_FIELD + "', -1)\n"
+ "redis.call('HSET', KEYS[2]..jobId, '"
+ RedisBackendUtils.PINLATER_JOB_HASH_UPDATED_AT_FIELD + "', ARGV[3])\n"
+ "end\n"
+ "end\n"
+ "end\n"
+ "return {tostring(doneNum), tostring(retryNum), tostring(evictNum)}";
/*
* Garbage collect done jobs.
*
* Get and remove up to limit number of expired jobs from the given queue. Also remove the hash
* key for each of these jobs.
*
* Args:
* KEYS[1]: Succeeded queue sorted set key.
* KEYS[2]: Hash key prefix.
* ARGV[1]: Expired timestamp.
* ARGV[2]: The max number of jobs to update.
*
* Returns:
* Number of jobs that have been removed.
*/
public static final String MONITOR_GC_DONE_JOBS =
"local jobIds = redis.call('ZRANGEBYSCORE', KEYS[1], '-inf', ARGV[1], 'LIMIT', '0',"
+ " ARGV[2])\n"
+ "local length = table.getn(jobIds)\n"
+ "if length > 0 then\n"
+ "redis.call('ZREMRANGEBYRANK', KEYS[1], 0, length - 1)\n"
+ "for i, jobId in ipairs(jobIds) do\n"
+ "redis.call('DEL', KEYS[2]..jobId)\n"
+ "end\n"
+ "end\n"
+ "return length";
/*
* Delete one sorted set of the queue.
*
* Args:
* KEYS[1]: Queue sorted set key.
* KEYS[2]: Hash key prefix.
*
* Returns:
* Number of jobs that have been removed.
*/
public static final String DELETE_QUEUE =
"local jobIds = redis.call('ZRANGE', KEYS[1], 0, -1)\n"
+ "redis.call('DEL', KEYS[1])\n"
+ "for i, jobId in ipairs(jobIds) do\n"
+ "redis.call('DEL', KEYS[2]..jobId)\n"
+ "end\n"
+ "return table.getn(jobIds)";
/*
* Move jobs from failed queue to pending queue.
*
* Get up to ``limit`` number of jobs from the failed queue and put them to the pending queue.
* For each job, also update the attempts remaining, updated_at timestamp, and reset the claim
* descriptor.
*
* Args:
* KEYS[1]: Failed queue sorted set key.
* KEYS[2]: Pending queue sorted set key.
* KEYS[3]: Hash key prefix.
* ARGV[1]: Current timestamp in seconds(float).
* ARGV[2]: Limit, as the number of jobs to move.
* ARGV[3]: Remaining attempts to set.
*
* Returns:
* Number of jobs that have been moved.
*/
public static final String RETRY_JOBS =
"local jobIds = redis.call('ZRANGE', KEYS[1], 0, ARGV[2] - 1)\n"
+ "local length = table.getn(jobIds)\n"
+ "if length > 0 then\n"
+ "redis.call('ZREMRANGEBYRANK', KEYS[1], 0, length - 1)\n"
+ "local zadd_args = {}\n"
+ "for i, jobId in ipairs(jobIds) do\n"
+ "table.insert(zadd_args, ARGV[1])\n"
+ "table.insert(zadd_args, jobId)\n"
+ "redis.call('HMSET', KEYS[3]..jobId, '"
+ RedisBackendUtils.PINLATER_JOB_HASH_ATTEMPTS_REMAINING_FIELD + "', ARGV[3], '"
+ RedisBackendUtils.PINLATER_JOB_HASH_UPDATED_AT_FIELD + "', ARGV[1])\n"
+ "redis.call('HDEL', KEYS[3]..jobId, '"
+ RedisBackendUtils.PINLATER_JOB_HASH_CLAIM_DESCRIPTOR_FIELD + "')\n"
+ "end\n"
+ "redis.call('ZADD', KEYS[2], unpack(zadd_args))\n"
+ "end\n"
+ "return length";
/*
* Delete jobs of a specified queue and state.
*
* Remove up to ``limit`` number of jobs from the specified queue (which returns job IDs) and
* then call delete on each individual job ID key to get rid of their associated data stored in
* job hash (e.g. body, custom status, etc.).
*
* Args:
* KEYS[1]: Queue (associated with a particular state) sorted set key.
* KEYS[2]: Hash key prefix.
* ARGV[1]: Limit, as the number of jobs to move.
*
* Returns:
* Number of jobs that have been deleted.
*/
public static final String DELETE_JOBS =
"local jobIds = redis.call('ZRANGE', KEYS[1], 0, ARGV[1] - 1)\n"
+ "local length = table.getn(jobIds)\n"
+ "if length > 0 then\n"
+ " redis.call('ZREMRANGEBYRANK', KEYS[1], 0, length - 1)\n"
+ " for i, jobId in ipairs(jobIds) do\n"
+ " redis.call('DEL', KEYS[2]..jobId)\n"
+ " end\n"
+ "end\n"
+ "return length";
/*
* Delete jobs that match the specified regex string in the specified queue and state.
*
* Check up to ``limit`` number of jobs from the specified queue (which returns job IDs) and
* their respective bodies. If the job body matches the specified regex string, then call delete
* on each individual job ID key to get rid of their associated data stored in job hash (e.g.
* body, custom status, etc.).
*
* Args:
* KEYS[1]: Queue (associated with a particular state) sorted set key.
* KEYS[2]: Hash key prefix.
* ARGV[1]: Limit, as the number of jobs to move.
* ARGV[2]: Regex string to match body with.
*
* Returns:
* Number of jobs that have been deleted.
*/
public static final String DELETE_JOBS_MATCH_BODY =
"local limit = tonumber(ARGV[1])\n"
+ "local start = 0\n"
+ "local numDeleted = 0\n"
+ "local moreToFetch = true\n"
+ "while moreToFetch do\n"
+ " local jobIds = redis.call('ZRANGE', KEYS[1], start, start + ARGV[1] - 1)\n"
+ " for i, jobId in ipairs(jobIds) do\n"
+ " local body = redis.call('HGET', KEYS[2]..jobId, '"
+ RedisBackendUtils.PINLATER_JOB_HASH_BODY_FIELD + "')\n"
+ " if string.match(body, ARGV[2]) then\n"
+ " redis.call('ZREM', KEYS[1], jobId)\n"
+ " redis.call('DEL', KEYS[2]..jobId)\n"
+ " numDeleted = numDeleted + 1\n"
+ " if numDeleted >= limit then\n"
+ " return numDeleted\n"
+ " end\n"
+ " end\n"
+ " end\n"
+ " local numFetched = table.getn(jobIds)\n"
+ " start = start + numFetched\n"
+ " moreToFetch = numFetched == limit\n"
+ "end\n"
+ "return numDeleted";
/*
* Count the number of jobs with bodies that match the regex string.
*
* Go through all jobs in the specified queue and counts the number of jobs that have
* bodies matching the regex string.
*
* Args:
* KEYS[1]: Queue (associated with a particular state) sorted set key.
* KEYS[2]: Hash key prefix.
* ARGV[1]: Lower bound score to scan the sorted set with.
* ARGV[2]: Upper bound score to scan the sorted set with.
* ARGV[3]: Regex string to match body with.
*
* Returns:
* Number of matching jobs.
*/
public static final String COUNT_JOBS_MATCH_BODY =
"local count = 0\n"
+ "local jobIds = redis.call('ZRANGEBYSCORE', KEYS[1], ARGV[1], ARGV[2])\n"
+ "for i, jobId in ipairs(jobIds) do\n"
+ " local body = redis.call('HGET', KEYS[2]..jobId, '"
+ RedisBackendUtils.PINLATER_JOB_HASH_BODY_FIELD + "')\n"
+ " if string.match(body, ARGV[3]) then\n"
+ " count = count + 1\n"
+ " end\n"
+ "end\n"
+ "return count";
/*
* Scan jobs with bodies that match the regex string.
*
* Go through all jobs in the specified queue and finds jobs that have bodies matching the regex
* string.
*
* Args:
* KEYS[1]: Queue (associated with a particular state) sorted set key.
* KEYS[2]: Hash key prefix.
* ARGV[1]: Limit for number of jobs to scan.
* ARGV[2]: Lower bound score to scan the sorted set with.
* ARGV[3]: Upper bound score to scan the sorted set with.
* ARGV[4]: Regex string to match body with.
*
* Returns:
* A list of 8n objects, where every 8 objects represents the job ID, attempts allowed,
* attempts remaining, custom status, created at, updated at, claim descriptor,
* and run after. Note it is unfortunate we have to return the data in this format since
* LUA 'dict' and list of 'list' is not convertible to JAVA or Jedis.
*/
public static final String SCAN_JOBS_MATCH_BODY =
"local result = {}\n"
+ "local limit = tonumber(ARGV[1])\n"
+ "local start = 0\n"
+ "local numScanned = 0\n"
+ "local moreToFetch = true\n"
+ "while moreToFetch do\n"
+ " local jobIdsAndScores = redis.call('ZREVRANGEBYSCORE', KEYS[1], ARGV[3], ARGV[2],"
+ " 'WITHSCORES', 'LIMIT', start, limit)\n"
+ " local jobIdsAndScoresLength = table.getn(jobIdsAndScores)\n"
+ " for i=1,jobIdsAndScoresLength,2 do\n"
+ " local jobId = jobIdsAndScores[i]\n"
+ " local score = jobIdsAndScores[i + 1]\n"
+ " local body = redis.call('HGET', KEYS[2]..jobId, '"
+ RedisBackendUtils.PINLATER_JOB_HASH_BODY_FIELD + "')\n"
+ " if string.match(body, ARGV[4]) then\n"
+ " local jobInfo = redis.call('HMGET', KEYS[2]..jobId, '"
+ RedisBackendUtils.PINLATER_JOB_HASH_ATTEMPTS_ALLOWED_FIELD + "', '"
+ RedisBackendUtils.PINLATER_JOB_HASH_ATTEMPTS_REMAINING_FIELD + "', '"
+ RedisBackendUtils.PINLATER_JOB_HASH_CUSTOM_STATUS_FIELD + "', '"
+ RedisBackendUtils.PINLATER_JOB_HASH_CREATED_AT_FIELD + "', '"
+ RedisBackendUtils.PINLATER_JOB_HASH_UPDATED_AT_FIELD + "', '"
+ RedisBackendUtils.PINLATER_JOB_HASH_CLAIM_DESCRIPTOR_FIELD + "')\n"
+ " table.insert(result, jobId)\n"
+ " table.insert(result, jobInfo[1])\n"
+ " table.insert(result, jobInfo[2])\n"
+ " table.insert(result, jobInfo[3])\n"
+ " table.insert(result, jobInfo[4])\n"
+ " table.insert(result, jobInfo[5])\n"
+ " table.insert(result, jobInfo[6])\n"
+ " table.insert(result, score)\n"
+ " numScanned = numScanned + 1\n"
+ " if numScanned >= limit then\n"
+ " return result\n"
+ " end\n"
+ " end\n"
+ " end\n"
+ " local numFetched = jobIdsAndScoresLength / 2\n"
+ " start = start + numFetched\n"
+ " moreToFetch = numFetched == limit\n"
+ "end\n"
+ "return result";
/*
* Common checkpointing logic that should be prepend to checkpoint request scripts.
*
* Args:
* KEYS[1]: Key of sorted set that the job currently lies in.
* KEYS[2]: Key of sorted set that the job is to be moved to.
* KEYS[3]: Hash key prefix.
* ARGV[1]: Job id.
* ARGV[2]: Source (hostname) of checkpoint request.
* ARGV[3]: Current timestamp in seconds (float).
*/
public static final String CHECKPOINT_JOB_HEADER =
"local claimDescriptor = redis.call('HGET', KEYS[3]..ARGV[1],"
+ " '" + RedisBackendUtils.PINLATER_JOB_HASH_CLAIM_DESCRIPTOR_FIELD + "')\n"
+ "if not claimDescriptor or not string.match(claimDescriptor, ARGV[2]) then\n"
+ " return 0\n"
+ "end\n"
+ "redis.call('ZREM', KEYS[1], ARGV[1])\n"
+ "redis.call('ZADD', KEYS[2], ARGV[3], ARGV[1])\n"
+ "redis.call('HSET', KEYS[3]..ARGV[1], '"
+ RedisBackendUtils.PINLATER_JOB_HASH_UPDATED_AT_FIELD + "', ARGV[3])\n";
/*
* Update the run_after and updated_at fields of a job to the current time. Optionally
* changes the job's state by moving it to a specified sorted set. Also optionally changes
* the job body, resets the number of attempts allowed/remaining and prepend a message to the
* custom status. A checkpoint request should be constructed as:
*
* CHECKPOINT_JOB_HEADER + [optional update fields] + CHECKPOINT_JOB_FOOTER
*
* Note that this update will only go through if the claim descriptor field matches the host
* that made the checkpoint request. If it doesn't match, this script is a no-op.
*
* Args:
* KEYS[1]: Key of sorted set that the job currently lies in.
* KEYS[2]: Key of sorted set that the job is to be moved to.
* KEYS[3]: Hash key prefix.
* ARGV[1]: Job id.
* ARGV[2]: Source (hostname) of checkpoint request.
* ARGV[3]: Current timestamp in seconds (float)
* ARGV[4]: New job body.
* ARGV[5]: Set the number of attempts allowed (also reset the attempts remaining).
* ARGV[6]: Message to prepend to custom status.
*
* Returns:
* Number of jobs that have been affected by the checkpoint request. In other words, 0 if this
* operation was a no-op, and 1 if the checkpoint operation was successful.
*/
public static final String CHECKPOINT_JOB_FOOTER =
"return 1";
public static final String CHECKPOINT_JOB_NEW_BODY =
"redis.call('HSET', KEYS[3]..ARGV[1], '"
+ RedisBackendUtils.PINLATER_JOB_HASH_BODY_FIELD + "', ARGV[4])\n";
public static final String CHECKPOINT_JOB_NEW_ATTEMPTS_ALLOWED =
"redis.call('HMSET', KEYS[3]..ARGV[1], '"
+ RedisBackendUtils.PINLATER_JOB_HASH_ATTEMPTS_ALLOWED_FIELD + "', ARGV[5], '"
+ RedisBackendUtils.PINLATER_JOB_HASH_ATTEMPTS_REMAINING_FIELD + "', ARGV[5])\n";
public static final String CHECKPOINT_JOB_NEW_CUSTOM_STATUS =
"local customStatus = string.sub(ARGV[6]..redis.call('HGET', KEYS[3]..ARGV[1],"
+ " '" + RedisBackendUtils.PINLATER_JOB_HASH_CUSTOM_STATUS_FIELD + "'),"
+ " 0, " + String.valueOf(RedisBackendUtils.CUSTOM_STATUS_SIZE_BYTES) + ")\n"
+ "redis.call('HSET', KEYS[3]..ARGV[1], '"
+ RedisBackendUtils.PINLATER_JOB_HASH_CUSTOM_STATUS_FIELD + "', customStatus)\n";
public static final String CHECKPOINT_JOB_RESET_CLAIM_DESCRIPTOR =
"redis.call('HDEL', KEYS[3]..ARGV[1], '"
+ RedisBackendUtils.PINLATER_JOB_HASH_CLAIM_DESCRIPTOR_FIELD + "')\n";
}
|
|
/*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.test.database.auto;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.exception.OConcurrentModificationException;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.sql.OCommandSQL;
import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery;
import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE;
import com.orientechnologies.orient.core.version.ORecordVersion;
import com.orientechnologies.orient.core.version.OVersionFactory;
import com.orientechnologies.orient.enterprise.channel.binary.OResponseProcessingException;
import com.orientechnologies.orient.object.db.OObjectDatabaseTx;
import com.orientechnologies.orient.test.domain.business.Account;
import com.orientechnologies.orient.test.domain.business.Address;
import com.tinkerpop.blueprints.impls.orient.OrientGraph;
import org.testng.Assert;
import org.testng.annotations.Optional;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Vector;
@Test
public class TransactionConsistencyTest extends DocumentDBBaseTest {
protected ODatabaseDocumentTx database1;
protected ODatabaseDocumentTx database2;
public static final String NAME = "name";
@Parameters(value = "url")
public TransactionConsistencyTest(@Optional String url) {
super(url);
}
@Test
public void test1RollbackOnConcurrentException() throws IOException {
database1 = new ODatabaseDocumentTx(url).open("admin", "admin");
database2 = new ODatabaseDocumentTx(url).open("admin", "admin");
database1.begin(TXTYPE.OPTIMISTIC);
// Create docA.
ODocument vDocA_db1 = database1.newInstance();
vDocA_db1.field(NAME, "docA");
database1.save(vDocA_db1);
// Create docB.
ODocument vDocB_db1 = database1.newInstance();
vDocB_db1.field(NAME, "docB");
database1.save(vDocB_db1);
database1.commit();
// Keep the IDs.
ORID vDocA_Rid = vDocA_db1.getIdentity().copy();
ORID vDocB_Rid = vDocB_db1.getIdentity().copy();
ORecordVersion vDocA_version = OVersionFactory.instance().createUntrackedVersion();
ORecordVersion vDocB_version = OVersionFactory.instance().createUntrackedVersion();
database2.begin(TXTYPE.OPTIMISTIC);
try {
// Get docA and update in db2 transaction context
ODocument vDocA_db2 = database2.load(vDocA_Rid);
vDocA_db2.field(NAME, "docA_v2");
database2.save(vDocA_db2);
// Concurrent update docA via database1 -> will throw OConcurrentModificationException at database2.commit().
database1.begin(TXTYPE.OPTIMISTIC);
try {
vDocA_db1.field(NAME, "docA_v3");
database1.save(vDocA_db1);
database1.commit();
} catch (OResponseProcessingException e) {
Assert.fail("Should not failed here...");
} catch (OConcurrentModificationException e) {
Assert.fail("Should not failed here...");
}
Assert.assertEquals(vDocA_db1.field(NAME), "docA_v3");
// Keep the last versions.
// Following updates should failed and reverted.
vDocA_version = vDocA_db1.getRecordVersion();
vDocB_version = vDocB_db1.getRecordVersion();
// Update docB in db2 transaction context -> should be rollbacked.
ODocument vDocB_db2 = database2.load(vDocB_Rid);
vDocB_db2.field(NAME, "docB_UpdatedInTranscationThatWillBeRollbacked");
database2.save(vDocB_db2);
// Will throw OConcurrentModificationException
database2.commit();
Assert.fail("Should throw OConcurrentModificationException");
} catch (OResponseProcessingException e) {
Assert.assertTrue(e.getCause() instanceof OConcurrentModificationException);
} catch (OConcurrentModificationException e) {
database2.rollback();
}
// Force reload all (to be sure it is not a cache problem)
database1.close();
database2.getStorage().close();
database2 = new ODatabaseDocumentTx(url).open("admin", "admin");
ODocument vDocA_db2 = database2.load(vDocA_Rid);
Assert.assertEquals(vDocA_db2.field(NAME), "docA_v3");
Assert.assertEquals(vDocA_db2.getRecordVersion(), vDocA_version);
// docB should be in the first state : "docB"
ODocument vDocB_db2 = database2.load(vDocB_Rid);
Assert.assertEquals(vDocB_db2.field(NAME), "docB");
Assert.assertEquals(vDocB_db2.getRecordVersion(), vDocB_version);
database1.close();
database2.close();
}
@Test
public void test4RollbackWithPin() throws IOException {
database1 = new ODatabaseDocumentTx(url).open("admin", "admin");
database2 = new ODatabaseDocumentTx(url).open("admin", "admin");
// Create docA.
ODocument vDocA_db1 = database1.newInstance();
vDocA_db1.field(NAME, "docA");
database1.save(vDocA_db1);
// Keep the IDs.
ORID vDocA_Rid = vDocA_db1.getIdentity().copy();
database2.begin(TXTYPE.OPTIMISTIC);
try {
// Get docA and update in db2 transaction context
ODocument vDocA_db2 = database2.load(vDocA_Rid);
vDocA_db2.field(NAME, "docA_v2");
database2.save(vDocA_db2);
database1.begin(TXTYPE.OPTIMISTIC);
try {
vDocA_db1.field(NAME, "docA_v3");
database1.save(vDocA_db1);
database1.commit();
} catch (OConcurrentModificationException e) {
Assert.fail("Should not failed here...");
}
Assert.assertEquals(vDocA_db1.field(NAME), "docA_v3");
// Will throw OConcurrentModificationException
database2.commit();
Assert.fail("Should throw OConcurrentModificationException");
} catch (OResponseProcessingException e) {
Assert.assertTrue(e.getCause() instanceof OConcurrentModificationException);
database2.rollback();
} catch (OConcurrentModificationException e) {
database2.rollback();
}
// Force reload all (to be sure it is not a cache problem)
database1.close();
database2.close();
database2 = new ODatabaseDocumentTx(url).open("admin", "admin");
// docB should be in the last state : "docA_v3"
ODocument vDocB_db2 = database2.load(vDocA_Rid);
Assert.assertEquals(vDocB_db2.field(NAME), "docA_v3");
database1.close();
database2.close();
}
@Test
public void test3RollbackWithCopyCacheStrategy() throws IOException {
database1 = new ODatabaseDocumentTx(url).open("admin", "admin");
database2 = new ODatabaseDocumentTx(url).open("admin", "admin");
// Create docA.
ODocument vDocA_db1 = database1.newInstance();
vDocA_db1.field(NAME, "docA");
database1.save(vDocA_db1);
// Keep the IDs.
ORID vDocA_Rid = vDocA_db1.getIdentity().copy();
database2.begin(TXTYPE.OPTIMISTIC);
try {
// Get docA and update in db2 transaction context
ODocument vDocA_db2 = database2.load(vDocA_Rid);
vDocA_db2.field(NAME, "docA_v2");
database2.save(vDocA_db2);
database1.begin(TXTYPE.OPTIMISTIC);
try {
vDocA_db1.field(NAME, "docA_v3");
database1.save(vDocA_db1);
database1.commit();
} catch (OConcurrentModificationException e) {
Assert.fail("Should not failed here...");
}
Assert.assertEquals(vDocA_db1.field(NAME), "docA_v3");
// Will throw OConcurrentModificationException
database2.commit();
Assert.fail("Should throw OConcurrentModificationException");
} catch (OResponseProcessingException e) {
Assert.assertTrue(e.getCause() instanceof OConcurrentModificationException);
database2.rollback();
} catch (OConcurrentModificationException e) {
database2.rollback();
}
// Force reload all (to be sure it is not a cache problem)
database1.close();
database2.close();
database2 = new ODatabaseDocumentTx(url).open("admin", "admin");
// docB should be in the last state : "docA_v3"
ODocument vDocB_db2 = database2.load(vDocA_Rid);
Assert.assertEquals(vDocB_db2.field(NAME), "docA_v3");
database1.close();
database2.close();
}
@Test
public void test5CacheUpdatedMultipleDbs() {
database1 = new ODatabaseDocumentTx(url).open("admin", "admin");
database2 = new ODatabaseDocumentTx(url).open("admin", "admin");
// Create docA in db1
database1.begin(TXTYPE.OPTIMISTIC);
ODocument vDocA_db1 = database1.newInstance();
vDocA_db1.field(NAME, "docA");
database1.save(vDocA_db1);
database1.commit();
// Keep the ID.
ORID vDocA_Rid = vDocA_db1.getIdentity().copy();
// Update docA in db2
database2.begin(TXTYPE.OPTIMISTIC);
ODocument vDocA_db2 = database2.load(vDocA_Rid);
vDocA_db2.field(NAME, "docA_v2");
database2.save(vDocA_db2);
database2.commit();
// Later... read docA with db1.
database1.begin(TXTYPE.OPTIMISTIC);
ODocument vDocA_db1_later = database1.load(vDocA_Rid, null, true);
Assert.assertEquals(vDocA_db1_later.field(NAME), "docA_v2");
database1.commit();
database1.close();
database2.close();
}
@SuppressWarnings("unchecked")
@Test
public void checkVersionsInConnectedDocuments() {
database.begin();
ODocument kim = new ODocument("Profile").field("name", "Kim").field("surname", "Bauer");
ODocument teri = new ODocument("Profile").field("name", "Teri").field("surname", "Bauer");
ODocument jack = new ODocument("Profile").field("name", "Jack").field("surname", "Bauer");
((HashSet<ODocument>) jack.field("following", new HashSet<ODocument>()).field("following")).add(kim);
((HashSet<ODocument>) kim.field("following", new HashSet<ODocument>()).field("following")).add(teri);
((HashSet<ODocument>) teri.field("following", new HashSet<ODocument>()).field("following")).add(jack);
jack.save();
database.commit();
database.close();
database.open("admin", "admin");
ODocument loadedJack = database.load(jack.getIdentity());
ORecordVersion jackLastVersion = loadedJack.getRecordVersion().copy();
database.begin();
loadedJack.field("occupation", "agent");
loadedJack.save();
database.commit();
Assert.assertTrue(!jackLastVersion.equals(loadedJack.getRecordVersion()));
loadedJack = database.load(jack.getIdentity());
Assert.assertTrue(!jackLastVersion.equals(loadedJack.getRecordVersion()));
database.close();
database.open("admin", "admin");
loadedJack = database.load(jack.getIdentity());
Assert.assertTrue(!jackLastVersion.equals(loadedJack.getRecordVersion()));
database.close();
}
@SuppressWarnings("unchecked")
@Test
public void createLinkInTx() {
OClass profile = database.getMetadata().getSchema()
.createClass("MyProfile", database.addCluster("myprofile"));
OClass edge = database.getMetadata().getSchema()
.createClass("MyEdge", database.addCluster("myedge"));
profile.createProperty("name", OType.STRING).setMin("3").setMax("30").createIndex(OClass.INDEX_TYPE.NOTUNIQUE);
profile.createProperty("surname", OType.STRING).setMin("3").setMax("30");
profile.createProperty("in", OType.LINKSET, edge);
profile.createProperty("out", OType.LINKSET, edge);
edge.createProperty("in", OType.LINK, profile);
edge.createProperty("out", OType.LINK, profile);
database.begin();
ODocument kim = new ODocument("MyProfile").field("name", "Kim").field("surname", "Bauer");
ODocument teri = new ODocument("MyProfile").field("name", "Teri").field("surname", "Bauer");
ODocument jack = new ODocument("MyProfile").field("name", "Jack").field("surname", "Bauer");
ODocument myedge = new ODocument("MyEdge").field("in", kim).field("out", jack);
myedge.save();
((HashSet<ODocument>) kim.field("out", new HashSet<ORID>()).field("out")).add(myedge);
((HashSet<ODocument>) jack.field("in", new HashSet<ORID>()).field("in")).add(myedge);
jack.save();
kim.save();
teri.save();
database.commit();
database.close();
database.open("admin", "admin");
List<ODocument> result = database.command(new OSQLSynchQuery<ODocument>("select from MyProfile ")).execute();
Assert.assertTrue(result.size() != 0);
}
@SuppressWarnings("unchecked")
@Test
public void loadRecordTest() {
database.begin();
ODocument kim = new ODocument("Profile").field("name", "Kim").field("surname", "Bauer");
ODocument teri = new ODocument("Profile").field("name", "Teri").field("surname", "Bauer");
ODocument jack = new ODocument("Profile").field("name", "Jack").field("surname", "Bauer");
ODocument chloe = new ODocument("Profile").field("name", "Chloe").field("surname", "O'Brien");
((HashSet<ODocument>) jack.field("following", new HashSet<ODocument>()).field("following")).add(kim);
((HashSet<ODocument>) kim.field("following", new HashSet<ODocument>()).field("following")).add(teri);
((HashSet<ODocument>) teri.field("following", new HashSet<ODocument>()).field("following")).add(jack);
((HashSet<ODocument>) teri.field("following")).add(kim);
((HashSet<ODocument>) chloe.field("following", new HashSet<ODocument>()).field("following")).add(jack);
((HashSet<ODocument>) chloe.field("following")).add(teri);
((HashSet<ODocument>) chloe.field("following")).add(kim);
int profileClusterId = database.getClusterIdByName("Profile");
jack.save();
kim.save();
teri.save();
chloe.save();
database.commit();
Assert.assertEquals(jack.getIdentity().getClusterId(), profileClusterId);
Assert.assertEquals(kim.getIdentity().getClusterId(), profileClusterId);
Assert.assertEquals(teri.getIdentity().getClusterId(), profileClusterId);
Assert.assertEquals(chloe.getIdentity().getClusterId(), profileClusterId);
database.close();
database.open("admin", "admin");
ODocument loadedChloe = database.load(chloe.getIdentity());
}
@Test
public void testTransactionPopulateDelete() {
if (!database.getMetadata().getSchema().existsClass("MyFruit")) {
OClass fruitClass = database.getMetadata().getSchema().createClass("MyFruit");
fruitClass.createProperty("name", OType.STRING);
fruitClass.createProperty("color", OType.STRING);
fruitClass.createProperty("flavor", OType.STRING);
database.getMetadata().getSchema().getClass("MyFruit").getProperty("name").createIndex(OClass.INDEX_TYPE.NOTUNIQUE);
database.getMetadata().getSchema().getClass("MyFruit").getProperty("color").createIndex(OClass.INDEX_TYPE.NOTUNIQUE);
database.getMetadata().getSchema().getClass("MyFruit").getProperty("flavor").createIndex(OClass.INDEX_TYPE.NOTUNIQUE);
}
database.close();
database.open("admin", "admin");
int chunkSize = 500;
for (int initialValue = 0; initialValue < 10; initialValue++) {
// System.out.println("initialValue = " + initialValue);
Assert.assertEquals(database.countClusterElements("MyFruit"), 0);
// do insert
Vector<ODocument> v = new Vector<ODocument>();
database.begin();
for (int i = initialValue * chunkSize; i < (initialValue * chunkSize) + chunkSize; i++) {
ODocument d = new ODocument("MyFruit").field("name", "" + i).field("color", "FOO").field("flavor", "BAR" + i);
d.save();
v.addElement(d);
}
// System.out.println("populate commit");
database.commit();
// do delete
database.begin();
// System.out.println("vector size = " + v.size());
for (int i = 0; i < v.size(); i++) {
database.delete(v.elementAt(i));
}
// System.out.println("delete commit");
database.commit();
Assert.assertEquals(database.countClusterElements("MyFruit"), 0);
}
database.close();
}
@Test
public void testConsistencyOnDelete() {
final OrientGraph graph = new OrientGraph(url);
if (graph.getVertexType("Foo") == null)
graph.createVertexType("Foo");
try {
// Step 1
// Create several foo's
graph.addVertex("class:Foo", "address", "test1");
graph.addVertex("class:Foo", "address", "test2");
graph.addVertex("class:Foo", "address", "test3");
graph.commit();
// just show what is there
List<ODocument> result = graph.getRawGraph().query(new OSQLSynchQuery<ODocument>("select * from Foo"));
for (ODocument d : result) {
System.out.println("Vertex: " + d);
}
// remove those foos in a transaction
// Step 3a
result = graph.getRawGraph().query(new OSQLSynchQuery<ODocument>("select * from Foo where address = 'test1'"));
Assert.assertEquals(result.size(), 1);
// Step 4a
graph.removeVertex(graph.getVertex(result.get(0)));
// Step 3b
result = graph.getRawGraph().query(new OSQLSynchQuery<ODocument>("select * from Foo where address = 'test2'"));
Assert.assertEquals(result.size(), 1);
// Step 4b
graph.removeVertex(graph.getVertex(result.get(0)));
// Step 3c
result = graph.getRawGraph().query(new OSQLSynchQuery<ODocument>("select * from Foo where address = 'test3'"));
Assert.assertEquals(result.size(), 1);
// Step 4c
graph.removeVertex(graph.getVertex(result.get(0)));
// Step 6
graph.commit();
// just show what is there
result = graph.getRawGraph().query(new OSQLSynchQuery<ODocument>("select * from Foo"));
for (ODocument d : result) {
System.out.println("Vertex: " + d);
}
} finally {
graph.shutdown();
}
}
@Test
public void deletesWithinTransactionArentWorking() throws IOException {
OrientGraph graph = new OrientGraph(url);
graph.setUseLightweightEdges(false);
try {
if (graph.getVertexType("Foo") == null)
graph.createVertexType("Foo");
if (graph.getVertexType("Bar") == null)
graph.createVertexType("Bar");
if (graph.getVertexType("Sees") == null)
graph.createEdgeType("Sees");
// Commenting out the transaction will result in the test succeeding.
ODocument foo = graph.addVertex("class:Foo", "prop", "test1").getRecord();
// Comment out these two lines and the test will succeed. The issue appears to be related to an edge
// connecting a deleted vertex during a transaction
ODocument bar = graph.addVertex("class:Bar", "prop", "test1").getRecord();
ODocument sees = graph.addEdge(null, graph.getVertex(foo), graph.getVertex(bar), "Sees").getRecord();
graph.commit();
List<ODocument> foos = graph.getRawGraph().query(new OSQLSynchQuery("select * from Foo"));
Assert.assertEquals(foos.size(), 1);
graph.removeVertex(graph.getVertex(foos.get(0)));
} finally {
graph.shutdown();
}
}
public void TransactionRollbackConstistencyTest() {
System.out.println("**************************TransactionRollbackConsistencyTest***************************************");
OClass vertexClass = database.getMetadata().getSchema().createClass("TRVertex");
OClass edgeClass = database.getMetadata().getSchema().createClass("TREdge");
vertexClass.createProperty("in", OType.LINKSET, edgeClass);
vertexClass.createProperty("out", OType.LINKSET, edgeClass);
edgeClass.createProperty("in", OType.LINK, vertexClass);
edgeClass.createProperty("out", OType.LINK, vertexClass);
OClass personClass = database.getMetadata().getSchema().createClass("TRPerson", vertexClass);
personClass.createProperty("name", OType.STRING).createIndex(OClass.INDEX_TYPE.UNIQUE);
personClass.createProperty("surname", OType.STRING).createIndex(OClass.INDEX_TYPE.NOTUNIQUE);
personClass.createProperty("version", OType.INTEGER);
database.getMetadata().getSchema().save();
database.close();
final int cnt = 4;
database.open("admin", "admin");
database.begin();
Vector inserted = new Vector();
for (int i = 0; i < cnt; i++) {
ODocument person = new ODocument("TRPerson");
person.field("name", Character.toString((char) ('A' + i)));
person.field("surname", Character.toString((char) ('A' + (i % 3))));
person.field("myversion", 0);
person.field("in", new HashSet<ODocument>());
person.field("out", new HashSet<ODocument>());
if (i >= 1) {
ODocument edge = new ODocument("TREdge");
edge.field("in", person.getIdentity());
edge.field("out", inserted.elementAt(i - 1));
((Set<ODocument>) person.field("out")).add(edge);
((Set<ODocument>) ((ODocument) inserted.elementAt(i - 1)).field("in")).add(edge);
edge.save();
}
inserted.add(person);
person.save();
}
database.commit();
final List<ODocument> result1 = database.command(new OCommandSQL("select from TRPerson")).execute();
Assert.assertNotNull(result1);
Assert.assertEquals(result1.size(), cnt);
System.out.println("Before transaction commit");
for (ODocument d : result1)
System.out.println(d);
try {
database.begin();
Vector inserted2 = new Vector();
for (int i = 0; i < cnt; i++) {
ODocument person = new ODocument("TRPerson");
person.field("name", Character.toString((char) ('a' + i)));
person.field("surname", Character.toString((char) ('a' + (i % 3))));
person.field("myversion", 0);
person.field("in", new HashSet<ODocument>());
person.field("out", new HashSet<ODocument>());
if (i >= 1) {
ODocument edge = new ODocument("TREdge");
edge.field("in", person.getIdentity());
edge.field("out", inserted2.elementAt(i - 1));
((Set<ODocument>) person.field("out")).add(edge);
((Set<ODocument>) ((ODocument) inserted2.elementAt(i - 1)).field("in")).add(edge);
edge.save();
}
inserted2.add(person);
person.save();
}
for (int i = 0; i < cnt; i++) {
if (i != cnt - 1) {
((ODocument) inserted.elementAt(i)).field("myversion", 2);
((ODocument) inserted.elementAt(i)).save();
}
}
((ODocument) inserted.elementAt(cnt - 1)).delete();
((ODocument) inserted.elementAt(cnt - 2)).getRecordVersion().reset();
((ODocument) inserted.elementAt(cnt - 2)).save();
database.commit();
Assert.assertTrue(false);
} catch (OResponseProcessingException e) {
Assert.assertTrue(e.getCause() instanceof OConcurrentModificationException);
database.rollback();
} catch (OConcurrentModificationException e) {
Assert.assertTrue(true);
database.rollback();
}
final List<ODocument> result2 = database.command(new OCommandSQL("select from TRPerson")).execute();
Assert.assertNotNull(result2);
System.out.println("After transaction commit failure/rollback");
for (ODocument d : result2)
System.out.println(d);
Assert.assertEquals(result2.size(), cnt);
System.out.println("**************************TransactionRollbackConstistencyTest***************************************");
}
@Test
public void testQueryIsolation() {
OrientGraph graph = new OrientGraph(url);
try {
graph.addVertex(null, "purpose", "testQueryIsolation");
if (!url.startsWith("remote")) {
List<OIdentifiable> result = graph.getRawGraph().query(
new OSQLSynchQuery<Object>("select from V where purpose = 'testQueryIsolation'"));
Assert.assertEquals(result.size(), 1);
}
graph.commit();
List<OIdentifiable> result = graph.getRawGraph().query(
new OSQLSynchQuery<Object>("select from V where purpose = 'testQueryIsolation'"));
Assert.assertEquals(result.size(), 1);
} finally {
graph.shutdown();
}
}
/**
* When calling .remove(o) on a collection, the row corresponding to o is deleted and not restored when the transaction is rolled
* back.
*
* Commented code after data model change to work around this problem.
*/
@SuppressWarnings("unused")
@Test
public void testRollbackWithRemove() {
// check if the database exists and clean before running tests
OObjectDatabaseTx database = new OObjectDatabaseTx(url);
database.open("admin", "admin");
try {
Account account = new Account();
account.setName("John Grisham");
account = database.save(account);
Address address1 = new Address();
address1.setStreet("Mulholland drive");
Address address2 = new Address();
address2.setStreet("Via Veneto");
List<Address> addresses = new ArrayList<Address>();
addresses.add(address1);
addresses.add(address2);
account.setAddresses(addresses);
account = database.save(account);
database.commit();
String originalName = account.getName();
database.begin(TXTYPE.OPTIMISTIC);
Assert.assertEquals(account.getAddresses().size(), 2);
account.getAddresses().remove(1); // delete one of the objects in the Books collection to see how rollback behaves
Assert.assertEquals(account.getAddresses().size(), 1);
account.setName("New Name"); // change an attribute to see if the change is rolled back
account = database.save(account);
Assert.assertEquals(account.getAddresses().size(), 1); // before rollback this is fine because one of the books was removed
database.rollback(); // rollback the transaction
account = database.reload(account, true); // ignore cache, get a copy of author from the datastore
Assert.assertEquals(account.getAddresses().size(), 2); // this is fine, author still linked to 2 books
Assert.assertEquals(account.getName(), originalName); // name is restored
int bookCount = 0;
for (Address b : database.browseClass(Address.class)) {
if (b.getStreet().equals("Mulholland drive") || b.getStreet().equals("Via Veneto"))
bookCount++;
}
Assert.assertEquals(bookCount, 2); // this fails, only 1 entry in the datastore :(
} finally {
database.close();
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.airavata.model.error;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
/**
* This exception is thrown for invalid requests that occur from any reasons like required input parameters are missing,
* or a parameter is malformed.
*
* message: contains the associated error message.
*/
@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-31")
public class InvalidRequestException extends TException implements org.apache.thrift.TBase<InvalidRequestException, InvalidRequestException._Fields>, java.io.Serializable, Cloneable, Comparable<InvalidRequestException> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("InvalidRequestException");
private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = new org.apache.thrift.protocol.TField("message", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new InvalidRequestExceptionStandardSchemeFactory());
schemes.put(TupleScheme.class, new InvalidRequestExceptionTupleSchemeFactory());
}
private String message; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
MESSAGE((short)1, "message");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // MESSAGE
return MESSAGE;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.MESSAGE, new org.apache.thrift.meta_data.FieldMetaData("message", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(InvalidRequestException.class, metaDataMap);
}
public InvalidRequestException() {
}
public InvalidRequestException(
String message)
{
this();
this.message = message;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public InvalidRequestException(InvalidRequestException other) {
if (other.isSetMessage()) {
this.message = other.message;
}
}
public InvalidRequestException deepCopy() {
return new InvalidRequestException(this);
}
@Override
public void clear() {
this.message = null;
}
public String getMessage() {
return this.message;
}
public void setMessage(String message) {
this.message = message;
}
public void unsetMessage() {
this.message = null;
}
/** Returns true if field message is set (has been assigned a value) and false otherwise */
public boolean isSetMessage() {
return this.message != null;
}
public void setMessageIsSet(boolean value) {
if (!value) {
this.message = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case MESSAGE:
if (value == null) {
unsetMessage();
} else {
setMessage((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case MESSAGE:
return getMessage();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case MESSAGE:
return isSetMessage();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof InvalidRequestException)
return this.equals((InvalidRequestException)that);
return false;
}
public boolean equals(InvalidRequestException that) {
if (that == null)
return false;
boolean this_present_message = true && this.isSetMessage();
boolean that_present_message = true && that.isSetMessage();
if (this_present_message || that_present_message) {
if (!(this_present_message && that_present_message))
return false;
if (!this.message.equals(that.message))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_message = true && (isSetMessage());
list.add(present_message);
if (present_message)
list.add(message);
return list.hashCode();
}
@Override
public int compareTo(InvalidRequestException other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetMessage()).compareTo(other.isSetMessage());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetMessage()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.message, other.message);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("InvalidRequestException(");
boolean first = true;
sb.append("message:");
if (this.message == null) {
sb.append("null");
} else {
sb.append(this.message);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (!isSetMessage()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'message' is unset! Struct:" + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class InvalidRequestExceptionStandardSchemeFactory implements SchemeFactory {
public InvalidRequestExceptionStandardScheme getScheme() {
return new InvalidRequestExceptionStandardScheme();
}
}
private static class InvalidRequestExceptionStandardScheme extends StandardScheme<InvalidRequestException> {
public void read(org.apache.thrift.protocol.TProtocol iprot, InvalidRequestException struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // MESSAGE
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.message = iprot.readString();
struct.setMessageIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, InvalidRequestException struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.message != null) {
oprot.writeFieldBegin(MESSAGE_FIELD_DESC);
oprot.writeString(struct.message);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class InvalidRequestExceptionTupleSchemeFactory implements SchemeFactory {
public InvalidRequestExceptionTupleScheme getScheme() {
return new InvalidRequestExceptionTupleScheme();
}
}
private static class InvalidRequestExceptionTupleScheme extends TupleScheme<InvalidRequestException> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, InvalidRequestException struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
oprot.writeString(struct.message);
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, InvalidRequestException struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.message = iprot.readString();
struct.setMessageIsSet(true);
}
}
}
|
|
package com.thinkbiganalytics.metadata.jobrepo.nifi.provenance;
/*-
* #%L
* thinkbig-operational-metadata-integration-service
* %%
* Copyright (C) 2017 ThinkBig Analytics
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.thinkbiganalytics.activemq.config.ActiveMqConstants;
import com.thinkbiganalytics.metadata.api.MetadataAccess;
import com.thinkbiganalytics.metadata.api.event.MetadataEventService;
import com.thinkbiganalytics.metadata.api.event.feed.FeedOperationStatusEvent;
import com.thinkbiganalytics.metadata.api.event.feed.OperationStatus;
import com.thinkbiganalytics.metadata.api.feed.DeleteFeedListener;
import com.thinkbiganalytics.metadata.api.feed.OpsManagerFeed;
import com.thinkbiganalytics.metadata.api.feed.OpsManagerFeedProvider;
import com.thinkbiganalytics.metadata.api.jobrepo.job.BatchJobExecution;
import com.thinkbiganalytics.metadata.api.jobrepo.job.BatchJobExecutionProvider;
import com.thinkbiganalytics.metadata.api.jobrepo.nifi.NifiEvent;
import com.thinkbiganalytics.metadata.api.jobrepo.step.BatchStepExecution;
import com.thinkbiganalytics.metadata.api.jobrepo.step.BatchStepExecutionProvider;
import com.thinkbiganalytics.metadata.api.jobrepo.step.FailedStepExecutionListener;
import com.thinkbiganalytics.metadata.api.op.FeedOperation;
import com.thinkbiganalytics.metadata.jpa.jobrepo.nifi.NifiEventProvider;
import com.thinkbiganalytics.nifi.activemq.Queues;
import com.thinkbiganalytics.nifi.provenance.model.ProvenanceEventRecordDTO;
import com.thinkbiganalytics.nifi.provenance.model.ProvenanceEventRecordDTOHolder;
import com.thinkbiganalytics.nifi.rest.client.LegacyNifiRestClient;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.web.api.dto.BulletinDTO;
import org.hibernate.exception.LockAcquisitionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.jms.annotation.JmsListener;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
/**
* JMS Listener for NiFi Provenance Events.
*/
@Component
public class ProvenanceEventReceiver implements FailedStepExecutionListener, DeleteFeedListener {
private static final Logger log = LoggerFactory.getLogger(ProvenanceEventReceiver.class);
/**
* Empty feed object for Loading Cache
*/
static OpsManagerFeed NULL_FEED = new OpsManagerFeed() {
@Override
public ID getId() {
return null;
}
@Override
public String getName() {
return null;
}
@Override
protected Object clone() throws CloneNotSupportedException {
return super.clone();
}
@Override
public int hashCode() {
return super.hashCode();
}
@Override
public FeedType getFeedType() {
return null;
}
};
@Inject
OpsManagerFeedProvider opsManagerFeedProvider;
@Inject
NifiBulletinExceptionExtractor nifiBulletinExceptionExtractor;
/**
* Temporary cache of completed events in to check against to ensure we trigger the same event twice
*/
Cache<String, String> completedJobEvents = CacheBuilder.newBuilder().expireAfterWrite(20, TimeUnit.MINUTES).build();
/**
* Cache of the Ops Manager Feed Object to ensure that we only process and create Job Executions for feeds that have been registered in Feed Manager
*/
LoadingCache<String, OpsManagerFeed> opsManagerFeedCache = null;
@Value("${kylo.ops.mgr.query.nifi.bulletins:false}")
private boolean queryForNiFiBulletins;
@Inject
private NifiEventProvider nifiEventProvider;
@Inject
private BatchJobExecutionProvider batchJobExecutionProvider;
@Inject
private BatchStepExecutionProvider batchStepExecutionProvider;
@Inject
private LegacyNifiRestClient nifiRestClient;
@Inject
private MetadataAccess metadataAccess;
@Inject
private MetadataEventService eventService;
/**
* The amount of retry attempts the system will do if it gets a LockAcquisitionException
* MySQL may fail to lock the table when performing inserts into the database resulting in a deadlock exception.
* When processing each event the LockAcquisitionException is caught and a retry attempt is done, retrying to process the event this amount of times before giving up.
*/
private int lockAcquisitionRetryAmount = 4;
/**
* default constructor creates the feed cache
*/
public ProvenanceEventReceiver() {
// create the loading Cache to get the Feed Manager Feeds. If its not in the cache, query the JCR store for the Feed object otherwise return the NULL_FEED object
opsManagerFeedCache = CacheBuilder.newBuilder().build(new CacheLoader<String, OpsManagerFeed>() {
@Override
public OpsManagerFeed load(String feedName) throws Exception {
OpsManagerFeed feed = null;
try {
feed = metadataAccess.commit(() -> opsManagerFeedProvider.findByName(feedName),
MetadataAccess.SERVICE);
} catch (Exception e) {
}
return feed == null ? NULL_FEED : feed;
}
}
);
}
@PostConstruct
private void init() {
batchStepExecutionProvider.subscribeToFailedSteps(this);
opsManagerFeedProvider.subscribeFeedDeletion(this);
}
/**
* Unique key for the Event in relation to the Job
*
* @param event a provenance event
* @return a unique key representing the event
*/
private String triggeredEventsKey(ProvenanceEventRecordDTO event) {
return event.getJobFlowFileId() + "_" + event.getEventId();
}
/**
* Process the Events from Nifi
* If it is a batch job, write the records to Ops manager.
* if it is a stream just write to the Nifi_event table.
* When either are marked as the last event Notify the event bus for the trigger feed mechanism to work.
*
* @param events The events obtained from JMS
*/
@JmsListener(destination = Queues.FEED_MANAGER_QUEUE, containerFactory = ActiveMqConstants.JMS_CONTAINER_FACTORY, concurrency = "3-10")
public void receiveEvents(ProvenanceEventRecordDTOHolder events) {
log.info("About to process batch: {}, {} events from the {} queue ", events.getBatchId(),events.getEvents().size(), Queues.FEED_MANAGER_QUEUE);
events.getEvents().stream()
.filter(this::isRegisteredWithFeedManager)
.filter(this::ensureNewEvent)
.forEach(event -> processEvent(event, 0));
}
/**
* process the event and persist it along with creating the Job and Step. If there is a lock error it will retry until it hits the {@link this#lockAcquisitionRetryAmount}
*
* @param event a provenance event
* @param retryAttempt the retry number. If there is a lock error it will retry until it hits the {@link this#lockAcquisitionRetryAmount}
*/
private void processEvent(ProvenanceEventRecordDTO event, int retryAttempt) {
try {
if (event.isBatchJob()) {
//ensure the job is there
BatchJobExecution jobExecution = metadataAccess.commit(() -> batchJobExecutionProvider.getOrCreateJobExecution(event),
MetadataAccess.SERVICE);
NifiEvent nifiEvent = metadataAccess.commit(() -> receiveBatchEvent(jobExecution, event),
MetadataAccess.SERVICE);
} else {
NifiEvent nifiEvent = metadataAccess.commit(() -> nifiEventProvider.create(event),
MetadataAccess.SERVICE);
}
if (event.isFinalJobEvent()) {
notifyJobFinished(event);
}
} catch (LockAcquisitionException lae) {
//safeguard against LockAcquisitionException if MySQL has a problem locking the table during its processing of the Event.
if (retryAttempt < lockAcquisitionRetryAmount) {
retryAttempt++;
log.error("LockAcquisitionException found trying to process Event: {} . Retry attempt # {} ", event, retryAttempt, lae);
//wait and re attempt
try {
Thread.sleep(300L);
} catch (InterruptedException var10) {
}
processEvent(event, retryAttempt);
} else {
log.error("LockAcquisitionException found. Unsuccessful after retrying {} times. This event {} will not be processed. ", retryAttempt, event, lae);
}
} catch (Exception e) {
log.error("Error processing Event ", event, e);
}
}
/**
* Process this record and record the Job and steps
*
* @param jobExecution the job execution
* @param event a provenance event
* @return a persisted nifi event object
*/
private NifiEvent receiveBatchEvent(BatchJobExecution jobExecution, ProvenanceEventRecordDTO event) {
NifiEvent nifiEvent = null;
log.debug("Received ProvenanceEvent {}. is end of Job: {}. is ending flowfile:{}, isBatch: {}", event, event.isEndOfJob(), event.isEndingFlowFileEvent(), event.isBatchJob());
nifiEvent = nifiEventProvider.create(event);
//query it again
jobExecution = batchJobExecutionProvider.findByJobExecutionId(jobExecution.getJobExecutionId());
BatchJobExecution job = batchJobExecutionProvider.save(jobExecution, event, nifiEvent);
if (job == null) {
log.error(" Detected a Batch event, but could not find related Job record. for event: {} is end of Job: {}. is ending flowfile:{}, isBatch: {}", event, event.isEndOfJob(),
event.isEndingFlowFileEvent(), event.isBatchJob());
}
return nifiEvent;
}
/**
* Check to see if the event has a relationship to Feed Manager
* In cases where a user is experimenting in NiFi and not using Feed Manager the event would not be registered
*
* @param event a provenance event
* @return {@code true} if the event has a feed associaetd with it {@code false} if there is no feed associated with it
*/
private boolean isRegisteredWithFeedManager(ProvenanceEventRecordDTO event) {
String feedName = event.getFeedName();
if (StringUtils.isNotBlank(feedName)) {
OpsManagerFeed feed = opsManagerFeedCache.getUnchecked(feedName);
if (feed == null || NULL_FEED.equals(feed)) {
log.debug("Not processing operational metadata for feed {} , event {} because it is not registered in feed manager ", feedName, event);
opsManagerFeedCache.invalidate(feedName);
return false;
} else {
return true;
}
}
return false;
}
/**
* Notify that the Job is complete either as a successful job or failed Job
*
* @param event a provenance event
*/
private void notifyJobFinished(ProvenanceEventRecordDTO event) {
if (event.isFinalJobEvent()) {
String mapKey = triggeredEventsKey(event);
String alreadyTriggered = completedJobEvents.getIfPresent(mapKey);
if (alreadyTriggered == null) {
completedJobEvents.put(mapKey, mapKey);
/// TRIGGER JOB COMPLETE!!!
if (event.isHasFailedEvents()) {
failedJob(event);
} else {
successfulJob(event);
}
}
}
}
/**
* Triggered for both Batch and Streaming Feed Jobs when the Job and any related Jobs (as a result of a Merge of other Jobs are complete but have a failure in the flow<br/> Example: <br/> Job
* (FlowFile) 1,2,3 are all running<br/> Job 1,2,3 get Merged<br/> Job 1,2 finish<br/> Job 3 finishes <br/>
*
* This will fire when Job3 finishes indicating this entire flow is complete<br/>
*
* @param event a provenance event
*/
private void failedJob(ProvenanceEventRecordDTO event) {
if (queryForNiFiBulletins && event.isBatchJob()) {
queryForNiFiErrorBulletins(event);
}
FeedOperation.State state = FeedOperation.State.FAILURE;
log.debug("FAILED JOB for Event {} ", event);
this.eventService.notify(new FeedOperationStatusEvent(new OperationStatus(event.getFeedName(), null, state, "Failed Job")));
}
/**
* Triggered for both Batch and Streaming Feed Jobs when the Job and any related Jobs (as a result of a Merge of other Jobs are complete<br/> Example: <br/> Job (FlowFile) 1,2,3 are all
* running<br/> Job 1,2,3 get Merged<br/> Job 1,2 finish<br/> Job 3 finishes <br/>
*
* This will fire when Job3 finishes indicating this entire flow is complete<br/>
*
* @param event a provenance event
*/
private void successfulJob(ProvenanceEventRecordDTO event) {
FeedOperation.State state = FeedOperation.State.SUCCESS;
log.debug("Success JOB for Event {} ", event);
this.eventService.notify(new FeedOperationStatusEvent(new OperationStatus(event.getFeedName(), null, state, "Job Succeeded for feed: " + event.getFeedName())));
}
/**
* Make a REST call to NiFi and query for the NiFi Bulletins that have a flowfile id matching for this job execution and write the bulletin message to the {@link
* BatchJobExecution#setExitMessage(String)}
*
* @param event a provenance event
*/
private void queryForNiFiErrorBulletins(ProvenanceEventRecordDTO event) {
try {
metadataAccess.commit(() -> {
//query for nifi logs
List<String> relatedFlowFiles = batchJobExecutionProvider.findRelatedFlowFiles(event.getFlowFileUuid());
if (relatedFlowFiles == null) {
relatedFlowFiles = new ArrayList<>();
}
if (relatedFlowFiles.isEmpty()) {
relatedFlowFiles.add(event.getFlowFileUuid());
}
log.info("Failed Job {}/{}. Found {} related flow files. ", event.getEventId(), event.getFlowFileUuid(), relatedFlowFiles.size());
List<BulletinDTO> bulletinDTOS = nifiBulletinExceptionExtractor.getErrorBulletinsForFlowFiles(relatedFlowFiles);
if (bulletinDTOS != null && !bulletinDTOS.isEmpty()) {
//write them back to the job
BatchJobExecution jobExecution = batchJobExecutionProvider.findJobExecution(event);
if (jobExecution != null) {
String msg = jobExecution.getExitMessage() != null ? jobExecution.getExitMessage() + "\n" : "";
msg += "NiFi exceptions: \n" + bulletinDTOS.stream().map(bulletinDTO -> bulletinDTO.getMessage()).collect(Collectors.joining("\n"));
jobExecution.setExitMessage(msg);
this.batchJobExecutionProvider.save(jobExecution);
}
}
}, MetadataAccess.SERVICE);
} catch (Exception e) {
log.error("Unable to query NiFi and save exception bulletins for job failure eventid/flowfile : {} / {}. Exception Message: {}", event.getEventId(), event.getFlowFileUuid(),
e.getMessage(), e);
}
}
/**
* Fails the step identified by the parameters given
*
* @param jobExecution the job execution
* @param stepExecution the step execution
* @param flowFileId the id of the flow file
* @param componentId the id of the component
*/
@Override
public void failedStep(BatchJobExecution jobExecution, BatchStepExecution stepExecution, String flowFileId, String componentId) {
nifiBulletinExceptionExtractor.addErrorMessagesToStep(stepExecution, flowFileId, componentId);
}
/*
* Indicates if the specified event hasn't already been processed.
*
* @param event the event to check
* @return {@code true} if the event is new, or {@code false otherwise}
*/
private boolean ensureNewEvent(ProvenanceEventRecordDTO event) {
return metadataAccess.read(() -> !nifiEventProvider.exists(event), MetadataAccess.SERVICE);
}
/**
* When a feed is deleted remove it from the cache of feed names
*
* @param feed a delete feed
*/
@Override
public void onFeedDelete(OpsManagerFeed feed) {
log.info("Notified that feed {} has been deleted. Removing this feed from the ProvenanceEventReceiver cache. ", feed.getName());
opsManagerFeedCache.invalidate(feed.getName());
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.parquet.columnreaders;
import static org.apache.parquet.format.converter.ParquetMetadataConverter.fromParquetStatistics;
import static org.apache.parquet.column.Encoding.valueOf;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.DrillBuf;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.exec.store.parquet.ColumnDataReader;
import org.apache.drill.exec.store.parquet.ParquetFormatPlugin;
import org.apache.drill.exec.store.parquet.ParquetReaderStats;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.bytes.BytesInput;
import org.apache.parquet.column.Dictionary;
import org.apache.parquet.column.Encoding;
import org.apache.parquet.column.ValuesType;
import org.apache.parquet.column.page.DictionaryPage;
import org.apache.parquet.column.statistics.Statistics;
import org.apache.parquet.column.values.ValuesReader;
import org.apache.parquet.column.values.dictionary.DictionaryValuesReader;
import org.apache.parquet.format.PageHeader;
import org.apache.parquet.format.PageType;
import org.apache.parquet.format.Util;
import org.apache.parquet.format.converter.ParquetMetadataConverter;
import org.apache.parquet.hadoop.CodecFactory;
import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import org.apache.parquet.schema.PrimitiveType;
import com.google.common.base.Preconditions;
import com.google.common.base.Stopwatch;
// class to keep track of the read position of variable length columns
final class PageReader {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PageReader.class);
public static final ParquetMetadataConverter METADATA_CONVERTER = ParquetFormatPlugin.parquetMetadataConverter;
private final ColumnReader parentColumnReader;
private final ColumnDataReader dataReader;
// buffer to store bytes of current page
DrillBuf pageData;
// for variable length data we need to keep track of our current position in the page data
// as the values and lengths are intermixed, making random access to the length data impossible
long readyToReadPosInBytes;
// read position in the current page, stored in the ByteBuf in ParquetRecordReader called bufferWithAllData
long readPosInBytes;
// bit shift needed for the next page if the last one did not line up with a byte boundary
int bitShift;
// storage space for extra bits at the end of a page if they did not line up with a byte boundary
// prevents the need to keep the entire last page, as these pageDataByteArray need to be added to the next batch
//byte extraBits;
// used for columns where the number of values that will fit in a vector is unknown
// currently used for variable length
// TODO - reuse this when compressed vectors are added, where fixed length values will take up a
// variable amount of space
// For example: if nulls are stored without extra space left in the data vector
// (this is currently simplifying random access to the data during processing, but increases the size of the vectors)
int valuesReadyToRead;
// the number of values read out of the last page
int valuesRead;
int byteLength;
//int rowGroupIndex;
ValuesReader definitionLevels;
ValuesReader repetitionLevels;
ValuesReader valueReader;
ValuesReader dictionaryLengthDeterminingReader;
ValuesReader dictionaryValueReader;
Dictionary dictionary;
PageHeader pageHeader = null;
int currentPageCount = -1;
private FSDataInputStream inputStream;
// These need to be held throughout reading of the entire column chunk
List<ByteBuf> allocatedDictionaryBuffers;
private final CodecFactory codecFactory;
private final ParquetReaderStats stats;
PageReader(ColumnReader<?> parentStatus, FileSystem fs, Path path, ColumnChunkMetaData columnChunkMetaData)
throws ExecutionSetupException{
this.parentColumnReader = parentStatus;
allocatedDictionaryBuffers = new ArrayList<ByteBuf>();
codecFactory = parentColumnReader.parentReader.getCodecFactory();
this.stats = parentColumnReader.parentReader.parquetReaderStats;
long start = columnChunkMetaData.getFirstDataPageOffset();
try {
inputStream = fs.open(path);
this.dataReader = new ColumnDataReader(inputStream, start, columnChunkMetaData.getTotalSize());
loadDictionaryIfExists(parentStatus, columnChunkMetaData, inputStream);
} catch (IOException e) {
throw new ExecutionSetupException("Error opening or reading metadata for parquet file at location: "
+ path.getName(), e);
}
}
private void loadDictionaryIfExists(final ColumnReader<?> parentStatus,
final ColumnChunkMetaData columnChunkMetaData, final FSDataInputStream f) throws IOException {
Stopwatch timer = new Stopwatch();
if (columnChunkMetaData.getDictionaryPageOffset() > 0) {
f.seek(columnChunkMetaData.getDictionaryPageOffset());
long start=f.getPos();
timer.start();
final PageHeader pageHeader = Util.readPageHeader(f);
long timeToRead = timer.elapsed(TimeUnit.MICROSECONDS);
long pageHeaderBytes=f.getPos()-start;
this.updateStats(pageHeader, "Page Header", start, timeToRead, pageHeaderBytes, pageHeaderBytes);
assert pageHeader.type == PageType.DICTIONARY_PAGE;
readDictionaryPage(pageHeader, parentStatus);
}
}
private void readDictionaryPage(final PageHeader pageHeader,
final ColumnReader<?> parentStatus) throws IOException {
int compressedSize = pageHeader.getCompressed_page_size();
int uncompressedSize = pageHeader.getUncompressed_page_size();
final DrillBuf dictionaryData = allocateDictionaryBuffer(uncompressedSize);
readPage(pageHeader, compressedSize, uncompressedSize, dictionaryData);
DictionaryPage page = new DictionaryPage(
asBytesInput(dictionaryData, 0, uncompressedSize),
pageHeader.uncompressed_page_size,
pageHeader.dictionary_page_header.num_values,
valueOf(pageHeader.dictionary_page_header.encoding.name()));
this.dictionary = page.getEncoding().initDictionary(parentStatus.columnDescriptor, page);
}
public void readPage(PageHeader pageHeader, int compressedSize, int uncompressedSize, DrillBuf dest) throws IOException {
Stopwatch timer = new Stopwatch();
long timeToRead;
long start=inputStream.getPos();
if (parentColumnReader.columnChunkMetaData.getCodec() == CompressionCodecName.UNCOMPRESSED) {
timer.start();
dataReader.loadPage(dest, compressedSize);
timeToRead = timer.elapsed(TimeUnit.MICROSECONDS);
this.updateStats(pageHeader, "Page Read", start, timeToRead, compressedSize, uncompressedSize);
} else {
final DrillBuf compressedData = allocateTemporaryBuffer(compressedSize);
try {
timer.start();
dataReader.loadPage(compressedData, compressedSize);
timeToRead = timer.elapsed(TimeUnit.MICROSECONDS);
timer.reset();
this.updateStats(pageHeader, "Page Read", start, timeToRead, compressedSize, compressedSize);
start = inputStream.getPos();
timer.start();
codecFactory.getDecompressor(parentColumnReader.columnChunkMetaData
.getCodec()).decompress(compressedData.nioBuffer(0, compressedSize), compressedSize,
dest.nioBuffer(0, uncompressedSize), uncompressedSize);
timeToRead = timer.elapsed(TimeUnit.MICROSECONDS);
this.updateStats(pageHeader, "Decompress", start, timeToRead, compressedSize, uncompressedSize);
} finally {
compressedData.release();
}
}
}
public static BytesInput asBytesInput(DrillBuf buf, int offset, int length) throws IOException {
return BytesInput.from(buf.nioBuffer(offset, length), 0, length);
}
/**
* Grab the next page.
*
* @return - if another page was present
* @throws java.io.IOException
*/
public boolean next() throws IOException {
Stopwatch timer = new Stopwatch();
currentPageCount = -1;
valuesRead = 0;
valuesReadyToRead = 0;
// TODO - the metatdata for total size appears to be incorrect for impala generated files, need to find cause
// and submit a bug report
if(!dataReader.hasRemainder() || parentColumnReader.totalValuesRead == parentColumnReader.columnChunkMetaData.getValueCount()) {
return false;
}
clearBuffers();
// next, we need to decompress the bytes
// TODO - figure out if we need multiple dictionary pages, I believe it may be limited to one
// I think we are clobbering parts of the dictionary if there can be multiple pages of dictionary
do {
long start=inputStream.getPos();
timer.start();
pageHeader = dataReader.readPageHeader();
long timeToRead = timer.elapsed(TimeUnit.MICROSECONDS);
this.updateStats(pageHeader, "Page Header Read", start, timeToRead, 0,0);
logger.trace("ParquetTrace,{},{},{},{},{},{},{},{}","Page Header Read","",
this.parentColumnReader.parentReader.hadoopPath,
this.parentColumnReader.columnDescriptor.toString(), start, 0, 0, timeToRead);
timer.reset();
if (pageHeader.getType() == PageType.DICTIONARY_PAGE) {
readDictionaryPage(pageHeader, parentColumnReader);
}
} while (pageHeader.getType() == PageType.DICTIONARY_PAGE);
//TODO: Handle buffer allocation exception
allocatePageData(pageHeader.getUncompressed_page_size());
int compressedSize = pageHeader.getCompressed_page_size();
int uncompressedSize = pageHeader.getUncompressed_page_size();
readPage(pageHeader, compressedSize, uncompressedSize, pageData);
currentPageCount = pageHeader.data_page_header.num_values;
final int uncompressedPageSize = pageHeader.uncompressed_page_size;
final Statistics<?> stats = fromParquetStatistics(pageHeader.data_page_header.getStatistics(), parentColumnReader
.getColumnDescriptor().getType());
final Encoding rlEncoding = METADATA_CONVERTER.getEncoding(pageHeader.data_page_header.repetition_level_encoding);
final Encoding dlEncoding = METADATA_CONVERTER.getEncoding(pageHeader.data_page_header.definition_level_encoding);
final Encoding valueEncoding = METADATA_CONVERTER.getEncoding(pageHeader.data_page_header.encoding);
byteLength = pageHeader.uncompressed_page_size;
final ByteBuffer pageDataBuffer = pageData.nioBuffer(0, pageData.capacity());
readPosInBytes = 0;
if (parentColumnReader.getColumnDescriptor().getMaxRepetitionLevel() > 0) {
repetitionLevels = rlEncoding.getValuesReader(parentColumnReader.columnDescriptor, ValuesType.REPETITION_LEVEL);
repetitionLevels.initFromPage(currentPageCount, pageDataBuffer, (int) readPosInBytes);
// we know that the first value will be a 0, at the end of each list of repeated values we will hit another 0 indicating
// a new record, although we don't know the length until we hit it (and this is a one way stream of integers) so we
// read the first zero here to simplify the reading processes, and start reading the first value the same as all
// of the rest. Effectively we are 'reading' the non-existent value in front of the first allowing direct access to
// the first list of repetition levels
readPosInBytes = repetitionLevels.getNextOffset();
repetitionLevels.readInteger();
}
if (parentColumnReader.columnDescriptor.getMaxDefinitionLevel() != 0){
parentColumnReader.currDefLevel = -1;
definitionLevels = dlEncoding.getValuesReader(parentColumnReader.columnDescriptor, ValuesType.DEFINITION_LEVEL);
definitionLevels.initFromPage(currentPageCount, pageDataBuffer, (int) readPosInBytes);
readPosInBytes = definitionLevels.getNextOffset();
if (!valueEncoding.usesDictionary()) {
valueReader = valueEncoding.getValuesReader(parentColumnReader.columnDescriptor, ValuesType.VALUES);
valueReader.initFromPage(currentPageCount, pageDataBuffer, (int) readPosInBytes);
}
}
if (parentColumnReader.columnDescriptor.getType() == PrimitiveType.PrimitiveTypeName.BOOLEAN) {
valueReader = valueEncoding.getValuesReader(parentColumnReader.columnDescriptor, ValuesType.VALUES);
valueReader.initFromPage(currentPageCount, pageDataBuffer, (int) readPosInBytes);
}
if (valueEncoding.usesDictionary()) {
// initialize two of the dictionary readers, one is for determining the lengths of each value, the second is for
// actually copying the values out into the vectors
dictionaryLengthDeterminingReader = new DictionaryValuesReader(dictionary);
dictionaryLengthDeterminingReader.initFromPage(currentPageCount, pageDataBuffer, (int) readPosInBytes);
dictionaryValueReader = new DictionaryValuesReader(dictionary);
dictionaryValueReader.initFromPage(currentPageCount, pageDataBuffer, (int) readPosInBytes);
parentColumnReader.usingDictionary = true;
} else {
parentColumnReader.usingDictionary = false;
}
// readPosInBytes is used for actually reading the values after we determine how many will fit in the vector
// readyToReadPosInBytes serves a similar purpose for the vector types where we must count up the values that will
// fit one record at a time, such as for variable length data. Both operations must start in the same location after the
// definition and repetition level data which is stored alongside the page data itself
readyToReadPosInBytes = readPosInBytes;
return true;
}
/**
* Allocate a page data buffer. Note that only one page data buffer should be active at a time. The reader will ensure
* that the page data is released after the reader is completed.
*/
private void allocatePageData(int size) {
Preconditions.checkArgument(pageData == null);
pageData = parentColumnReader.parentReader.getOperatorContext().getAllocator().buffer(size);
}
/**
* Allocate a buffer which the user should release immediately. The reader does not manage release of these buffers.
*/
private DrillBuf allocateTemporaryBuffer(int size) {
return parentColumnReader.parentReader.getOperatorContext().getAllocator().buffer(size);
}
/**
* Allocate and return a dictionary buffer. These are maintained for the life of the reader and then released when the
* reader is cleared.
*/
private DrillBuf allocateDictionaryBuffer(int size) {
DrillBuf buf = parentColumnReader.parentReader.getOperatorContext().getAllocator().buffer(size);
allocatedDictionaryBuffers.add(buf);
return buf;
}
protected boolean hasPage() {
return currentPageCount != -1;
}
private void updateStats(PageHeader pageHeader, String op, long start, long time, long bytesin, long bytesout) {
String pageType = "Data Page";
if (pageHeader.type == PageType.DICTIONARY_PAGE) {
pageType = "Dictionary Page";
}
logger.trace("ParquetTrace,{},{},{},{},{},{},{},{}", op, pageType.toString(),
this.parentColumnReader.parentReader.hadoopPath,
this.parentColumnReader.columnDescriptor.toString(), start, bytesin, bytesout, time);
if (pageHeader.type != PageType.DICTIONARY_PAGE) {
if (bytesin == bytesout) {
this.stats.timePageLoads += time;
this.stats.numPageLoads++;
this.stats.totalPageReadBytes += bytesin;
} else {
this.stats.timePagesDecompressed += time;
this.stats.numPagesDecompressed++;
this.stats.totalDecompressedBytes += bytesin;
}
} else {
if (bytesin == bytesout) {
this.stats.timeDictPageLoads += time;
this.stats.numDictPageLoads++;
this.stats.totalDictPageReadBytes += bytesin;
} else {
this.stats.timeDictPagesDecompressed += time;
this.stats.numDictPagesDecompressed++;
this.stats.totalDictDecompressedBytes += bytesin;
}
}
}
public void clearBuffers() {
if (pageData != null) {
pageData.release();
pageData = null;
}
}
public void clearDictionaryBuffers() {
for (ByteBuf b : allocatedDictionaryBuffers) {
b.release();
}
allocatedDictionaryBuffers.clear();
}
public void clear(){
this.dataReader.clear();
// Free all memory, including fixed length types. (Data is being copied for all types not just var length types)
//if(!this.parentColumnReader.isFixedLength) {
clearBuffers();
clearDictionaryBuffers();
//}
}
}
|
|
/*******************************************************************************
* Copyright 2014 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
/**
* Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior
* University
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
**/
package org.openflow.protocol;
import java.util.Arrays;
import java.util.List;
import org.jboss.netty.buffer.ChannelBuffer;
import org.openflow.protocol.action.OFAction;
import org.openflow.protocol.factory.OFActionFactory;
import org.openflow.protocol.factory.OFActionFactoryAware;
import org.openflow.util.HexString;
import org.openflow.util.U16;
/**
* Represents an ofp_packet_out message
*
* @author David Erickson ([email protected]) - Mar 12, 2010
*/
public class OFPacketOut extends OFMessage implements OFActionFactoryAware {
public static int MINIMUM_LENGTH = 16;
public static int BUFFER_ID_NONE = 0xffffffff;
protected OFActionFactory actionFactory;
protected int bufferId;
protected short inPort;
protected short actionsLength;
protected List<OFAction> actions;
protected byte[] packetData;
public OFPacketOut() {
super();
this.type = OFType.PACKET_OUT;
this.length = U16.t(OFPacketOut.MINIMUM_LENGTH);
this.bufferId = OFPacketOut.BUFFER_ID_NONE;
}
/**
* Get buffer_id
*
* @return
*/
public int getBufferId() {
return this.bufferId;
}
/**
* Set buffer_id
*
* @param bufferId
*/
public OFPacketOut setBufferId(final int bufferId) {
if (this.packetData != null && this.packetData.length > 0
&& bufferId != OFPacketOut.BUFFER_ID_NONE) {
throw new IllegalArgumentException(
"PacketOut should not have both bufferId and packetData set");
}
this.bufferId = bufferId;
return this;
}
/**
* Returns the packet data
*
* @return
*/
public byte[] getPacketData() {
return this.packetData;
}
/**
* Sets the packet data
*
* @param packetData
*/
public OFPacketOut setPacketData(final byte[] packetData) {
if (packetData != null && packetData.length > 0
&& this.bufferId != OFPacketOut.BUFFER_ID_NONE) {
throw new IllegalArgumentException(
"PacketOut should not have both bufferId and packetData set");
}
this.packetData = packetData;
return this;
}
/**
* Get in_port
*
* @return
*/
public short getInPort() {
return this.inPort;
}
/**
* Set in_port
*
* @param inPort
*/
public OFPacketOut setInPort(final short inPort) {
this.inPort = inPort;
return this;
}
/**
* Set in_port. Convenience method using OFPort enum.
*
* @param inPort
*/
public OFPacketOut setInPort(final OFPort inPort) {
this.inPort = inPort.getValue();
return this;
}
/**
* Get actions_len
*
* @return
*/
public short getActionsLength() {
return this.actionsLength;
}
/**
* Get actions_len, unsigned
*
* @return
*/
public int getActionsLengthU() {
return U16.f(this.actionsLength);
}
/**
* Set actions_len
*
* @param actionsLength
*/
public OFPacketOut setActionsLength(final short actionsLength) {
this.actionsLength = actionsLength;
return this;
}
/**
* Returns the actions contained in this message
*
* @return a list of ordered OFAction objects
*/
public List<OFAction> getActions() {
return this.actions;
}
/**
* Sets the list of actions on this message
*
* @param actions
* a list of ordered OFAction objects
*/
public OFPacketOut setActions(final List<OFAction> actions) {
this.actions = actions;
return this;
}
@Override
public void setActionFactory(final OFActionFactory actionFactory) {
this.actionFactory = actionFactory;
}
@Override
public void readFrom(final ChannelBuffer data) {
super.readFrom(data);
this.bufferId = data.readInt();
this.inPort = data.readShort();
this.actionsLength = data.readShort();
if (this.actionFactory == null) {
throw new RuntimeException("ActionFactory not set");
}
this.actions = this.actionFactory.parseActions(data,
this.getActionsLengthU());
this.packetData = new byte[this.getLengthU()
- OFPacketOut.MINIMUM_LENGTH - this.getActionsLengthU()];
data.readBytes(this.packetData);
this.validate();
}
@Override
public void writeTo(final ChannelBuffer data) {
this.validate();
super.writeTo(data);
data.writeInt(this.bufferId);
data.writeShort(this.inPort);
data.writeShort(this.actionsLength);
for (final OFAction action : this.actions) {
action.writeTo(data);
}
if (this.packetData != null) {
data.writeBytes(this.packetData);
}
}
/** validate the invariants of this OFMessage hold */
public void validate() {
if (!(this.bufferId != OFPacketOut.BUFFER_ID_NONE ^ (this.packetData != null && this.packetData.length > 0))) {
throw new IllegalStateException(
"OFPacketOut must have exactly one of (bufferId, packetData) set (not one, not both)");
}
}
@Override
public int hashCode() {
final int prime = 293;
int result = super.hashCode();
result = prime * result
+ (this.actions == null ? 0 : this.actions.hashCode());
result = prime * result + this.actionsLength;
result = prime * result + this.bufferId;
result = prime * result + this.inPort;
result = prime * result + Arrays.hashCode(this.packetData);
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (!(obj instanceof OFPacketOut)) {
return false;
}
final OFPacketOut other = (OFPacketOut) obj;
if (this.actions == null) {
if (other.actions != null) {
return false;
}
} else if (!this.actions.equals(other.actions)) {
return false;
}
if (this.actionsLength != other.actionsLength) {
return false;
}
if (this.bufferId != other.bufferId) {
return false;
}
if (this.inPort != other.inPort) {
return false;
}
if (!Arrays.equals(this.packetData, other.packetData)) {
return false;
}
return true;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return "OFPacketOut [actionFactory=" + this.actionFactory
+ ", actions=" + this.actions + ", actionsLength="
+ this.actionsLength + ", bufferId=0x"
+ Integer.toHexString(this.bufferId) + ", inPort="
+ this.inPort + ", packetData="
+ HexString.toHexString(this.packetData) + "]";
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.deephacks.confit.internal.hbase;
import java.util.Random;
/**
* <p>Operations for random <code>String</code>s.</p>
* <p>Currently <em>private high surrogate</em> characters are ignored.
* These are unicode characters that fall between the values 56192 (db80)
* and 56319 (dbff) as we don't know how to handle them.
* High and low surrogates are correctly dealt with - that is if a
* high surrogate is randomly chosen, 55296 (d800) to 56191 (db7f)
* then it is followed by a low surrogate. If a low surrogate is chosen,
* 56320 (dc00) to 57343 (dfff) then it is placed after a randomly
* chosen high surrogate. </p>
*
* @author Apache Software Foundation
* @author <a href="mailto:[email protected]">Steven Caswell</a>
* @author Gary Gregory
* @author Phil Steitz
* @since 1.0
* @version $Id: RandomStringUtils.java 905636 2010-02-02 14:03:32Z niallp $
*/
public class RandomStringUtils {
/**
* <p>Random object used by random method. This has to be not local
* to the random method so as to not return the same value in the
* same millisecond.</p>
*/
private static final Random RANDOM = new Random();
/**
* <p><code>RandomStringUtils</code> instances should NOT be constructed in
* standard programming. Instead, the class should be used as
* <code>RandomStringUtils.random(5);</code>.</p>
*
* <p>This constructor is public to permit tools that require a JavaBean instance
* to operate.</p>
*/
public RandomStringUtils() {
super();
}
// Random
//-----------------------------------------------------------------------
/**
* <p>Creates a random string whose length is the number of characters
* specified.</p>
*
* <p>Characters will be chosen from the set of all characters.</p>
*
* @param count the length of random string to create
* @return the random string
*/
public static String random(int count) {
return random(count, false, false);
}
/**
* <p>Creates a random string whose length is the number of characters
* specified.</p>
*
* <p>Characters will be chosen from the set of characters whose
* ASCII value is between <code>32</code> and <code>126</code> (inclusive).</p>
*
* @param count the length of random string to create
* @return the random string
*/
public static String randomAscii(int count) {
return random(count, 32, 127, false, false);
}
/**
* <p>Creates a random string whose length is the number of characters
* specified.</p>
*
* <p>Characters will be chosen from the set of alphabetic
* characters.</p>
*
* @param count the length of random string to create
* @return the random string
*/
public static String randomAlphabetic(int count) {
return random(count, true, false);
}
/**
* <p>Creates a random string whose length is the number of characters
* specified.</p>
*
* <p>Characters will be chosen from the set of alpha-numeric
* characters.</p>
*
* @param count the length of random string to create
* @return the random string
*/
public static String randomAlphanumeric(int count) {
return random(count, true, true);
}
/**
* <p>Creates a random string whose length is the number of characters
* specified.</p>
*
* <p>Characters will be chosen from the set of numeric
* characters.</p>
*
* @param count the length of random string to create
* @return the random string
*/
public static String randomNumeric(int count) {
return random(count, false, true);
}
/**
* <p>Creates a random string whose length is the number of characters
* specified.</p>
*
* <p>Characters will be chosen from the set of alpha-numeric
* characters as indicated by the arguments.</p>
*
* @param count the length of random string to create
* @param letters if <code>true</code>, generated string will include
* alphabetic characters
* @param numbers if <code>true</code>, generated string will include
* numeric characters
* @return the random string
*/
public static String random(int count, boolean letters, boolean numbers) {
return random(count, 0, 0, letters, numbers);
}
/**
* <p>Creates a random string whose length is the number of characters
* specified.</p>
*
* <p>Characters will be chosen from the set of alpha-numeric
* characters as indicated by the arguments.</p>
*
* @param count the length of random string to create
* @param start the position in set of chars to start at
* @param end the position in set of chars to end before
* @param letters if <code>true</code>, generated string will include
* alphabetic characters
* @param numbers if <code>true</code>, generated string will include
* numeric characters
* @return the random string
*/
public static String random(int count, int start, int end, boolean letters, boolean numbers) {
return random(count, start, end, letters, numbers, null, RANDOM);
}
/**
* <p>Creates a random string based on a variety of options, using
* default source of randomness.</p>
*
* <p>This method has exactly the same semantics as
* {@link #random(int,int,int,boolean,boolean,char[],Random)}, but
* instead of using an externally supplied source of randomness, it uses
* the internal static {@link Random} instance.</p>
*
* @param count the length of random string to create
* @param start the position in set of chars to start at
* @param end the position in set of chars to end before
* @param letters only allow letters?
* @param numbers only allow numbers?
* @param chars the set of chars to choose randoms from.
* If <code>null</code>, then it will use the set of all chars.
* @return the random string
* @throws ArrayIndexOutOfBoundsException if there are not
* <code>(end - start) + 1</code> characters in the set array.
*/
public static String random(int count, int start, int end, boolean letters, boolean numbers, char[] chars) {
return random(count, start, end, letters, numbers, chars, RANDOM);
}
/**
* <p>Creates a random string based on a variety of options, using
* supplied source of randomness.</p>
*
* <p>If start and end are both <code>0</code>, start and end are set
* to <code>' '</code> and <code>'z'</code>, the ASCII printable
* characters, will be used, unless letters and numbers are both
* <code>false</code>, in which case, start and end are set to
* <code>0</code> and <code>Integer.MAX_VALUE</code>.
*
* <p>If set is not <code>null</code>, characters between start and
* end are chosen.</p>
*
* <p>This method accepts a user-supplied {@link Random}
* instance to use as a source of randomness. By seeding a single
* {@link Random} instance with a fixed seed and using it for each call,
* the same random sequence of strings can be generated repeatedly
* and predictably.</p>
*
* @param count the length of random string to create
* @param start the position in set of chars to start at
* @param end the position in set of chars to end before
* @param letters only allow letters?
* @param numbers only allow numbers?
* @param chars the set of chars to choose randoms from.
* If <code>null</code>, then it will use the set of all chars.
* @param random a source of randomness.
* @return the random string
* @throws ArrayIndexOutOfBoundsException if there are not
* <code>(end - start) + 1</code> characters in the set array.
* @throws IllegalArgumentException if <code>count</code> < 0.
* @since 2.0
*/
public static String random(int count, int start, int end, boolean letters, boolean numbers,
char[] chars, Random random) {
if (count == 0) {
return "";
} else if (count < 0) {
throw new IllegalArgumentException("Requested random string length " + count + " is less than 0.");
}
if ((start == 0) && (end == 0)) {
end = 'z' + 1;
start = ' ';
if (!letters && !numbers) {
start = 0;
end = Integer.MAX_VALUE;
}
}
char[] buffer = new char[count];
int gap = end - start;
while (count-- != 0) {
char ch;
if (chars == null) {
ch = (char) (random.nextInt(gap) + start);
} else {
ch = chars[random.nextInt(gap) + start];
}
if ((letters && Character.isLetter(ch))
|| (numbers && Character.isDigit(ch))
|| (!letters && !numbers))
{
if(ch >= 56320 && ch <= 57343) {
if(count == 0) {
count++;
} else {
// low surrogate, insert high surrogate after putting it in
buffer[count] = ch;
count--;
buffer[count] = (char) (55296 + random.nextInt(128));
}
} else if(ch >= 55296 && ch <= 56191) {
if(count == 0) {
count++;
} else {
// high surrogate, insert low surrogate before putting it in
buffer[count] = (char) (56320 + random.nextInt(128));
count--;
buffer[count] = ch;
}
} else if(ch >= 56192 && ch <= 56319) {
// private high surrogate, no effing clue, so skip it
count++;
} else {
buffer[count] = ch;
}
} else {
count++;
}
}
return new String(buffer);
}
/**
* <p>Creates a random string whose length is the number of characters
* specified.</p>
*
* <p>Characters will be chosen from the set of characters
* specified.</p>
*
* @param count the length of random string to create
* @param chars the String containing the set of characters to use,
* may be null
* @return the random string
* @throws IllegalArgumentException if <code>count</code> < 0.
*/
public static String random(int count, String chars) {
if (chars == null) {
return random(count, 0, 0, false, false, null, RANDOM);
}
return random(count, chars.toCharArray());
}
/**
* <p>Creates a random string whose length is the number of characters
* specified.</p>
*
* <p>Characters will be chosen from the set of characters specified.</p>
*
* @param count the length of random string to create
* @param chars the character array containing the set of characters to use,
* may be null
* @return the random string
* @throws IllegalArgumentException if <code>count</code> < 0.
*/
public static String random(int count, char[] chars) {
if (chars == null) {
return random(count, 0, 0, false, false, null, RANDOM);
}
return random(count, 0, chars.length, false, false, chars, RANDOM);
}
}
|
|
package edu.chalmers.sankoss.java;
import edu.chalmers.sankoss.core.core.Coordinate;
import edu.chalmers.sankoss.core.core.Ship;
import edu.chalmers.sankoss.core.exceptions.IllegalShipCoordinatesException;
import edu.chalmers.sankoss.core.protocol.GameReady;
import edu.chalmers.sankoss.server.server.Game;
import edu.chalmers.sankoss.server.server.Player;
import edu.chalmers.sankoss.server.server.UsedCoordinateException;
import org.junit.Test;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* @author Fredrik Thune
*/
public class GameTest {
@Test
public void testUsedCoordinates() {
Player player1 = new Player(new Long(1), "player1");
int size = player1.getUsedCoordinates().size();
Player player2 = new Player(new Long(2), "player2");
List<Player> list = new ArrayList<Player>();
list.add(player1);
list.add(player2);
Game game = new Game(new Long(3), list);
game.setAttacker(player1);
Coordinate cor = new Coordinate(5,3);
try {
game.fire(player2, cor);
} catch (UsedCoordinateException e) {
fail("Should not throw exception");
}
assertFalse(player1.getUsedCoordinates().size() == size);
assertTrue(player1.getUsedCoordinates().contains(cor));
}
@Test
public void testAttacker() {
Player player1 = new Player(new Long(1), "player1");
Player player2 = new Player(new Long(2), "player2");
List<Player> list = new ArrayList<Player>();
list.add(player1);
list.add(player2);
Game game = new Game(new Long(3), list);
assertFalse(game.getAttacker() == player1);
assertFalse(game.getAttacker() == player2);
game.setAttacker(player1);
assertTrue(game.getAttacker() == player1);
assertFalse(game.getAttacker() == player2);
game.changeAttacker();
assertFalse(game.getAttacker() == player1);
assertTrue(game.getAttacker() == player2);
}
@Test
public void testHit() {
Player player1 = new Player(new Long(1), "player1");
Player player2 = new Player(new Long(2), "player2");
List<Ship> fleet = new ArrayList<Ship>();
try {
fleet.add(new Ship(new Coordinate(1,1), new Coordinate(1,3)));
fleet.add(new Ship(new Coordinate(2,1), new Coordinate(2,4)));
}
catch (IllegalShipCoordinatesException ignore) {
fail("Should not throw exception");
}
player1.setFleet(fleet);
List<Player> list = new ArrayList<Player>();
list.add(player1);
list.add(player2);
Game game = new Game(new Long(3), list);
game.setAttacker(player2);
try {
assertFalse(game.fire(player1, new Coordinate(1,1)) == null);
assertTrue(game.fire(player1, new Coordinate(5,5)) == null);
} catch (UsedCoordinateException e) {
fail("Should not throw exception");
}
try{
game.fire(player1, new Coordinate(1,1));
fail("Exception should be thrown before this");
} catch (UsedCoordinateException ignore) {
}
try {
game.fire(player1, new Coordinate(1,2));
Ship ship = game.fire(player1, new Coordinate(1,3));
assertTrue(ship.isDestroyed());
} catch (UsedCoordinateException e) {
fail("Should not throw exception");
}
}
@Test
public void testWinning() {
Player player1 = new Player(new Long(1), "player1");
Player player2 = new Player(new Long(2), "player2");
List<Ship> fleet = new ArrayList<Ship>();
try {
fleet.add(new Ship(new Coordinate(1,1), new Coordinate(1,3)));
fleet.add(new Ship(new Coordinate(2,1), new Coordinate(2,4)));
}
catch (IllegalShipCoordinatesException ignore) {
fail("Should not throw exception");
}
player1.setFleet(fleet);
List<Player> list = new ArrayList<Player>();
list.add(player1);
list.add(player2);
Game game = new Game(new Long(3), list);
game.setAttacker(player2);
try {
game.fire(player1, new Coordinate(1,1));
game.fire(player1, new Coordinate(1,2));
game.fire(player1, new Coordinate(1,3));
assertFalse(player1.fleetIsDestoyed());
game.fire(player1, new Coordinate(2,1));
game.fire(player1, new Coordinate(2,2));
game.fire(player1, new Coordinate(2,3));
game.fire(player1, new Coordinate(2,4));
assertTrue(player1.fleetIsDestoyed());
} catch (UsedCoordinateException e) {
e.printStackTrace();
}
}
@Test
public void testHasPlayerWithID(){
Player player1 = new Player((long) 5245324);
Player player2 = new Player((long) 523324);
List list = new LinkedList<Player>();
list.add(player1);
list.add(player2);
Game game = new Game((long) 774883, list );
assertTrue(game.hasPlayerWithID((long) 5245324) &&
!game.hasPlayerWithID((long) 1337));
}
@Test
public void testIsPlayerWithIDHost(){
Player player1 = new Player((long) 5245324);
Player player2 = new Player((long) 523324);
List list = new LinkedList<Player>();
list.add(player1);
list.add(player2);
Game game = new Game((long) 774883, list );
assertTrue(game.isPlayerWithIDHost((long) 5245324) &&
!game.isPlayerWithIDHost((long) 523324));
}
@Test
public void testRemovePlayerWithID(){
Player player1 = new Player((long) 5245324);
Player player2 = new Player((long) 523324);
List list = new LinkedList<Player>();
list.add(player1);
list.add(player2);
Game game = new Game((long) 774883, list );
game.removePlayerWithID((long) 523324);
assertFalse(game.hasPlayerWithID((long) 523324));
}
}
|
|
package org.motechproject.mds.builder.impl;
import javassist.CtClass;
import javassist.NotFoundException;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.reflect.FieldUtils;
import org.motechproject.commons.date.model.Time;
import org.motechproject.mds.builder.EntityMetadataBuilder;
import org.motechproject.mds.domain.ClassData;
import org.motechproject.mds.domain.ComboboxHolder;
import org.motechproject.mds.domain.EntityType;
import org.motechproject.mds.domain.RelationshipHolder;
import org.motechproject.mds.dto.EntityDto;
import org.motechproject.mds.dto.FieldDto;
import org.motechproject.mds.dto.MetadataDto;
import org.motechproject.mds.dto.SchemaHolder;
import org.motechproject.mds.dto.SettingDto;
import org.motechproject.mds.dto.TypeDto;
import org.motechproject.mds.ex.MdsException;
import org.motechproject.mds.helper.ClassTableName;
import org.motechproject.mds.javassist.MotechClassPool;
import org.motechproject.mds.reflections.ReflectionsUtil;
import org.motechproject.mds.util.ClassName;
import org.motechproject.mds.util.Constants;
import org.motechproject.mds.util.TypeHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.stereotype.Component;
import javax.jdo.annotations.Column;
import javax.jdo.annotations.Element;
import javax.jdo.annotations.Extension;
import javax.jdo.annotations.ForeignKeyAction;
import javax.jdo.annotations.IdGeneratorStrategy;
import javax.jdo.annotations.IdentityType;
import javax.jdo.annotations.Inheritance;
import javax.jdo.annotations.Join;
import javax.jdo.annotations.NullValue;
import javax.jdo.annotations.PersistenceModifier;
import javax.jdo.annotations.Persistent;
import javax.jdo.annotations.Value;
import javax.jdo.annotations.Version;
import javax.jdo.metadata.ClassMetadata;
import javax.jdo.metadata.ClassPersistenceModifier;
import javax.jdo.metadata.CollectionMetadata;
import javax.jdo.metadata.ColumnMetadata;
import javax.jdo.metadata.ElementMetadata;
import javax.jdo.metadata.FieldMetadata;
import javax.jdo.metadata.ForeignKeyMetadata;
import javax.jdo.metadata.InheritanceMetadata;
import javax.jdo.metadata.JDOMetadata;
import javax.jdo.metadata.JoinMetadata;
import javax.jdo.metadata.MapMetadata;
import javax.jdo.metadata.MemberMetadata;
import javax.jdo.metadata.PackageMetadata;
import javax.jdo.metadata.UniqueMetadata;
import javax.jdo.metadata.ValueMetadata;
import javax.jdo.metadata.VersionMetadata;
import java.util.List;
import java.util.Map;
import static org.apache.commons.lang.StringUtils.defaultIfBlank;
import static org.apache.commons.lang.StringUtils.isNotBlank;
import static org.motechproject.mds.util.Constants.MetadataKeys.DATABASE_COLUMN_NAME;
import static org.motechproject.mds.util.Constants.MetadataKeys.MAP_KEY_TYPE;
import static org.motechproject.mds.util.Constants.MetadataKeys.MAP_VALUE_TYPE;
import static org.motechproject.mds.util.Constants.Util.CREATION_DATE_FIELD_NAME;
import static org.motechproject.mds.util.Constants.Util.CREATOR_FIELD_NAME;
import static org.motechproject.mds.util.Constants.Util.DATANUCLEUS;
import static org.motechproject.mds.util.Constants.Util.FALSE;
import static org.motechproject.mds.util.Constants.Util.ID_FIELD_NAME;
import static org.motechproject.mds.util.Constants.Util.INSTANCE_VERSION_FIELD_NAME;
import static org.motechproject.mds.util.Constants.Util.MODIFICATION_DATE_FIELD_NAME;
import static org.motechproject.mds.util.Constants.Util.MODIFIED_BY_FIELD_NAME;
import static org.motechproject.mds.util.Constants.Util.OWNER_FIELD_NAME;
import static org.motechproject.mds.util.Constants.Util.TRUE;
import static org.motechproject.mds.util.Constants.Util.VALUE_GENERATOR;
/**
* The <code>EntityMetadataBuilderImpl</code> class is responsible for building jdo metadata for an
* entity class.
*/
@Component
public class EntityMetadataBuilderImpl implements EntityMetadataBuilder {
private static final Logger LOGGER = LoggerFactory.getLogger(EntityMetadataBuilderImpl.class);
private static final String[] FIELD_VALUE_GENERATOR = new String[]{
CREATOR_FIELD_NAME, OWNER_FIELD_NAME, CREATION_DATE_FIELD_NAME,
MODIFIED_BY_FIELD_NAME, MODIFICATION_DATE_FIELD_NAME
};
private static final String ID_SUFFIX = "_ID";
@Override
public void addEntityMetadata(JDOMetadata jdoMetadata, EntityDto entity, Class<?> definition, SchemaHolder schemaHolder) {
String className = (entity.isDDE()) ? entity.getClassName() : ClassName.getEntityClassName(entity.getClassName());
String packageName = ClassName.getPackage(className);
String tableName = ClassTableName.getTableName(entity.getClassName(), entity.getModule(), entity.getNamespace(), entity.getTableName(), null);
PackageMetadata pmd = getPackageMetadata(jdoMetadata, packageName);
ClassMetadata cmd = getClassMetadata(pmd, ClassName.getSimpleName(ClassName.getEntityClassName(entity.getClassName())));
cmd.setTable(tableName);
cmd.setDetachable(true);
cmd.setIdentityType(IdentityType.APPLICATION);
cmd.setPersistenceModifier(ClassPersistenceModifier.PERSISTENCE_CAPABLE);
addInheritanceMetadata(cmd, definition);
if (!entity.isSubClassOfMdsEntity() && !entity.isSubClassOfMdsVersionedEntity()) {
addIdField(cmd, entity, schemaHolder, definition);
//we add versioning metadata only for Standard class.
addVersioningMetadata(cmd, definition);
}
addMetadataForFields(cmd, null, entity, EntityType.STANDARD, definition, schemaHolder);
}
@Override
public void addHelperClassMetadata(JDOMetadata jdoMetadata, ClassData classData,
EntityDto entity, EntityType entityType, Class<?> definition,
SchemaHolder schemaHolder) {
String packageName = ClassName.getPackage(classData.getClassName());
String simpleName = ClassName.getSimpleName(classData.getClassName());
String tableName = ClassTableName.getTableName(classData.getClassName(), classData.getModule(), classData.getNamespace(),
entity == null ? "" : entity.getTableName(), entityType);
PackageMetadata pmd = getPackageMetadata(jdoMetadata, packageName);
ClassMetadata cmd = getClassMetadata(pmd, simpleName);
cmd.setTable(tableName);
cmd.setDetachable(true);
cmd.setIdentityType(IdentityType.APPLICATION);
cmd.setPersistenceModifier(ClassPersistenceModifier.PERSISTENCE_CAPABLE);
InheritanceMetadata imd = cmd.newInheritanceMetadata();
imd.setCustomStrategy("complete-table");
addIdField(cmd, classData.getClassName(), definition);
if (entity != null) {
addMetadataForFields(cmd, classData, entity, entityType, definition, schemaHolder);
}
}
@Override
public void fixEnhancerIssuesInMetadata(JDOMetadata jdoMetadata, SchemaHolder schemaHolder) {
for (PackageMetadata pmd : jdoMetadata.getPackages()) {
for (ClassMetadata cmd : pmd.getClasses()) {
String className = String.format("%s.%s", pmd.getName(), cmd.getName());
EntityType entityType = EntityType.forClassName(className);
if (entityType == EntityType.STANDARD) {
EntityDto entity = schemaHolder.getEntityByClassName(className);
if (null != entity) {
for (MemberMetadata mmd : cmd.getMembers()) {
CollectionMetadata collMd = mmd.getCollectionMetadata();
FieldDto field = schemaHolder.getFieldByName(entity, mmd.getName());
if (null != collMd) {
fixCollectionMetadata(collMd, field);
}
if (null != field && field.getType().isRelationship()) {
fixRelationMetadata(pmd, field);
}
//Defining column name for join and element results in setting it both as XML attribute and child element
fixDuplicateColumnDefinitions(mmd);
}
}
}
}
}
}
@Override
public void addBaseMetadata(JDOMetadata jdoMetadata, ClassData classData, EntityType entityType, Class<?> definition) {
addHelperClassMetadata(jdoMetadata, classData, null, entityType, definition, null);
}
private void addVersioningMetadata(ClassMetadata cmd, Class<?> definition) {
Class<Version> ann = ReflectionsUtil.getAnnotationClass(definition, Version.class);
Version versionAnnotation = AnnotationUtils.findAnnotation(definition, ann);
if (versionAnnotation != null) {
VersionMetadata vmd = cmd.newVersionMetadata();
vmd.setColumn(versionAnnotation.column());
vmd.setStrategy(versionAnnotation.strategy());
if (versionAnnotation.extensions().length == 0 || !"field-name".equals(versionAnnotation.extensions()[0].key())) {
throw new MdsException(String.format("Cannot create metadata fo %s. Extension not found in @Version annotation.", cmd.getName()));
}
Extension extension = versionAnnotation.extensions()[0];
vmd.newExtensionMetadata(DATANUCLEUS, "field-name", extension.value());
}
}
private void fixCollectionMetadata(CollectionMetadata collMd, FieldDto field) {
String elementType = collMd.getElementType();
RelationshipHolder holder = new RelationshipHolder(field);
if (null != MotechClassPool.getEnhancedClassData(elementType)) {
collMd.setEmbeddedElement(false);
}
if (holder.isOneToMany() && holder.getRelatedField() == null) {
collMd.setDependentElement(holder.isCascadeDelete());
}
}
private void fixRelationMetadata(PackageMetadata pmd, FieldDto field) {
RelationshipHolder holder = new RelationshipHolder(field);
//for bidirectional 1:1 and 1:N relationship we're letting RDBMS take care of cascade deletion
//this must be set here cause we can't get related class metadata before metadata enhancement
if (shouldSetCascadeDelete(holder, EntityType.STANDARD)) {
String relatedClass = ClassName.getSimpleName(holder.getRelatedClass());
MemberMetadata rfmd = getFieldMetadata(getClassMetadata(pmd, relatedClass), holder.getRelatedField());
ForeignKeyMetadata rfkmd = rfmd.newForeignKeyMetadata();
rfkmd.setDeleteAction(ForeignKeyAction.CASCADE);
}
}
private void fixDuplicateColumnDefinitions(MemberMetadata mmd) {
JoinMetadata jmd = mmd.getJoinMetadata();
ElementMetadata emd = mmd.getElementMetadata();
if (jmd != null && ArrayUtils.isNotEmpty(jmd.getColumns()) && StringUtils.isNotEmpty(jmd.getColumn())) {
jmd.setColumn(null);
}
if (emd != null && ArrayUtils.isNotEmpty(emd.getColumns()) && StringUtils.isNotEmpty(emd.getColumn())) {
emd.setColumn(null);
}
}
private void addInheritanceMetadata(ClassMetadata cmd, Class<?> definition) {
Class<Inheritance> ann = ReflectionsUtil.getAnnotationClass(definition, Inheritance.class);
Inheritance annotation = AnnotationUtils.findAnnotation(definition, ann);
if (annotation == null) {
InheritanceMetadata imd = cmd.newInheritanceMetadata();
imd.setCustomStrategy("complete-table");
}
}
private void addDefaultFetchGroupMetadata(FieldMetadata fmd, Class<?> definition) {
java.lang.reflect.Field field = FieldUtils.getField(definition, fmd.getName(), true);
if (field == null) {
LOGGER.warn("Unable to retrieve field {} from class {}. Putting the field in the default fetch group by default.",
fmd.getName(), definition.getName());
fmd.setDefaultFetchGroup(true);
} else {
Persistent persistentAnnotation = ReflectionsUtil.getAnnotationSelfOrAccessor(field, Persistent.class);
// set to true, unless there is a JDO annotation that specifies otherwise
if (persistentAnnotation == null || StringUtils.isBlank(persistentAnnotation.defaultFetchGroup())) {
fmd.setDefaultFetchGroup(true);
}
}
}
private void addMetadataForFields(ClassMetadata cmd, ClassData classData, EntityDto entity,
EntityType entityType, Class<?> definition, SchemaHolder schemaHolder) {
List<FieldDto> fields = schemaHolder.getFields(entity);
for (FieldDto field : fields) {
if (field.isVersionField() && entityType != EntityType.STANDARD) {
continue;
}
String fieldName = getNameForMetadata(field);
processField(cmd, classData, entity, entityType, definition, fieldName, field, schemaHolder);
}
}
public void processField(ClassMetadata cmd, ClassData classData, EntityDto entity, EntityType entityType,
Class<?> definition, String fieldName, FieldDto field, SchemaHolder schemaHolder) {
// Metadata for ID field has been added earlier in addIdField() method
if (!fieldName.equals(ID_FIELD_NAME)) {
FieldMetadata fmd = null;
if (isFieldNotInherited(fieldName, entity, schemaHolder)) {
fmd = setFieldMetadata(cmd, classData, entity, entityType, field, definition);
}
// when field is in Lookup, we set field metadata indexed to retrieve instance faster
if (!field.getLookups().isEmpty() && entityType.equals(EntityType.STANDARD)) {
if (fmd == null) {
String inheritedFieldName = ClassName.getSimpleName(entity.getSuperClass()) + "." + fieldName;
fmd = cmd.newFieldMetadata(inheritedFieldName);
}
fmd.setIndexed(true);
}
if (fmd != null) {
customizeFieldMd(fmd, entity, field, entityType, definition);
}
}
}
private boolean isFieldRequired(FieldDto field, EntityType entityType) {
return field.getBasic().isRequired() && !(entityType.equals(EntityType.TRASH) && field.getType().isRelationship());
}
private void customizeFieldMd(FieldMetadata fmd, EntityDto entity, FieldDto field, EntityType entityType,
Class<?> definition) {
setColumnParameters(fmd, field, definition);
// Check whether the field is required and set appropriate metadata
fmd.setNullValue(isFieldRequired(field, entityType) ? NullValue.EXCEPTION : NullValue.NONE);
// Non DDE fields have controllable unique
if (!field.isReadOnly() && entityType == EntityType.STANDARD && field.getBasic().isUnique()) {
UniqueMetadata umd = fmd.newUniqueMetadata();
// TODO: Move to KeyNames class (to be introduced in MOTECH-1991)
umd.setName(KeyNames.uniqueKeyName(entity.getName(), getNameForMetadata(field)));
}
}
private boolean isFieldNotInherited(String fieldName, EntityDto entity, SchemaHolder schemaHolder) {
if ((entity.isSubClassOfMdsEntity() || entity.isSubClassOfMdsVersionedEntity()) && (ArrayUtils.contains(FIELD_VALUE_GENERATOR, fieldName))
|| isVersionFieldFromMdsVersionedEntity(entity, fieldName)) {
return false;
} else {
// return false if it is inherited field from superclass
return entity.isBaseEntity() || !isFieldFromSuperClass(entity.getSuperClass(), fieldName, schemaHolder);
}
}
private boolean isVersionFieldFromMdsVersionedEntity(EntityDto entity, String fieldName) {
return entity.isSubClassOfMdsVersionedEntity() && INSTANCE_VERSION_FIELD_NAME.equals(fieldName);
}
private boolean isFieldFromSuperClass(String className, String fieldName, SchemaHolder schemaHolder) {
return schemaHolder.getFieldByName(className, fieldName) != null;
}
private FieldMetadata setFieldMetadata(ClassMetadata cmd, ClassData classData, EntityDto entity,
EntityType entityType, FieldDto field, Class<?> definition) {
String name = getNameForMetadata(field);
TypeDto type = field.getType();
Class<?> typeClass = type.getClassObjectForType();
if (ArrayUtils.contains(FIELD_VALUE_GENERATOR, name)) {
return setAutoGenerationMetadata(cmd, name);
} else if (type.isCombobox()) {
return setComboboxMetadata(cmd, entity, field, definition);
} else if (type.isRelationship()) {
return setRelationshipMetadata(cmd, classData, entity, field, entityType, definition);
} else if (Map.class.isAssignableFrom(typeClass)) {
return setMapMetadata(cmd, field, definition);
} else if (Time.class.isAssignableFrom(typeClass)) {
return setTimeMetadata(cmd, name);
}
return cmd.newFieldMetadata(name);
}
private MemberMetadata getFieldMetadata(ClassMetadata cmd, String relatedField) {
MemberMetadata fmd = null;
for (MemberMetadata field : cmd.getMembers()) {
if (field.getName().equals(relatedField)) {
fmd = field;
break;
}
}
return fmd;
}
private void setColumnParameters(FieldMetadata fmd, FieldDto field, Class<?> definition) {
Value valueAnnotation = null;
java.lang.reflect.Field fieldDefinition = FieldUtils.getDeclaredField(definition, field.getBasic().getName(), true);
//@Value in datanucleus is used with maps.
if (fieldDefinition != null && java.util.Map.class.isAssignableFrom(field.getType().getClassObjectForType())) {
valueAnnotation = ReflectionsUtil.getAnnotationSelfOrAccessor(fieldDefinition, Value.class);
}
if ((field.getMetadata(DATABASE_COLUMN_NAME) != null || field.getSetting(Constants.Settings.STRING_MAX_LENGTH) != null
|| field.getSetting(Constants.Settings.STRING_TEXT_AREA) != null) || (valueAnnotation != null)) {
addColumnMetadata(fmd, field, valueAnnotation);
}
}
private void addColumnMetadata(FieldMetadata fmd, FieldDto field, Value valueAnnotation) {
SettingDto maxLengthSetting = field.getSetting(Constants.Settings.STRING_MAX_LENGTH);
ColumnMetadata colMd = fmd.newColumnMetadata();
// text(clob) fields don't have length
if (maxLengthSetting != null && !isClob(field)) {
colMd.setLength(Integer.parseInt(maxLengthSetting.getValueAsString()));
}
// if TextArea then change length
if (field.getSetting(Constants.Settings.STRING_TEXT_AREA) != null &&
"true".equalsIgnoreCase(field.getSetting(Constants.Settings.STRING_TEXT_AREA).getValueAsString())) {
fmd.setIndexed(false);
colMd.setSQLType("CLOB");
}
if (field.getMetadata(DATABASE_COLUMN_NAME) != null) {
colMd.setName(field.getMetadata(DATABASE_COLUMN_NAME).getValue());
}
if (valueAnnotation != null) {
copyParametersFromValueAnnotation(fmd, valueAnnotation);
}
}
private void copyParametersFromValueAnnotation(FieldMetadata fmd, Value valueAnnotation) {
ValueMetadata valueMetadata = fmd.newValueMetadata();
for (Column column : valueAnnotation.columns()) {
ColumnMetadata colMd = valueMetadata.newColumnMetadata();
colMd.setName(column.name());
colMd.setLength(column.length());
colMd.setAllowsNull(Boolean.parseBoolean(column.allowsNull()));
colMd.setDefaultValue(column.defaultValue());
colMd.setInsertValue(column.insertValue());
colMd.setJDBCType(column.jdbcType());
colMd.setSQLType(column.sqlType());
}
}
private FieldMetadata setTimeMetadata(ClassMetadata cmd, String name) {
// for time we register our converter which persists as string
FieldMetadata fmd = cmd.newFieldMetadata(name);
fmd.setPersistenceModifier(PersistenceModifier.PERSISTENT);
fmd.setDefaultFetchGroup(true);
fmd.newExtensionMetadata(DATANUCLEUS, "type-converter-name", "dn.time-string");
return fmd;
}
private FieldMetadata setMapMetadata(ClassMetadata cmd, FieldDto field, Class<?> definition) {
FieldMetadata fmd = cmd.newFieldMetadata(getNameForMetadata(field));
MetadataDto keyMetadata = field.getMetadata(MAP_KEY_TYPE);
MetadataDto valueMetadata = field.getMetadata(MAP_VALUE_TYPE);
boolean serialized = shouldSerializeMap(keyMetadata, valueMetadata);
// Depending on the types of key and value of the map we either serialize the map or create a separate table for it
fmd.setSerialized(serialized);
addDefaultFetchGroupMetadata(fmd, definition);
MapMetadata mmd = fmd.newMapMetadata();
if (serialized) {
mmd.setSerializedKey(true);
mmd.setSerializedValue(true);
} else {
mmd.setKeyType(keyMetadata.getValue());
mmd.setValueType(valueMetadata.getValue());
fmd.setTable(ClassTableName.getTableName(cmd.getTable(), getNameForMetadata(field)));
JoinMetadata jmd = fmd.newJoinMetadata();
ForeignKeyMetadata fkmd = jmd.newForeignKeyMetadata();
fkmd.setDeleteAction(ForeignKeyAction.CASCADE);
}
return fmd;
}
private boolean shouldSerializeMap(MetadataDto keyMetadata, MetadataDto valueMetadata) {
// If generics types of map are not supported in MDS, we serialized the field in DB.
return keyMetadata == null || valueMetadata == null ||
! (TypeHelper.isTypeSupportedInMap(keyMetadata.getValue(), true) &&
TypeHelper.isTypeSupportedInMap(valueMetadata.getValue(), false));
}
private FieldMetadata setRelationshipMetadata(ClassMetadata cmd, ClassData classData,
EntityDto entity, FieldDto field,
EntityType entityType, Class<?> definition) {
RelationshipHolder holder = new RelationshipHolder(classData, field);
FieldMetadata fmd = cmd.newFieldMetadata(getNameForMetadata(field));
addDefaultFetchGroupMetadata(fmd, definition);
if (entityType == EntityType.STANDARD) {
processRelationship(fmd, holder, entity, field, definition);
} else {
processHistoryTrashRelationship(cmd, fmd, holder);
}
return fmd;
}
private void processRelationship(FieldMetadata fmd, RelationshipHolder holder,
EntityDto entity, FieldDto field,
Class<?> definition) {
String relatedClass = holder.getRelatedClass();
fmd.newExtensionMetadata(DATANUCLEUS, "cascade-persist", holder.isCascadePersist() ? TRUE : FALSE);
fmd.newExtensionMetadata(DATANUCLEUS, "cascade-update", holder.isCascadeUpdate() ? TRUE : FALSE);
if (holder.isOneToMany() || holder.isManyToMany()) {
setUpCollectionMetadata(fmd, relatedClass, holder, EntityType.STANDARD);
} else if (holder.isOneToOne()) {
fmd.setPersistenceModifier(PersistenceModifier.PERSISTENT);
//for bidirectional 1:1 we're setting foreign key with cascade deletion after metadata enhancement
if (holder.getRelatedField() == null) {
fmd.setDependent(holder.isCascadeDelete());
}
}
if (holder.isManyToMany()) {
addManyToManyMetadata(fmd, holder, entity, field, definition);
}
}
private void processHistoryTrashRelationship(ClassMetadata cmd, FieldMetadata fmd, RelationshipHolder holder) {
if (holder.isOneToOne() || holder.isManyToOne()) {
fmd.setColumn(holder.getFieldName() + ID_SUFFIX);
} else {
fmd.setTable(cmd.getTable() + '_' + holder.getFieldName());
CollectionMetadata collMd = fmd.newCollectionMetadata();
collMd.setElementType(Long.class.getName());
JoinMetadata joinMd = fmd.newJoinMetadata();
ColumnMetadata joinColumnMd = joinMd.newColumnMetadata();
joinColumnMd.setName(cmd.getName() + ID_SUFFIX);
ElementMetadata elementMd = fmd.newElementMetadata();
elementMd.setColumn(holder.getFieldName() + ID_SUFFIX);
}
}
private void addManyToManyMetadata(FieldMetadata fmd, RelationshipHolder holder, EntityDto entity, FieldDto field,
Class<?> definition) {
java.lang.reflect.Field fieldDefinition = FieldUtils.getDeclaredField(definition, field.getBasic().getName(), true);
Join join = fieldDefinition.getAnnotation(Join.class);
// If tables and column names have been specified in annotations, do not set their metadata
// Join metadata must be present at exactly one side of the M:N relation when using Sets
// When using Lists join metadata must be present at two sides of M:N relation
if (!holder.isOwningSide() || holder.isListManyToMany()) {
JoinMetadata jmd = null;
if (join == null) {
jmd = fmd.newJoinMetadata();
}
Persistent persistent = fieldDefinition.getAnnotation(Persistent.class);
Element element = fieldDefinition.getAnnotation(Element.class);
setTableNameMetadata(fmd, persistent, entity, field, holder, EntityType.STANDARD);
setElementMetadata(fmd, element, holder);
if (join == null || StringUtils.isEmpty(join.column())) {
setJoinMetadata(jmd, fmd, ClassName.getSimpleName(entity.getClassName()).toUpperCase() + ID_SUFFIX);
}
}
}
private void setElementMetadata(FieldMetadata fmd, Element element, RelationshipHolder holder) {
if (element == null || StringUtils.isEmpty(element.column())) {
ElementMetadata emd = fmd.newElementMetadata();
emd.setColumn((ClassName.getSimpleName(holder.getRelatedClass()) + ID_SUFFIX).toUpperCase());
}
}
private void setJoinMetadata(JoinMetadata jmd, FieldMetadata fmd, String column) {
JoinMetadata joinMetadata;
if (jmd == null) {
joinMetadata = fmd.newJoinMetadata();
joinMetadata.setOuter(false);
} else {
joinMetadata = jmd;
}
joinMetadata.newColumnMetadata().setName(column);
}
private void setTableNameMetadata(FieldMetadata fmd, Persistent persistent, EntityDto entity, FieldDto field,
RelationshipHolder holder, EntityType entityType) {
if (persistent != null && StringUtils.isNotEmpty(persistent.table()) && entityType != EntityType.STANDARD) {
fmd.setTable(entityType.getTableName(persistent.table()));
} else if (persistent == null || StringUtils.isEmpty(persistent.table())) {
fmd.setTable(getJoinTableName(entity.getModule(), entity.getNamespace(), field.getBasic().getName(),
holder.getRelatedField()));
}
}
private void setUpCollectionMetadata(FieldMetadata fmd, String relatedClass, RelationshipHolder holder, EntityType entityType) {
CollectionMetadata colMd = getOrCreateCollectionMetadata(fmd);
colMd.setElementType(relatedClass);
colMd.setEmbeddedElement(false);
colMd.setSerializedElement(false);
//for 1:N we're setting foreign key with cascade deletion after metadata enhancement
if (holder.isManyToMany()) {
colMd.setDependentElement(holder.isCascadeDelete() || entityType == EntityType.TRASH);
}
if (holder.isSetManyToMany() && !holder.isOwningSide() && entityType.equals(EntityType.STANDARD)) {
fmd.setMappedBy(holder.getRelatedField());
}
}
private FieldMetadata setComboboxMetadata(ClassMetadata cmd, EntityDto entity, FieldDto field, Class<?> definition) {
ComboboxHolder holder = new ComboboxHolder(entity, field);
String fieldName = getNameForMetadata(field);
FieldMetadata fmd = cmd.newFieldMetadata(fieldName);
if (holder.isCollection()) {
addDefaultFetchGroupMetadata(fmd, definition);
fmd.setTable(ClassTableName.getTableName(cmd.getTable(), fieldName));
JoinMetadata jm = fmd.newJoinMetadata();
jm.newForeignKeyMetadata();
jm.setDeleteAction(ForeignKeyAction.CASCADE);
jm.newColumnMetadata().setName(fieldName + "_OID");
}
return fmd;
}
private FieldMetadata setAutoGenerationMetadata(ClassMetadata cmd, String name) {
FieldMetadata fmd = cmd.newFieldMetadata(name);
fmd.setPersistenceModifier(PersistenceModifier.PERSISTENT);
fmd.setDefaultFetchGroup(true);
fmd.newExtensionMetadata(DATANUCLEUS, VALUE_GENERATOR, "ovg." + name);
return fmd;
}
private static ClassMetadata getClassMetadata(PackageMetadata pmd, String className) {
ClassMetadata[] classes = pmd.getClasses();
if (ArrayUtils.isNotEmpty(classes)) {
for (ClassMetadata cmd : classes) {
if (StringUtils.equals(className, cmd.getName())) {
return cmd;
}
}
}
return pmd.newClassMetadata(className);
}
private static PackageMetadata getPackageMetadata(JDOMetadata jdoMetadata, String packageName) {
// first look for existing metadata
PackageMetadata[] packages = jdoMetadata.getPackages();
if (ArrayUtils.isNotEmpty(packages)) {
for (PackageMetadata pkgMetadata : packages) {
if (StringUtils.equals(pkgMetadata.getName(), packageName)) {
return pkgMetadata;
}
}
}
// if not found, create new
return jdoMetadata.newPackageMetadata(packageName);
}
private void addIdField(ClassMetadata cmd, EntityDto entity, SchemaHolder schemaHolder, Class<?> definition) {
boolean containsID = null != schemaHolder.getFieldByName(entity, ID_FIELD_NAME);
boolean isBaseClass = entity.isBaseEntity();
if (containsID && isBaseClass) {
FieldMetadata metadata = cmd.newFieldMetadata(ID_FIELD_NAME);
metadata.setValueStrategy(getIdGeneratorStrategy(metadata, definition));
metadata.setPrimaryKey(true);
metadata.setIndexed(true);
}
}
private void addIdField(ClassMetadata cmd, String className, Class<?> definition) {
boolean containsID;
boolean isBaseClass;
try {
CtClass ctClass = MotechClassPool.getDefault().getOrNull(className);
containsID = null != ctClass && null != ctClass.getField(ID_FIELD_NAME);
isBaseClass = null != ctClass && (null == ctClass.getSuperclass() || Object.class.getName().equalsIgnoreCase(ctClass.getSuperclass().getName()));
} catch (NotFoundException e) {
containsID = false;
isBaseClass = false;
}
if (containsID && isBaseClass) {
FieldMetadata metadata = cmd.newFieldMetadata(ID_FIELD_NAME);
metadata.setValueStrategy(getIdGeneratorStrategy(metadata, definition));
metadata.setPrimaryKey(true);
metadata.setIndexed(true);
}
}
private IdGeneratorStrategy getIdGeneratorStrategy(FieldMetadata fmd, Class<?> definition) {
java.lang.reflect.Field field = FieldUtils.getField(definition, fmd.getName(), true);
if (field != null) {
Persistent persistentAnnotation = ReflectionsUtil.getAnnotationSelfOrAccessor(field, Persistent.class);
if (persistentAnnotation != null && persistentAnnotation.valueStrategy() != null
&& !persistentAnnotation.valueStrategy().equals(IdGeneratorStrategy.UNSPECIFIED)) {
return persistentAnnotation.valueStrategy();
}
}
return IdGeneratorStrategy.NATIVE;
}
private CollectionMetadata getOrCreateCollectionMetadata(FieldMetadata fmd) {
CollectionMetadata collMd = fmd.getCollectionMetadata();
if (collMd == null) {
collMd = fmd.newCollectionMetadata();
}
return collMd;
}
private String getJoinTableName(String module, String namespace, String owningSideName, String inversedSideNameWithSuffix) {
String mod = defaultIfBlank(module, "MDS");
StringBuilder builder = new StringBuilder();
builder.append(mod).append("_");
if (isNotBlank(namespace)) {
builder.append(namespace).append("_");
}
builder.append("Join_").
append(inversedSideNameWithSuffix).append("_").
append(owningSideName).
append(ClassName.getEntityTypeSuffix(inversedSideNameWithSuffix));
return builder.toString().replace('-', '_').replace(' ', '_').toUpperCase();
}
private boolean shouldSetCascadeDelete(RelationshipHolder holder, EntityType entityType) {
if (holder.isCascadeDelete() || entityType == EntityType.TRASH) {
return (holder.isOneToOne() || holder.isOneToMany()) && ( holder.getRelatedField() != null);
}
return false;
}
private String getNameForMetadata(FieldDto field) {
return StringUtils.uncapitalize(field.getBasic().getName());
}
private boolean isClob(FieldDto field) {
return Constants.Util.TRUE.equalsIgnoreCase(
field.getSettingsValueAsString(Constants.Settings.STRING_TEXT_AREA));
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gemstone.gemfire.internal.cache;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.junit.Before;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runners.MethodSorters;
import com.gemstone.gemfire.LogWriter;
import com.gemstone.gemfire.cache.AttributesFactory;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.cache.DataPolicy;
import com.gemstone.gemfire.cache.PartitionAttributesFactory;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.RegionAttributes;
import com.gemstone.gemfire.cache.RegionExistsException;
import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
/**
* This is a test for creation of Partition
* region(PR).
* <p>
* Following tests are included in PartitionedRegionCreationJUnitTest :
* </p>
* <p>
* 1) testpartionedRegionCreate - Tests the PR creation.
* </p>
* <p>
* 2) testpartionedRegionInitialization - Tests the PR initialization
* </p>
* <p>
* 3) testpartionedRegionRegistration - Tests the PR registration
* </p>
* <p>
* 4) testpartionedRegionBucketToNodeCreate - Tests the PR's BUCKET_2_NODE region creation
* </p>
*
* @author tnegi
*
*/
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
@Category(IntegrationTest.class)
public class PartitionedRegionCreationJUnitTest
{
static volatile int PRNumber = 0;
static Region root = null;
static volatile boolean increamentFlag = false;
static final int TOTAL_THREADS = 10;
static volatile int TOTAL_PR_CREATED = 0;
static volatile int TOTAL_RETURNS = 0;
static volatile int TOTAL_PR_CREATION_FAIL = 0;
static final Object PR_CREATE = new Object();
static final Object PR_CREATE_FAIL = new Object();
static final Object PR_INCREMENT = new Object();
static final Object PR_TOTAL_RETURNS = new Object();
public boolean PRCreateDone = false;
List PRRegionList = new ArrayList();
LogWriter logger = null;
private Object CREATE_COMPLETE_LOCK = new Object();
private volatile boolean createComplete = false;
@Before
public void setUp() throws Exception
{
TOTAL_RETURNS = 0;
if (logger == null)
logger = PartitionedRegionTestHelper.getLogger();
}
/*
* 1)Create 10 thread each. Each thread will try to create PartionedRegion total
* of 5 partitioned region will be created. 5 threads should throw
* RegionExistException.
* 2) Tests for PR scope = GLOBAL and PR scope = LOCAL </p>
* 3) Test for redundancy < 0 </p>
* 4) Test for redundancy > 3 </p>
* 5) Test for localMaxMemory < 0 </p>
*/
@Test
public void test000PartitionedRegionCreate()
{
createMultiplePartitionedRegions();
verifyCreateResults();
if (logger.fineEnabled()) {
logger
.fine(" PartitionedRegionCreationTest-testpartionedRegionCreate() Successfully Complete .. ");
}
final String regionname = "testPartionedRegionCreate";
int localMaxMemory = 0;
PartitionedRegion pr = null;
// Test vanilla creation of a Partitioned Region w/o Scope
try {
AttributesFactory af = new AttributesFactory();
af.setDataPolicy(DataPolicy.PARTITION);
RegionAttributes ra = af.create();
Cache cache = PartitionedRegionTestHelper.createCache();
pr = (PartitionedRegion)cache.createRegion(regionname, ra);
} finally {
pr.destroyRegion();
}
// Assert that setting any scope throws IllegalStateException
final Scope[] scopes = { Scope.LOCAL, Scope.DISTRIBUTED_ACK, Scope.DISTRIBUTED_NO_ACK, Scope.GLOBAL };
for (int i=0; i < scopes.length; i++) {
try {
AttributesFactory af = new AttributesFactory();
af.setDataPolicy(DataPolicy.PARTITION);
af.setScope(scopes[i]);
RegionAttributes ra = af.create();
Cache cache = PartitionedRegionTestHelper.createCache();
pr = (PartitionedRegion)cache.createRegion(regionname, ra);
fail("testpartionedRegionCreate() Expected IllegalStateException not thrown for Scope " + scopes[i]);
}
catch (IllegalStateException expected )
{
}
finally {
if (pr != null && !pr.isDestroyed()) {
pr.destroyRegion();
}
}
}
// test for redundancy > 3
int redundancy = 10;
try {
pr = (PartitionedRegion)PartitionedRegionTestHelper
.createPartitionedRegion(regionname, String.valueOf(localMaxMemory),
redundancy);
}
catch (IllegalStateException illex) {
if (logger.fineEnabled()) {
logger
.fine("testpartionedRegionCreate() Got a correct exception-IllegalStateException for redundancy > 3 ");
}
}
// test for redundancy < 0
if (pr!= null && !pr.isDestroyed())
pr.destroyRegion();
redundancy = -5;
try {
pr = (PartitionedRegion)PartitionedRegionTestHelper
.createPartitionedRegion(regionname, String.valueOf(200), redundancy);
fail("testpartionedRegionCreate() Expected IllegalStateException not thrown for redundancy < 0 ");
}
catch (IllegalStateException illex) {
if (logger.fineEnabled()) {
logger
.fine("testpartionedRegionCreate() Got a correct exception-IllegalStateException for redundancy < 0 ");
}
}
// test for localMaxMemory < 0
/*
if (pr!= null && !pr.isDestroyed())
pr.destroyRegion();
;
localMaxMemory = -5;
try {
pr = (PartitionedRegion)PartitionedRegionTestHelper
.createPartitionedRegion(regionname, String.valueOf(localMaxMemory),
2, Scope.DISTRIBUTED_ACK);
fail("testpartionedRegionCreate() Expected IllegalStateException not thrown for localMaxMemory < 0 ");
}
catch (IllegalStateException illex) {
if (logger.fineEnabled()) {
logger
.fine("testpartionedRegionCreate() Got a correct exception-IllegalStateException for localMaxMemory < 0 ");
}
}
*/
}
@Test
public void test001PersistentPartitionedRegionCreate()
{
final String regionname = "testPersistentPartionedRegionCreate";
PartitionedRegion pr = null;
// Test vanilla creation of a Partitioned Region w/o Scope
try {
AttributesFactory af = new AttributesFactory();
af.setDataPolicy(DataPolicy.PERSISTENT_PARTITION);
RegionAttributes ra = af.create();
Cache cache = PartitionedRegionTestHelper.createCache();
pr = (PartitionedRegion)cache.createRegion(regionname, ra);
} finally {
if (pr != null) {
pr.destroyRegion();
}
}
// Assert that an accessor (localMaxMem == 0) can't be persistent
try {
AttributesFactory af = new AttributesFactory();
af.setDataPolicy(DataPolicy.PERSISTENT_PARTITION);
af.setPartitionAttributes(new PartitionAttributesFactory().setLocalMaxMemory(0).create());
RegionAttributes ra = af.create();
Cache cache = PartitionedRegionTestHelper.createCache();
pr = (PartitionedRegion)cache.createRegion(regionname, ra);
fail("testpartionedRegionCreate() Expected IllegalStateException not thrown");
} catch (IllegalStateException expected) {
assertEquals("Persistence is not allowed when local-max-memory is zero.",
expected.getMessage());
}
// Assert that a region can't be created
// if configured with a diskStoreName and the disk store has not be created.
try {
AttributesFactory af = new AttributesFactory();
af.setDataPolicy(DataPolicy.PERSISTENT_PARTITION);
af.setDiskStoreName("nonexistentDiskStore");
RegionAttributes ra = af.create();
Cache cache = PartitionedRegionTestHelper.createCache();
pr = (PartitionedRegion)cache.createRegion(regionname, ra);
fail("testpartionedRegionCreate() Expected IllegalStateException not thrown");
} catch (RuntimeException expected) {
assertTrue(expected.getMessage().contains(LocalizedStrings.CacheCreation_DISKSTORE_NOTFOUND_0
.toLocalizedString("nonexistentDiskStore")));
}
// Assert that you can't have a diskStoreName unless you are persistent or overflow.
try {
Cache cache = PartitionedRegionTestHelper.createCache();
cache.createDiskStoreFactory().create("existentDiskStore");
AttributesFactory af = new AttributesFactory();
af.setDataPolicy(DataPolicy.PARTITION);
af.setDiskStoreName("existentDiskStore");
RegionAttributes ra = af.create();
pr = (PartitionedRegion)cache.createRegion(regionname, ra);
fail("testpartionedRegionCreate() Expected IllegalStateException not thrown");
} catch (IllegalStateException expected) {
assertEquals("Only regions with persistence or overflow to disk can specify DiskStore", expected.getMessage());
}
// Assert that setting any scope throws IllegalStateException
final Scope[] scopes = { Scope.LOCAL, Scope.DISTRIBUTED_ACK, Scope.DISTRIBUTED_NO_ACK, Scope.GLOBAL };
for (int i=0; i < scopes.length; i++) {
try {
AttributesFactory af = new AttributesFactory();
af.setDataPolicy(DataPolicy.PERSISTENT_PARTITION);
af.setScope(scopes[i]);
RegionAttributes ra = af.create();
Cache cache = PartitionedRegionTestHelper.createCache();
pr = (PartitionedRegion)cache.createRegion(regionname, ra);
fail("testpartionedRegionCreate() Expected IllegalStateException not thrown for Scope " + scopes[i]);
} catch (IllegalStateException expected) {
}
}
// test for redundancy > 3
try {
pr = (PartitionedRegion)PartitionedRegionTestHelper
.createPartitionedRegion(regionname, String.valueOf(0), 4);
fail("testpartionedRegionCreate() Expected IllegalStateException not thrown for redundancy > 3 ");
}
catch (IllegalStateException illex) {
if (logger.fineEnabled()) {
logger
.fine("testpartionedRegionCreate() Got a correct exception-IllegalStateException for redundancy > 3 ");
}
}
// test for redundancy < 0
try {
pr = (PartitionedRegion)PartitionedRegionTestHelper
.createPartitionedRegion(regionname, String.valueOf(200), -1);
fail("testpartionedRegionCreate() Expected IllegalStateException not thrown for redundancy < 0 ");
}
catch (IllegalStateException illex) {
if (logger.fineEnabled()) {
logger
.fine("testpartionedRegionCreate() Got a correct exception-IllegalStateException for redundancy < 0 ");
}
}
}
/**
* Test for initialization of PartitionedRegion. Following are tested for the
* PartitionedRegion:
* <p>
* (1) Test for Root == null
* <p>
* (2) Test for Root region scope is DIST_ACK
* <p>
* (3) Test if MirrorType.NONE for root region.
* <p>
* (4) Test if PARTITIONED_REGION_CONFIG_NAME exist and isDistributedAck and the
* mirror type is MirrorType.KEYS_VALUES.
*
* @throws RegionExistsException
*/
@Test
public void test002PartionedRegionInitialization() throws RegionExistsException
{
String PRName = "testpartionedRegionInitialization";
PartitionedRegionTestHelper.createPartionedRegion(PRName);
Region root = (PartitionedRegionTestHelper
.getExistingRegion(PartitionedRegionHelper.PR_ROOT_REGION_NAME));
if (root == null)
fail("testpartionedRegionInitialization() - the "
+ PartitionedRegionHelper.PR_ROOT_REGION_NAME + " do not exists");
RegionAttributes regionAttribs = root.getAttributes();
Scope scope = regionAttribs.getScope();
if (!scope.isDistributedAck())
fail("testpartionedRegionInitialization() - the "
+ PartitionedRegionHelper.PR_ROOT_REGION_NAME
+ " scope is not distributed_ack");
assertEquals(DataPolicy.REPLICATE, regionAttribs.getDataPolicy());
// Region allPartitionedRegions = root
// .getSubregion(PartitionedRegionHelper.PARTITIONED_REGION_CONFIG_NAME);
// if (allPartitionedRegions == null)
// fail("testpartionedRegionInitialization() - the "
// + PartitionedRegionHelper.PARTITIONED_REGION_CONFIG_NAME
// + " do not exists");
//
// regionAttribs = allPartitionedRegions.getAttributes();
// scope = regionAttribs.getScope();
// if (!scope.isDistributedAck())
// fail("testpartionedRegionInitialization() - the "
// + PartitionedRegionHelper.PARTITIONED_REGION_CONFIG_NAME
// + " scope is not distributed_ack");
// MirrorType mirrortype = regionAttribs.getMirrorType();
// if (mirrortype != MirrorType.KEYS_VALUES)
// fail("testpartionedRegionInitialization() - the "
// + PartitionedRegionHelper.PARTITIONED_REGION_CONFIG_NAME
// + " mirror type is not KEYS_VALUES ");
if (logger.fineEnabled()) {
logger
.fine("testpartionedRegionInitialization() Successfully Complete .. ");
}
// System.out.println("testpartionedRegionInitialization");
}
/**
* Test for partitioned Region registration. All partitioned regions created must
* have a entry in PARTITIONED_REGION_CONFIG_NAME. Every PR has PR name /
* PartitionRegionConfig entry in region PARTITIONED_REGION_CONFIG_NAME
*
*/
@Test
public void test003partionedRegionRegistration()
{
createMultiplePartitionedRegions();
Region root = (PartitionedRegionTestHelper
.getExistingRegion(PartitionedRegionHelper.PR_ROOT_REGION_NAME));
//
// Region allPartitionedRegions = root
// .getSubregion(PartitionedRegionHelper.PARTITIONED_REGION_CONFIG_NAME);
Iterator itr = PRRegionList.iterator();
while (itr.hasNext()) {
Region region = (Region)itr.next();
String name = ((PartitionedRegion)region).getRegionIdentifier();
PartitionRegionConfig prConfig = (PartitionRegionConfig)root
.get(name);
if (prConfig == null)
fail("testpartionedRegionRegistration() - PartionedRegion - " + name
+ " configs do not exists in region - "
+ root.getName());
}
if (logger.fineEnabled()) {
logger
.fine(" testpartionedRegionRegistration() Successfully Complete .. ");
}
// System.out.println("testpartionedRegionRegistration");
}
/**
* creates multiple partitioned region from different threads.
*
*/
private void createMultiplePartitionedRegions()
{
if (PRCreateDone)
return;
int numthread = 0;
while (numthread < TOTAL_THREADS) {
PartionedRegionCreateThread pregionThread = new PartionedRegionCreateThread();
pregionThread.start();
numthread++;
}
while (!createComplete) {
synchronized (CREATE_COMPLETE_LOCK) {
if (!createComplete) {
try {
CREATE_COMPLETE_LOCK.wait();
}
catch (Exception ex) { // no action }
}
}
}
}
PRCreateDone = true;
}
/**
* Verifies creation of partitioned region.
*
*/
private void verifyCreateResults()
{
if (TOTAL_RETURNS != TOTAL_THREADS)
fail("Failed -- Total thread returned is not same as number of threads created");
if (TOTAL_PR_CREATED != (TOTAL_THREADS / 2))
fail("Failed -- Total Partioned Region created is not correct");
if (TOTAL_PR_CREATION_FAIL != (TOTAL_THREADS / 2))
fail("Failed -- Total Partioned Region creation failures is not correct");
}
/**
* Thread to create the partitioned region.
*
*/
public class PartionedRegionCreateThread extends Thread
{
public void run()
{
String prName = "PartitionedRegionCreationJUnitTest_" + getPRNumber();
try {
Region region = PartitionedRegionTestHelper
.createPartionedRegion(prName);
PRRegionList.add(region);
if (logger.fineEnabled()) {
logger
.fine("PartitionedRegionCreationJUnitTest - partitioned region -"
+ prName + "Created");
}
updatePRCreate();
}
catch (RegionExistsException rex) {
if (logger.fineEnabled()) {
logger.fine("PartitionedRegionCreationTest - Thread - "
+ Thread.currentThread().getName()
+ " Failed to create a PartitionedRegion. Region already exists");
}
updatePRCreateFail();
}
updateTotalReturns();
}
}
/**
* Increments and returns the PR nmber used for PR creation. Thread safe
* function.
*
* @return the PR number
*/
protected int getPRNumber()
{
int retNum = 0;
synchronized (PR_INCREMENT) {
if (increamentFlag) {
retNum = PRNumber;
PRNumber++;
increamentFlag = false;
}
else {
increamentFlag = true;
}
}
return retNum;
}
protected void updatePRCreate()
{
synchronized (PR_CREATE) {
TOTAL_PR_CREATED++;
}
}
protected void updatePRCreateFail()
{
synchronized (PR_CREATE_FAIL) {
TOTAL_PR_CREATION_FAIL++;
}
}
/**
* Increments total creation thread returns.
*
*/
protected void updateTotalReturns()
{
synchronized (PR_TOTAL_RETURNS) {
TOTAL_RETURNS++;
System.out.println("TOTAL_RETURNS is " + TOTAL_RETURNS);
}
if (TOTAL_RETURNS == TOTAL_THREADS) {
synchronized (CREATE_COMPLETE_LOCK) {
createComplete = true;
CREATE_COMPLETE_LOCK.notifyAll();
}
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.kafka;
import java.util.concurrent.ExecutorService;
import kafka.message.MessageAndMetadata;
import org.apache.camel.Consumer;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.MultipleConsumersSupport;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.impl.DefaultEndpoint;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
/**
* The kafka component allows messages to be sent to (or consumed from) Apache Kafka brokers.
*/
@UriEndpoint(scheme = "kafka", title = "Kafka", syntax = "kafka:brokers", consumerClass = KafkaConsumer.class, label = "messaging")
public class KafkaEndpoint extends DefaultEndpoint implements MultipleConsumersSupport {
@UriParam
private KafkaConfiguration configuration = new KafkaConfiguration();
@UriParam(description = "If the option is true, then KafkaProducer will ignore the KafkaConstants.TOPIC header setting of the inbound message.", defaultValue = "false")
private boolean bridgeEndpoint;
public KafkaEndpoint() {
}
public KafkaEndpoint(String endpointUri, KafkaComponent component) {
super(endpointUri, component);
}
public KafkaConfiguration getConfiguration() {
if (configuration == null) {
configuration = createConfiguration();
}
return configuration;
}
public void setConfiguration(KafkaConfiguration configuration) {
this.configuration = configuration;
}
protected KafkaConfiguration createConfiguration() {
return new KafkaConfiguration();
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
KafkaConsumer consumer = new KafkaConsumer(this, processor);
configureConsumer(consumer);
return consumer;
}
@Override
public Producer createProducer() throws Exception {
String msgClassName = getConfiguration().getSerializerClass();
String keyClassName = getConfiguration().getKeySerializerClass();
if (msgClassName == null) {
msgClassName = KafkaConstants.KAFKA_DEFAULT_ENCODER;
}
if (keyClassName == null) {
keyClassName = msgClassName;
}
ClassLoader cl = getClass().getClassLoader();
Class<?> k;
try {
k = cl.loadClass(keyClassName);
} catch (ClassNotFoundException x) {
k = getCamelContext().getClassResolver().resolveMandatoryClass(keyClassName);
}
Class<?> v;
try {
v = cl.loadClass(msgClassName);
} catch (ClassNotFoundException x) {
v = getCamelContext().getClassResolver().resolveMandatoryClass(msgClassName);
}
return createProducer(k, v, this);
}
@Override
public boolean isSingleton() {
return true;
}
public ExecutorService createExecutor() {
return getCamelContext().getExecutorServiceManager().newFixedThreadPool(this, "KafkaTopic[" + configuration.getTopic() + "]", configuration.getConsumerStreams());
}
public Exchange createKafkaExchange(MessageAndMetadata<byte[], byte[]> mm) {
Exchange exchange = super.createExchange();
Message message = exchange.getIn();
message.setHeader(KafkaConstants.PARTITION, mm.partition());
message.setHeader(KafkaConstants.TOPIC, mm.topic());
message.setHeader(KafkaConstants.OFFSET, mm.offset());
if (mm.key() != null) {
message.setHeader(KafkaConstants.KEY, new String(mm.key()));
}
message.setBody(mm.message());
return exchange;
}
protected <K, V> KafkaProducer<K, V> createProducer(Class<K> keyClass, Class<V> valueClass, KafkaEndpoint endpoint) {
return new KafkaProducer<K, V>(endpoint);
}
// Delegated properties from the configuration
//-------------------------------------------------------------------------
public String getZookeeperConnect() {
return configuration.getZookeeperConnect();
}
public void setZookeeperConnect(String zookeeperConnect) {
configuration.setZookeeperConnect(zookeeperConnect);
}
public String getZookeeperHost() {
return configuration.getZookeeperHost();
}
public void setZookeeperHost(String zookeeperHost) {
configuration.setZookeeperHost(zookeeperHost);
}
public int getZookeeperPort() {
return configuration.getZookeeperPort();
}
public void setZookeeperPort(int zookeeperPort) {
configuration.setZookeeperPort(zookeeperPort);
}
public String getGroupId() {
return configuration.getGroupId();
}
public void setGroupId(String groupId) {
configuration.setGroupId(groupId);
}
public String getPartitioner() {
return configuration.getPartitioner();
}
public void setPartitioner(String partitioner) {
configuration.setPartitioner(partitioner);
}
public String getTopic() {
return configuration.getTopic();
}
public void setTopic(String topic) {
configuration.setTopic(topic);
}
public String getBrokers() {
return configuration.getBrokers();
}
public void setBrokers(String brokers) {
configuration.setBrokers(brokers);
}
public int getConsumerStreams() {
return configuration.getConsumerStreams();
}
public void setConsumerStreams(int consumerStreams) {
configuration.setConsumerStreams(consumerStreams);
}
public int getBatchSize() {
return configuration.getBatchSize();
}
public void setBatchSize(int batchSize) {
this.configuration.setBatchSize(batchSize);
}
public int getBarrierAwaitTimeoutMs() {
return configuration.getBarrierAwaitTimeoutMs();
}
public void setBarrierAwaitTimeoutMs(int barrierAwaitTimeoutMs) {
this.configuration.setBarrierAwaitTimeoutMs(barrierAwaitTimeoutMs);
}
public int getConsumersCount() {
return this.configuration.getConsumersCount();
}
public void setConsumersCount(int consumersCount) {
this.configuration.setConsumersCount(consumersCount);
}
public void setConsumerTimeoutMs(int consumerTimeoutMs) {
configuration.setConsumerTimeoutMs(consumerTimeoutMs);
}
public void setSerializerClass(String serializerClass) {
configuration.setSerializerClass(serializerClass);
}
public void setQueueBufferingMaxMessages(int queueBufferingMaxMessages) {
configuration.setQueueBufferingMaxMessages(queueBufferingMaxMessages);
}
public int getFetchWaitMaxMs() {
return configuration.getFetchWaitMaxMs();
}
public Integer getZookeeperConnectionTimeoutMs() {
return configuration.getZookeeperConnectionTimeoutMs();
}
public void setZookeeperConnectionTimeoutMs(Integer zookeeperConnectionTimeoutMs) {
configuration.setZookeeperConnectionTimeoutMs(zookeeperConnectionTimeoutMs);
}
public void setMessageSendMaxRetries(int messageSendMaxRetries) {
configuration.setMessageSendMaxRetries(messageSendMaxRetries);
}
public int getQueueBufferingMaxMs() {
return configuration.getQueueBufferingMaxMs();
}
public void setRequestRequiredAcks(short requestRequiredAcks) {
configuration.setRequestRequiredAcks(requestRequiredAcks);
}
public Integer getRebalanceBackoffMs() {
return configuration.getRebalanceBackoffMs();
}
public void setQueueEnqueueTimeoutMs(int queueEnqueueTimeoutMs) {
configuration.setQueueEnqueueTimeoutMs(queueEnqueueTimeoutMs);
}
public int getFetchMessageMaxBytes() {
return configuration.getFetchMessageMaxBytes();
}
public int getQueuedMaxMessages() {
return configuration.getQueuedMaxMessageChunks();
}
public int getAutoCommitIntervalMs() {
return configuration.getAutoCommitIntervalMs();
}
public void setSocketTimeoutMs(int socketTimeoutMs) {
configuration.setSocketTimeoutMs(socketTimeoutMs);
}
public void setAutoCommitIntervalMs(int autoCommitIntervalMs) {
configuration.setAutoCommitIntervalMs(autoCommitIntervalMs);
}
public void setRequestTimeoutMs(int requestTimeoutMs) {
configuration.setRequestTimeoutMs(requestTimeoutMs);
}
public void setCompressedTopics(String compressedTopics) {
configuration.setCompressedTopics(compressedTopics);
}
public int getSocketReceiveBufferBytes() {
return configuration.getSocketReceiveBufferBytes();
}
public void setSendBufferBytes(int sendBufferBytes) {
configuration.setSendBufferBytes(sendBufferBytes);
}
public void setFetchMessageMaxBytes(int fetchMessageMaxBytes) {
configuration.setFetchMessageMaxBytes(fetchMessageMaxBytes);
}
public int getRefreshLeaderBackoffMs() {
return configuration.getRefreshLeaderBackoffMs();
}
public void setFetchWaitMaxMs(int fetchWaitMaxMs) {
configuration.setFetchWaitMaxMs(fetchWaitMaxMs);
}
public int getTopicMetadataRefreshIntervalMs() {
return configuration.getTopicMetadataRefreshIntervalMs();
}
public void setZookeeperSessionTimeoutMs(int zookeeperSessionTimeoutMs) {
configuration.setZookeeperSessionTimeoutMs(zookeeperSessionTimeoutMs);
}
public Integer getConsumerTimeoutMs() {
return configuration.getConsumerTimeoutMs();
}
public void setAutoCommitEnable(boolean autoCommitEnable) {
configuration.setAutoCommitEnable(autoCommitEnable);
}
public String getCompressionCodec() {
return configuration.getCompressionCodec();
}
public void setProducerType(String producerType) {
configuration.setProducerType(producerType);
}
public String getClientId() {
return configuration.getClientId();
}
public int getFetchMinBytes() {
return configuration.getFetchMinBytes();
}
public String getAutoOffsetReset() {
return configuration.getAutoOffsetReset();
}
public void setRefreshLeaderBackoffMs(int refreshLeaderBackoffMs) {
configuration.setRefreshLeaderBackoffMs(refreshLeaderBackoffMs);
}
public void setAutoOffsetReset(String autoOffsetReset) {
configuration.setAutoOffsetReset(autoOffsetReset);
}
public void setConsumerId(String consumerId) {
configuration.setConsumerId(consumerId);
}
public int getRetryBackoffMs() {
return configuration.getRetryBackoffMs();
}
public int getRebalanceMaxRetries() {
return configuration.getRebalanceMaxRetries();
}
public Boolean isAutoCommitEnable() {
return configuration.isAutoCommitEnable();
}
public void setQueueBufferingMaxMs(int queueBufferingMaxMs) {
configuration.setQueueBufferingMaxMs(queueBufferingMaxMs);
}
public void setRebalanceMaxRetries(int rebalanceMaxRetries) {
configuration.setRebalanceMaxRetries(rebalanceMaxRetries);
}
public int getZookeeperSessionTimeoutMs() {
return configuration.getZookeeperSessionTimeoutMs();
}
public void setKeySerializerClass(String keySerializerClass) {
configuration.setKeySerializerClass(keySerializerClass);
}
public void setCompressionCodec(String compressionCodec) {
configuration.setCompressionCodec(compressionCodec);
}
public void setClientId(String clientId) {
configuration.setClientId(clientId);
}
public int getSocketTimeoutMs() {
return configuration.getSocketTimeoutMs();
}
public String getCompressedTopics() {
return configuration.getCompressedTopics();
}
public int getZookeeperSyncTimeMs() {
return configuration.getZookeeperSyncTimeMs();
}
public void setSocketReceiveBufferBytes(int socketReceiveBufferBytes) {
configuration.setSocketReceiveBufferBytes(socketReceiveBufferBytes);
}
public int getQueueEnqueueTimeoutMs() {
return configuration.getQueueEnqueueTimeoutMs();
}
public int getQueueBufferingMaxMessages() {
return configuration.getQueueBufferingMaxMessages();
}
public void setZookeeperSyncTimeMs(int zookeeperSyncTimeMs) {
configuration.setZookeeperSyncTimeMs(zookeeperSyncTimeMs);
}
public String getKeySerializerClass() {
return configuration.getKeySerializerClass();
}
public void setTopicMetadataRefreshIntervalMs(int topicMetadataRefreshIntervalMs) {
configuration.setTopicMetadataRefreshIntervalMs(topicMetadataRefreshIntervalMs);
}
public void setBatchNumMessages(int batchNumMessages) {
configuration.setBatchNumMessages(batchNumMessages);
}
public int getSendBufferBytes() {
return configuration.getSendBufferBytes();
}
public void setRebalanceBackoffMs(Integer rebalanceBackoffMs) {
configuration.setRebalanceBackoffMs(rebalanceBackoffMs);
}
public void setQueuedMaxMessages(int queuedMaxMessages) {
configuration.setQueuedMaxMessageChunks(queuedMaxMessages);
}
public void setRetryBackoffMs(int retryBackoffMs) {
configuration.setRetryBackoffMs(retryBackoffMs);
}
public int getBatchNumMessages() {
return configuration.getBatchNumMessages();
}
public short getRequestRequiredAcks() {
return configuration.getRequestRequiredAcks();
}
public String getProducerType() {
return configuration.getProducerType();
}
public String getConsumerId() {
return configuration.getConsumerId();
}
public int getMessageSendMaxRetries() {
return configuration.getMessageSendMaxRetries();
}
public void setFetchMinBytes(int fetchMinBytes) {
configuration.setFetchMinBytes(fetchMinBytes);
}
public String getSerializerClass() {
return configuration.getSerializerClass();
}
public int getRequestTimeoutMs() {
return configuration.getRequestTimeoutMs();
}
@Override
public boolean isMultipleConsumersSupported() {
return true;
}
public boolean isBridgeEndpoint() {
return bridgeEndpoint;
}
public void setBridgeEndpoint(boolean bridgeEndpoint) {
this.bridgeEndpoint = bridgeEndpoint;
}
public String getOffsetsStorage() {
return configuration.getOffsetsStorage();
}
public void setOffsetsStorage(String offsetsStorage) {
configuration.setOffsetsStorage(offsetsStorage);
}
public Boolean isDualCommitEnabled() {
return configuration.isDualCommitEnabled();
}
public void setDualCommitEnabled(boolean dualCommitEnabled) {
configuration.setDualCommitEnabled(dualCommitEnabled);
}
}
|
|
/*******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.repository.dialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.MenuDetectEvent;
import org.eclipse.swt.events.MenuDetectListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Dialog;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.MenuItem;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Tree;
import org.eclipse.swt.widgets.TreeItem;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.RepositoryDirectory;
import org.pentaho.di.repository.RepositoryDirectoryInterface;
import org.pentaho.di.ui.core.ConstUI;
import org.pentaho.di.ui.core.PropsUI;
import org.pentaho.di.ui.core.dialog.EnterStringDialog;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.core.gui.GUIResource;
import org.pentaho.di.ui.core.gui.WindowProperty;
import org.pentaho.di.ui.repository.RepositoryDirectoryUI;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
/**
* This dialog represents an explorer type of interface on a given database connection. It shows the
* tables defined in the visible schemas or catalogs on that connection. The interface also allows
* you to get all kinds of information on those tables.
*
* @author Matt
* @since 18-05-2003
*
*/
public class SelectDirectoryDialog extends Dialog
{
private static Class<?> PKG = RepositoryDialogInterface.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$
private PropsUI props;
private Repository rep;
private Shell shell;
private Tree wTree;
private TreeItem tiTree;
private Button wOK;
private Button wRefresh;
private Button wCancel;
private RepositoryDirectoryInterface selection;
private Color dircolor;
private RepositoryDirectoryInterface repositoryTree;
private boolean readOnly;
;
public SelectDirectoryDialog(Shell parent, int style, Repository rep)
{
super(parent, style);
this.props = PropsUI.getInstance();
this.rep = rep;
selection = null;
readOnly = rep.getSecurityProvider().isReadOnly();
}
public RepositoryDirectoryInterface open()
{
dircolor = GUIResource.getInstance().getColorDirectory();
Shell parent = getParent();
shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN);
props.setLook(shell);
shell.setImage(GUIResource.getInstance().getImageConnection());
shell.setText(BaseMessages.getString(PKG, "SelectDirectoryDialog.Dialog.Main.Title"));
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout(formLayout);
// Tree
wTree = new Tree(shell, SWT.SINGLE | SWT.BORDER);
props.setLook(wTree);
try
{
repositoryTree = rep.loadRepositoryDirectoryTree();
}
catch (KettleException e)
{
new ErrorDialog(shell,
BaseMessages.getString(PKG, "SelectDirectoryDialog.Dialog.ErrorRefreshingDirectoryTree.Title"),
BaseMessages.getString(PKG, "SelectDirectoryDialog.Dialog.ErrorRefreshingDirectoryTree.Message"), e);
return null;
}
if (!getData())
return null;
// Buttons
wOK = new Button(shell, SWT.PUSH);
wOK.setText(BaseMessages.getString(PKG, "System.Button.OK"));
wRefresh = new Button(shell, SWT.PUSH);
wRefresh.setText(BaseMessages.getString(PKG, "System.Button.Refresh"));
wCancel = new Button(shell, SWT.PUSH);
wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel"));
FormData fdTree = new FormData();
FormData fdOK = new FormData();
FormData fdRefresh = new FormData();
FormData fdCancel = new FormData();
int margin = 10;
fdTree.left = new FormAttachment(0, 0); // To the right of the label
fdTree.top = new FormAttachment(0, 0);
fdTree.right = new FormAttachment(100, 0);
fdTree.bottom = new FormAttachment(100, -50);
wTree.setLayoutData(fdTree);
fdOK.left = new FormAttachment(wTree, 0, SWT.CENTER);
fdOK.bottom = new FormAttachment(100, -margin);
wOK.setLayoutData(fdOK);
fdRefresh.left = new FormAttachment(wOK, 10);
fdRefresh.bottom = new FormAttachment(100, -margin);
wRefresh.setLayoutData(fdRefresh);
fdCancel.left = new FormAttachment(wRefresh, 10);
fdCancel.bottom = new FormAttachment(100, -margin);
wCancel.setLayoutData(fdCancel);
// Add listeners
wCancel.addListener(SWT.Selection, new Listener()
{
public void handleEvent(Event e)
{
dispose();
}
});
// Add listeners
wOK.addListener(SWT.Selection, new Listener()
{
public void handleEvent(Event e)
{
handleOK();
}
});
wTree.addSelectionListener(new SelectionAdapter()
{
public void widgetDefaultSelected(SelectionEvent arg0)
{
handleOK();
}
});
wRefresh.addListener(SWT.Selection, new Listener()
{
public void handleEvent(Event e)
{
getData();
}
});
wTree.addMenuDetectListener(new MenuDetectListener()
{
public void menuDetected(MenuDetectEvent e)
{
setTreeMenu();
}
});
BaseStepDialog.setSize(shell);
shell.open();
Display display = parent.getDisplay();
while (!shell.isDisposed())
{
if (!display.readAndDispatch())
display.sleep();
}
return selection;
}
private boolean getData()
{
// Clear the tree top entry
if (tiTree != null && !tiTree.isDisposed())
tiTree.dispose();
tiTree = new TreeItem(wTree, SWT.NONE);
tiTree.setImage(GUIResource.getInstance().getImageFolderConnections());
RepositoryDirectoryUI.getDirectoryTree(tiTree, dircolor, repositoryTree);
tiTree.setExpanded(true);
return true;
}
public void setTreeMenu()
{
Menu mTree = null;
TreeItem ti[] = wTree.getSelection(); // use SWT.SINGLE in wTree!!!!
if (ti.length == 1)
{
mTree = new Menu(wTree);
/*
* NEW Sub-directory
*/
MenuItem miNew = new MenuItem(mTree, SWT.CASCADE);
miNew.setText(BaseMessages.getString(PKG, "SelectDirectoryDialog.PopupMenu.Directory.New"));
miNew.addSelectionListener(new SelectionAdapter()
{
public void widgetSelected(SelectionEvent e)
{
if (!readOnly)
{
TreeItem ti = wTree.getSelection()[0];
String str[] = ConstUI.getTreeStrings(ti);
//
// In which directory do we want create a subdirectory?
//
RepositoryDirectoryInterface dir = repositoryTree.findDirectory(str);
if (dir != null)
{
//
// What's the name of the new directory?
//
EnterStringDialog etd = new EnterStringDialog(shell,
BaseMessages.getString(PKG, "SelectDirectoryDialog.Dialog.EnterDirectoryName.Title"),
BaseMessages.getString(PKG, "SelectDirectoryDialog.Dialog.EnterDirectoryName.Message"),
BaseMessages.getString(PKG, "SelectDirectoryDialog.Dialog.EnterDirectoryName.Default"));
String newdir = etd.open();
if (newdir != null)
{
RepositoryDirectory subdir = new RepositoryDirectory(dir, newdir);
try
{
rep.saveRepositoryDirectory(subdir);
dir.addSubdirectory(subdir);
TreeItem tiNew = new TreeItem(ti, SWT.NONE);
tiNew.setText(newdir);
tiNew.setImage(GUIResource.getInstance().getImageArrow());
wTree.setSelection(new TreeItem[] { tiNew });
}
catch(Exception exception)
{
new ErrorDialog(shell,
BaseMessages.getString(PKG, "SelectDirectoryDialog.Dialog.UnableToCreateDirectory.Message"),
BaseMessages.getString(PKG, "SelectDirectoryDialog.Dialog.UnableToCreateDirectory.Title"),
exception
);
}
}
}
else
{
MessageBox mb = new MessageBox(shell, SWT.ICON_ERROR | SWT.OK);
mb.setMessage(BaseMessages.getString(PKG, "SelectDirectoryDialog.Dialog.UnableToLocateDirectory.Message"));
mb.setText(BaseMessages.getString(PKG, "SelectDirectoryDialog.Dialog.UnableToLocateDirectory.Title"));
mb.open();
}
}
else
{
MessageBox mb = new MessageBox(shell, SWT.ICON_ERROR | SWT.OK);
mb.setMessage(BaseMessages.getString(PKG, "SelectDirectoryDialog.Dialog.PermissionDenied.Message1") + rep.getUserInfo().getLogin() + BaseMessages.getString(PKG, "SelectDirectoryDialog.Dialog.PermissionDenied.Message2"));
mb.setText(BaseMessages.getString(PKG, "SelectDirectoryDialog.Dialog.PermissionDenied.Title"));
mb.open();
}
}
});
/*
* RENAME directory
*/
MenuItem miRen = new MenuItem(mTree, SWT.CASCADE);
miRen.setText(BaseMessages.getString(PKG, "SelectDirectoryDialog.PopupMenu.Directory.Rename"));
MenuItem miDel = new MenuItem(mTree, SWT.CASCADE);
miDel.setText(BaseMessages.getString(PKG, "SelectDirectoryDialog.PopupMenu.Directory.Delete"));
}
wTree.setMenu(mTree);
}
public void dispose()
{
props.setScreen(new WindowProperty(shell));
shell.dispose();
}
public void handleOK()
{
TreeItem ti[] = wTree.getSelection();
if (ti.length == 1)
{
String tree[] = ConstUI.getTreeStrings(ti[0]);
selection = repositoryTree.findDirectory(tree);
dispose();
}
}
}
|
|
/*
* Copyright 2014 Click Travel Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.clicktravel.infrastructure.persistence.inmemory.database;
import static com.clicktravel.common.random.Randoms.*;
import static org.hamcrest.CoreMatchers.hasItems;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import java.util.*;
import org.hamcrest.core.Is;
import org.junit.Before;
import org.junit.Test;
import org.mockito.internal.util.collections.Sets;
import com.clicktravel.cheddar.infrastructure.persistence.database.GeneratedKeyHolder;
import com.clicktravel.cheddar.infrastructure.persistence.database.ItemId;
import com.clicktravel.cheddar.infrastructure.persistence.database.SequenceKeyGenerator;
import com.clicktravel.cheddar.infrastructure.persistence.database.configuration.*;
import com.clicktravel.cheddar.infrastructure.persistence.database.exception.ItemConstraintViolationException;
import com.clicktravel.cheddar.infrastructure.persistence.database.exception.NonExistentItemException;
import com.clicktravel.cheddar.infrastructure.persistence.database.exception.NonUniqueResultException;
import com.clicktravel.cheddar.infrastructure.persistence.database.query.*;
import com.clicktravel.common.random.Randoms;
public class InMemoryDatabaseTemplateTest {
private DatabaseSchemaHolder databaseSchemaHolder;
private final static InMemoryDbDataGenerator dataGenerator = new InMemoryDbDataGenerator();
private static final String STRING_PROPERTY = "stringProperty";
private final Collection<String> createdItemIds = new ArrayList<>();
@Before
public void init() throws Exception {
final Collection<ItemConfiguration> itemConfigurations = new ArrayList<>();
final ItemConfiguration stubItemConfiguration = new ItemConfiguration(StubItem.class,
InMemoryDbDataGenerator.STUB_ITEM_TABLE_NAME);
final ParentItemConfiguration stubParentItemConfiguration = new ParentItemConfiguration(StubParentItem.class,
InMemoryDbDataGenerator.STUB_ITEM_TABLE_NAME);
final ItemConfiguration stubItemWithRangeConfiguration = new ItemConfiguration(StubWithRangeItem.class,
InMemoryDbDataGenerator.STUB_ITEM_WITH_RANGE_TABLE_NAME,
new CompoundPrimaryKeyDefinition("id", "supportingId"));
itemConfigurations.add(stubItemConfiguration);
itemConfigurations.add(stubParentItemConfiguration);
itemConfigurations.add(new VariantItemConfiguration(stubParentItemConfiguration, StubVariantItem.class, "a"));
itemConfigurations
.add(new VariantItemConfiguration(stubParentItemConfiguration, StubVariantTwoItem.class, "b"));
itemConfigurations.add(stubItemWithRangeConfiguration);
databaseSchemaHolder = new DatabaseSchemaHolder(InMemoryDbDataGenerator.UNIT_TEST_SCHEMA_NAME,
itemConfigurations);
}
@Test
public void shouldConstructInMemeoryDataTemplate_withDataBaseSchemaHolder() {
// Given
final DatabaseSchemaHolder mockDatabaseSchemaHolder = mock(DatabaseSchemaHolder.class);
// When
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(mockDatabaseSchemaHolder);
// Then
assertNotNull(databaseTemplate);
}
@Test
public void shouldFetch_withKeySetQuery() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final int itemCount = 1 + Randoms.randomInt(3);
final HashSet<StubItem> items = new HashSet<>();
final Set<ItemId> itemIds = new HashSet<>();
for (int n = 0; n < itemCount; n++) {
final StubItem item = dataGenerator.randomStubItem();
databaseTemplate.create(item);
items.add(item);
itemIds.add(new ItemId(item.getId()));
}
final KeySetQuery query = new KeySetQuery(itemIds);
// When
final Collection<StubItem> returnedItems = databaseTemplate.fetch(query, StubItem.class);
// Then
assertNotNull(returnedItems);
assertEquals(itemCount, returnedItems.size());
final HashSet<StubItem> returnedItemSet = new HashSet<>(returnedItems);
assertTrue(returnedItemSet.equals(items));
}
@Test
public void shouldFetch_withKeySetQueryWithCompoundPk() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final int itemCount = 1 + Randoms.randomInt(3);
final HashSet<StubWithRangeItem> items = new HashSet<>();
final Set<ItemId> itemIds = new HashSet<>();
for (int n = 0; n < itemCount; n++) {
final StubWithRangeItem item = dataGenerator.randomStubWithRangeItem();
databaseTemplate.create(item);
items.add(item);
itemIds.add(new ItemId(item.getId(), item.getSupportingId()));
}
final KeySetQuery query = new KeySetQuery(itemIds);
// When
final Collection<StubWithRangeItem> returnedItems = databaseTemplate.fetch(query, StubWithRangeItem.class);
// Then
assertNotNull(returnedItems);
assertEquals(itemCount, returnedItems.size());
final HashSet<StubWithRangeItem> returnedItemSet = new HashSet<>(returnedItems);
assertTrue(returnedItemSet.equals(items));
}
@Test
public void shouldFetch_withNoIds() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final Set<ItemId> itemIds = new HashSet<>();
final KeySetQuery query = new KeySetQuery(itemIds);
// When
final Collection<StubItem> returnedItems = databaseTemplate.fetch(query, StubItem.class);
// Then
assertNotNull(returnedItems);
assertEquals(0, returnedItems.size());
}
@Test
public void shouldFetch_withUnknownIds() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final Set<ItemId> unknownIds = new HashSet<>();
final int idCount = 1 + Randoms.randomInt(5);
for (int n = 0; n < idCount; n++) {
unknownIds.add(new ItemId(Randoms.randomId()));
}
final KeySetQuery query = new KeySetQuery(unknownIds);
// When
final Collection<StubItem> returnedItems = databaseTemplate.fetch(query, StubItem.class);
// Then
assertNotNull(returnedItems);
assertEquals(0, returnedItems.size());
}
@Test
public void shouldFetch_withRandomKnownAndUnknownIds() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final int itemCount = 1 + Randoms.randomInt(3);
final Set<ItemId> itemIds = new HashSet<>();
for (int n = 0; n < itemCount; n++) {
final StubItem item = dataGenerator.randomStubItem();
databaseTemplate.create(item);
itemIds.add(new ItemId(item.getId()));
}
final Set<ItemId> unknownIds = new HashSet<>();
final int idCount = 1 + Randoms.randomInt(5);
for (int n = 0; n < idCount; n++) {
unknownIds.add(new ItemId(Randoms.randomId()));
}
final HashSet<ItemId> knownAndUnknownIds = new HashSet<>();
knownAndUnknownIds.addAll(itemIds);
knownAndUnknownIds.addAll(unknownIds);
final KeySetQuery query = new KeySetQuery(knownAndUnknownIds);
// When
final Collection<StubItem> returnedItems = databaseTemplate.fetch(query, StubItem.class);
// Then
assertNotNull(returnedItems);
assertEquals(itemIds.size(), returnedItems.size());
final Collection<ItemId> returnedItemIds = new ArrayList<>();
for (final StubItem stubItem : returnedItems) {
returnedItemIds.add(new ItemId(stubItem.getId()));
}
for (final ItemId itemId : itemIds) {
assertTrue(returnedItemIds.contains(itemId));
}
for (final ItemId unknownId : unknownIds) {
assertFalse(returnedItemIds.contains(unknownId));
}
}
@Test
public void shouldUpdate_withSingleItem() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final StubItem createdItem = dataGenerator.randomStubItem();
databaseTemplate.create(createdItem);
final Long originalVersion = createdItem.getVersion();
final String stringProperty = randomString(10);
final String stringProperty2 = randomString(10);
final Set<String> newStringSetProperty = Sets.newSet(randomString(10), randomString(10), randomString(10));
createdItem.setStringProperty(stringProperty);
createdItem.setStringProperty2(stringProperty2);
createdItem.setStringSetProperty(newStringSetProperty);
final Long newVersion = originalVersion + 1;
// When
final StubItem updatedItem = databaseTemplate.update(createdItem);
// Then
assertEquals(newVersion, updatedItem.getVersion());
assertEquals(createdItem.getId(), updatedItem.getId());
assertEquals(stringProperty, updatedItem.getStringProperty());
assertEquals(stringProperty2, updatedItem.getStringProperty2());
assertEquals(newStringSetProperty, updatedItem.getStringSetProperty());
}
@Test
public void shouldUpdate_withSingleItemWithCompoundPk() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final StubWithRangeItem createdItem = dataGenerator.randomStubWithRangeItem();
databaseTemplate.create(createdItem);
final Long originalVersion = createdItem.getVersion();
final String stringProperty = randomString(10);
final boolean booleanProperty = randomBoolean();
final Set<String> newStringSetProperty = Sets.newSet(randomString(10), randomString(10), randomString(10));
createdItem.setStringProperty(stringProperty);
createdItem.setBooleanProperty(booleanProperty);
createdItem.setStringSetProperty(newStringSetProperty);
final Long newVersion = originalVersion + 1;
// When
final StubWithRangeItem updatedItem = databaseTemplate.update(createdItem);
// Then
assertEquals(newVersion, updatedItem.getVersion());
assertEquals(createdItem.getId(), updatedItem.getId());
assertEquals(stringProperty, updatedItem.getStringProperty());
assertEquals(booleanProperty, updatedItem.isBooleanProperty());
assertEquals(newStringSetProperty, updatedItem.getStringSetProperty());
}
@Test
public void shouldFetch_withAttributeQuery() throws Exception {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final StubItem createdItem = dataGenerator.randomStubItem();
databaseTemplate.create(createdItem);
final String stringProperty = createdItem.getStringProperty();
final StubItem stubItem = new StubItem();
stubItem.setStringProperty(stringProperty);
final Query query = new AttributeQuery(STRING_PROPERTY, new Condition(Operators.EQUALS, stringProperty));
// When
final Collection<StubItem> itemResults = databaseTemplate.fetch(query, StubItem.class);
// Then
assertNotNull(itemResults);
assertEquals(1, itemResults.size());
assertEquals(createdItem, itemResults.iterator().next());
}
@Test
public void shouldGetEmptySet_withNullAttributeQuery() {
// Given
final String stringProperty = null;
final Query query = new AttributeQuery(STRING_PROPERTY, new Condition(Operators.EQUALS, stringProperty));
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
// When
final Collection<StubItem> itemResults = databaseTemplate.fetch(query, StubItem.class);
// Then
assertNotNull(itemResults);
assertEquals(0, itemResults.size());
}
@Test
public void shouldGetEmptySet_withEmptyAttributeQuery() {
// Given
final String stringProperty = "";
final Query query = new AttributeQuery(STRING_PROPERTY, new Condition(Operators.EQUALS, stringProperty));
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
// When
final Collection<StubItem> itemResults = databaseTemplate.fetch(query, StubItem.class);
// Then
assertNotNull(itemResults);
assertEquals(0, itemResults.size());
}
@Test
public void shouldFetch_withAttributeQueryAndMultipleItems() throws Exception {
// Given
final String stringProperty = randomString(10);
final AttributeQuery query = new AttributeQuery(STRING_PROPERTY,
new Condition(Operators.EQUALS, stringProperty));
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final StubItem createdItem1 = dataGenerator.stubItemWithStringProperty(stringProperty);
final StubItem createdItem2 = dataGenerator.stubItemWithStringProperty(stringProperty);
databaseTemplate.create(createdItem1);
databaseTemplate.create(createdItem2);
// When
final Collection<StubItem> itemResults = databaseTemplate.fetch(query, StubItem.class);
// Then
assertNotNull(itemResults);
assertEquals(2, itemResults.size());
assertThat(itemResults, hasItems(createdItem1, createdItem2));
}
@Test
public void shouldFetchUnique_withAttributeQuery() throws Exception {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final StubItem createdItem = dataGenerator.randomStubItem();
databaseTemplate.create(createdItem);
final String property = createdItem.getStringProperty();
final StubItem stubItem = new StubItem();
stubItem.setStringProperty(property);
final AttributeQuery query = new AttributeQuery(STRING_PROPERTY, new Condition(Operators.EQUALS, property));
// When
final StubItem itemResult = databaseTemplate.fetchUnique(query, StubItem.class);
// Then
assertNotNull(itemResult);
assertEquals(createdItem, itemResult);
}
@Test
public void shouldNotFetchUnique_withMultipleMatches() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final String stringProperty = randomString(10);
final StubItem createdItem1 = dataGenerator.stubItemWithStringProperty(stringProperty);
final StubItem createdItem2 = dataGenerator.stubItemWithStringProperty(stringProperty);
databaseTemplate.create(createdItem1);
databaseTemplate.create(createdItem2);
final AttributeQuery query = new AttributeQuery(STRING_PROPERTY,
new Condition(Operators.EQUALS, stringProperty));
// When
NonUniqueResultException expectedException = null;
try {
databaseTemplate.fetchUnique(query, StubItem.class);
} catch (final NonUniqueResultException e) {
expectedException = e;
}
// Then
assertNotNull(expectedException);
}
@Test
public void shouldReadItem_withNullValues() throws Exception {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final StubItem createdStubItem = dataGenerator.stubItemWithNullValues();
databaseTemplate.create(createdStubItem);
final ItemId itemId = new ItemId(createdStubItem.getId());
// When
final StubItem item = databaseTemplate.read(itemId, StubItem.class);
// Then
assertEquals(createdStubItem, item);
}
@Test
public void shouldDeleteItem_withItem() throws Exception {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final StubItem createdItem = dataGenerator.randomStubItem();
databaseTemplate.create(createdItem);
// When
databaseTemplate.delete(createdItem);
// Then
NonExistentItemException actualException = null;
try {
databaseTemplate.read(new ItemId(createdItem.getId()), StubItem.class);
} catch (final NonExistentItemException e) {
actualException = e;
}
assertNotNull(actualException);
}
@Test
public void shouldDeleteItem_withItemWithCompoundPk() throws Exception {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final StubWithRangeItem createdItem = dataGenerator.randomStubWithRangeItem();
databaseTemplate.create(createdItem);
// When
databaseTemplate.delete(createdItem);
// Then
NonExistentItemException actualException = null;
try {
databaseTemplate.read(new ItemId(createdItem.getId()), StubItem.class);
} catch (final NonExistentItemException e) {
actualException = e;
}
assertNotNull(actualException);
}
@Test
public void shouldCreateItem_withVariantItem() throws Exception {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final StubVariantItem stubVariantItem = new StubVariantItem();
stubVariantItem.setId(randomId());
stubVariantItem.setStringProperty(randomString(10));
stubVariantItem.setStringProperty2(randomString(10));
createdItemIds.add(stubVariantItem.getId());
// When
final StubVariantItem item = databaseTemplate.create(stubVariantItem);
//
assertEquals(stubVariantItem, item);
}
@Test
public void shouldReadItem_withVariantItem() throws Exception {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final StubVariantItem createdStubVariantItem = dataGenerator.randomStubVariantItem();
databaseTemplate.create(createdStubVariantItem);
final ItemId stubVariantItemId = new ItemId(createdStubVariantItem.getId());
// When
final StubVariantItem stubVariantItem = databaseTemplate.read(stubVariantItemId, StubVariantItem.class);
// Then
assertEquals(createdStubVariantItem, stubVariantItem);
}
@Test
public void shouldNotReadItem_withDifferentVariantItemType() throws Exception {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final StubVariantItem createdStubVariantItem = dataGenerator.randomStubVariantItem();
databaseTemplate.create(createdStubVariantItem);
final ItemId stubVariantItemId = new ItemId(createdStubVariantItem.getId());
// When
NonExistentItemException actualException = null;
try {
databaseTemplate.read(stubVariantItemId, StubVariantTwoItem.class);
} catch (final NonExistentItemException e) {
actualException = e;
}
// Then
assertNotNull(actualException);
}
@Test
public void shouldReadItem_withVariantItemByParent() throws Exception {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final StubVariantItem createdStubVariantItem = dataGenerator.randomStubVariantItem();
databaseTemplate.create(createdStubVariantItem);
final ItemId stubVariantItemId = new ItemId(createdStubVariantItem.getId());
// When
final StubParentItem stubParentItem = databaseTemplate.read(stubVariantItemId, StubParentItem.class);
// Then
assertTrue(stubParentItem instanceof StubVariantItem);
assertEquals(createdStubVariantItem, stubParentItem);
}
@Test
public void shouldFetch_withKeySetQueryWithVariants() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final int itemCount = 1 + Randoms.randomInt(3);
final HashSet<StubParentItem> items = new HashSet<>();
final Set<ItemId> itemIds = new HashSet<>();
for (int n = 0; n < itemCount; n++) {
final StubVariantItem item = dataGenerator.randomStubVariantItem();
databaseTemplate.create(item);
items.add(item);
itemIds.add(new ItemId(item.getId()));
}
final KeySetQuery query = new KeySetQuery(itemIds);
// When
final Collection<StubParentItem> returnedItems = databaseTemplate.fetch(query, StubParentItem.class);
// Then
assertNotNull(returnedItems);
assertEquals(itemCount, returnedItems.size());
final HashSet<StubParentItem> returnedItemSet = new HashSet<>(returnedItems);
assertTrue(returnedItemSet.equals(items));
for (final StubParentItem returnedItem : returnedItems) {
assertTrue(returnedItem instanceof StubVariantItem);
}
}
@Test
public void shouldFetch_withKeySetQueryWithMixedVariantsByParent() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final Set<ItemId> itemIds = new HashSet<>();
final StubVariantItem item1 = dataGenerator.randomStubVariantItem();
databaseTemplate.create(item1);
itemIds.add(new ItemId(item1.getId()));
final StubVariantTwoItem item2 = dataGenerator.randomStubVariantTwoItem();
databaseTemplate.create(item2);
itemIds.add(new ItemId(item2.getId()));
final KeySetQuery query = new KeySetQuery(itemIds);
// When
final Collection<StubParentItem> returnedItems = databaseTemplate.fetch(query, StubParentItem.class);
// Then
assertNotNull(returnedItems);
assertEquals(2, returnedItems.size());
assertTrue(returnedItems.contains(item1));
assertTrue(returnedItems.contains(item2));
final Iterator<StubParentItem> iterator = returnedItems.iterator();
final StubParentItem returnedItem1 = iterator.next();
final StubParentItem returnedItem2 = iterator.next();
if (returnedItem1 instanceof StubVariantItem) {
assertTrue(returnedItem2 instanceof StubVariantTwoItem);
} else {
assertTrue(returnedItem1 instanceof StubVariantTwoItem);
assertTrue(returnedItem2 instanceof StubVariantItem);
}
}
@Test
public void shouldFetch_withKeySetQueryWithMixedVariants_partialResults() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final HashSet<StubParentItem> items = new HashSet<>();
final Set<ItemId> itemIds = new HashSet<>();
final StubVariantItem item1 = dataGenerator.randomStubVariantItem();
databaseTemplate.create(item1);
items.add(item1);
itemIds.add(new ItemId(item1.getId()));
final StubVariantTwoItem item2 = dataGenerator.randomStubVariantTwoItem();
databaseTemplate.create(item2);
items.add(item2);
itemIds.add(new ItemId(item2.getId()));
final KeySetQuery query = new KeySetQuery(itemIds);
// When
final Collection<StubVariantTwoItem> returnedItems = databaseTemplate.fetch(query, StubVariantTwoItem.class);
// Then
assertNotNull(returnedItems);
assertEquals(1, returnedItems.size());
final StubVariantTwoItem returnedItem1 = returnedItems.iterator().next();
assertTrue(returnedItem1 instanceof StubVariantTwoItem);
}
@Test
public void shouldReadBack_withSingleItem() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final StubItem createdStubItem = dataGenerator.randomStubItem();
databaseTemplate.create(createdStubItem);
final ItemId itemId = new ItemId(createdStubItem.getId());
// When
final StubItem returnedItem = databaseTemplate.read(itemId, StubItem.class);
// Then
assertEquals(createdStubItem, returnedItem);
}
@Test
public void shouldReadBack_withSingleItemAndCompoundPk() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final StubWithRangeItem createdStubWithRangeItem = dataGenerator.randomStubWithRangeItem();
databaseTemplate.create(createdStubWithRangeItem);
final ItemId itemId = new ItemId(createdStubWithRangeItem.getId(), createdStubWithRangeItem.getSupportingId());
// When
final StubWithRangeItem returnedItem = databaseTemplate.read(itemId, StubWithRangeItem.class);
// Then
assertEquals(createdStubWithRangeItem, returnedItem);
}
@Test
public void shouldNotReadBack_withUnknownId() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final ItemId unknownId = new ItemId(randomId());
// When
NonExistentItemException expectedException = null;
try {
databaseTemplate.read(unknownId, StubItem.class);
} catch (final NonExistentItemException e) {
expectedException = e;
}
// Then
assertNotNull(expectedException);
}
@Test
public void shouldNotReadBack_withUnknownCompoundPk() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final ItemId unknownId = new ItemId(randomId(), randomId());
// When
NonExistentItemException expectedException = null;
try {
databaseTemplate.read(unknownId, StubItem.class);
} catch (final NonExistentItemException e) {
expectedException = e;
}
// Then
assertNotNull(expectedException);
}
@Test
public void shouldSaveNewItem_withItem() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final StubItem stubItem = dataGenerator.randomStubItem();
stubItem.setVersion(null);
createdItemIds.add(stubItem.getId());
// When
final StubItem item = databaseTemplate.create(stubItem);
// Then
assertEquals(new Long(1), item.getVersion());
assertEquals(stubItem.getId(), item.getId());
assertEquals(stubItem.getStringProperty(), item.getStringProperty());
assertEquals(stubItem.getStringSetProperty(), item.getStringSetProperty());
}
@Test
public void shoudNotSaveItem_withExisitingItem() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final StubItem stubItem = dataGenerator.randomStubItem();
databaseTemplate.create(stubItem);
// When
ItemConstraintViolationException actualException = null;
try {
databaseTemplate.create(stubItem);
} catch (final ItemConstraintViolationException e) {
actualException = e;
}
// Then
assertNotNull(actualException);
}
@Test
public void shoudSaveItemAndItsUniqueConstraints_withItemWithUniqueConstraint() {
// Given
final StubItem stubItem = dataGenerator.randomStubItem();
final String uniqueConstraintAttributeName = "stringProperty";
final String stubItemIndexAttributeValue = stubItem.getStringProperty();
final ItemConfiguration stubItemConfigurationWithUniqueConstraints = new ItemConfiguration(stubItem.getClass(),
"stubTable");
stubItemConfigurationWithUniqueConstraints
.registerUniqueConstraints(Arrays.asList(new UniqueConstraint(uniqueConstraintAttributeName)));
final DatabaseSchemaHolder databaseSchemaHolderWithUniqueConstraints = databaseSchemaHolderWithItemConfiguration(
stubItemConfigurationWithUniqueConstraints);
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(
databaseSchemaHolderWithUniqueConstraints);
// When
final StubItem returnedItem = databaseTemplate.create(stubItem);
// Then
assertNotNull(returnedItem);
assertEquals(stubItem, databaseTemplate.read(new ItemId(stubItem.getId()), stubItem.getClass()));
assertEquals(returnedItem, stubItem);
final StubItem uniqueStubItem = new StubItem();
uniqueStubItem.setId(randomId());
uniqueStubItem.setStringProperty(stubItemIndexAttributeValue);
assertTrue(databaseTemplate.hasMatchingUniqueConstraint(stubItem, uniqueConstraintAttributeName,
stubItem.getStringProperty()));
}
@Test
public void shoudNotSaveItem_withAlreadyExistingUniqueConstraintValue() {
// Given
final StubItem stubItem = dataGenerator.randomStubItem();
final StubItem secondItem = dataGenerator.randomStubItem();
final String uniqueConstraintAttributeName = "stringProperty";
final String stubItemIndexAttributeValue = stubItem.getStringProperty();
final ItemConfiguration stubItemConfigurationWithUniqueConstraints = new ItemConfiguration(stubItem.getClass(),
"stubTable");
stubItemConfigurationWithUniqueConstraints
.registerUniqueConstraints(Arrays.asList(new UniqueConstraint(uniqueConstraintAttributeName)));
final DatabaseSchemaHolder databaseSchemaHolderWithUniqueConstraints = databaseSchemaHolderWithItemConfiguration(
stubItemConfigurationWithUniqueConstraints);
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(
databaseSchemaHolderWithUniqueConstraints);
databaseTemplate.create(stubItem);
secondItem.setStringProperty(stubItemIndexAttributeValue);
// When
ItemConstraintViolationException actualException = null;
try {
databaseTemplate.create(secondItem);
} catch (final ItemConstraintViolationException e) {
actualException = e;
}
// Then
assertNotNull(actualException);
assertEquals(stubItem, databaseTemplate.read(new ItemId(stubItem.getId()), stubItem.getClass()));
assertTrue(databaseTemplate.hasMatchingUniqueConstraint(stubItem, uniqueConstraintAttributeName,
stubItem.getStringProperty()));
}
@Test
public void shoudNotSaveItem_withAlreadyExistingUniqueConstraintValueButDifferentCase() {
// Given
final StubItem stubItem = dataGenerator.randomStubItem();
final StubItem secondItem = dataGenerator.randomStubItem();
final String uniqueConstraintAttributeName = "stringProperty";
final String stubItemIndexAttributeValue = stubItem.getStringProperty();
final ItemConfiguration stubItemConfigurationWithUniqueConstraints = new ItemConfiguration(stubItem.getClass(),
"stubTable");
stubItemConfigurationWithUniqueConstraints
.registerUniqueConstraints(Arrays.asList(new UniqueConstraint(uniqueConstraintAttributeName)));
final DatabaseSchemaHolder databaseSchemaHolderWithUniqueConstraints = databaseSchemaHolderWithItemConfiguration(
stubItemConfigurationWithUniqueConstraints);
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(
databaseSchemaHolderWithUniqueConstraints);
databaseTemplate.create(stubItem);
secondItem.setStringProperty(stubItemIndexAttributeValue.toUpperCase());
// When
ItemConstraintViolationException actualException = null;
try {
databaseTemplate.create(secondItem);
} catch (final ItemConstraintViolationException e) {
actualException = e;
}
// Then
assertNotNull(actualException);
assertEquals(stubItem, databaseTemplate.read(new ItemId(stubItem.getId()), stubItem.getClass()));
assertTrue(databaseTemplate.hasMatchingUniqueConstraint(stubItem, uniqueConstraintAttributeName,
stubItem.getStringProperty()));
}
@Test
public void shouldUpdateItemAndUniqueConstraints_withUpdatedItemWithUnchangedUniqueConstraint() {
// Given
final StubItem stubItem = dataGenerator.randomStubItem();
final String propertyValue = stubItem.getStringProperty();
final String updatedProperty2Value = randomString();
final Set<String> updatedSetValue = new HashSet<>();
final String uniqueConstraintAttributeName = "stringProperty";
final ItemConfiguration stubItemConfigurationWithUniqueConstraints = new ItemConfiguration(stubItem.getClass(),
"stubTable");
stubItemConfigurationWithUniqueConstraints
.registerUniqueConstraints(Arrays.asList(new UniqueConstraint(uniqueConstraintAttributeName)));
final DatabaseSchemaHolder databaseSchemaHolderWithUniqueConstraints = databaseSchemaHolderWithItemConfiguration(
stubItemConfigurationWithUniqueConstraints);
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(
databaseSchemaHolderWithUniqueConstraints);
databaseTemplate.create(stubItem);
stubItem.setStringProperty2(updatedProperty2Value);
stubItem.setStringSetProperty(updatedSetValue);
// When
final StubItem updatedItem = databaseTemplate.update(stubItem);
// Then
assertNotNull(updatedItem);
assertThat(updatedItem.getStringProperty(), Is.is(propertyValue));
assertThat(updatedItem.getStringProperty2(), Is.is(updatedProperty2Value));
assertThat(updatedItem.getStringSetProperty(), Is.is(updatedSetValue));
final StubItem uniqueStubItem = new StubItem();
uniqueStubItem.setId(randomId());
uniqueStubItem.setStringProperty(propertyValue);
assertTrue(databaseTemplate.hasMatchingUniqueConstraint(stubItem, uniqueConstraintAttributeName,
stubItem.getStringProperty()));
}
@Test
public void shouldUpdateItemAndUniqueConstraint_withItemWithUpdatedUniqueConstraint() {
// Given
final StubItem stubItem = dataGenerator.randomStubItem();
final String uniqueConstraintAttributeName = "stringProperty";
final String updatedPropertyValue = randomString();
final ItemConfiguration stubItemConfigurationWithUniqueConstraints = new ItemConfiguration(stubItem.getClass(),
"stubTable");
stubItemConfigurationWithUniqueConstraints
.registerUniqueConstraints(Arrays.asList(new UniqueConstraint(uniqueConstraintAttributeName)));
final DatabaseSchemaHolder databaseSchemaHolderWithUniqueConstraints = databaseSchemaHolderWithItemConfiguration(
stubItemConfigurationWithUniqueConstraints);
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(
databaseSchemaHolderWithUniqueConstraints);
databaseTemplate.create(stubItem);
stubItem.setStringProperty(updatedPropertyValue);
// When
final StubItem updatedItem = databaseTemplate.update(stubItem);
// Then
assertNotNull(updatedItem);
final StubItem uniqueStubItem = new StubItem();
uniqueStubItem.setId(randomId());
uniqueStubItem.setStringProperty(updatedPropertyValue);
assertTrue(databaseTemplate.hasMatchingUniqueConstraint(stubItem, uniqueConstraintAttributeName,
updatedPropertyValue));
}
@Test
public void shouldNotUpdateItemAndUniqueConstraint_withItemExistingUpdatedUniqueConstraintValue() {
// Given
final StubItem stubItem = dataGenerator.randomStubItem();
final String originalStubItemContstraintValue = stubItem.getStringProperty();
final StubItem existingStubItem = dataGenerator.randomStubItem();
final String alreadyExistingUniqueConstraint = existingStubItem.getStringProperty();
final String uniqueConstraintAttributeName = "stringProperty";
final ItemConfiguration stubItemConfigurationWithUniqueConstraints = new ItemConfiguration(stubItem.getClass(),
"stubTable");
stubItemConfigurationWithUniqueConstraints
.registerUniqueConstraints(Arrays.asList(new UniqueConstraint(uniqueConstraintAttributeName)));
final DatabaseSchemaHolder databaseSchemaHolderWithUniqueConstraints = databaseSchemaHolderWithItemConfiguration(
stubItemConfigurationWithUniqueConstraints);
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(
databaseSchemaHolderWithUniqueConstraints);
databaseTemplate.create(existingStubItem);
databaseTemplate.create(stubItem);
stubItem.setStringProperty(alreadyExistingUniqueConstraint);
// When
ItemConstraintViolationException actualException = null;
try {
databaseTemplate.update(stubItem);
} catch (final ItemConstraintViolationException e) {
actualException = e;
}
// Then
assertNotNull(actualException);
assertTrue(databaseTemplate.hasMatchingUniqueConstraint(existingStubItem, uniqueConstraintAttributeName,
alreadyExistingUniqueConstraint));
assertTrue(databaseTemplate.hasMatchingUniqueConstraint(stubItem, uniqueConstraintAttributeName,
originalStubItemContstraintValue));
}
@Test
public void shouldDeleteUniqueConstraints_withDeletedItem() {
// Given
final StubItem stubItem = dataGenerator.randomStubItem();
final String uniqueConstraintAttributeName = "stringProperty";
final String existingUniqueConstraint = stubItem.getStringProperty();
final ItemConfiguration stubItemConfigurationWithUniqueConstraints = new ItemConfiguration(stubItem.getClass(),
"stubTable");
stubItemConfigurationWithUniqueConstraints
.registerUniqueConstraints(Arrays.asList(new UniqueConstraint(uniqueConstraintAttributeName)));
final DatabaseSchemaHolder databaseSchemaHolderWithUniqueConstraints = databaseSchemaHolderWithItemConfiguration(
stubItemConfigurationWithUniqueConstraints);
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(
databaseSchemaHolderWithUniqueConstraints);
databaseTemplate.create(stubItem);
// When
databaseTemplate.delete(stubItem);
// Then
final StubItem uniqueStubItem = new StubItem();
uniqueStubItem.setId(randomId());
uniqueStubItem.setStringProperty(existingUniqueConstraint);
assertFalse(databaseTemplate.hasMatchingUniqueConstraint(stubItem, uniqueConstraintAttributeName,
existingUniqueConstraint));
}
@Test
public void shouldNotFetchUniqueItem_withIncorrectQueryValue() {
// Given
final StubItem stubItem = dataGenerator.randomStubItem();
final String randomQueryValue = randomString();
final StubItem secondStubItem = dataGenerator.randomStubItem();
final String attributeName = "stringProperty";
final Condition condition = new Condition(Operators.EQUALS, randomQueryValue);
final Query query = new AttributeQuery(attributeName, condition);
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
databaseTemplate.create(stubItem);
databaseTemplate.create(secondStubItem);
// When
NonUniqueResultException actualException = null;
try {
databaseTemplate.fetchUnique(query, stubItem.getClass());
} catch (final NonUniqueResultException e) {
actualException = e;
}
// Then
assertNotNull(actualException);
}
@Test
public void shouldNotUpdateItem_withItemVersionNotEqualToOldItem() {
// Given
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
final StubItem stubItem = dataGenerator.randomStubItem();
databaseTemplate.create(stubItem);
stubItem.setVersion(randomLong());
// When
IllegalAccessError actualException = null;
try {
databaseTemplate.update(stubItem);
} catch (final IllegalAccessError e) {
actualException = e;
}
// Then
assertNotNull(actualException);
}
@Test
public void shouldGenerateKeyHolder_withSequenceKeyGenerator() {
// Given
final int keyCount = 1 + Randoms.randomInt(100);
final SequenceKeyGenerator sequenceKeyGenerator = new SequenceKeyGenerator(randomString(), keyCount);
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
// When
final GeneratedKeyHolder genratedKeyHolder = databaseTemplate.generateKeys(sequenceKeyGenerator);
// Then
assertNotNull(genratedKeyHolder);
assertThat(genratedKeyHolder.keys().size(), Is.is(sequenceKeyGenerator.keyCount()));
final Set<Long> setOfKeysToCheckForDuplicates = new HashSet<>(genratedKeyHolder.keys());
assertThat(setOfKeysToCheckForDuplicates.size(), Is.is(genratedKeyHolder.keys().size()));
}
@Test
public void shouldUniquelyGenerateKeyHolders_withManySequenceKeyGenerator() {
// Given
final Collection<SequenceKeyGenerator> sequnceKeyGeneratorCollection = new ArrayList<>();
final Collection<GeneratedKeyHolder> generatedKeyHolderCollection = new ArrayList<>();
int totalKeyCount = 0;
for (int i = 0; i < randomInt(9) + 1; i++) {
final int keyCount = 1 + Randoms.randomInt(100);
totalKeyCount += keyCount;
final SequenceKeyGenerator sequenceKeyGenerator = new SequenceKeyGenerator(randomString(), keyCount);
sequnceKeyGeneratorCollection.add(sequenceKeyGenerator);
}
final InMemoryDatabaseTemplate databaseTemplate = new InMemoryDatabaseTemplate(databaseSchemaHolder);
// When
for (final SequenceKeyGenerator sequenceKeyGenerator : sequnceKeyGeneratorCollection) {
generatedKeyHolderCollection.add(databaseTemplate.generateKeys(sequenceKeyGenerator));
}
// Then
assertFalse(generatedKeyHolderCollection.isEmpty());
final Set<Long> allGeneratedKeys = new HashSet<>();
for (final GeneratedKeyHolder generatedKeyHolder : generatedKeyHolderCollection) {
allGeneratedKeys.addAll(generatedKeyHolder.keys());
}
assertTrue(allGeneratedKeys.size() == totalKeyCount);
}
private DatabaseSchemaHolder databaseSchemaHolderWithItemConfiguration(final ItemConfiguration itemConfiguration) {
final Collection<ItemConfiguration> itemConfigurations = new HashSet<>();
itemConfigurations.add(itemConfiguration);
return new DatabaseSchemaHolder("testStub", itemConfigurations);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.pdmodel.common;
import org.apache.pdfbox.cos.COSArray;
import org.apache.pdfbox.cos.COSBase;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSInteger;
import org.apache.pdfbox.cos.COSFloat;
import org.apache.pdfbox.cos.COSString;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.cos.COSNull;
import org.apache.pdfbox.cos.COSNumber;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import org.apache.pdfbox.cos.COSObject;
/**
* This is an implementation of a List that will sync its contents to a COSArray.
*
* @author Ben Litchfield
*/
public class COSArrayList<E> implements List<E>
{
private final COSArray array;
private final List<E> actual;
private COSDictionary parentDict;
private COSName dictKey;
/**
* Default constructor.
*/
public COSArrayList()
{
array = new COSArray();
actual = new ArrayList<E>();
}
/**
* Constructor.
*
* @param actualList The list of standard java objects
* @param cosArray The COS array object to sync to.
*/
public COSArrayList( List<E> actualList, COSArray cosArray )
{
actual = actualList;
array = cosArray;
}
/**
* This is a really special constructor. Sometimes the PDF spec says
* that a dictionary entry can either be a single item or an array of those
* items. But in the PDModel interface we really just want to always return
* a java.util.List. In the case were we get the list and never modify it
* we don't want to convert to COSArray and put one element, unless we append
* to the list. So here we are going to create this object with a single
* item instead of a list, but allow more items to be added and then converted
* to an array.
*
* @param actualObject The PDModel object.
* @param item The COS Model object.
* @param dictionary The dictionary that holds the item, and will hold the array if an item is added.
* @param dictionaryKey The key into the dictionary to set the item.
*/
public COSArrayList( E actualObject, COSBase item, COSDictionary dictionary, COSName dictionaryKey )
{
array = new COSArray();
array.add( item );
actual = new ArrayList<E>();
actual.add( actualObject );
parentDict = dictionary;
dictKey = dictionaryKey;
}
/**
* {@inheritDoc}
*/
@Override
public int size()
{
return actual.size();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isEmpty()
{
return actual.isEmpty();
}
/**
* {@inheritDoc}
*/
@Override
public boolean contains(Object o)
{
return actual.contains(o);
}
/**
* {@inheritDoc}
*/
@Override
public Iterator<E> iterator()
{
return actual.iterator();
}
/**
* {@inheritDoc}
*/
@Override
public Object[] toArray()
{
return actual.toArray();
}
/**
* {@inheritDoc}
*/
@Override
public <X>X[] toArray(X[] a)
{
return actual.toArray(a);
}
/**
* {@inheritDoc}
*/
@Override
public boolean add(E o)
{
//when adding if there is a parentDict then change the item
//in the dictionary from a single item to an array.
if( parentDict != null )
{
parentDict.setItem( dictKey, array );
//clear the parent dict so it doesn't happen again, there might be
//a usecase for keeping the parentDict around but not now.
parentDict = null;
}
//string is a special case because we can't subclass to be COSObjectable
if( o instanceof String )
{
array.add( new COSString( (String)o ) );
}
else
{
if(array != null)
{
array.add(((COSObjectable)o).getCOSObject());
}
}
return actual.add(o);
}
/**
* {@inheritDoc}
*/
@Override
public boolean remove(Object o)
{
boolean retval = true;
int index = actual.indexOf( o );
if( index >= 0 )
{
actual.remove( index );
array.remove( index );
}
else
{
retval = false;
}
return retval;
}
/**
* {@inheritDoc}
*/
@Override
public boolean containsAll(Collection<?> c)
{
return actual.containsAll( c );
}
/**
* {@inheritDoc}
*/
@Override
public boolean addAll(Collection<? extends E> c)
{
//when adding if there is a parentDict then change the item
//in the dictionary from a single item to an array.
if( parentDict != null && c.size() > 0)
{
parentDict.setItem( dictKey, array );
//clear the parent dict so it doesn't happen again, there might be
//a usecase for keeping the parentDict around but not now.
parentDict = null;
}
array.addAll( toCOSObjectList( c ) );
return actual.addAll( c );
}
/**
* {@inheritDoc}
*/
@Override
public boolean addAll(int index, Collection<? extends E> c)
{
//when adding if there is a parentDict then change the item
//in the dictionary from a single item to an array.
if( parentDict != null && c.size() > 0)
{
parentDict.setItem( dictKey, array );
//clear the parent dict so it doesn't happen again, there might be
//a usecase for keeping the parentDict around but not now.
parentDict = null;
}
array.addAll( index, toCOSObjectList( c ) );
return actual.addAll( index, c );
}
/**
* This will take an array of COSNumbers and return a COSArrayList of
* java.lang.Integer values.
*
* @param intArray The existing integer Array.
*
* @return A list that is part of the core Java collections.
*/
public static List<Integer> convertIntegerCOSArrayToList(COSArray intArray)
{
List<Integer> retval = null;
if (intArray != null)
{
List<Integer> numbers = new ArrayList<Integer>();
for (int i = 0; i < intArray.size(); i++)
{
COSNumber num;
if (intArray.get(i) instanceof COSObject)
{
num = (COSNumber) ((COSObject) intArray.get(i)).getObject();
}
else
{
num = (COSNumber) intArray.get(i);
}
numbers.add(num.intValue());
}
retval = new COSArrayList<Integer>(numbers, intArray);
}
return retval;
}
/**
* This will take an array of COSNumbers and return a COSArrayList of
* java.lang.Float values.
*
* @param floatArray The existing float Array.
*
* @return The list of Float objects.
*/
public static List<Float> convertFloatCOSArrayToList( COSArray floatArray )
{
List<Float> retval = null;
if( floatArray != null )
{
List<Float> numbers = new ArrayList<Float>();
for( int i=0; i<floatArray.size(); i++ )
{
numbers.add(((COSNumber) floatArray.getObject(i)).floatValue());
}
retval = new COSArrayList<Float>( numbers, floatArray );
}
return retval;
}
/**
* This will take an array of COSName and return a COSArrayList of
* java.lang.String values.
*
* @param nameArray The existing name Array.
*
* @return The list of String objects.
*/
public static List<String> convertCOSNameCOSArrayToList( COSArray nameArray )
{
List<String> retval = null;
if( nameArray != null )
{
List<String>names = new ArrayList<String>();
for( int i=0; i<nameArray.size(); i++ )
{
names.add( ((COSName)nameArray.getObject( i )).getName() );
}
retval = new COSArrayList<String>( names, nameArray );
}
return retval;
}
/**
* This will take an array of COSString and return a COSArrayList of
* java.lang.String values.
*
* @param stringArray The existing name Array.
*
* @return The list of String objects.
*/
public static List<String> convertCOSStringCOSArrayToList( COSArray stringArray )
{
List<String> retval = null;
if( stringArray != null )
{
List<String> string = new ArrayList<String>();
for( int i=0; i<stringArray.size(); i++ )
{
string.add( ((COSString)stringArray.getObject( i )).getString() );
}
retval = new COSArrayList<String>( string, stringArray );
}
return retval;
}
/**
* This will take an list of string objects and return a COSArray of COSName
* objects.
*
* @param strings A list of strings
*
* @return An array of COSName objects
*/
public static COSArray convertStringListToCOSNameCOSArray( List<String> strings )
{
COSArray retval = new COSArray();
for (String string : strings)
{
retval.add(COSName.getPDFName(string));
}
return retval;
}
/**
* This will take an list of string objects and return a COSArray of COSName
* objects.
*
* @param strings A list of strings
*
* @return An array of COSName objects
*/
public static COSArray convertStringListToCOSStringCOSArray( List<String> strings )
{
COSArray retval = new COSArray();
for (String string : strings)
{
retval.add(new COSString(string));
}
return retval;
}
/**
* This will convert a list of COSObjectables to an array list of COSBase objects.
*
* @param cosObjectableList A list of COSObjectable.
*
* @return A list of COSBase.
* @throws IllegalArgumentException if an object type is not supported for conversion to a
* COSBase object.
*/
public static COSArray converterToCOSArray( List<?> cosObjectableList )
{
COSArray array = null;
if( cosObjectableList != null )
{
if( cosObjectableList instanceof COSArrayList )
{
//if it is already a COSArrayList then we don't want to recreate the array, we want to reuse it.
array = ((COSArrayList<?>)cosObjectableList).array;
}
else
{
array = new COSArray();
Iterator<?> iter = cosObjectableList.iterator();
while( iter.hasNext() )
{
Object next = iter.next();
if( next instanceof String )
{
array.add( new COSString( (String)next ) );
}
else if( next instanceof Integer || next instanceof Long )
{
array.add( COSInteger.get( ((Number)next).longValue() ) );
}
else if( next instanceof Float || next instanceof Double )
{
array.add( new COSFloat( ((Number)next).floatValue() ) );
}
else if( next instanceof COSObjectable )
{
COSObjectable object = (COSObjectable)next;
array.add( object.getCOSObject() );
}
else if( next == null )
{
array.add( COSNull.NULL );
}
else
{
throw new IllegalArgumentException( "Error: Don't know how to convert type to COSBase '" +
next.getClass().getName() + "'" );
}
}
}
}
return array;
}
private List<COSBase> toCOSObjectList( Collection<?> list )
{
List<COSBase> cosObjects = new ArrayList<COSBase>();
Iterator<?> iter = list.iterator();
while( iter.hasNext() )
{
Object next = iter.next();
if( next instanceof String )
{
cosObjects.add( new COSString( (String)next ) );
}
else
{
COSObjectable cos = (COSObjectable)next;
cosObjects.add( cos.getCOSObject() );
}
}
return cosObjects;
}
/**
* {@inheritDoc}
*/
@Override
public boolean removeAll(Collection<?> c)
{
array.removeAll( toCOSObjectList( c ) );
return actual.removeAll( c );
}
/**
* {@inheritDoc}
*/
@Override
public boolean retainAll(Collection<?> c)
{
array.retainAll( toCOSObjectList( c ) );
return actual.retainAll( c );
}
/**
* {@inheritDoc}
*/
@Override
public void clear()
{
//when adding if there is a parentDict then change the item
//in the dictionary from a single item to an array.
if( parentDict != null )
{
parentDict.setItem( dictKey, null );
}
actual.clear();
array.clear();
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(Object o)
{
return actual.equals( o );
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode()
{
return actual.hashCode();
}
/**
* {@inheritDoc}
*/
@Override
public E get(int index)
{
return actual.get( index );
}
/**
* {@inheritDoc}
*/
@Override
public E set(int index, E element)
{
if( element instanceof String )
{
COSString item = new COSString( (String)element );
if( parentDict != null && index == 0 )
{
parentDict.setItem( dictKey, item );
}
array.set( index, item );
}
else
{
if( parentDict != null && index == 0 )
{
parentDict.setItem( dictKey, ((COSObjectable)element).getCOSObject() );
}
array.set( index, ((COSObjectable)element).getCOSObject() );
}
return actual.set( index, element );
}
/**
* {@inheritDoc}
*/
@Override
public void add(int index, E element)
{
//when adding if there is a parentDict then change the item
//in the dictionary from a single item to an array.
if( parentDict != null )
{
parentDict.setItem( dictKey, array );
//clear the parent dict so it doesn't happen again, there might be
//a usecase for keeping the parentDict around but not now.
parentDict = null;
}
actual.add( index, element );
if( element instanceof String )
{
array.add( index, new COSString( (String)element ) );
}
else
{
array.add( index, ((COSObjectable)element).getCOSObject() );
}
}
/**
* {@inheritDoc}
*/
@Override
public E remove(int index)
{
array.remove( index );
return actual.remove( index );
}
/**
* {@inheritDoc}
*/
@Override
public int indexOf(Object o)
{
return actual.indexOf( o );
}
/**
* {@inheritDoc}
*/
@Override
public int lastIndexOf(Object o)
{
return actual.indexOf( o );
}
/**
* {@inheritDoc}
*/
@Override
public ListIterator<E> listIterator()
{
return actual.listIterator();
}
/**
* {@inheritDoc}
*/
@Override
public ListIterator<E> listIterator(int index)
{
return actual.listIterator( index );
}
/**
* {@inheritDoc}
*/
@Override
public List<E> subList(int fromIndex, int toIndex)
{
return actual.subList( fromIndex, toIndex );
}
/**
* {@inheritDoc}
*/
@Override
public String toString()
{
return "COSArrayList{" + array.toString() + "}";
}
/**
* This will return then underlying COSArray.
*
* @return the COSArray
*/
public COSArray toList()
{
return array;
}
}
|
|
package test.model;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static test.utils.ExceptionAsserter.assertException;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import test.utils.ExceptionAsserter.ExceptionAssert;
import sr.entityset.EntityColumn;
import sr.entityset.EntityRow;
import sr.entityset.EntityTable;
import sr.entityset.Index;
import sr.entityset.Index.IndexChangedListener;
public class IndexTest
{
private EntityTable table;
private EntityColumn id1Col;
private EntityColumn id2Col;
private EntityColumn valCol;
@Before
public void beforeEach() throws Exception
{
table = new EntityTable("tableName");
id1Col = table.addColumn("id1", Integer.class);
id2Col = table.addColumn("id2", String.class);
valCol = table.addColumn("val", String.class);
}
@Test
public void testThrowErrorWhenGivingWrongNumberOfValuesForFindRowsAndIndexDontChange() throws Exception
{
EntityColumn[] indexCols = new EntityColumn[] {id1Col};
final Index index = new Index(table, indexCols);
IndexChangeObserver changeObserver = new IndexChangeObserver(index);
assertException(IllegalArgumentException.class, new ExceptionAssert() {
@Override
public void doAction() throws Exception {
index.findRows(1, "b");
}
});
changeObserver.assertHasNotChanged();
}
@Test
public void testIndexReturnProperRowWithTwoFieldsIndexAndChangesAtRightTime() throws Exception
{
EntityColumn[] indexCols = new EntityColumn[] {id1Col, id2Col};
EntityRow row1 = table.addRow(1, "a", "1a");
EntityRow row2 = table.addRow(2, "b", "2b");
table.addRow(3, "c", "3c");
table.addRow(4, "d", "4d");
Index index = new Index(table, indexCols);
IndexChangeObserver changeObserver = new IndexChangeObserver(index);
changeObserver.assertHasNotChanged();
assertSame(row1, single(index.findRows(1, "a")));
changeObserver.assertHasChanged().andReset();
assertSame(row2, single(index.findRows(2, "b")));
assertEquals(0, index.findRows(10, "a").size());
changeObserver.assertHasNotChanged();
}
@Test
public void testIndexReturnNRowsWorks() throws Exception
{
EntityColumn[] indexCols = new EntityColumn[] {id1Col, id2Col};
EntityRow row1 = table.addRow(1, "a", "1a");
table.addRow(3, "c", "3c");
table.addRow(4, "d", "4d");
EntityRow row2 = table.addRow(1, "a", "2b");
Index index = new Index(table, indexCols);
List<EntityRow> res = index.findRows(1, "a");
assertEquals(2, res.size());
assertSame(row1, res.get(0));
assertSame(row2, res.get(1));
}
@Test
/**
* Ensure that after building the index the first time,
* rows added through the proper method are properly considered by the index
*/
public void testPostCreationRowAddingWorks() throws Exception
{
EntityColumn[] indexCols = new EntityColumn[] {id1Col, id2Col};
EntityRow row1 = table.addRow(1, "a", "1a");
table.addRow(2, "b", "2b");
table.addRow(3, "c", "3c");
Index index = new Index(table, indexCols);
IndexChangeObserver changeObserver = new IndexChangeObserver(index);
assertSame(row1, single(index.findRows(1, "a")));
changeObserver.assertHasChanged().andReset();
EntityRow row3 = table.addRow(4, "d", "pof");
index.updateOnRowAdded(row3);
changeObserver.assertHasChanged().andReset();
assertSame(row3, single(index.findRows(4, "d")));
changeObserver.assertHasNotChanged();
}
@Test
/**
* Ensure rows modified on NON-indexed fields do not trigger a index changed event,
* and return the proper row still.
*/
public void testPostCreationModifiedRowsOnNonIndexedFielWorks() throws Exception
{
EntityColumn[] indexCols = new EntityColumn[] {id1Col, id2Col};
table.addRow(1, "a", "row1");
table.addRow(1, "a", "row2");
EntityRow row3 = table.addRow(3, "c", "3c");
Index index = new Index(table, indexCols);
IndexChangeObserver changeObserver = new IndexChangeObserver(index);
table.addRow(4, "d", "row4");
changeObserver.assertHasNotChanged();
assertEquals(2, index.findRows(1, "a").size());
changeObserver.assertHasChanged().andReset();
row3.setValue(valCol, "toto");
index.updateOnRowModified(row3);
changeObserver.assertHasNotChanged().andReset();
assertSame(row3, single(index.findRows(3, "c")));
changeObserver.assertHasNotChanged();
}
@Test
/**
* Ensure rows modified on indexed field raise index-changed event and are accessible with
* the new key-values while former keyed-values do not return the row.
*/
public void testPostCreationModifiedRowsOnIndexedFieldWorks() throws Exception
{
EntityColumn[] indexCols = new EntityColumn[] {id1Col, id2Col};
table.addRow(1, "a", "row1");
table.addRow(1, "a", "row2");
EntityRow row3 = table.addRow(3, "c", "3c");
Index index = new Index(table, indexCols);
IndexChangeObserver changeObserver = new IndexChangeObserver(index);
table.addRow(4, "d", "row4");
changeObserver.assertHasNotChanged();
assertEquals(2, index.findRows(1, "a").size());
changeObserver.assertHasChanged().andReset();
row3.setValue(id2Col, "x");
index.updateOnRowModified(row3);
changeObserver.assertHasChanged().andReset();
assertSame(row3, single(index.findRows(3, "x")));
assertEquals(0, index.findRows(3, "c").size());
changeObserver.assertHasNotChanged();
}
@Test
/**
* Ensure rows deleted raises index changed event and are not accessible anymore in index.
*/
public void testPostCreationDeletedRowsWorks() throws Exception
{
EntityColumn[] indexCols = new EntityColumn[] {id1Col, id2Col};
table.addRow(1, "a", "row1");
table.addRow(1, "a", "row2");
EntityRow row3 = table.addRow(3, "c", "3c");
Index index = new Index(table, indexCols);
IndexChangeObserver changeObserver = new IndexChangeObserver(index);
assertSame(row3, single(index.findRows(3, "c")));
changeObserver.assertHasChanged().andReset();
table.removeRow(row3);
index.updateOnRowRemoved(row3);
changeObserver.assertHasChanged().andReset();
assertEquals(0, index.findRows(3, "c").size());
changeObserver.assertHasNotChanged();
}
@Test
/**
* Ensure ContainsDuplicates works on index creation
* when a duplicate actually exists.
*/
public void testTrueContainsDuplicateWorksOnIndexCreation() throws Exception
{
EntityColumn[] indexCols = new EntityColumn[] {id1Col, id2Col};
table.addRow(1, "a", "row1");
table.addRow(1, "a", "row2");
table.addRow(3, "c", "3c");
Index index = new Index(table, indexCols);
IndexChangeObserver changeObserver = new IndexChangeObserver(index);
index.build();
changeObserver.assertHasChanged().andReset();
assertTrue(index.containsDuplicates());
}
@Test
/**
* Ensure ContainsDuplicates works on index creation
* when no duplicate is present.
*/
public void testFalseContainsDuplicateWorksOnIndexCreation() throws Exception
{
EntityColumn[] indexCols = new EntityColumn[] {id1Col, id2Col};
table.addRow(1, "q", "row1");
table.addRow(1, "a", "row2");
table.addRow(3, "c", "3c");
Index index = new Index(table, indexCols);
IndexChangeObserver changeObserver = new IndexChangeObserver(index);
index.build();
changeObserver.assertHasChanged().andReset();
assertFalse(index.containsDuplicates());
}
@Test
/**
* Ensure ContainsDuplicates works when a duplicated row is added
*/
public void testPostCreationAddingDuplicatedRowWorks() throws Exception
{
EntityColumn[] indexCols = new EntityColumn[] {id1Col, id2Col};
table.addRow(1, "q", "row1");
table.addRow(1, "a", "row2");
Index index = new Index(table, indexCols);
IndexChangeObserver changeObserver = new IndexChangeObserver(index);
index.build();
changeObserver.assertHasChanged().andReset();
assertFalse(index.containsDuplicates());
EntityRow row3 = table.addRow(1, "a", "row3");
index.updateOnRowAdded(row3);
changeObserver.assertHasChanged().andReset();
assertTrue(index.containsDuplicates());
}
@Test
/**
* Ensure ContainsDuplicate works when a row which
* was not a duplicate is changed so that to become a duplicate.
*/
public void testPostCreationRowModificationToCreateDuplicate() throws Exception
{
EntityColumn[] indexCols = new EntityColumn[] {id1Col, id2Col};
table.addRow(1, "a", "row1");
EntityRow row2 = table.addRow(1, "a", "row2");
Index index = new Index(table, indexCols);
IndexChangeObserver changeObserver = new IndexChangeObserver(index);
index.build();
changeObserver.assertHasChanged().andReset();
assertTrue(index.containsDuplicates());
row2.setValue(id1Col, 2);
index.updateOnRowModified(row2);
changeObserver.assertHasChanged().andReset();
assertFalse(index.containsDuplicates());
}
@Test
/**
* Ensure a formerly duplicated index becomes non duplicated when
* the only duplicated row is deleted (but remain duplicated if other
* duplicated rows remains).
*/
public void testPostCreationRowDeletionToRemoveDuplicationWorks() throws Exception
{
EntityColumn[] indexCols = new EntityColumn[] {id1Col, id2Col};
table.addRow(1, "a", "row1");
EntityRow row2 = table.addRow(1, "a", "row2");
Index index = new Index(table, indexCols);
IndexChangeObserver changeObserver = new IndexChangeObserver(index);
index.build();
changeObserver.assertHasChanged().andReset();
assertTrue(index.containsDuplicates());
table.removeRow(row2);
index.updateOnRowRemoved(row2);
changeObserver.assertHasChanged().andReset();
assertFalse(index.containsDuplicates());
}
@Test
/**
* Ensure a formerly duplicated index becomes non duplicated when
* the only duplicated row is modified on its index keys.
*/
public void testPostCreationRowModificationToRemoveDuplicationWorks() throws Exception
{
EntityColumn[] indexCols = new EntityColumn[] {id1Col, id2Col};
table.addRow(1, "a", "row1");
table.addRow(1, "a", "row2");
EntityRow row3 = table.addRow(1, "a", "row3");
Index index = new Index(table, indexCols);
IndexChangeObserver changeObserver = new IndexChangeObserver(index);
index.build();
changeObserver.assertHasChanged().andReset();
assertTrue(index.containsDuplicates());
table.removeRow(row3);
index.updateOnRowRemoved(row3);
changeObserver.assertHasChanged().andReset();
assertTrue(index.containsDuplicates());
}
@Test
/**
* Ensure ContainsDuplicate = true works even if index was never created before
*/
public void testTrueContainsDuplicateWorksEvenOnFirstIndexCreation() throws Exception
{
EntityColumn[] indexCols = new EntityColumn[] {id1Col, id2Col};
table.addRow(1, "a", "row1");
table.addRow(1, "a", "row2");
Index index = new Index(table, indexCols);
IndexChangeObserver changeObserver = new IndexChangeObserver(index);
changeObserver.assertHasNotChanged().andReset();
assertTrue(index.containsDuplicates());
changeObserver.assertHasChanged();
}
@Test
/**
* Ensure ContainsDuplicate = false works even if index was never created before.
*/
public void testFalseContainsDuplicateWorksEvenOnFirstIndexCreation() throws Exception
{
EntityColumn[] indexCols = new EntityColumn[] {id1Col, id2Col};
table.addRow(1, "a", "row1");
table.addRow(1, "b", "row2");
Index index = new Index(table, indexCols);
IndexChangeObserver changeObserver = new IndexChangeObserver(index);
changeObserver.assertHasNotChanged().andReset();
assertFalse(index.containsDuplicates());
changeObserver.assertHasChanged();
}
///////////////////////////////////////////////////////////////////////////////////////////////
private <T> T single(List<T> list) throws Exception
{
if (list.size() != 1) throw new Exception("Expecting exactly one row.");
return list.get(0);
}
private class IndexChangeObserver implements IndexChangedListener
{
private boolean changed = false;
public IndexChangeObserver(Index index) {
index.addIndexChangedListener(this);
}
@Override
public void indexChanged() {
this.changed = true;
}
public void reset() {
this.changed = false;
}
public IndexChangeObserver assertHasChanged() { assertTrue(this.changed); return this; }
public IndexChangeObserver assertHasNotChanged() { assertFalse(this.changed); return this; }
public void andReset() { this.reset(); }
}
}
|
|
package com.njlabs.showjava.processor;
import android.util.Log;
import com.crashlytics.android.Crashlytics;
import com.googlecode.dex2jar.Method;
import com.googlecode.dex2jar.ir.IrMethod;
import com.googlecode.dex2jar.reader.DexFileReader;
import com.googlecode.dex2jar.v3.Dex2jar;
import com.googlecode.dex2jar.v3.DexExceptionHandler;
import com.njlabs.showjava.utils.StringUtils;
import com.njlabs.showjava.utils.logging.Ln;
import org.jf.dexlib2.DexFileFactory;
import org.jf.dexlib2.iface.ClassDef;
import org.jf.dexlib2.iface.DexFile;
import org.jf.dexlib2.immutable.ImmutableDexFile;
import org.objectweb.asm.tree.MethodNode;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Niranjan on 29-05-2015.
*/
@SuppressWarnings({"ResultOfMethodCallIgnored", "ConstantConditions"})
public class JarExtractor extends ProcessServiceHelper {
private ArrayList<String> ignoredLibs;
public JarExtractor(ProcessService processService) {
this.processService = processService;
this.UIHandler = processService.UIHandler;
this.packageFilePath = processService.packageFilePath;
this.packageName = processService.packageName;
this.exceptionHandler = processService.exceptionHandler;
this.sourceOutputDir = processService.sourceOutputDir;
this.javaSourceOutputDir = processService.javaSourceOutputDir;
ignoredLibs = new ArrayList<>();
}
public void extract() {
ThreadGroup group = new ThreadGroup("DEX TO JAR EXTRACTION");
broadcastStatus("optimise_dex_start");
Runnable runProcess = new Runnable() {
@Override
public void run() {
loadIgnoredLibs();
apkToDex();
if(processService.decompilerToUse.equals("cfr")){
dexToJar();
}
startJavaExtractor();
}
};
Thread extractionThread = new Thread(group, runProcess, "DEX TO JAR EXTRACTION", processService.STACK_SIZE);
extractionThread.setPriority(Thread.MAX_PRIORITY);
extractionThread.setUncaughtExceptionHandler(exceptionHandler);
extractionThread.start();
}
public void apkToDex() {
DexFile dexFile = null;
try {
dexFile = DexFileFactory.loadDexFile(packageFilePath, 19);
} catch (Exception e) {
broadcastStatus("exit");
UIHandler.post(new ToastRunnable("The app you selected cannot be decompiled. Please select another app."));
}
List<ClassDef> classes = new ArrayList<>();
broadcastStatus("optimising", "");
for (ClassDef classDef : dexFile.getClasses()) {
if (!isIgnored(classDef.getType())) {
final String CurrentClass = classDef.getType();
broadcastStatus("optimising_class", CurrentClass.replaceAll("Processing ", ""));
classes.add(classDef);
}
}
broadcastStatus("optimise_dex_finish");
File PerAppWorkingDirectory = new File(processService.sourceOutputDir);
PerAppWorkingDirectory.mkdirs();
Log.d("DEBUGGER", "Prepare Writing");
broadcastStatus("merging_classes");
dexFile = new ImmutableDexFile(classes);
try {
Log.d("DEBUGGER", "Start Writing");
DexFileFactory.writeDexFile(PerAppWorkingDirectory + "/optimised_classes.dex", dexFile);
Log.d("DEBUGGER", "Writing done!");
} catch (Exception e) {
broadcastStatus("exit");
UIHandler.post(new ToastRunnable("The app you selected cannot be decompiled. Please select another app."));
}
//////
PrintStream printStream = new PrintStream(new ProgressStream());
System.setErr(printStream);
System.setOut(printStream);
//////
}
public void dexToJar() {
Log.i("STATUS", "Jar Extraction Started");
broadcastStatus("dex2jar");
// DEX 2 JAR CONFIGS
boolean reuseReg = false; // reuse register while generate java .class file
boolean topologicalSort1 = false; // same with --topological-sort/-ts
boolean topologicalSort = false; // sort block by topological, that will generate more readable code
boolean verbose = true; // show progress
boolean debugInfo = false; // translate debug info
boolean printIR = false; // print ir to System.out
boolean optimizeSynchronized = true; // Optimise-synchronised
File PerAppWorkingDirectory = new File(sourceOutputDir);
File file = new File(PerAppWorkingDirectory + "/" + packageName + ".jar");
File dexFile = new File(PerAppWorkingDirectory + "/optimised_classes.dex");
if (dexFile.exists() && dexFile.isFile()) {
DexExceptionHandlerMod dexExceptionHandlerMod = new DexExceptionHandlerMod();
try {
DexFileReader reader = new DexFileReader(dexFile);
Dex2jar dex2jar = Dex2jar.from(reader).reUseReg(reuseReg).topoLogicalSort(topologicalSort || topologicalSort1).skipDebug(!debugInfo)
.optimizeSynchronized(optimizeSynchronized).printIR(printIR).verbose(verbose);
dex2jar.setExceptionHandler(dexExceptionHandlerMod);
dex2jar.to(file);
} catch (Exception e) {
broadcastStatus("exit_process_on_error");
}
Log.i("STATUS", "Clearing cache");
File ClassDex = new File(PerAppWorkingDirectory + "/optimised_classes.dex");
ClassDex.delete();
}
}
private void startJavaExtractor() {
JavaExtractor javaExtractor = new JavaExtractor(processService);
javaExtractor.extract();
}
private void loadIgnoredLibs() {
String ignoredList = (processService.IGNORE_LIBS ? "ignored.list":"ignored_basic.list");
BufferedReader reader = null;
try {
reader = new BufferedReader(new InputStreamReader(processService.getAssets().open(ignoredList)));
String mLine = reader.readLine().trim();
while (mLine != null) {
mLine = mLine.trim();
if (mLine.length() != 0) {
ignoredLibs.add(StringUtils.toClassName(mLine));
}
Ln.d(mLine);
mLine = reader.readLine();
}
} catch (IOException e) {
Crashlytics.logException(e);
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
Crashlytics.logException(e);
}
}
}
}
private boolean isIgnored(String className) {
for (String ignoredClass : ignoredLibs) {
if (className.startsWith(ignoredClass)) {
return true;
}
}
return false;
}
class DexExceptionHandlerMod implements DexExceptionHandler {
@Override
public void handleFileException(Exception e) {
Ln.d("Dex2Jar Exception " + e);
}
@Override
public void handleMethodTranslateException(Method method, IrMethod irMethod, MethodNode methodNode, Exception e) {
Ln.d("Dex2Jar Exception " + e);
}
}
}
|
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.ui.layout.impl;
import com.intellij.execution.ui.layout.*;
import com.intellij.execution.ui.layout.actions.CloseViewAction;
import com.intellij.execution.ui.layout.actions.MinimizeViewAction;
import com.intellij.openapi.actionSystem.ActionGroup;
import com.intellij.openapi.actionSystem.DataProvider;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.util.ActionCallback;
import com.intellij.openapi.util.DimensionService;
import com.intellij.openapi.util.MutualMap;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.ui.components.panels.NonOpaquePanel;
import com.intellij.ui.content.Content;
import com.intellij.ui.content.ContentManager;
import com.intellij.ui.tabs.JBTabs;
import com.intellij.ui.tabs.TabInfo;
import com.intellij.ui.tabs.TabsListener;
import com.intellij.ui.tabs.UiDecorator;
import com.intellij.ui.tabs.impl.JBEditorTabs;
import com.intellij.ui.tabs.impl.TabLabel;
import com.intellij.ui.tabs.impl.singleRow.ScrollableSingleRowLayout;
import com.intellij.ui.tabs.impl.singleRow.SingleRowLayout;
import com.intellij.util.SmartList;
import com.intellij.util.containers.HashSet;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.List;
import java.util.Set;
public class GridCellImpl implements GridCell {
private final GridImpl myContainer;
private final MutualMap<Content, TabInfo> myContents = new MutualMap<>(true);
private final Set<Content> myMinimizedContents = new HashSet<>();
private final JBTabs myTabs;
private final GridImpl.Placeholder myPlaceholder;
private final PlaceInGrid myPlaceInGrid;
private final ViewContextEx myContext;
private JBPopup myPopup;
public GridCellImpl(ViewContextEx context, @NotNull GridImpl container, GridImpl.Placeholder placeholder, PlaceInGrid placeInGrid) {
myContext = context;
myContainer = container;
myPlaceInGrid = placeInGrid;
myPlaceholder = placeholder;
myTabs = new JBEditorTabs(myContext.getProject(), myContext.getActionManager(), myContext.getFocusManager(), container) {
{
//noinspection UseJBColor
myDefaultPainter.setDefaultTabColor(new Color(0xC6CFDF));
//noinspection UseJBColor
myDarkPainter.setDefaultTabColor(new Color(0x424D5F));
}
@Override
public boolean useSmallLabels() {
return true;
}
@Override
protected SingleRowLayout createSingleRowLayout() {
return new ScrollableSingleRowLayout(this);
}
@Override
public int tabMSize() {
return 12;
}
@Override
protected void paintBorder(Graphics2D g2d, ShapeInfo shape, Color borderColor) {
if (UIUtil.isUnderDarcula()) {
return;
}
super.paintBorder(g2d, shape, borderColor);
}
@Override
public void processDropOver(TabInfo over, RelativePoint point) {
((RunnerContentUi)myContext).myTabs.processDropOver(over, point);
}
@Override
public Image startDropOver(TabInfo tabInfo, RelativePoint point) {
return ((RunnerContentUi)myContext).myTabs.startDropOver(tabInfo, point);
}
@Override
public void resetDropOver(TabInfo tabInfo) {
((RunnerContentUi)myContext).myTabs.resetDropOver(tabInfo);
}
@Override
protected TabLabel createTabLabel(TabInfo info) {
return new TabLabel(this, info) {
@Override
public void setAlignmentToCenter(boolean toCenter) {
super.setAlignmentToCenter(false);
}
};
}
}.setDataProvider(new DataProvider() {
@Override
@Nullable
public Object getData(@NonNls final String dataId) {
if (ViewContext.CONTENT_KEY.is(dataId)) {
TabInfo target = myTabs.getTargetInfo();
if (target != null) {
return new Content[]{getContentFor(target)};
}
}
else if (ViewContext.CONTEXT_KEY.is(dataId)) {
return myContext;
}
return null;
}
});
myTabs.getPresentation().setUiDecorator(new UiDecorator() {
@Override
@NotNull
public UiDecoration getDecoration() {
return new UiDecoration(null, new Insets(1, -1, 1, -1));
}
}).setSideComponentVertical(!context.getLayoutSettings().isToolbarHorizontal())
.setStealthTabMode(true).setFocusCycle(false).setPaintFocus(true)
.setTabDraggingEnabled(true).setSideComponentOnTabs(false);
myTabs.addTabMouseListener(new MouseAdapter() {
@Override
public void mousePressed(final MouseEvent e) {
if (UIUtil.isCloseClick(e)) {
// see RunnerContentUi tabMouseListener as well
closeOrMinimize(e);
}
}
});
rebuildPopupGroup();
myTabs.addListener(new TabsListener.Adapter() {
@Override
public void beforeSelectionChanged(TabInfo oldSelection, TabInfo newSelection) {
if (oldSelection != null && myContext.isStateBeingRestored()) {
saveUiState();
}
}
@Override
public void selectionChanged(final TabInfo oldSelection, final TabInfo newSelection) {
updateSelection(myTabs.getComponent().isShowing());
if (!myTabs.getComponent().isShowing()) return;
if (newSelection != null) {
newSelection.stopAlerting();
}
}
});
}
public void rebuildPopupGroup() {
myTabs.setPopupGroup(myContext.getCellPopupGroup(ViewContext.CELL_POPUP_PLACE),
ViewContext.CELL_POPUP_PLACE, true);
}
public PlaceInGrid getPlaceInGrid() {
return myPlaceInGrid;
}
void add(final Content content) {
if (myContents.containsKey(content)) return;
myContents.put(content, null);
revalidateCell(() -> myTabs.addTab(createTabInfoFor(content)));
updateSelection(myTabs.getComponent().getRootPane() != null);
}
void remove(Content content) {
if (!myContents.containsKey(content)) return;
final TabInfo info = getTabFor(content);
myContents.remove(content);
revalidateCell(() -> myTabs.removeTab(info));
updateSelection(myTabs.getComponent().getRootPane() != null);
}
private void revalidateCell(Runnable contentAction) {
if (myContents.size() == 0) {
myPlaceholder.removeAll();
myTabs.removeAllTabs();
if (myPopup != null) {
myPopup.cancel();
myPopup = null;
}
}
else {
if (myPlaceholder.isNull()) {
myPlaceholder.setContent(myTabs.getComponent());
}
contentAction.run();
}
restoreProportions();
myTabs.getComponent().revalidate();
myTabs.getComponent().repaint();
}
void setHideTabs(boolean hide) {
myTabs.getPresentation().setHideTabs(hide);
}
private TabInfo createTabInfoFor(Content content) {
final TabInfo tabInfo = updatePresentation(new TabInfo(new ProviderWrapper(content, myContext)), content)
.setObject(content)
.setPreferredFocusableComponent(content.getPreferredFocusableComponent())
.setActionsContextComponent(content.getActionsContextComponent());
myContents.remove(content);
myContents.put(content, tabInfo);
ActionGroup group = (ActionGroup)myContext.getActionManager().getAction(RunnerContentUi.VIEW_TOOLBAR);
tabInfo.setTabLabelActions(group, ViewContext.CELL_TOOLBAR_PLACE);
tabInfo.setDragOutDelegate(((RunnerContentUi)myContext).myDragOutDelegate);
return tabInfo;
}
@Nullable
private static TabInfo updatePresentation(TabInfo info, Content content) {
if (info == null) {
return null;
}
return info.
setIcon(content.getIcon()).
setText(content.getDisplayName()).
setTooltipText(content.getDescription()).
setActionsContextComponent(content.getActionsContextComponent()).
setActions(content.getActions(), content.getPlace());
}
public ActionCallback select(final Content content, final boolean requestFocus) {
final TabInfo tabInfo = myContents.getValue(content);
return tabInfo != null ? myTabs.select(tabInfo, requestFocus) : ActionCallback.DONE;
}
public void processAlert(final Content content, final boolean activate) {
if (myMinimizedContents.contains(content)) return;
TabInfo tab = getTabFor(content);
if (tab == null) return;
if (myTabs.getSelectedInfo() != tab) {
if (activate) {
tab.fireAlert();
}
else {
tab.stopAlerting();
}
}
}
public void updateTabPresentation(Content content) {
updatePresentation(myTabs.findInfo(content), content);
}
public boolean isMinimized(Content content) {
return myMinimizedContents.contains(content);
}
public boolean contains(Component c) {
return myTabs.getComponent().isAncestorOf(c);
}
private static class ProviderWrapper extends NonOpaquePanel implements DataProvider {
Content myContent;
ViewContext myContext;
private ProviderWrapper(final Content content, final ViewContext context) {
myContent = content;
myContext = context;
setLayout(new BorderLayout());
add(content.getComponent(), BorderLayout.CENTER);
}
@Override
@Nullable
public Object getData(@NonNls final String dataId) {
if (ViewContext.CONTENT_KEY.is(dataId)) {
return new Content[]{myContent};
}
else if (ViewContext.CONTEXT_KEY.is(dataId)) {
return myContext;
}
return null;
}
}
@Nullable
TabInfo getTabFor(Content content) {
return myContents.getValue(content);
}
@NotNull
private Content getContentFor(TabInfo tab) {
return myContents.getKey(tab);
}
public void setToolbarHorizontal(final boolean horizontal) {
myTabs.getPresentation().setSideComponentVertical(!horizontal);
}
public ActionCallback restoreLastUiState() {
final ActionCallback result = new ActionCallback();
restoreProportions();
final Content[] contents = getContents();
final List<Content> toMinimize = new SmartList<>();
int window = 0;
for (final Content each : contents) {
final View view = myContainer.getStateFor(each);
if (view.isMinimizedInGrid()) {
toMinimize.add(each);
}
window = view.getWindow();
}
minimize(toMinimize.toArray(new Content[toMinimize.size()]));
final Tab tab = myContainer.getTab();
final boolean detached = (tab != null && tab.isDetached(myPlaceInGrid)) || window != myContext.getWindow();
if (detached && contents.length > 0) {
if (tab != null) {
tab.setDetached(myPlaceInGrid, false);
}
myContext.detachTo(window, this).notifyWhenDone(result);
}
else {
result.setDone();
}
return result;
}
Content[] getContents() {
return myContents.getKeys().toArray(new Content[myContents.size()]);
}
@Override
public int getContentCount() {
return myContents.size();
}
public void saveUiState() {
saveProportions();
for (Content each : myContents.getKeys()) {
saveState(each, false);
}
for (Content each : myMinimizedContents) {
saveState(each, true);
}
final DimensionService service = DimensionService.getInstance();
final Dimension size = myContext.getContentManager().getComponent().getSize();
service.setSize(getDimensionKey(), size, myContext.getProject());
if (myContext.getWindow() != 0) {
final Window frame = SwingUtilities.getWindowAncestor(myPlaceholder);
if (frame != null) {
service.setLocation(getDimensionKey(), frame.getLocationOnScreen());
}
}
}
public void saveProportions() {
myContainer.saveSplitterProportions(myPlaceInGrid);
}
private void saveState(Content content, boolean minimized) {
View state = myContext.getStateFor(content);
state.setMinimizedInGrid(minimized);
state.setPlaceInGrid(myPlaceInGrid);
final List<Content> contents = myContainer.getContents();
final Tab tab = myContainer.getTabIndex();
if (minimized && contents.size() == 1 && contents.get(0).equals(content)) {
state.setTabIndex(-1);
if (tab instanceof TabImpl) {
((TabImpl)tab).setIndex(-1);
}
}
state.assignTab(tab);
state.setWindow(myContext.getWindow());
}
public void restoreProportions() {
myContainer.restoreLastSplitterProportions(myPlaceInGrid);
}
public void updateSelection(final boolean isShowing) {
ContentManager contentManager = myContext.getContentManager();
if (contentManager.isDisposed()) return;
for (Content each : myContents.getKeys()) {
final TabInfo eachTab = getTabFor(each);
boolean isSelected = eachTab != null && myTabs.getSelectedInfo() == eachTab;
if (isSelected && isShowing) {
contentManager.addSelectedContent(each);
}
else {
contentManager.removeFromSelection(each);
}
}
for (Content each : myMinimizedContents) {
contentManager.removeFromSelection(each);
}
}
public void minimize(Content[] contents) {
if (contents.length == 0) return;
myContext.saveUiState();
for (final Content each : contents) {
myMinimizedContents.add(each);
remove(each);
saveState(each, true);
boolean isShowing = myTabs.getComponent().getRootPane() != null;
myContainer.minimize(each, new CellTransform.Restore() {
@Override
public ActionCallback restoreInGrid() {
return restore(each);
}
});
updateSelection(isShowing);
}
}
@Nullable
public Point getLocation() {
return DimensionService.getInstance().getLocation(getDimensionKey(), myContext.getProject());
}
@Nullable
public Dimension getSize() {
return DimensionService.getInstance().getSize(getDimensionKey(), myContext.getProject());
}
private String getDimensionKey() {
return "GridCell.Tab." + myContainer.getTab().getIndex() + "." + myPlaceInGrid.name();
}
public boolean isValidForCalculateProportions() {
return getContentCount() > 0;
}
@Override
public void minimize(Content content) {
minimize(new Content[]{content});
}
public void closeOrMinimize(MouseEvent e) {
TabInfo tabInfo = myTabs.findInfo(e);
if (tabInfo == null) return;
Content content = getContentFor(tabInfo);
if (CloseViewAction.isEnabled(new Content[]{content})) {
CloseViewAction.perform(myContext, content);
}
else if (MinimizeViewAction.isEnabled(myContext, getContents(), ViewContext.CELL_TOOLBAR_PLACE)) {
minimize(content);
}
}
ActionCallback restore(Content content) {
myMinimizedContents.remove(content);
return ActionCallback.DONE;
}
}
|
|
package minieiffel.semantics;
import java.io.StringReader;
import java.util.Collections;
import java.util.List;
import junit.framework.TestCase;
import minieiffel.Lexer;
import minieiffel.Parser;
import minieiffel.Source;
import minieiffel.TestCaseUtil;
import minieiffel.Token;
import minieiffel.Token.TokenType;
import minieiffel.ast.MethodAST;
import minieiffel.ast.ParamDeclAST;
import minieiffel.ast.VariableDeclAST;
import org.easymock.MockControl;
public class SignatureResolverTestCase extends TestCase {
private static final List<VariableDeclAST> EMPTY_VARS = Collections.emptyList();
private static final List<MethodAST> EMPTY_METHODS = Collections.emptyList();
private SignatureResolver resolver;
private List<Signature> signatures;
private SemanticAnalyzer analyzerMock;
private MockControl analyzerMockControl;
protected void setUp() {
analyzerMockControl = MockControl.createStrictControl(SemanticAnalyzer.class);
analyzerMock = (SemanticAnalyzer)analyzerMockControl.getMock();
}
private void resolve(String code) {
Source source = new Source(new StringReader(code));
Parser parser = new Parser(new Lexer(source));
resolver = new SignatureResolver(analyzerMock, parser.handleProgram());
signatures = resolver.resolveSignatures();
}
public void testEmptyClass() {
analyzerMockControl.replay();
resolve("class Empty\nend");
analyzerMockControl.verify();
Signature sig = signatures.get(0);
assertEquals(new Type("Empty"), sig.getClassAST().getType());
assertEquals(EMPTY_VARS, sig.getVariables());
assertEquals(EMPTY_METHODS, sig.getMethods());
}
public void testEmptyFeature() {
analyzerMockControl.replay();
resolve("class EmptyFeature\nfeature\nend");
analyzerMockControl.verify();
Signature sig = signatures.get(0);
assertEquals(new Type("EmptyFeature"), sig.getClassAST().getType());
assertEquals(EMPTY_VARS, sig.getVariables());
assertEquals(EMPTY_METHODS, sig.getMethods());
}
public void testDefaultVisibility() {
analyzerMockControl.replay();
resolve(
"class Test\n" +
"feature\n" +
" a : INTEGER\n" +
" x : BOOLEAN is\n" +
"end"
);
analyzerMockControl.verify();
assertEquals(1, signatures.size());
Signature sig = signatures.get(0);
assertEquals(new Type("Test"), sig.getClassAST().getType());
assertEquals(1, sig.getVariables().size());
VariableDeclAST a = sig.getVariables().get(0);
assertEquals(Type.INTEGER, a.getType());
assertEquals(SignatureResolver.DEFAULT_VISIBILITY, a.getVisibility());
assertEquals(1, sig.getMethods().size());
MethodAST x = sig.getMethods().get(0);
assertEquals(Type.BOOLEAN, x.getReturnType());
assertEquals(SignatureResolver.DEFAULT_VISIBILITY, x.getVisibility());
}
public void testEmptyVisibility() {
analyzerMockControl.replay();
resolve(
"class EmptyVisibilityTest\n" +
"feature {}\n" +
" h : CHARACTER\n" +
"end"
);
analyzerMockControl.verify();
assertEquals(1, signatures.size());
Signature sig = signatures.get(0);
assertEquals(new Type("EmptyVisibilityTest"), sig.getClassAST().getType());
assertEquals(1, sig.getVariables().size());
VariableDeclAST h = sig.getVariables().get(0);
assertEquals(Type.CHARACTER, h.getType());
assertEquals(SignatureResolver.EMPTY_VISIBILITY, h.getVisibility());
}
public void testCustomVisibility() {
analyzerMockControl.replay();
resolve(
"class VisibilityTest\n" +
"feature {VisibilityTest, INTEGER}\n" +
" z : REAL\n" +
" q : VisibilityTest is\n" +
"feature {REAL}\n" +
" p : CHARACTER is\n" +
"end"
);
analyzerMockControl.verify();
Type self = new Type("VisibilityTest");
assertEquals(1, signatures.size());
Signature sig = signatures.get(0);
assertEquals(self, sig.getClassAST().getType());
assertEquals(1, sig.getVariables().size());
VariableDeclAST z = sig.getVariables().get(0);
assertEquals(Type.REAL, z.getType());
TestCaseUtil.assertListContents(z.getVisibility(), self, Type.INTEGER);
assertEquals(2, sig.getMethods().size());
MethodAST q = sig.getMethods().get(0);
assertEquals(self, q.getReturnType());
TestCaseUtil.assertListContents(q.getVisibility(), self, Type.INTEGER);
MethodAST p = sig.getMethods().get(1);
assertEquals(Type.CHARACTER, p.getReturnType());
TestCaseUtil.assertListContents(p.getVisibility(), Type.REAL);
}
public void testMethodParameterTypes() {
analyzerMockControl.replay();
resolve(
"class MethodParams\n" +
"feature {}\n" +
" myPrivateMethod(a:INTEGER; b:REAL) is\n" +
"end"
);
analyzerMockControl.verify();
Signature sig = signatures.get(0);
MethodAST myPrivateMethod = sig.getMethods().get(0);
assertEquals(Type.VOID, myPrivateMethod.getReturnType());
TestCaseUtil.assertListContents(myPrivateMethod.getVisibility(), Type.NONE);
assertEquals(2, myPrivateMethod.getParamDecls().size());
ParamDeclAST a = myPrivateMethod.getParamDecls().get(0);
assertEquals(Type.INTEGER, a.getType());
ParamDeclAST b = myPrivateMethod.getParamDecls().get(1);
assertEquals(Type.REAL, b.getType());
}
public void testVoidNotAllowedAsParamOrVisibilityType() {
Token voidToken = new Token(TokenType.IDENTIFIER, "VOID");
analyzerMock.addError("\"VOID\" is a special type that can't be referenced in a source file", voidToken);
analyzerMockControl.setVoidCallable(4);
analyzerMockControl.replay();
resolve(
"class VoidInvalidityTest\n" +
"feature {VOID}\n" +
" a:VOID\n" +
" b(c:VOID)is\n" +
" d:VOID is\n" +
"end"
);
analyzerMockControl.verify();
}
public void testCrossReferencing() {
analyzerMockControl.replay();
resolve(
"class A\n" +
"feature {B}\n" +
" aVar:B\n" +
" aMethod(x:B):B is\n" +
"end\n" +
"class B\n" +
"feature {A}\n" +
" bVar:A\n" +
" bMethod(y:A):A is\n" +
"end\n"
);
analyzerMockControl.verify();
Type typeA = new Type("A");
Type typeB = new Type("B");
assertEquals(2, signatures.size());
Signature A = signatures.get(0);
assertEquals(typeA, A.getClassAST().getType());
assertEquals(1, A.getVariables().size());
VariableDeclAST aVar = A.getVariables().get(0);
assertEquals(typeB, aVar.getType());
TestCaseUtil.assertListContents(aVar.getVisibility(), typeB);
assertEquals(1, A.getMethods().size());
MethodAST aMethod = A.getMethods().get(0);
assertEquals(typeB, aMethod.getReturnType());
TestCaseUtil.assertListContents(aMethod.getVisibility(), typeB);
assertEquals(typeB, aMethod.getParamDecls().get(0).getType());
Signature B = signatures.get(1);
assertEquals(typeB, B.getClassAST().getType());
assertEquals(1, B.getVariables().size());
VariableDeclAST bVar = B.getVariables().get(0);
assertEquals(typeA, bVar.getType());
TestCaseUtil.assertListContents(bVar.getVisibility(), typeA);
assertEquals(1, B.getMethods().size());
MethodAST bMethod = B.getMethods().get(0);
assertEquals(typeA, bMethod.getReturnType());
TestCaseUtil.assertListContents(bMethod.getVisibility(), typeA);
assertEquals(typeA, bMethod.getParamDecls().get(0).getType());
}
public void testRedefiningBuiltinClasses() {
analyzerMock.addError("Can't redefine built-in type \"INTEGER\"", new Token(TokenType.IDENTIFIER, "INTEGER"));
analyzerMockControl.replay();
resolve("class INTEGER\nend\n");
analyzerMockControl.verify();
assertEquals(0, signatures.size());
}
public void testDefiningClassTwice() {
analyzerMock.addError("Class \"A\" already defined", new Token(TokenType.IDENTIFIER, "A"));
analyzerMockControl.replay();
resolve(
"class A\n" +
"end\n" +
"class A\n" +
"feature\n" +
" b:INTEGER\n" +
"end"
);
analyzerMockControl.verify();
assertEquals(1, signatures.size());
assertTrue(signatures.get(0).getVariables().isEmpty());
}
public void testReferencingNonExistingClasses() {
Token B = new Token(TokenType.IDENTIFIER, "B");
analyzerMock.addError("Can't find class \"B\"", B);
analyzerMockControl.setVoidCallable(4);
analyzerMockControl.replay();
resolve(
"class A\n" +
"feature {B}\n" +
" bVar:B\n" +
" bMethod(bParam:B) : B is\n" +
"end"
);
analyzerMockControl.verify();
}
public void testConstantValues() {
Token PI = new Token(TokenType.REAL_LITERAL, "3.14");
analyzerMock.addError("\"3.14\" is an invalid constant value for type INTEGER", PI);
analyzerMockControl.replay();
resolve(
"class ConstantValueTest\n" +
"feature\n" +
" a:INTEGER is 3.14\n" +
" b:INTEGER is 3\n" +
"end"
);
analyzerMockControl.verify();
assertEquals(1, signatures.size());
Signature sig = signatures.get(0);
assertEquals(1, sig.getVariables().size());
VariableDeclAST b = sig.getVariables().get(0);
assertEquals(Type.INTEGER, b.getType());
}
}
|
|
package org.swiften.javautilities.functional;
import io.reactivex.functions.BiFunction;
import io.reactivex.functions.Function;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Created by haipham on 11/7/17.
*/
public abstract class Try<Val> implements TryType<Val> {
/**
* Get {@link Success}.
* @param value {@link Val} instance.
* @param <Val> Generics parameter.
* @return {@link Try} instance.
*/
@NotNull
public static <Val> Try<Val> success(@NotNull Val value) {
return new Success<Val>(value);
}
/**
* Get {@link Failure}.
* @param e {@link Exception} instance.
* @param <Val> Generics parameter.
* @return {@link Try} instance.
*/
@NotNull
public static <Val> Try<Val> failure(@NotNull Exception e) {
return new Failure<Val>(e);
}
/**
* Get {@link Failure}.
* @param t {@link Throwable} instance.
* @param <Val> Generics parameter.
* @return {@link Try} instance.
* @see #failure(Exception)
*/
@NotNull
public static <Val> Try<Val> failure(@NotNull Throwable t) {
if (t instanceof Exception) {
return failure((Exception)t);
} else {
return failure(new Exception(t));
}
}
/**
* Get {@link Failure}.
* @param error {@link String} value.
* @param <Val> Generics parameter.
* @return {@link Try} instance.
* @see #failure(Exception)
*/
@NotNull
public static <Val> Try<Val> failure(@NotNull String error) {
return failure(new Exception(error));
}
/**
* Get {@link Try} based on some {@link Supplier}.
* @param supplier {@link Supplier} instance.
* @param <Val> Generics parameter.
* @return {@link Try} instance.
*/
@NotNull
public static <Val> Try<Val> from(@NotNull Supplier<Val> supplier) {
try {
return success(supplier.supply());
} catch (Exception e) {
return failure(e);
}
}
Try() {}
/**
* Override this method to provide default implementation.
* @return {@link Try} instance.
* @see TryConvertibleType#asTry()
*/
@NotNull
@Override
public Try<Val> asTry() {
return this;
}
/**
* Override this method to provide default implementation.
* @return {@link Boolean} value.
*/
@Override
public boolean isSuccess() {
return this instanceof Success;
}
/**
* Override this method to provide default implementation.
* @return {@link Boolean} value.
*/
@Override
public boolean isFailure() {
return this instanceof Failure;
}
/**
* Override this method to provide default implementation.
* @param try2 {@link TryConvertibleType} instance.
* @param transform Transform {@link BiFunction} from {@link Val} and
* {@link Val2} to {@link Val3}.
* @param <Val2> Generics parameter.
* @param <Val3> Generics parameter.
* @return {@link Try} instance.
*/
@NotNull
@Override
public <Val2,Val3> Try<Val3> zipWith(
@NotNull TryConvertibleType<Val2> try2,
@NotNull BiFunction<? super Val,? super Val2,? extends Val3> transform
) {
try {
Val a = getOrThrow();
Val2 b = try2.asTry().getOrThrow();
return Try.success(transform.apply(a, b));
} catch (Exception e) {
return Try.failure(e);
}
}
/**
* Represent success {@link Try}.
* @param <Val> Generics parameter.
*/
private static final class Success<Val> extends Try<Val> {
@NotNull private final Val VALUE;
Success(@NotNull Val value) {
VALUE = value;
}
@NotNull
@Override
public String toString() {
return String.format("Success: %s", VALUE);
}
/**
* Override this method to provide default implementation.
* @return {@link Option} instance.
*/
@NotNull
@Override
public Option<Val> asOption() {
return Option.some(VALUE);
}
/**
* Override this method to provide default implementation.
* @return {@link Val} instance.
* @throws Exception If failure.
*/
@NotNull
@Override
public Val getOrThrow() throws Exception {
return VALUE;
}
/**
* Override this method to provide default implementation.
* @return {@link Val} instance.
*/
@Nullable
@Override
public Val get() {
return VALUE;
}
/**
* Override this method to provide default implementation.
* @return {@link Exception} instance.
*/
@Nullable
@Override
public Exception getError() {
return null;
}
/**
* Override this method to provide default implementation.
* @return {@link Val} instance.
* @throws Exception If failure.
*/
@NotNull
@Override
public Val getOrThrow(@NotNull Exception e) throws Exception {
return VALUE;
}
/**
* Override this method to provide default implementation.
* @return {@link Val} instance.
*/
@NotNull
@Override
public Val getOrElse(@NotNull Val value) {
return VALUE;
}
/**
* Override this method to provide default implementation.
* @param transform Transform {@link Function} from {@link Val} to {@link Val1}.
* @param <Val1> Generics parameter.
* @return {@link Try} instance.
*/
@NotNull
@Override
public <Val1> Try<Val1> map(@NotNull Function<? super Val, ? extends Val1> transform) {
try {
return new Success<Val1>(transform.apply(VALUE));
} catch (Exception e) {
return new Failure<Val1>(e);
}
}
/**
* Override this method to provide default implementation.
* @param transform {@link TryConvertibleType} of transform {@link Function}.
* @param <Val1> Generics parameter.
* @return {@link Try} instance.
*/
@NotNull
@Override
public <Val1> Try<Val1> apply(@NotNull TryConvertibleType<Function<? super Val, ? extends Val1>> transform) {
return transform.asTry().flatMap(new Function<Function<? super Val, ? extends Val1>, TryConvertibleType<Val1>>() {
@NotNull
@Override
public TryConvertibleType<Val1> apply(@NotNull final Function<? super Val, ? extends Val1> FUNCTION) throws Exception {
return Success.this.map(new Function<Val, Val1>() {
@NotNull
@Override
public Val1 apply(@NotNull Val val) throws Exception {
return FUNCTION.apply(val);
}
});
}
});
}
/**
* Override this method to provide default implementation.
* @param transform Transform {@link Function} from {@link Val} to {@link Try}.
* @param <Val1> Generics parameter.
* @return {@link Try} instance.
*/
@NotNull
@Override
public <Val1> Try<Val1> flatMap(@NotNull Function<? super Val, ? extends TryConvertibleType<Val1>> transform) {
try {
return transform.apply(getOrThrow()).asTry();
} catch (Exception e) {
return new Failure<Val1>(e);
}
}
}
/**
* Represent failure {@link Try}.
* @param <Val> Generics parameter.
*/
private static final class Failure<Val> extends Try<Val> {
@NotNull private final Exception ERROR;
Failure(@NotNull Exception e) {
ERROR = e;
}
public String toString() {
return String.format("Failure: %s", ERROR);
}
/**
* Override this method to provide default implementation.
* @return {@link Option} instance.
*/
@NotNull
@Override
public Option<Val> asOption() {
return Option.nothing();
}
/**
* Override this method to provide default implementation.
* @return {@link Val} instance.
* @throws Exception If failure.
*/
@NotNull
@Override
public Val getOrThrow() throws Exception {
throw ERROR;
}
/**
* Override this method to provide default implementation.
* @return {@link Val} instance.
*/
@Nullable
@Override
public Val get() {
return null;
}
/**
* Override this method to provide default implementation.
* @return {@link Exception} instance.
*/
@Nullable
@Override
public Exception getError() {
return ERROR;
}
/**
* Override this method to provide default implementation.
* @return {@link Val} instance.
* @throws Exception If failure.
*/
@NotNull
@Override
public Val getOrThrow(@NotNull Exception e) throws Exception {
throw e;
}
/**
* Override this method to provide default implementation.
* @return {@link Val} instance.
*/
@NotNull
@Override
public Val getOrElse(@NotNull Val value) {
return value;
}
/**
* Override this method to provide default implementation.
* @param transform Transform {@link Function} from {@link Val} to {@link Val1}.
* @param <Val1> Generics parameter.
* @return {@link Try} instance.
*/
@NotNull
@Override
public <Val1> Try<Val1> map(@NotNull Function<? super Val, ? extends Val1> transform) {
return new Failure<Val1>(ERROR);
}
/**
* Override this method to provide default implementation.
* @param transform {@link TryConvertibleType} of transform {@link Function}.
* @param <Val1> Generics parameter.
* @return {@link Try} instance.
*/
@NotNull
@Override
public <Val1> Try<Val1> apply(@NotNull TryConvertibleType<Function<? super Val, ? extends Val1>> transform) {
return new Failure<Val1>(ERROR);
}
/**
* Override this method to provide default implementation.
* @param transform Transform {@link Function} from {@link Val} to {@link Try}.
* @param <Val1> Generics parameter.
* @return {@link Try} instance.
*/
@NotNull
@Override
public <Val1> Try<Val1> flatMap(@NotNull Function<? super Val, ? extends TryConvertibleType<Val1>> transform) {
return new Failure<Val1>(ERROR);
}
}
}
|
|
/**
* MVEL 2.0
* Copyright (C) 2007 The Codehaus
* Mike Brock, Dhanji Prasanna, John Graham, Mark Proctor
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mvel2;
import org.mvel2.ast.Proto;
import org.mvel2.compiler.AbstractParser;
import org.mvel2.integration.Interceptor;
import org.mvel2.util.MethodStub;
import org.mvel2.util.PropertyTools;
import java.io.Serializable;
import java.lang.reflect.Method;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import static java.lang.Thread.currentThread;
/**
* The resusable parser configuration object.
*/
public class ParserConfiguration implements Serializable {
protected Map<String, Object> imports;
protected HashSet<String> packageImports;
protected Map<String, Interceptor> interceptors;
protected transient ClassLoader classLoader = currentThread().getContextClassLoader();
public ParserConfiguration() {
}
public ParserConfiguration(Map<String, Object> imports, Map<String, Interceptor> interceptors) {
addAllImports(imports);
this.interceptors = interceptors;
}
public ParserConfiguration(Map<String, Object> imports, HashSet<String> packageImports, Map<String, Interceptor> interceptors) {
addAllImports(imports);
this.packageImports = packageImports;
this.interceptors = interceptors;
}
public HashSet<String> getPackageImports() {
return packageImports;
}
public void setPackageImports(HashSet<String> packageImports) {
this.packageImports = packageImports;
}
public Class getImport(String name) {
if (imports != null && imports.containsKey(name) && imports.get(name) instanceof Class) {
return (Class) imports.get(name);
}
return (Class) (AbstractParser.LITERALS.get(name) instanceof Class ? AbstractParser.LITERALS.get(name) : null);
}
public MethodStub getStaticImport(String name) {
return imports != null ? (MethodStub) imports.get(name) : null;
}
public Object getStaticOrClassImport(String name) {
return (imports != null && imports.containsKey(name) ? imports.get(name) : AbstractParser.LITERALS.get(name));
}
public void addPackageImport(String packageName) {
if (packageImports == null) packageImports = new LinkedHashSet<String>();
packageImports.add(packageName);
}
public void addAllImports(Map<String, Object> imports) {
if (imports == null) return;
if (this.imports == null) this.imports = new LinkedHashMap<String, Object>();
Object o;
for (Map.Entry<String, Object> entry : imports.entrySet()) {
if ((o = entry.getValue()) instanceof Method) {
this.imports.put(entry.getKey(), new MethodStub((Method) o));
}
else {
this.imports.put(entry.getKey(), o);
}
}
}
public void setAllImports(Map<String, Object> imports) {
this.imports = imports;
}
private boolean checkForDynamicImport(String className) {
if (packageImports == null) return false;
int found = 0;
Class cls = null;
for (String pkg : packageImports) {
try {
cls = Class.forName(pkg + "." + className, true, classLoader);
found++;
}
catch (ClassNotFoundException e) {
// do nothing.
}
catch (NoClassDefFoundError e) {
if (PropertyTools.contains(e.getMessage(), "wrong name")) {
// do nothing. this is a weirdness in the jvm.
// see MVEL-43
}
else {
throw e;
}
}
}
if (found > 1) {
throw new RuntimeException("ambiguous class name: " + className);
}
else if (found == 1) {
addImport(className, cls);
return true;
}
else {
return false;
}
}
public boolean hasImport(String name) {
return (imports != null && imports.containsKey(name)) ||
(!"this".equals(name) && !"self".equals(name) && !"empty".equals(name) && !"null".equals(name) &&
!"nil".equals(name) && !"true".equals(name) && !"false".equals(name)
&& AbstractParser.LITERALS.containsKey(name))
|| checkForDynamicImport(name);
}
public void addImport(Class cls) {
addImport(cls.getSimpleName(), cls);
}
public void addImport(String name, Class cls) {
if (this.imports == null) this.imports = new ConcurrentHashMap<String, Object>();
this.imports.put(name, cls);
}
public void addImport(String name, Proto proto) {
if (this.imports == null) this.imports = new ConcurrentHashMap<String, Object>();
this.imports.put(name, proto);
}
public void addImport(String name, Method method) {
addImport(name, new MethodStub(method));
}
public void addImport(String name, MethodStub method) {
if (this.imports == null) this.imports = new ConcurrentHashMap<String, Object>();
this.imports.put(name, method);
}
public Map<String, Interceptor> getInterceptors() {
return interceptors;
}
public void setInterceptors(Map<String, Interceptor> interceptors) {
this.interceptors = interceptors;
}
public Map<String, Object> getImports() {
return imports;
}
public void setImports(Map<String, Object> imports) {
if (imports == null) return;
Object val;
for (Map.Entry<String, Object> entry : imports.entrySet()) {
if ((val = entry.getValue()) instanceof Class) {
addImport(entry.getKey(), (Class) val);
}
else if (val instanceof Method) {
addImport(entry.getKey(), (Method) val);
}
else if (val instanceof MethodStub) {
addImport(entry.getKey(), (MethodStub) val);
}
else if (val instanceof Proto) {
addImport(entry.getKey(), (Proto) entry.getValue());
}
else {
throw new RuntimeException("invalid element in imports map: " + entry.getKey() + " (" + val + ")");
}
}
}
public boolean hasImports() {
return (imports != null && imports.size() != 0) || (packageImports != null && packageImports.size() != 0);
}
public ClassLoader getClassLoader() {
return classLoader == null ? classLoader = Thread.currentThread().getContextClassLoader() : classLoader;
}
public void setClassLoader(ClassLoader classLoader) {
this.classLoader = classLoader;
}
public void setImports(HashMap<String, Object> imports) {
this.imports = imports;
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.discovery.zen.membership;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.discovery.zen.DiscoveryNodesProvider;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.EmptyTransportResponseHandler;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
/**
*
*/
public class MembershipAction extends AbstractComponent {
public static final String DISCOVERY_JOIN_ACTION_NAME = "internal:discovery/zen/join";
public static final String DISCOVERY_JOIN_VALIDATE_ACTION_NAME = "internal:discovery/zen/join/validate";
public static final String DISCOVERY_LEAVE_ACTION_NAME = "internal:discovery/zen/leave";
public interface JoinCallback {
void onSuccess();
void onFailure(Exception e);
}
public interface MembershipListener {
void onJoin(DiscoveryNode node, JoinCallback callback);
void onLeave(DiscoveryNode node);
}
private final TransportService transportService;
private final DiscoveryNodesProvider nodesProvider;
private final MembershipListener listener;
public MembershipAction(Settings settings, TransportService transportService, DiscoveryNodesProvider nodesProvider, MembershipListener listener) {
super(settings);
this.transportService = transportService;
this.nodesProvider = nodesProvider;
this.listener = listener;
transportService.registerRequestHandler(DISCOVERY_JOIN_ACTION_NAME, JoinRequest::new, ThreadPool.Names.GENERIC, new JoinRequestRequestHandler());
transportService.registerRequestHandler(DISCOVERY_JOIN_VALIDATE_ACTION_NAME, ValidateJoinRequest::new, ThreadPool.Names.GENERIC, new ValidateJoinRequestRequestHandler());
transportService.registerRequestHandler(DISCOVERY_LEAVE_ACTION_NAME, LeaveRequest::new, ThreadPool.Names.GENERIC, new LeaveRequestRequestHandler());
}
public void close() {
transportService.removeHandler(DISCOVERY_JOIN_ACTION_NAME);
transportService.removeHandler(DISCOVERY_JOIN_VALIDATE_ACTION_NAME);
transportService.removeHandler(DISCOVERY_LEAVE_ACTION_NAME);
}
public void sendLeaveRequest(DiscoveryNode masterNode, DiscoveryNode node) {
transportService.sendRequest(node, DISCOVERY_LEAVE_ACTION_NAME, new LeaveRequest(masterNode), EmptyTransportResponseHandler.INSTANCE_SAME);
}
public void sendLeaveRequestBlocking(DiscoveryNode masterNode, DiscoveryNode node, TimeValue timeout) {
transportService.submitRequest(masterNode, DISCOVERY_LEAVE_ACTION_NAME, new LeaveRequest(node), EmptyTransportResponseHandler.INSTANCE_SAME).txGet(timeout.millis(), TimeUnit.MILLISECONDS);
}
public void sendJoinRequestBlocking(DiscoveryNode masterNode, DiscoveryNode node, TimeValue timeout) {
transportService.submitRequest(masterNode, DISCOVERY_JOIN_ACTION_NAME, new JoinRequest(node), EmptyTransportResponseHandler.INSTANCE_SAME)
.txGet(timeout.millis(), TimeUnit.MILLISECONDS);
}
/**
* Validates the join request, throwing a failure if it failed.
*/
public void sendValidateJoinRequestBlocking(DiscoveryNode node, ClusterState state, TimeValue timeout) {
transportService.submitRequest(node, DISCOVERY_JOIN_VALIDATE_ACTION_NAME, new ValidateJoinRequest(state), EmptyTransportResponseHandler.INSTANCE_SAME)
.txGet(timeout.millis(), TimeUnit.MILLISECONDS);
}
public static class JoinRequest extends TransportRequest {
DiscoveryNode node;
public JoinRequest() {
}
private JoinRequest(DiscoveryNode node) {
this.node = node;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
node = new DiscoveryNode(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
node.writeTo(out);
}
}
private class JoinRequestRequestHandler implements TransportRequestHandler<JoinRequest> {
@Override
public void messageReceived(final JoinRequest request, final TransportChannel channel) throws Exception {
listener.onJoin(request.node, new JoinCallback() {
@Override
public void onSuccess() {
try {
channel.sendResponse(TransportResponse.Empty.INSTANCE);
} catch (Exception e) {
onFailure(e);
}
}
@Override
public void onFailure(Exception e) {
try {
channel.sendResponse(e);
} catch (Exception inner) {
inner.addSuppressed(e);
logger.warn("failed to send back failure on join request", inner);
}
}
});
}
}
class ValidateJoinRequest extends TransportRequest {
private ClusterState state;
ValidateJoinRequest() {
}
ValidateJoinRequest(ClusterState state) {
this.state = state;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
this.state = ClusterState.Builder.readFrom(in, nodesProvider.nodes().getLocalNode());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
this.state.writeTo(out);
}
}
class ValidateJoinRequestRequestHandler implements TransportRequestHandler<ValidateJoinRequest> {
@Override
public void messageReceived(ValidateJoinRequest request, TransportChannel channel) throws Exception {
// for now, the mere fact that we can serialize the cluster state acts as validation....
channel.sendResponse(TransportResponse.Empty.INSTANCE);
}
}
public static class LeaveRequest extends TransportRequest {
private DiscoveryNode node;
public LeaveRequest() {
}
private LeaveRequest(DiscoveryNode node) {
this.node = node;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
node = new DiscoveryNode(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
node.writeTo(out);
}
}
private class LeaveRequestRequestHandler implements TransportRequestHandler<LeaveRequest> {
@Override
public void messageReceived(LeaveRequest request, TransportChannel channel) throws Exception {
listener.onLeave(request.node);
channel.sendResponse(TransportResponse.Empty.INSTANCE);
}
}
}
|
|
package org.deri.tarql;
import static org.deri.tarql.Helpers.binding;
import static org.deri.tarql.Helpers.removePseudoVars;
import static org.deri.tarql.Helpers.vars;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.jena.sparql.core.Var;
import org.apache.jena.sparql.engine.binding.Binding;
import org.junit.Test;
public class CSVParserTest {
@Test
public void testColumnName() {
assertEquals("a", CSVParser.getColumnName(0));
assertEquals("b", CSVParser.getColumnName(1));
assertEquals("c", CSVParser.getColumnName(2));
assertEquals("z", CSVParser.getColumnName(25));
assertEquals("aa", CSVParser.getColumnName(26));
assertEquals("ab", CSVParser.getColumnName(27));
}
@Test
public void testCountVars() throws IOException {
String csv = "1\n1,1,1\n1,1";
assertEquals(3, countRows(csv, false));
}
@Test
public void testHeading() throws IOException {
String csv = "1,2,3,4,5";
assertEquals(vars("a", "b", "c", "d", "e"), getNonPseudoVars(csv, false));
}
@Test
public void testUnbound() throws IOException {
String csv = "1\n1,1";
Binding binding = readCSV(csv, false).next();
assertEquals(null, binding.get(Var.alloc("b")));
}
@Test
public void testNoEmptyStrings() throws IOException {
String csv = ",1";
assertEquals(null, readCSV(csv, false).next().get(Var.alloc("a")));
}
@Test
public void testSkipEmptyLines() throws IOException {
String csv = "\n,,,,\n1";
assertEquals(1, countRows(csv, false));
}
@Test
public void testWithHeaders() throws IOException {
String csv = "X,Y\n1,2";
assertEquals(1, countRows(csv, true));
assertEquals(vars("X", "Y"), getNonPseudoVars(csv, true));
assertEquals(binding(vars("X", "Y"), "\"1\"", "\"2\""), removePseudoVars(readCSV(csv,true).next()));
}
@Test
public void testSkipEmptyRowsBeforeHeader() throws IOException {
String csv = "\n\nX,Y\n1,2";
assertEquals(vars("X", "Y"), getNonPseudoVars(csv, true));
}
@Test
public void testFillAdditionalColumnsNotInHeader() throws IOException {
String csv = "X\n1,2,3";
assertEquals(vars("X", "b", "c"), getNonPseudoVars(csv, true));
}
@Test
public void testFillNonColumnsInHeader() throws IOException {
String csv = "X,,Y\n1,2,3";
assertEquals(vars("X", "b", "Y"), getNonPseudoVars(csv, true));
}
@Test
public void testHandleSpacesInColumnNames() throws IOException {
String csv = "Total Value\n123";
assertEquals(vars("Total_Value"), getNonPseudoVars(csv, true));
}
@Test
public void testHandleDashesInColumnNames() throws IOException {
String csv = "Total-Value\n123";
assertEquals(vars("Total_Value"), getNonPseudoVars(csv, true));
}
@Test
public void testHandleQuestionMarkInColumnNames() throws IOException {
String csv = "Is-Estimated?\nYes";
assertEquals(vars("Is_Estimated_"), getNonPseudoVars(csv, true));
}
@Test
public void testHandlePercentInColumnNames() throws IOException {
String csv = "Profit%\n80";
assertEquals(vars("Profit_"), getNonPseudoVars(csv, true));
}
@Test
public void testHandleRoundBracketsInColumnNames() throws IOException {
String csv = "Weight(mg)\n0.33";
assertEquals(vars("Weight_mg_"), getNonPseudoVars(csv, true));
}
@Test
public void testDuplicateColumnName() throws IOException {
String csv = "X,X\n1,2";
assertEquals(vars("X", "b"), getNonPseudoVars(csv, true));
}
@Test
public void testHandleClashWhenFillingInVarNames1() throws IOException {
String csv = "a,b,,c";
assertEquals(vars("a", "b", "c", "d"), getNonPseudoVars(csv, true));
}
@Test
public void testHandleClashWhenFillingInVarNames2() throws IOException {
String csv = "a,c,,d";
assertEquals(vars("a", "c", "_c", "d"), getNonPseudoVars(csv, true));
}
@Test
public void testAssignNewNameToReservedColumnName() throws IOException {
String csv = "ROWNUM";
assertEquals(vars("a"), getNonPseudoVars(csv, true));
}
@Test
public void testIncludesROWNUM() throws IOException {
String csv = "a,b";
assertEquals(vars("a", "b", "ROWNUM"), readCSV(csv, true).getVars());
}
@Test
public void testEmptyColumn() throws IOException {
String csv = "x,,y";
assertEquals(vars("a", "b", "c"), getNonPseudoVars(csv, false));
}
@Test
public void testTabSeparated() throws IOException {
String csv = "foo\tbar\n1\t2";
List<Var> vars = vars("foo", "bar");
assertEquals(binding(vars, "\"1\"", "\"2\""), removePseudoVars(readCSV(csv, true, '\t', '"').next()));
}
@Test
public void testSemicolonSeparated() throws IOException {
String csv = "foo;bar\n1,5;2,0";
assertEquals(binding(vars("foo", "bar"), "\"1,5\"", "\"2,0\""),
removePseudoVars(readCSV(csv, true, ';', '"').next()));
}
@Test
public void testStandardQuotes() throws IOException {
String csv = "Value\n\"This, too\"";
assertEquals(binding(vars("Value"), "\"This, too\""),
removePseudoVars(readCSV(csv, true).next()));
}
@Test
public void testSingleQuotes() throws IOException {
String csv = "Value\n\'This, too\'";
assertEquals(binding(vars("Value"), "\"This, too\""),
removePseudoVars(readCSV(csv, true, ',', '\'').next()));
}
@Test
public void testQuoteDoubling() throws IOException {
String csv = "Value\nJoseph \"\"Joe\"\" Smith";
assertEquals(binding(vars("Value"), "\"Joseph \\\"Joe\\\" Smith\""),
removePseudoVars(readCSV(csv, true).next()));
}
@Test
public void testEscapingQuoteWithBackslash() throws IOException {
String csv = "Value\n\"Joseph \\\"Joe\\\" Smith\"";
assertEquals(binding(vars("Value"), "\"Joseph \\\"Joe\\\" Smith\""),
removePseudoVars(readCSV(csv, true, '\\').next()));
}
/* OpenCSV only uses the escape character for quotes, not for delimiters */
//@Test
public void testEscapingDelimiterWithBackslash() throws IOException {
String csv = "Value\nThis\\, too";
assertEquals(binding(vars("Value"), "\"This, too\""),
removePseudoVars(readCSV(csv, true, '\\').next()));
}
private static CSVParser readCSV(String csv, boolean varsFromHeader) throws IOException {
return new CSVParser(new StringReader(csv), varsFromHeader, null, '"', null);
}
private static CSVParser readCSV(String csv, boolean varsFromHeader, char delimiter, char quote) throws IOException {
return new CSVParser(new StringReader(csv), varsFromHeader, delimiter, quote, null);
}
private static CSVParser readCSV(String csv, boolean varsFromHeader, char escape) throws IOException {
return new CSVParser(new StringReader(csv), varsFromHeader, null, '"', escape);
}
private static long countRows(String csv, boolean varsFromHeader) throws IOException {
Iterator<Binding> table = readCSV(csv, varsFromHeader);
long count = 0;
while(table.hasNext()) {
table.next();
count +=1;
}
return count;
}
private static List<Var> getNonPseudoVars(String csv, boolean varsFromHeader) throws IOException {
CSVParser table = readCSV(csv, varsFromHeader);
while(table.hasNext()) {
table.next();
}
List<Var> result = new ArrayList<Var>(table.getVars());
result.remove(TarqlQuery.ROWNUM);
return result;
}
}
|
|
/**
* Copyright (c) 2013, Sony Mobile Communications Inc
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* This file is part of logdog.
*/
package logdog.controller;
import org.jfree.ui.RefineryUtilities;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import logdog.Prefs;
import logdog.logdog;
import logdog.model.FileLogSource;
import logdog.model.LLMEditListener;
import logdog.model.LogLineMatcher;
import logdog.model.LogLineMatcher.Group;
import logdog.model.LogLineMatcherManager;
import logdog.model.LogSource;
import logdog.model.LogSourceLifeListener;
import logdog.model.DeviceStater;
import logdog.model.LogSourceListener;
import logdog.utils.Logger;
import logdog.view.ChartView;
import logdog.view.LogSourceView;
/**
* This is the controller for all views in logdog. It wraps a
* LogLineMatcherManager which is the main interface to the model. It
* is also responsibile for creating the ChartView and views for all
* log sources as they are created.
*/
public class MainController
implements LLMEditListener, LogSourceLifeListener {
private ChartView mChartView;
private LogLineMatcherManager mLLMMgr;
private DeviceStater mDeviceStaterThread;
// List of views to be notified when a selection has been done in
// another view, for instance when a point is selected in the
// ChartView.
private ArrayList<ViewSelListener> mViewSelListeners = new ArrayList<ViewSelListener>(5);
public MainController() {
// Subscribe to LogLineMatcher edit events.
LogLineMatcherManager.addLLMEditListener(this);
// Subscribe to LogSource lifecycle events i.e. start and stop
// of LogSources.
LogSource.addLifeListener(this);
}
public void showChartView(Prefs prefs) {
// Create the chart view at screen center.
mChartView = new ChartView(logdog.getFriendlyVersion(), this, prefs);
// mChartView.pack();
RefineryUtilities.centerFrameOnScreen(mChartView);
mChartView.setVisible(true);
startDeviceStater();
// Now always start logcat with both main and system.
LogSource.startLogCatMainAndSystem();
}
/**
* Called from ChartView when exiting the program.
*/
public void die() {
LogLineMatcherManager.removeLLMEditListener(this);
LogSource.removeLifeListener(this);
clean();
stopDeviceStater();
}
public boolean hasLLMMgr() {
return mLLMMgr != null;
}
/**
* Unregister all LogLineMatchers from their LogSource and set
* mLLMMgr to null.
*/
public void clean() {
if (mLLMMgr != null) {
mLLMMgr.unRegisterAllLLMs();
mLLMMgr = null;
}
}
public void clearStateAllLLMs() {
if (mLLMMgr != null) {
mLLMMgr.clearStateAllLLMs();
}
}
public void clearStateLLMsPresentationId(int presentationId) {
if (mLLMMgr != null) {
mLLMMgr.clearStateLLMsPresentationId(presentationId);
}
}
/**
* Create DeviceStaterThread and set the chart as listener so we
* can display if the device is available or not.
*/
private void startDeviceStater() {
mDeviceStaterThread = new DeviceStater();
mDeviceStaterThread.start();
mDeviceStaterThread.setListener(mChartView);
}
private void stopDeviceStater() {
if (mDeviceStaterThread != null) {
try {
mDeviceStaterThread.interrupt();
mDeviceStaterThread.join(DeviceStater.POLL_INTERVAL_MS * 2);
} catch (InterruptedException excep) {
Logger.logExcep(excep);
}
}
}
public void createFromFile(final File xmlPath)
throws IOException {
mLLMMgr = LogLineMatcherManager.createFromFile(xmlPath);
}
/**
* Show a dialog and create a new set of LogLineMatchers. Can be
* cancelled by the user therefore we do not call clean() or use
* mLLMMgr.
*
* @param owner JFrame owner of the LLM edit dialog
* @param pasteAsRegExp
*/
public void create(JFrame owner, String pasteAsRegExp) {
// No file yet, initially 5 LogLineMatchers:
LogLineMatcherManager llmMgr = new LogLineMatcherManager(null, 5);
llmMgr.editBegin();
LLMController llmController = new LLMController(llmMgr);
llmController.displayLLMView(owner, true, pasteAsRegExp);
}
/**
* Edit the set of current LogLineMatchers using a dialog.
*
* @param owner JFrame owner of the dialog
* @param pasteAsRegExp
*/
public void edit(JFrame owner, String pasteAsRegExp) {
if (mLLMMgr != null) {
mLLMMgr.editBegin();
LLMController llmController = new LLMController(mLLMMgr);
llmController.displayLLMView(owner, false, pasteAsRegExp);
}
}
/**
* Create a new LLM based on the given regular expression.
*
* @param owner
* @param pasteAsRegExp Regexp to pass to LLMView.
*/
public void pasteAsRegExp(JFrame owner, String pasteAsRegExp) {
if (mLLMMgr == null) {
create(owner, pasteAsRegExp);
} else {
edit(owner, pasteAsRegExp);
}
}
/**
* Change log source for all LogLineMatchers to the given file
* based log source and start it.
*
* @param fileLogSource
*/
public void setFileLogSource(FileLogSource fileLogSource) {
if (mLLMMgr != null) {
mLLMMgr.setFileLogSource(fileLogSource);
}
}
public void addSelListener(ViewSelListener listener) {
synchronized (mViewSelListeners) {
if (listener != null && !mViewSelListeners.contains(listener)) {
mViewSelListeners.add(listener);
}
}
}
public void removeSelListener(ViewSelListener listener) {
synchronized (mViewSelListeners) {
if (listener != null && mViewSelListeners.contains(listener)) {
mViewSelListeners.remove(listener);
}
}
}
/**
* Notify all views except the given view that the user has made a
* selection at the given time stamp.
*
* @param sender Notify all ViewSelListeners except this one.
* @param time Time stamp at which the selection has been done by the user.
*/
public void notifySelListeners(ViewSelListener sender, long time) {
synchronized (mViewSelListeners) {
for (ViewSelListener listener : mViewSelListeners) {
if (listener != sender) {
listener.onSelection(time);
}
}
}
}
private void createLSView(LogSource logSource) {
try {
// Show the log window on a secondary monitor if available.
JFrame frame = new LogSourceView(logSource, this);
// TODO showOnScreen() doesn't work on single screen
// systems. It will maximize the window (no resizing, no
// min/max). Skip this for now, nice though to be able to
// show secondary windows on a secondary screen.
frame.setVisible(true);
// UIUtils.showOnScreen(1, frame);
if (mChartView != null) {
mChartView.requestFocus();
// UIUtils.showOnScreen(0, mChartView);
}
} catch (OutOfMemoryError excep) {
String msg = "MainController: Out of memory when creating LogSourceView.";
Logger.log(msg);
Logger.logExcep(excep);
// LogSourceView uses a lot of memory, handle any failure nicely.
JOptionPane.showMessageDialog(null, excep.getMessage(), msg, JOptionPane.OK_OPTION);
}
}
public void ensureLSView(LogSource logSource) {
LogSourceListener listener = logSource.hasListenerOfType(LogSourceView.class);
if (listener == null) {
createLSView(logSource);
} else {
((LogSourceView) listener).toFront();
}
}
// LLMEditListener
/**
* We end up here when clicking the Save-button when editing LogLineMatchers.
*
* @param llmMgr If creating a new set of LogLineMatchers this is
* the LogLineMatcherManager newed in create(). Otherwise it is
* 'mLLMMgr'.
*/
public void onEditCommitBegin(LogLineMatcherManager llmMgr) {
boolean newLLMMgr = llmMgr != mLLMMgr;
if (newLLMMgr) {
clean();
mLLMMgr = llmMgr;
}
if (mChartView != null) {
mChartView.removeChartPanels();
mChartView.setWindowTitle(mLLMMgr.getFilePath().getName());
mChartView.setFileLogSourceName(null); // remove file name
}
}
public void onAdded(LogLineMatcher llm) {
}
public void onDeleted(LogLineMatcher llm) {
}
public void onMoved(int fromIndex, int toIndex) {
}
public void onGroupAdded(LogLineMatcher llm, Group group) {
}
public void onGroupDeleted(LogLineMatcher llm, int groupIndex) {
}
public void onGroupMoved(LogLineMatcher llm, int fromIndex, int toIndex) {
}
// LogSourceLifeListener
public void onStarted(LogSource logSource) {
createLSView(logSource);
}
public void onStopped(LogSource logSource) {
}
}
|
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.text;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.LineSeparator;
import com.intellij.xml.util.XmlStringUtil;
import org.jdom.Verifier;
import org.jetbrains.annotations.NotNull;
import org.junit.Test;
import java.nio.CharBuffer;
import java.util.*;
import static org.junit.Assert.*;
/**
* @author Eugene Zhuravlev
* @since Dec 22, 2006
*/
public class StringUtilTest {
@Test
public void testTrimLeadingChar() throws Exception {
doTestTrimLeading("", "");
doTestTrimLeading("", " ");
doTestTrimLeading("", " ");
doTestTrimLeading("a ", "a ");
doTestTrimLeading("a ", " a ");
}
@Test
public void testTrimTrailingChar() throws Exception {
doTestTrimTrailing("", "");
doTestTrimTrailing("", " ");
doTestTrimTrailing("", " ");
doTestTrimTrailing(" a", " a");
doTestTrimTrailing(" a", " a ");
}
private static void doTestTrimLeading(@NotNull String expected, @NotNull String string) {
assertEquals(expected, StringUtil.trimLeading(string));
assertEquals(expected, StringUtil.trimLeading(string, ' '));
assertEquals(expected, StringUtil.trimLeading(new StringBuilder(string), ' ').toString());
}
private static void doTestTrimTrailing(@NotNull String expected, @NotNull String string) {
assertEquals(expected, StringUtil.trimTrailing(string));
assertEquals(expected, StringUtil.trimTrailing(string, ' '));
assertEquals(expected, StringUtil.trimTrailing(new StringBuilder(string), ' ').toString());
}
@Test
public void testToUpperCase() {
assertEquals('/', StringUtil.toUpperCase('/'));
assertEquals(':', StringUtil.toUpperCase(':'));
assertEquals('A', StringUtil.toUpperCase('a'));
assertEquals('A', StringUtil.toUpperCase('A'));
assertEquals('K', StringUtil.toUpperCase('k'));
assertEquals('K', StringUtil.toUpperCase('K'));
assertEquals('\u2567', StringUtil.toUpperCase(Character.toLowerCase('\u2567')));
}
@Test
public void testToLowerCase() {
assertEquals('/', StringUtil.toLowerCase('/'));
assertEquals(':', StringUtil.toLowerCase(':'));
assertEquals('a', StringUtil.toLowerCase('a'));
assertEquals('a', StringUtil.toLowerCase('A'));
assertEquals('k', StringUtil.toLowerCase('k'));
assertEquals('k', StringUtil.toLowerCase('K'));
assertEquals('\u2567', StringUtil.toUpperCase(Character.toLowerCase('\u2567')));
}
@Test
public void testIsEmptyOrSpaces() throws Exception {
assertTrue(StringUtil.isEmptyOrSpaces(null));
assertTrue(StringUtil.isEmptyOrSpaces(""));
assertTrue(StringUtil.isEmptyOrSpaces(" "));
assertFalse(StringUtil.isEmptyOrSpaces("1"));
assertFalse(StringUtil.isEmptyOrSpaces(" 12345 "));
assertFalse(StringUtil.isEmptyOrSpaces("test"));
}
@Test
public void testSplitWithQuotes() {
final List<String> strings = StringUtil.splitHonorQuotes("aaa bbb ccc \"ddd\" \"e\\\"e\\\"e\" ", ' ');
assertEquals(5, strings.size());
assertEquals("aaa", strings.get(0));
assertEquals("bbb", strings.get(1));
assertEquals("ccc", strings.get(2));
assertEquals("\"ddd\"", strings.get(3));
assertEquals("\"e\\\"e\\\"e\"", strings.get(4));
}
@Test
public void testUnPluralize() {
// synthetic
assertEquals("plurals", StringUtil.unpluralize("pluralses"));
assertEquals("Inherits", StringUtil.unpluralize("Inheritses"));
assertEquals("s", StringUtil.unpluralize("ss"));
assertEquals("I", StringUtil.unpluralize("Is"));
assertEquals(null, StringUtil.unpluralize("s"));
assertEquals("z", StringUtil.unpluralize("zs"));
// normal
assertEquals("case", StringUtil.unpluralize("cases"));
assertEquals("Index", StringUtil.unpluralize("Indices"));
assertEquals("fix", StringUtil.unpluralize("fixes"));
assertEquals("man", StringUtil.unpluralize("men"));
assertEquals("leaf", StringUtil.unpluralize("leaves"));
assertEquals("cookie", StringUtil.unpluralize("cookies"));
assertEquals("search", StringUtil.unpluralize("searches"));
assertEquals("process", StringUtil.unpluralize("process"));
assertEquals("PROPERTY", StringUtil.unpluralize("PROPERTIES"));
assertEquals("THIS", StringUtil.unpluralize("THESE"));
}
@Test
public void testPluralize() {
assertEquals("values", StringUtil.pluralize("value"));
assertEquals("values", StringUtil.pluralize("values"));
assertEquals("indices", StringUtil.pluralize("index"));
assertEquals("matrices", StringUtil.pluralize("matrix"));
assertEquals("fixes", StringUtil.pluralize("fix"));
assertEquals("men", StringUtil.pluralize("man"));
assertEquals("media", StringUtil.pluralize("medium"));
assertEquals("stashes", StringUtil.pluralize("stash"));
assertEquals("children", StringUtil.pluralize("child"));
assertEquals("leaves", StringUtil.pluralize("leaf"));
assertEquals("These", StringUtil.pluralize("This"));
assertEquals("cookies", StringUtil.pluralize("cookie"));
assertEquals("VaLuES", StringUtil.pluralize("VaLuE"));
assertEquals("PLANS", StringUtil.pluralize("PLAN"));
assertEquals("stackTraceLineExes", StringUtil.pluralize("stackTraceLineEx"));
assertEquals("schemas", StringUtil.pluralize("schema")); // anglicized version
assertEquals("PROPERTIES", StringUtil.pluralize("PROPERTY"));
assertEquals("THESE", StringUtil.pluralize("THIS"));
}
@Test
public void testStartsWithConcatenation() {
assertTrue(StringUtil.startsWithConcatenation("something.with.dot", "something", "."));
assertTrue(StringUtil.startsWithConcatenation("something.with.dot", "", "something."));
assertTrue(StringUtil.startsWithConcatenation("something.", "something", "."));
assertTrue(StringUtil.startsWithConcatenation("something", "something", "", "", ""));
assertFalse(StringUtil.startsWithConcatenation("something", "something", "", "", "."));
assertFalse(StringUtil.startsWithConcatenation("some", "something", ""));
}
@Test
public void testNaturalCompare() {
assertEquals(1, StringUtil.naturalCompare("test011", "test10"));
assertEquals(1, StringUtil.naturalCompare("test10a", "test010"));
final List<String> strings = new ArrayList<>(Arrays.asList("Test99", "tes0", "test0", "testing", "test", "test99", "test011", "test1",
"test 3", "test2", "test10a", "test10", "1.2.10.5", "1.2.9.1"));
final Comparator<String> c = (o1, o2) -> StringUtil.naturalCompare(o1, o2);
strings.sort(c);
assertEquals(Arrays.asList("1.2.9.1", "1.2.10.5", "tes0", "test", "test0", "test1", "test2", "test 3", "test10", "test10a",
"test011", "Test99", "test99", "testing"), strings);
final List<String> strings2 = new ArrayList<>(Arrays.asList("t1", "t001", "T2", "T002", "T1", "t2"));
strings2.sort(c);
assertEquals(Arrays.asList("T1", "t1", "t001", "T2", "t2", "T002"), strings2);
assertEquals(1 ,StringUtil.naturalCompare("7403515080361171695", "07403515080361171694"));
assertEquals(-14, StringUtil.naturalCompare("_firstField", "myField1"));
//idea-80853
final List<String> strings3 = new ArrayList<>(
Arrays.asList("C148A_InsomniaCure", "C148B_Escape", "C148C_TersePrincess", "C148D_BagOfMice", "C148E_Porcelain"));
strings3.sort(c);
assertEquals(Arrays.asList("C148A_InsomniaCure", "C148B_Escape", "C148C_TersePrincess", "C148D_BagOfMice", "C148E_Porcelain"), strings3);
}
@Test
public void testFormatLinks() {
assertEquals("<a href=\"http://a-b+c\">http://a-b+c</a>", StringUtil.formatLinks("http://a-b+c"));
}
@Test
public void testCopyHeapCharBuffer() {
String s = "abc.d";
CharBuffer buffer = CharBuffer.allocate(s.length());
buffer.append(s);
buffer.rewind();
assertNotNull(CharArrayUtil.fromSequenceWithoutCopying(buffer));
assertNotNull(CharArrayUtil.fromSequenceWithoutCopying(buffer.subSequence(0, 5)));
//assertNull(CharArrayUtil.fromSequenceWithoutCopying(buffer.subSequence(0, 4))); // end index is not checked
assertNull(CharArrayUtil.fromSequenceWithoutCopying(buffer.subSequence(1, 5)));
assertNull(CharArrayUtil.fromSequenceWithoutCopying(buffer.subSequence(1, 2)));
}
@Test
public void testTitleCase() {
assertEquals("Couldn't Connect to Debugger", StringUtil.wordsToBeginFromUpperCase("Couldn't connect to debugger"));
assertEquals("Let's Make Abbreviations Like I18n, SQL and CSS", StringUtil.wordsToBeginFromUpperCase("Let's make abbreviations like I18n, SQL and CSS"));
}
@Test
public void testSentenceCapitalization() {
assertEquals("couldn't connect to debugger", StringUtil.wordsToBeginFromLowerCase("Couldn't Connect to Debugger"));
assertEquals("let's make abbreviations like I18n, SQL and CSS s SQ sq", StringUtil.wordsToBeginFromLowerCase("Let's Make Abbreviations Like I18n, SQL and CSS S SQ Sq"));
}
@Test
public void testEscapeStringCharacters() {
assertEquals("\\\"\\n", StringUtil.escapeStringCharacters(3, "\\\"\n", "\"", false, new StringBuilder()).toString());
assertEquals("\\\"\\n", StringUtil.escapeStringCharacters(2, "\"\n", "\"", false, new StringBuilder()).toString());
assertEquals("\\\\\\\"\\n", StringUtil.escapeStringCharacters(3, "\\\"\n", "\"", true, new StringBuilder()).toString());
}
@Test
public void testEscapeSlashes() {
assertEquals("\\/", StringUtil.escapeSlashes("/"));
assertEquals("foo\\/bar\\foo\\/", StringUtil.escapeSlashes("foo/bar\\foo/"));
assertEquals("\\\\\\\\server\\\\share\\\\extension.crx", StringUtil.escapeBackSlashes("\\\\server\\share\\extension.crx"));
}
@Test
public void testEscapeQuotes() {
assertEquals("\\\"", StringUtil.escapeQuotes("\""));
assertEquals("foo\\\"bar'\\\"", StringUtil.escapeQuotes("foo\"bar'\""));
}
@Test
public void testUnquote() {
assertEquals("", StringUtil.unquoteString(""));
assertEquals("\"", StringUtil.unquoteString("\""));
assertEquals("", StringUtil.unquoteString("\"\""));
assertEquals("\"", StringUtil.unquoteString("\"\"\""));
assertEquals("foo", StringUtil.unquoteString("\"foo\""));
assertEquals("\"foo", StringUtil.unquoteString("\"foo"));
assertEquals("foo\"", StringUtil.unquoteString("foo\""));
assertEquals("", StringUtil.unquoteString(""));
assertEquals("\'", StringUtil.unquoteString("\'"));
assertEquals("", StringUtil.unquoteString("\'\'"));
assertEquals("\'", StringUtil.unquoteString("\'\'\'"));
assertEquals("foo", StringUtil.unquoteString("\'foo\'"));
assertEquals("\'foo", StringUtil.unquoteString("\'foo"));
assertEquals("foo\'", StringUtil.unquoteString("foo\'"));
assertEquals("\'\"", StringUtil.unquoteString("\'\""));
assertEquals("\"\'", StringUtil.unquoteString("\"\'"));
assertEquals("\"foo\'", StringUtil.unquoteString("\"foo\'"));
}
@SuppressWarnings("SSBasedInspection")
@Test
public void testStripQuotesAroundValue() {
assertEquals("", StringUtil.stripQuotesAroundValue(""));
assertEquals("", StringUtil.stripQuotesAroundValue("'"));
assertEquals("", StringUtil.stripQuotesAroundValue("\""));
assertEquals("", StringUtil.stripQuotesAroundValue("''"));
assertEquals("", StringUtil.stripQuotesAroundValue("\"\""));
assertEquals("", StringUtil.stripQuotesAroundValue("'\""));
assertEquals("foo", StringUtil.stripQuotesAroundValue("'foo'"));
assertEquals("foo", StringUtil.stripQuotesAroundValue("'foo"));
assertEquals("foo", StringUtil.stripQuotesAroundValue("foo'"));
assertEquals("f'o'o", StringUtil.stripQuotesAroundValue("'f'o'o'"));
assertEquals("f\"o'o", StringUtil.stripQuotesAroundValue("\"f\"o'o'"));
assertEquals("f\"o'o", StringUtil.stripQuotesAroundValue("f\"o'o"));
assertEquals("\"'f\"o'o\"", StringUtil.stripQuotesAroundValue("\"\"'f\"o'o\"\""));
assertEquals("''f\"o'o''", StringUtil.stripQuotesAroundValue("'''f\"o'o'''"));
assertEquals("foo' 'bar", StringUtil.stripQuotesAroundValue("foo' 'bar"));
}
@Test
public void testUnquoteWithQuotationChar() {
assertEquals("", StringUtil.unquoteString("", '|'));
assertEquals("|", StringUtil.unquoteString("|", '|'));
assertEquals("", StringUtil.unquoteString("||", '|'));
assertEquals("|", StringUtil.unquoteString("|||", '|'));
assertEquals("foo", StringUtil.unquoteString("|foo|", '|'));
assertEquals("|foo", StringUtil.unquoteString("|foo", '|'));
assertEquals("foo|", StringUtil.unquoteString("foo|", '|'));
}
@Test
public void testIsQuotedString() {
assertFalse(StringUtil.isQuotedString(""));
assertFalse(StringUtil.isQuotedString("'"));
assertFalse(StringUtil.isQuotedString("\""));
assertTrue(StringUtil.isQuotedString("\"\""));
assertTrue(StringUtil.isQuotedString("''"));
assertTrue(StringUtil.isQuotedString("'ab'"));
assertTrue(StringUtil.isQuotedString("\"foo\""));
}
@Test
public void testJoin() {
assertEquals("", StringUtil.join(Collections.emptyList(), ","));
assertEquals("qqq", StringUtil.join(Collections.singletonList("qqq"), ","));
assertEquals("", StringUtil.join(Collections.singletonList(null), ","));
assertEquals("a,b", StringUtil.join(Arrays.asList("a", "b"), ","));
assertEquals("foo,,bar", StringUtil.join(Arrays.asList("foo", "", "bar"), ","));
assertEquals("foo,,bar", StringUtil.join(new String[]{"foo", "", "bar"}, ","));
}
@Test
public void testSplitByLineKeepingSeparators() {
assertEquals(Collections.singletonList(""), Arrays.asList(StringUtil.splitByLinesKeepSeparators("")));
assertEquals(Collections.singletonList("aa"), Arrays.asList(StringUtil.splitByLinesKeepSeparators("aa")));
assertEquals(Arrays.asList("\n", "\n", "aa\n", "\n", "bb\n", "cc\n", "\n"),
Arrays.asList(StringUtil.splitByLinesKeepSeparators("\n\naa\n\nbb\ncc\n\n")));
assertEquals(Arrays.asList("\r", "\r\n", "\r"), Arrays.asList(StringUtil.splitByLinesKeepSeparators("\r\r\n\r")));
assertEquals(Arrays.asList("\r\n", "\r", "\r\n"), Arrays.asList(StringUtil.splitByLinesKeepSeparators("\r\n\r\r\n")));
assertEquals(Arrays.asList("\n", "\r\n", "\n", "\r\n", "\r", "\r", "aa\r", "bb\r\n", "cc\n", "\r", "dd\n", "\n", "\r\n", "\r"),
Arrays.asList(StringUtil.splitByLinesKeepSeparators("\n\r\n\n\r\n\r\raa\rbb\r\ncc\n\rdd\n\n\r\n\r")));
}
@Test
public void testReplaceReturnReplacementIfTextEqualsToReplacedText() {
String newS = "/tmp";
assertSame(newS,
StringUtil.replace("$PROJECT_FILE$", "$PROJECT_FILE$".toLowerCase().toUpperCase() /* ensure new String instance */, newS));
}
@Test
public void testReplace() {
assertEquals("/tmp/filename", StringUtil.replace("$PROJECT_FILE$/filename", "$PROJECT_FILE$", "/tmp"));
}
@Test
public void testEqualsIgnoreWhitespaces() {
assertTrue(StringUtil.equalsIgnoreWhitespaces(null, null));
assertFalse(StringUtil.equalsIgnoreWhitespaces("", null));
assertTrue(StringUtil.equalsIgnoreWhitespaces("", ""));
assertTrue(StringUtil.equalsIgnoreWhitespaces("\n\t ", ""));
assertTrue(StringUtil.equalsIgnoreWhitespaces("", "\t\n \n\t"));
assertTrue(StringUtil.equalsIgnoreWhitespaces("\t", "\n"));
assertTrue(StringUtil.equalsIgnoreWhitespaces("x", " x"));
assertTrue(StringUtil.equalsIgnoreWhitespaces("x", "x "));
assertTrue(StringUtil.equalsIgnoreWhitespaces("x\n", "x"));
assertTrue(StringUtil.equalsIgnoreWhitespaces("abc", "a\nb\nc\n"));
assertTrue(StringUtil.equalsIgnoreWhitespaces("x y x", "x y x"));
assertTrue(StringUtil.equalsIgnoreWhitespaces("xyx", "x y x"));
assertFalse(StringUtil.equalsIgnoreWhitespaces("x", "\t\n "));
assertFalse(StringUtil.equalsIgnoreWhitespaces("", " x "));
assertFalse(StringUtil.equalsIgnoreWhitespaces("", "x "));
assertFalse(StringUtil.equalsIgnoreWhitespaces("", " x"));
assertFalse(StringUtil.equalsIgnoreWhitespaces("xyx", "xxx"));
assertFalse(StringUtil.equalsIgnoreWhitespaces("xyx", "xYx"));
}
@Test
public void testStringHashCodeIgnoreWhitespaces() {
assertTrue(Comparing.equal(StringUtil.stringHashCodeIgnoreWhitespaces(""), StringUtil.stringHashCodeIgnoreWhitespaces("")));
assertTrue(Comparing.equal(StringUtil.stringHashCodeIgnoreWhitespaces("\n\t "), StringUtil.stringHashCodeIgnoreWhitespaces("")));
assertTrue(Comparing.equal(StringUtil.stringHashCodeIgnoreWhitespaces(""), StringUtil.stringHashCodeIgnoreWhitespaces("\t\n \n\t")));
assertTrue(Comparing.equal(StringUtil.stringHashCodeIgnoreWhitespaces("\t"), StringUtil.stringHashCodeIgnoreWhitespaces("\n")));
assertTrue(Comparing.equal(StringUtil.stringHashCodeIgnoreWhitespaces("x"), StringUtil.stringHashCodeIgnoreWhitespaces(" x")));
assertTrue(Comparing.equal(StringUtil.stringHashCodeIgnoreWhitespaces("x"), StringUtil.stringHashCodeIgnoreWhitespaces("x ")));
assertTrue(Comparing.equal(StringUtil.stringHashCodeIgnoreWhitespaces("x\n"), StringUtil.stringHashCodeIgnoreWhitespaces("x")));
assertTrue(Comparing.equal(StringUtil.stringHashCodeIgnoreWhitespaces("abc"), StringUtil.stringHashCodeIgnoreWhitespaces("a\nb\nc\n")));
assertTrue(Comparing.equal(StringUtil.stringHashCodeIgnoreWhitespaces("x y x"), StringUtil.stringHashCodeIgnoreWhitespaces("x y x")));
assertTrue(Comparing.equal(StringUtil.stringHashCodeIgnoreWhitespaces("xyx"), StringUtil.stringHashCodeIgnoreWhitespaces("x y x")));
assertFalse(Comparing.equal(StringUtil.stringHashCodeIgnoreWhitespaces("x"), StringUtil.stringHashCodeIgnoreWhitespaces("\t\n ")));
assertFalse(Comparing.equal(StringUtil.stringHashCodeIgnoreWhitespaces(""), StringUtil.stringHashCodeIgnoreWhitespaces(" x ")));
assertFalse(Comparing.equal(StringUtil.stringHashCodeIgnoreWhitespaces(""), StringUtil.stringHashCodeIgnoreWhitespaces("x ")));
assertFalse(Comparing.equal(StringUtil.stringHashCodeIgnoreWhitespaces(""), StringUtil.stringHashCodeIgnoreWhitespaces(" x")));
assertFalse(Comparing.equal(StringUtil.stringHashCodeIgnoreWhitespaces("xyx"), StringUtil.stringHashCodeIgnoreWhitespaces("xxx")));
assertFalse(Comparing.equal(StringUtil.stringHashCodeIgnoreWhitespaces("xyx"), StringUtil.stringHashCodeIgnoreWhitespaces("xYx")));
}
@Test
public void testContains() {
assertTrue(StringUtil.contains("1", "1"));
assertFalse(StringUtil.contains("1", "12"));
assertTrue(StringUtil.contains("12", "1"));
assertTrue(StringUtil.contains("12", "2"));
}
@Test
public void testDetectSeparators() {
assertEquals(null, StringUtil.detectSeparators(""));
assertEquals(null, StringUtil.detectSeparators("asd"));
assertEquals(null, StringUtil.detectSeparators("asd\t"));
assertEquals(LineSeparator.LF, StringUtil.detectSeparators("asd\n"));
assertEquals(LineSeparator.LF, StringUtil.detectSeparators("asd\nads\r"));
assertEquals(LineSeparator.LF, StringUtil.detectSeparators("asd\nads\n"));
assertEquals(LineSeparator.CR, StringUtil.detectSeparators("asd\r"));
assertEquals(LineSeparator.CR, StringUtil.detectSeparators("asd\rads\r"));
assertEquals(LineSeparator.CR, StringUtil.detectSeparators("asd\rads\n"));
assertEquals(LineSeparator.CRLF, StringUtil.detectSeparators("asd\r\n"));
assertEquals(LineSeparator.CRLF, StringUtil.detectSeparators("asd\r\nads\r"));
assertEquals(LineSeparator.CRLF, StringUtil.detectSeparators("asd\r\nads\n"));
}
@Test
public void testFindStartingLineSeparator() {
assertEquals(null, StringUtil.getLineSeparatorAt("", -1));
assertEquals(null, StringUtil.getLineSeparatorAt("", 0));
assertEquals(null, StringUtil.getLineSeparatorAt("", 1));
assertEquals(null, StringUtil.getLineSeparatorAt("\nHello", -1));
assertEquals(null, StringUtil.getLineSeparatorAt("\nHello", 1));
assertEquals(null, StringUtil.getLineSeparatorAt("\nH\rel\nlo", 6));
assertEquals(LineSeparator.LF, StringUtil.getLineSeparatorAt("\nHello", 0));
assertEquals(LineSeparator.LF, StringUtil.getLineSeparatorAt("\nH\rel\nlo", 5));
assertEquals(LineSeparator.LF, StringUtil.getLineSeparatorAt("Hello\n", 5));
assertEquals(LineSeparator.CR, StringUtil.getLineSeparatorAt("\rH\r\nello", 0));
assertEquals(LineSeparator.CR, StringUtil.getLineSeparatorAt("Hello\r", 5));
assertEquals(LineSeparator.CR, StringUtil.getLineSeparatorAt("Hello\b\r", 6));
assertEquals(LineSeparator.CRLF, StringUtil.getLineSeparatorAt("\rH\r\nello", 2));
assertEquals(LineSeparator.CRLF, StringUtil.getLineSeparatorAt("\r\nH\r\nello", 0));
assertEquals(LineSeparator.CRLF, StringUtil.getLineSeparatorAt("\r\nH\r\nello\r\n", 9));
}
@Test
public void testFormatFileSize() {
assertEquals("0B", StringUtil.formatFileSize(0));
assertEquals("1B", StringUtil.formatFileSize(1));
assertEquals("2.15G", StringUtil.formatFileSize(Integer.MAX_VALUE));
assertEquals("9.22E", StringUtil.formatFileSize(Long.MAX_VALUE));
assertEquals("60.10K", StringUtil.formatFileSize(60100));
assertEquals("1.23K", StringUtil.formatFileSize(1234));
assertEquals("12.35K", StringUtil.formatFileSize(12345));
assertEquals("123.46K", StringUtil.formatFileSize(123456));
assertEquals("1.23M", StringUtil.formatFileSize(1234567));
assertEquals("12.35M", StringUtil.formatFileSize(12345678));
assertEquals("123.46M", StringUtil.formatFileSize(123456789));
assertEquals("1.23G", StringUtil.formatFileSize(1234567890));
}
@Test
public void testFormatDuration() {
assertEquals("0ms", StringUtil.formatDuration(0));
assertEquals("1ms", StringUtil.formatDuration(1));
assertEquals("3w 3d 20h 31m 23s 647ms", StringUtil.formatDuration(Integer.MAX_VALUE));
assertEquals("31ep 7714ml 2c 59yr 5mo 0w 3d 7h 12m 55s 807ms", StringUtil.formatDuration(Long.MAX_VALUE));
assertEquals("1m 0s 100ms", StringUtil.formatDuration(60100));
assertEquals("1s 234ms", StringUtil.formatDuration(1234));
assertEquals("12s 345ms", StringUtil.formatDuration(12345));
assertEquals("2m 3s 456ms", StringUtil.formatDuration(123456));
assertEquals("20m 34s 567ms", StringUtil.formatDuration(1234567));
assertEquals("3h 25m 45s 678ms", StringUtil.formatDuration(12345678));
assertEquals("1d 10h 17m 36s 789ms", StringUtil.formatDuration(123456789));
assertEquals("2w 0d 6h 56m 7s 890ms", StringUtil.formatDuration(1234567890));
}
@Test
public void testXmlWrapInCDATA() {
assertEquals("<![CDATA[abc]]>", XmlStringUtil.wrapInCDATA("abc"));
assertEquals("<![CDATA[abc]]]><![CDATA[]>]]>", XmlStringUtil.wrapInCDATA("abc]]>"));
assertEquals("<![CDATA[abc]]]><![CDATA[]>def]]>", XmlStringUtil.wrapInCDATA("abc]]>def"));
assertEquals("<![CDATA[123<![CDATA[wow<&>]]]><![CDATA[]>]]]><![CDATA[]><![CDATA[123]]>", XmlStringUtil.wrapInCDATA("123<![CDATA[wow<&>]]>]]><![CDATA[123"));
}
@Test
public void testGetPackageName() {
assertEquals("java.lang", StringUtil.getPackageName("java.lang.String"));
assertEquals("java.util.Map", StringUtil.getPackageName("java.util.Map.Entry"));
assertEquals("Map", StringUtil.getPackageName("Map.Entry"));
assertEquals("", StringUtil.getPackageName("Number"));
}
@SuppressWarnings("SpellCheckingInspection")
@Test
public void testIndexOf_1() {
char[] chars = new char[]{'a','b','c','d','a','b','c','d','A','B','C','D'};
assertEquals(2, StringUtil.indexOf(chars, 'c', 0, 12, false));
assertEquals(2, StringUtil.indexOf(chars, 'C', 0, 12, false));
assertEquals(10, StringUtil.indexOf(chars, 'C', 0, 12, true));
assertEquals(2, StringUtil.indexOf(chars, 'c', -42, 99, false));
}
@SuppressWarnings("SpellCheckingInspection")
@Test
public void testIndexOf_2() {
assertEquals(1, StringUtil.indexOf("axaxa", 'x', 0, 5));
assertEquals(2, StringUtil.indexOf("abcd", 'c', -42, 99));
}
@SuppressWarnings("SpellCheckingInspection")
@Test
public void testIndexOf_3() {
assertEquals(1, StringUtil.indexOf("axaXa", 'x', 0, 5, false));
assertEquals(3, StringUtil.indexOf("axaXa", 'X', 0, 5, true));
assertEquals(2, StringUtil.indexOf("abcd", 'c', -42, 99, false));
}
@SuppressWarnings("SpellCheckingInspection")
@Test
public void testIndexOfAny() {
assertEquals(1, StringUtil.indexOfAny("axa", "x", 0, 5));
assertEquals(1, StringUtil.indexOfAny("axa", "zx", 0, 5));
assertEquals(2, StringUtil.indexOfAny("abcd", "c", -42, 99));
}
@SuppressWarnings("SpellCheckingInspection")
@Test
public void testLastIndexOf() {
assertEquals(1, StringUtil.lastIndexOf("axaxa", 'x', 0, 2));
assertEquals(1, StringUtil.lastIndexOf("axaxa", 'x', 0, 3));
assertEquals(3, StringUtil.lastIndexOf("axaxa", 'x', 0, 5));
assertEquals(2, StringUtil.lastIndexOf("abcd", 'c', -42, 99)); // #IDEA-144968
}
@Test
public void testEscapingIllegalXmlChars() {
for (String s : new String[]{"ab\n\0\r\tde", "\\abc\1\2\3\uFFFFdef"}) {
String escapedText = XmlStringUtil.escapeIllegalXmlChars(s);
assertNull(Verifier.checkCharacterData(escapedText));
assertEquals(s, XmlStringUtil.unescapeIllegalXmlChars(escapedText));
}
}
@Test
public void testCountChars() {
assertEquals(0, StringUtil.countChars("abcdefgh", 'x'));
assertEquals(1, StringUtil.countChars("abcdefgh", 'd'));
assertEquals(5, StringUtil.countChars("abcddddefghd", 'd'));
assertEquals(4, StringUtil.countChars("abcddddefghd", 'd', 4, false));
assertEquals(3, StringUtil.countChars("abcddddefghd", 'd', 4, true));
assertEquals(2, StringUtil.countChars("abcddddefghd", 'd', 4, 6, false));
}
}
|
|
package ug.or.nda.dao;
import java.io.Serializable;
import java.lang.reflect.ParameterizedType;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.persistence.EntityManager;
import javax.persistence.LockModeType;
import javax.persistence.NoResultException;
import javax.persistence.NonUniqueResultException;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import org.apache.log4j.Logger;
import ug.or.nda.constant.AppPropertyHolder;
public class GenericDAOImpl<T, ID extends Serializable> implements GenericDAOI<T, ID> {
private final Class<T> persistentClass;
@SuppressWarnings("unchecked")
public GenericDAOImpl() {
this.persistentClass = (Class<T>) ((ParameterizedType) getClass()
.getGenericSuperclass()).getActualTypeArguments()[0];
}
public GenericDAOImpl(final Class<T> persistentClass) {
super();
this.persistentClass = persistentClass;
}
@PersistenceContext(unitName=AppPropertyHolder.PRIMARY_PERSISTENT_UNIT)
protected EntityManager em;
private Logger log = Logger.getLogger(getClass());
@Override
public T findBy(String fieldName, Object value) {
Query query = em
.createQuery(getQuery(fieldName))
.setParameter(fieldName, value);
return getSingleResult(query);
}
@SuppressWarnings("unchecked")
private T getSingleResult(Query query) {
try {
return (T) query.getSingleResult();
} catch (NonUniqueResultException exc) {
return (T) query.getResultList().get(0);
} catch (NoResultException exc) {
return null;
}
}
private String getQuery(String fieldName) {
String query = "from " + persistentClass.getName() + " t " + "where t."
+ fieldName + " = :" + fieldName;
return query;
}
/**
* @see GenericDAO#getEntityClass()
*/
public Class<T> getEntityClass() {
return persistentClass;
}
/**
* @see GenericDAO#save(Object)
* #save(java.lang.Object)
*/
public T save(T entity) throws Exception{
entity = em.merge(entity);
return entity;
}
/**
* @see GenericDAO#delete(java.lang.Object)
*/
public void delete(T entity) throws Exception{
em.remove(entity);
}
/**
* @see GenericDAO#deleteById(java.lang.Object)
*/
public void deleteById(final ID id) throws Exception{
T entity = em.find(persistentClass, id);
if(entity != null) em.remove(entity);
}
public void delete(ID ids []) throws Exception{
int size = ids.length;
for(int idx = 0; idx < size; idx++){
T entity = em.find(persistentClass, ids[idx]);
if(entity != null) em.remove(entity);
}
}
/**
* @see GenericDAO#deleteBatchById(java.lang.Object)
*/
public void deleteBatchById(ID ids []) throws Exception{
int size = ids.length;
for(int idx = 0; idx < size; idx++){
T entity = em.find(persistentClass, ids[idx]);
if(entity != null) em.remove(entity);
}
}
/**
* @see GenericDAO#findById(java.io.Serializable)
*/
public T findById(final ID id) {
final T result = em.find(persistentClass, id);
return result;
}
/* (non-Javadoc)
* @see com.systech.fm.dao.generic.GenericDAO#getReference(java.io.Serializable)
*/
public T getReference(final ID id){
return em.getReference(persistentClass, id);
}
/**
* @see GenericDAO#findByNamedQuery(String, Object...)
* #findByNamedQuery(java.lang.String, java.lang.Object[])
*/
@SuppressWarnings("unchecked")
public List<T> findByNamedQuery(final String name, Object... params) {
javax.persistence.Query query = em.createNamedQuery(name);
for (int i = 0; i < params.length; i++) {
query.setParameter(i + 1, params[i]);
}
final List<T> result = (List<T>) query.getResultList();
return result;
}
/**
* @see GenericDAO#findByNamedQuery(String, Map)
*/
@SuppressWarnings("unchecked")
@Override
public List<T> findByNamedQuery(final String name,
final Map<String, ? extends Object> params) {
javax.persistence.Query query = em.createNamedQuery(name);
for (final Map.Entry<String, ? extends Object> param : params.entrySet()) {
query.setParameter(param.getKey(), param.getValue());
}
return query.getResultList();
}
@SuppressWarnings("unchecked")
public List<T> findByNamedQuery(final String queryName, final Map<String, ?extends Object> params, int start, int limit){
javax.persistence.Query query = em.createNamedQuery(queryName);
for (final Map.Entry<String, ? extends Object> param : params.entrySet()) {
query.setParameter(param.getKey(), param.getValue());
}
query.setFirstResult(start);
query.setMaxResults(limit);
return query.getResultList();
}
@SuppressWarnings("unchecked")
public List<T> list(final int firstResult,final int maxResults){
try{
Query qry = em.createQuery("from "+getEntityClass().getName());
qry.setFirstResult(firstResult);
qry.setMaxResults(maxResults);
return qry.getResultList();
}catch(javax.persistence.NoResultException ex){
log.warn("Could not find any results with the query\"from "+getEntityClass().getName());
}
return new ArrayList<T>();
}
@Override
@SuppressWarnings("unchecked")
public List<T> findByNamedQueryAndNamedParams(final String name,
final Map<String, ? extends Object> params) {
javax.persistence.Query query = em.createNamedQuery(name);
for (final Map.Entry<String, ? extends Object> param : params.entrySet()) {
query.setParameter(param.getKey(), param.getValue());
}
final List<T> result = (List<T>) query.getResultList();
return result;
}
@Override
public void lock(T entity, LockModeType type){
em.lock(entity, type);
}
}
|
|
/*******************************************************************************
* Copyright 2006 - 2012 Vienna University of Technology,
* Department of Software Technology and Interactive Systems, IFS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package at.tuwien.minimee.migration.engines;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import at.tuwien.minimee.model.ToolConfig;
import eu.scape_project.planning.model.beans.MigrationResult;
import eu.scape_project.planning.model.measurement.Measure;
import eu.scape_project.planning.model.measurement.Measurement;
import eu.scape_project.planning.model.values.PositiveFloatValue;
/**
* This engine uses psList to monitor migration processes
* on WINDOWS environments. However, it is currently NOT working properly
* and should be fixed!
* @author gottardi
*
*/
public class MonitorEngineWinPslist extends MiniMeeDefaultMigrationEngine {
private Logger log = LoggerFactory.getLogger(this.getClass());
private String monitorScript = "pslistMonitor.bat";
private String incrementScript = "increment.bat";
private String logFile = "topWin.log";
@Override
protected void cleanup(long time, String inputFile, String outputFile) {
super.cleanup(time, inputFile, outputFile);
String workingDir = makeWorkingDirName(time);
new File(workingDir + "/" + monitorScript).delete();
new File(workingDir + "/" + incrementScript).delete();
new File(workingDir + "/" + logFile).delete();
}
protected String makeWorkingDirName(long time) {
return getTempDir() + "profile_" + time;
}
protected String prepareWorkingDirectory (long time) throws Exception {
// assemble the working directory from timestamp
String workingDirectory = makeWorkingDirName(time);
// create the working directory
(new File(workingDirectory)).mkdir();
//
// copy script files
//
String from, to;
//
// copy script: monitorcall.sh
//
from = "data/minimee/monitoring/" + monitorScript;
to = workingDirectory + "/" + monitorScript;
copyFile(from, to, workingDirectory);
/**
* Copy increment script
*/
from = "data/minimee/monitoring/" + incrementScript;
to = workingDirectory + "/" + incrementScript;
copyFile(from, to, workingDirectory);
return workingDirectory;
}
@Override
protected String prepareCommand(ToolConfig config, String params,
String inputFile, String outputFile, long time) throws Exception {
prepareWorkingDirectory(time);
// we calculate the timeout for the migration process
File file = new File(inputFile);
// we calculate the cycle (winTop has a counter that needs 6 units: we set 150000 as max)
// Long timeout = Math.max((file.length() / (1000000))*6, 150000);
//so small time only for testing
Long timeout = Math.max((file.length() / (1000000))*6, 100000);
String cycles=timeout.intValue()+"";
String monitoringCmd = prepareMonitoringCommand(time, cycles);
String command = monitoringCmd + " " + config.getTool().getExecutablePath() + " \"" + config.getParams() + " "+ inputFile;
// SPECIAL STUFF, UNLIKELY TO REMAIN HERE:
if (!config.isNoOutFile()) {
command = command + " " + outputFile;
}
command += "\"";
log.debug("TOP WINDOWS MONITORING COMMAND: "+command);
return command;
}
protected String prepareMonitoringCommand(long time, String cycles) {
return makeWorkingDirName(time) + "/" + monitorScript
+ " " + makeWorkingDirName(time) + "/"
+ " " + cycles
+ " " + makeWorkingDirName(time) + "/" + logFile
+ " " + makeWorkingDirName(time) + "/" + incrementScript;
}
/**
* Copies resource file 'from' from destination 'to' and set execution permission.
*
* @param from
* @param to
* @throws Exception
*/
protected void copyFile(String from, String to, String workingDirectory) throws Exception {
//
// copy the shell script to the working directory
//
// URL monitorCallShellScriptUrl = Thread.currentThread().getContextClassLoader().getResource(from);
// File f = new File(monitorCallShellScriptUrl.getFile());
// String directoryPath = f.getAbsolutePath();
/*
URL urlJar = new URL(directoryPath.substring(
directoryPath.indexOf("file:"),
directoryPath.indexOf("plato.jar")+"plato.jar".length()));
JarFile jf = new JarFile(urlJar.getFile());
JarEntry je = jf.getJarEntry(from);
String fileName = je.getName();
*/
InputStream in = Thread.currentThread()
.getContextClassLoader().getResourceAsStream(from);
File outScriptFile = new File(to);
FileOutputStream fos = new FileOutputStream(outScriptFile);
int nextChar;
while ((nextChar = in.read()) != -1) {
fos.write(nextChar);
}
fos.flush();
fos.close();
}
protected void collectData(ToolConfig config, long time, MigrationResult result) {
// TopWinParser p = new TopWinParser(makeWorkingDirName(time) + logFile);
//// p.parse();
//
// ExecutionFootprintList performance = p.getList();
// //log.debug(performance.toString());
for (Measure measure: getMeasures()) {
Measurement m = new Measurement();
m.setMeasureId(measure.getUri());
PositiveFloatValue v = (PositiveFloatValue) measure.getScale().createValue();
if (measure.getUri().equals("performance:memory:used")) {
v.setValue(123.12);
}
// if (property.getName().equals(MigrationResult.MIGRES_USED_TIME)) {
// v.setValue(performance.getTotalCpuTimeUsed());
// }
// if (property.getName().equals(MigrationResult.MIGRES_MEMORY_GROSS)) {
// v.setValue(performance.getMaxVirtualMemory());
// }
// if (property.getName().equals(MigrationResult.MIGRES_MEMORY_NET)) {
// v.setValue(performance.getMaxResidentSize());
// }
m.setValue(v);
result.getMeasurements().put(measure.getUri(), m);
}
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.*;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.io.IOException;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
public class DynamicMappingTests extends ESSingleNodeTestCase {
public void testDynamicTrue() throws IOException {
String mapping = jsonBuilder().startObject().startObject("type")
.field("dynamic", "true")
.startObject("properties")
.startObject("field1").field("type", "string").endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", jsonBuilder()
.startObject()
.field("field1", "value1")
.field("field2", "value2")
.bytes());
assertThat(doc.rootDoc().get("field1"), equalTo("value1"));
assertThat(doc.rootDoc().get("field2"), equalTo("value2"));
}
public void testDynamicFalse() throws IOException {
String mapping = jsonBuilder().startObject().startObject("type")
.field("dynamic", "false")
.startObject("properties")
.startObject("field1").field("type", "string").endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", jsonBuilder()
.startObject()
.field("field1", "value1")
.field("field2", "value2")
.bytes());
assertThat(doc.rootDoc().get("field1"), equalTo("value1"));
assertThat(doc.rootDoc().get("field2"), nullValue());
}
public void testDynamicStrict() throws IOException {
String mapping = jsonBuilder().startObject().startObject("type")
.field("dynamic", "strict")
.startObject("properties")
.startObject("field1").field("type", "string").endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
try {
defaultMapper.parse("test", "type", "1", jsonBuilder()
.startObject()
.field("field1", "value1")
.field("field2", "value2")
.bytes());
fail();
} catch (StrictDynamicMappingException e) {
// all is well
}
try {
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field1", "value1")
.field("field2", (String) null)
.bytes());
fail();
} catch (StrictDynamicMappingException e) {
// all is well
}
}
public void testDynamicFalseWithInnerObjectButDynamicSetOnRoot() throws IOException {
String mapping = jsonBuilder().startObject().startObject("type")
.field("dynamic", "false")
.startObject("properties")
.startObject("obj1").startObject("properties")
.startObject("field1").field("type", "string").endObject()
.endObject().endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", jsonBuilder()
.startObject().startObject("obj1")
.field("field1", "value1")
.field("field2", "value2")
.endObject()
.bytes());
assertThat(doc.rootDoc().get("obj1.field1"), equalTo("value1"));
assertThat(doc.rootDoc().get("obj1.field2"), nullValue());
}
public void testDynamicStrictWithInnerObjectButDynamicSetOnRoot() throws IOException {
String mapping = jsonBuilder().startObject().startObject("type")
.field("dynamic", "strict")
.startObject("properties")
.startObject("obj1").startObject("properties")
.startObject("field1").field("type", "string").endObject()
.endObject().endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
try {
defaultMapper.parse("test", "type", "1", jsonBuilder()
.startObject().startObject("obj1")
.field("field1", "value1")
.field("field2", "value2")
.endObject()
.bytes());
fail();
} catch (StrictDynamicMappingException e) {
// all is well
}
}
public void testDynamicMappingOnEmptyString() throws Exception {
IndexService service = createIndex("test");
client().prepareIndex("test", "type").setSource("empty_field", "").get();
MappedFieldType fieldType = service.mapperService().fullName("empty_field");
assertNotNull(fieldType);
}
public void testTypeNotCreatedOnIndexFailure() throws IOException, InterruptedException {
XContentBuilder mapping = jsonBuilder().startObject().startObject("_default_")
.field("dynamic", "strict")
.endObject().endObject();
IndexService indexService = createIndex("test", Settings.EMPTY, "_default_", mapping);
try {
client().prepareIndex().setIndex("test").setType("type").setSource(jsonBuilder().startObject().field("test", "test").endObject()).get();
fail();
} catch (StrictDynamicMappingException e) {
}
GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").get();
assertNull(getMappingsResponse.getMappings().get("test").get("type"));
}
private String serialize(ToXContent mapper) throws Exception {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
mapper.toXContent(builder, new ToXContent.MapParams(ImmutableMap.<String, String>of()));
return builder.endObject().string();
}
private Mapper parse(DocumentMapper mapper, DocumentMapperParser parser, XContentBuilder builder) throws Exception {
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
ParseContext.InternalParseContext ctx = new ParseContext.InternalParseContext(settings, parser, mapper, new ContentPath(0));
SourceToParse source = SourceToParse.source(builder.bytes());
ctx.reset(XContentHelper.createParser(source.source()), new ParseContext.Document(), source);
assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken());
ctx.parser().nextToken();
return DocumentParser.parseObject(ctx, mapper.root(), true);
}
public void testDynamicMappingsNotNeeded() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("foo").field("type", "string").endObject().endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject());
// foo is already defined in the mappings
assertNull(update);
}
public void testField() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject()
.startObject("type").endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject());
assertNotNull(update);
// original mapping not modified
assertEquals(mapping, serialize(mapper));
// but we have an update
assertEquals("{\"type\":{\"properties\":{\"foo\":{\"type\":\"string\"}}}}", serialize(update));
}
public void testIncremental() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
// Make sure that mapping updates are incremental, this is important for performance otherwise
// every new field introduction runs in linear time with the total number of fields
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("foo").field("type", "string").endObject().endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").field("bar", "baz").endObject());
assertNotNull(update);
// original mapping not modified
assertEquals(mapping, serialize(mapper));
// but we have an update
assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
// foo is NOT in the update
.startObject("bar").field("type", "string").endObject()
.endObject().endObject().string(), serialize(update));
}
public void testIntroduceTwoFields() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject()
.startObject("type").endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").field("bar", "baz").endObject());
assertNotNull(update);
// original mapping not modified
assertEquals(mapping, serialize(mapper));
// but we have an update
assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("bar").field("type", "string").endObject()
.startObject("foo").field("type", "string").endObject()
.endObject().endObject().string(), serialize(update));
}
public void testObject() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject()
.startObject("type").endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startObject("foo").startObject("bar").field("baz", "foo").endObject().endObject().endObject());
assertNotNull(update);
// original mapping not modified
assertEquals(mapping, serialize(mapper));
// but we have an update
assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo").startObject("properties").startObject("bar").startObject("properties").startObject("baz").field("type", "string").endObject().endObject().endObject().endObject().endObject()
.endObject().endObject().endObject().string(), serialize(update));
}
public void testArray() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject()
.startObject("type").endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startArray("foo").value("bar").value("baz").endArray().endObject());
assertNotNull(update);
// original mapping not modified
assertEquals(mapping, serialize(mapper));
// but we have an update
assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo").field("type", "string").endObject()
.endObject().endObject().endObject().string(), serialize(update));
}
public void testInnerDynamicMapping() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties")
.startObject("foo").field("type", "object").endObject()
.endObject().endObject().endObject().string();
DocumentMapper mapper = parser.parse(mapping);
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startObject("foo").startObject("bar").field("baz", "foo").endObject().endObject().endObject());
assertNotNull(update);
// original mapping not modified
assertEquals(mapping, serialize(mapper));
// but we have an update
assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo").startObject("properties").startObject("bar").startObject("properties").startObject("baz").field("type", "string").endObject().endObject().endObject().endObject().endObject()
.endObject().endObject().endObject().string(), serialize(update));
}
public void testComplexArray() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject()
.startObject("type").endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startArray("foo")
.startObject().field("bar", "baz").endObject()
.startObject().field("baz", 3).endObject()
.endArray().endObject());
assertEquals(mapping, serialize(mapper));
assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo").startObject("properties")
.startObject("bar").field("type", "string").endObject()
.startObject("baz").field("type", "long").endObject()
.endObject().endObject()
.endObject().endObject().endObject().string(), serialize(update));
}
public void testReuseExistingMappings() throws IOException, Exception {
IndexService indexService = createIndex("test", Settings.EMPTY, "type", "my_field1", "type=string,store=yes", "my_field2", "type=integer,precision_step=10");
// Even if the dynamic type of our new field is long, we already have a mapping for the same field
// of type string so it should be mapped as a string
DocumentMapper newMapper = indexService.mapperService().documentMapperWithAutoCreate("type2").getDocumentMapper();
Mapper update = parse(newMapper, indexService.mapperService().documentMapperParser(),
XContentFactory.jsonBuilder().startObject().field("my_field1", 42).endObject());
Mapper myField1Mapper = null;
for (Mapper m : update) {
if (m.name().equals("my_field1")) {
myField1Mapper = m;
}
}
assertNotNull(myField1Mapper);
// same type
assertTrue(myField1Mapper instanceof StringFieldMapper);
// and same option
assertTrue(((StringFieldMapper) myField1Mapper).fieldType().stored());
// Even if dynamic mappings would map a numeric field as a long, here it should map it as a integer
// since we already have a mapping of type integer
update = parse(newMapper, indexService.mapperService().documentMapperParser(),
XContentFactory.jsonBuilder().startObject().field("my_field2", 42).endObject());
Mapper myField2Mapper = null;
for (Mapper m : update) {
if (m.name().equals("my_field2")) {
myField2Mapper = m;
}
}
assertNotNull(myField2Mapper);
// same type
assertTrue(myField2Mapper instanceof IntegerFieldMapper);
// and same option
assertEquals(10, ((IntegerFieldMapper) myField2Mapper).fieldType().numericPrecisionStep());
// This can't work
try {
parse(newMapper, indexService.mapperService().documentMapperParser(),
XContentFactory.jsonBuilder().startObject().field("my_field2", "foobar").endObject());
fail("Cannot succeed, incompatible types");
} catch (MapperParsingException e) {
// expected
}
}
}
|
|
class VO {
@com.develhack.annotation.feature.VO @java.lang.SuppressWarnings("serial") class Default implements java.io.Serializable {
private int field;
private final int finalField = 0;
private transient int transientField;
private @com.develhack.annotation.feature.ExcludedFrom(com.develhack.annotation.feature.VO.class) String exclude;
Default() {
super();
}
public @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") int getField() {
return this.field;
}
public @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") int getFinalField() {
return this.finalField;
}
public @java.lang.Override @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") boolean equals(final java.lang.Object o) {
if ((o == this))
return true;
if ((! (o instanceof VO.Default)))
return false;
final Default other = (Default) o;
if ((! other.canEqual((java.lang.Object) this)))
return false;
if ((this.field != other.field))
return false;
if ((this.finalField != other.finalField))
return false;
final java.lang.Object this$exclude = this.exclude;
final java.lang.Object other$exclude = other.exclude;
if (((this$exclude == null) ? (other$exclude != null) : (! this$exclude.equals(other$exclude))))
return false;
return true;
}
protected @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") boolean canEqual(final java.lang.Object other) {
return (other instanceof VO.Default);
}
public @java.lang.Override @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") int hashCode() {
final int PRIME = 59;
int result = 1;
result = ((result * PRIME) + this.field);
result = ((result * PRIME) + this.finalField);
final java.lang.Object $exclude = this.exclude;
result = ((result * PRIME) + (($exclude == null) ? 43 : $exclude.hashCode()));
return result;
}
}
@com.develhack.annotation.feature.VO @java.lang.SuppressWarnings("serial") class ExtendsObject extends Object implements java.io.Serializable {
private int field;
ExtendsObject() {
super();
}
public @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") int getField() {
return this.field;
}
public @java.lang.Override @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") boolean equals(final java.lang.Object o) {
if ((o == this))
return true;
if ((! (o instanceof VO.ExtendsObject)))
return false;
final ExtendsObject other = (ExtendsObject) o;
if ((! other.canEqual((java.lang.Object) this)))
return false;
if ((this.field != other.field))
return false;
return true;
}
protected @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") boolean canEqual(final java.lang.Object other) {
return (other instanceof VO.ExtendsObject);
}
public @java.lang.Override @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") int hashCode() {
final int PRIME = 59;
int result = 1;
result = ((result * PRIME) + this.field);
return result;
}
}
@com.develhack.annotation.feature.VO @java.lang.SuppressWarnings("serial") class ExtendsNumber extends Thread implements java.io.Serializable {
private int field;
ExtendsNumber() {
super();
}
public @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") int getField() {
return this.field;
}
public @java.lang.Override @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") boolean equals(final java.lang.Object o) {
if ((o == this))
return true;
if ((! (o instanceof VO.ExtendsNumber)))
return false;
final ExtendsNumber other = (ExtendsNumber) o;
if ((! other.canEqual((java.lang.Object) this)))
return false;
if ((! super.equals(o)))
return false;
if ((this.field != other.field))
return false;
return true;
}
protected @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") boolean canEqual(final java.lang.Object other) {
return (other instanceof VO.ExtendsNumber);
}
public @java.lang.Override @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") int hashCode() {
final int PRIME = 59;
int result = 1;
result = ((result * PRIME) + super.hashCode());
result = ((result * PRIME) + this.field);
return result;
}
}
@com.develhack.annotation.feature.VO @java.lang.SuppressWarnings("serial") class HasNonPrivateField implements java.io.Serializable {
int field;
HasNonPrivateField() {
super();
}
public @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") int getField() {
return this.field;
}
public @java.lang.Override @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") boolean equals(final java.lang.Object o) {
if ((o == this))
return true;
if ((! (o instanceof VO.HasNonPrivateField)))
return false;
final HasNonPrivateField other = (HasNonPrivateField) o;
if ((! other.canEqual((java.lang.Object) this)))
return false;
if ((this.field != other.field))
return false;
return true;
}
protected @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") boolean canEqual(final java.lang.Object other) {
return (other instanceof VO.HasNonPrivateField);
}
public @java.lang.Override @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") int hashCode() {
final int PRIME = 59;
int result = 1;
result = ((result * PRIME) + this.field);
return result;
}
}
@com.develhack.annotation.feature.VO() @java.lang.SuppressWarnings("serial") class HasSetter implements java.io.Serializable {
private int field;
HasSetter() {
super();
}
void setField(int field) {
this.field = field;
}
public @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") int getField() {
return this.field;
}
public @java.lang.Override @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") boolean equals(final java.lang.Object o) {
if ((o == this))
return true;
if ((! (o instanceof VO.HasSetter)))
return false;
final HasSetter other = (HasSetter) o;
if ((! other.canEqual((java.lang.Object) this)))
return false;
if ((this.field != other.field))
return false;
return true;
}
protected @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") boolean canEqual(final java.lang.Object other) {
return (other instanceof VO.HasSetter);
}
public @java.lang.Override @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") int hashCode() {
final int PRIME = 59;
int result = 1;
result = ((result * PRIME) + this.field);
return result;
}
}
@com.develhack.annotation.feature.VO @java.lang.SuppressWarnings("serial") class InvalidAccessorVisibility implements java.io.Serializable {
private @com.develhack.annotation.feature.Accessible(get = com.develhack.annotation.feature.Access.PROTECTED,set = com.develhack.annotation.feature.Access.PRIVATE) int field;
InvalidAccessorVisibility() {
super();
}
protected @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") int getField() {
return this.field;
}
private @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") void setField(final int field) {
this.field = field;
}
public @java.lang.Override @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") boolean equals(final java.lang.Object o) {
if ((o == this))
return true;
if ((! (o instanceof VO.InvalidAccessorVisibility)))
return false;
final InvalidAccessorVisibility other = (InvalidAccessorVisibility) o;
if ((! other.canEqual((java.lang.Object) this)))
return false;
if ((this.field != other.field))
return false;
return true;
}
protected @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") boolean canEqual(final java.lang.Object other) {
return (other instanceof VO.InvalidAccessorVisibility);
}
public @java.lang.Override @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") int hashCode() {
final int PRIME = 59;
int result = 1;
result = ((result * PRIME) + this.field);
return result;
}
}
@com.develhack.annotation.feature.VO @java.lang.SuppressWarnings("serial") class InvalidSetterVisibility implements java.io.Serializable {
private @com.develhack.annotation.feature.Accessible(set = com.develhack.annotation.feature.Access.DEFAULT) int field;
InvalidSetterVisibility() {
super();
}
public @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") int getField() {
return this.field;
}
@java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") void setField(final int field) {
this.field = field;
}
public @java.lang.Override @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") boolean equals(final java.lang.Object o) {
if ((o == this))
return true;
if ((! (o instanceof VO.InvalidSetterVisibility)))
return false;
final InvalidSetterVisibility other = (InvalidSetterVisibility) o;
if ((! other.canEqual((java.lang.Object) this)))
return false;
if ((this.field != other.field))
return false;
return true;
}
protected @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") boolean canEqual(final java.lang.Object other) {
return (other instanceof VO.InvalidSetterVisibility);
}
public @java.lang.Override @java.lang.SuppressWarnings("all") @javax.annotation.Generated("lombok") int hashCode() {
final int PRIME = 59;
int result = 1;
result = ((result * PRIME) + this.field);
return result;
}
}
VO() {
super();
}
}
|
|
/*
*Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
*WSO2 Inc. licenses this file to you under the Apache License,
*Version 2.0 (the "License"); you may not use this file except
*in compliance with the License.
*You may obtain a copy of the License at
*
*http://www.apache.org/licenses/LICENSE-2.0
*
*Unless required by applicable law or agreed to in writing,
*software distributed under the License is distributed on an
*"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
*KIND, either express or implied. See the License for the
*specific language governing permissions and limitations
*under the License.
*/
package org.wso2.carbon.registry.es.publisher.crud;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.wink.client.ClientResponse;
import org.json.JSONException;
import org.json.JSONObject;
import org.testng.annotations.*;
import org.wso2.carbon.automation.engine.annotations.ExecutionEnvironment;
import org.wso2.carbon.automation.engine.annotations.SetEnvironment;
import org.wso2.carbon.automation.engine.context.TestUserMode;
import org.wso2.carbon.automation.engine.frameworkutils.FrameworkPathUtil;
import org.wso2.carbon.registry.es.utils.ESTestBaseTest;
import org.wso2.es.integration.common.clients.ResourceAdminServiceClient;
import org.wso2.es.integration.common.utils.GenericRestClient;
import javax.ws.rs.core.MediaType;
import javax.xml.xpath.XPathExpressionException;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertTrue;
@SetEnvironment(executionEnvironments = {ExecutionEnvironment.ALL})
public class GenericCRUDTestCase extends ESTestBaseTest {
private static final Log log = LogFactory.getLog(GenericCRUDTestCase.class);
private TestUserMode userMode;
String jSessionId;
String assetId = null;
String customAssetId = null;
String assetName;
String cookieHeader;
GenericRestClient genericRestClient;
Map<String, String> headerMap;
String publisherUrl;
String resourcePath;
@Factory(dataProvider = "userModeProvider")
public GenericCRUDTestCase(TestUserMode userMode) {
this.userMode = userMode;
}
@BeforeClass(alwaysRun = true)
public void init() throws Exception {
super.init(userMode);
genericRestClient = new GenericRestClient();
headerMap = new HashMap<>();
resourcePath = FrameworkPathUtil.getSystemResourceLocation()
+ "artifacts" + File.separator + "GREG" + File.separator;
publisherUrl = publisherContext.getContextUrls()
.getSecureServiceUrl().replace("services", "publisher/apis");
setTestEnvironment();
}
@BeforeMethod(alwaysRun = true)
public void reInitEnvironment() throws XPathExpressionException, JSONException {
JSONObject objSessionPublisher =
new JSONObject(authenticate(publisherUrl, genericRestClient,
automationContext.getSuperTenant().getTenantAdmin().getUserName(),
automationContext.getSuperTenant().getTenantAdmin().getPassword())
.getEntity(String.class));
jSessionId = objSessionPublisher.getJSONObject("data").getString("sessionId");
cookieHeader = "JSESSIONID=" + jSessionId;
}
private void setTestEnvironment() throws Exception {
assertTrue(addNewRxtConfiguration("event_lc.rxt", "event_lc.rxt"),"Adding new custom event_lc.rxt failure encountered ");
JSONObject objSessionPublisher =
new JSONObject(authenticate(publisherUrl, genericRestClient,
automationContext.getSuperTenant().getTenantAdmin().getUserName(),
automationContext.getSuperTenant().getTenantAdmin().getPassword())
.getEntity(String.class));
jSessionId = objSessionPublisher.getJSONObject("data").getString("sessionId");
cookieHeader = "JSESSIONID=" + jSessionId;
//refresh the publisher landing page to deploy new rxt type
refreshPublisherLandingPage(publisherUrl, genericRestClient, cookieHeader);
assertNotNull(jSessionId, "Invalid JSessionID received");
}
@Test(groups = {"wso2.greg", "wso2.greg.es"}, description = "Create Defined Asset(soapservice) without required field in Publisher")
public void createAssetWithoutRequiredField() throws JSONException, IOException {
Map<String, String> queryParamMap = new HashMap<>();
queryParamMap.put("type", "soapservice");
String soapTemplate = readFile(resourcePath + "json" + File.separator + "soapservice-sample.json");
assetName = "bbb";
String dataBody = String.format(soapTemplate, assetName, "bbb", "1.0.0", null);
JSONObject jsonObject = new JSONObject(dataBody);
jsonObject.remove("overview_version");
ClientResponse response =
genericRestClient.geneticRestRequestPost(publisherUrl + "/assets",
MediaType.APPLICATION_JSON,
MediaType.APPLICATION_JSON, jsonObject.toString()
, queryParamMap, headerMap, cookieHeader);
JSONObject resObject = new JSONObject(response.getEntity(String.class));
if (response.getStatusCode() == 201) {
assetId = (String)resObject.get("id");
}
assertTrue((response.getStatusCode() == 500),
"Wrong status code ,Expected 500 Internal Server Error ,Received " +
response.getStatusCode()
);
}
@Test(groups = {"wso2.greg", "wso2.greg.es"}, description = "Check Option Field Values in Publisher")
public void checkOptionFieldValues() throws IOException, JSONException {
ClientResponse response = getAssetCreatePage("evlc");
assertTrue((response.getStatusCode() == 200),
"Wrong status code ,Expected 200 OK ,Received " +
response.getStatusCode()
);
String createPage = response.getEntity(String.class);
String [] formGroup = createPage.split("<div class=\"form-group\">");
for (String form: formGroup) {
if (form.contains("rules_gender")) {
assertTrue(form.contains("male"));
assertTrue(form.contains("female"));
break;
}
}
}
@Test(groups = {"wso2.greg", "wso2.greg.es"}, description = "Check Dynamically Populated values in Publisher")
public void checkDynamicPopulatorValues() throws IOException, JSONException {
ClientResponse response = getAssetCreatePage("evlc");
assertTrue((response.getStatusCode() == 200),
"Wrong status code ,Expected 200 OK ,Received " +
response.getStatusCode()
);
String createPage = response.getEntity(String.class);
String [] formGroup = createPage.split("<div class=\"form-group\">");
for (String form: formGroup) {
if (form.contains("serviceLifecycle_lifecycleName")) {
assertTrue(form.contains("None"));
assertTrue(form.contains("ServerLifeCycle"));
assertTrue(form.contains("EndpointLifeCycle"));
assertTrue(form.contains("ServiceLifeCycle"));
break;
}
}
}
@Test(groups = {"wso2.greg", "wso2.greg.es"}, description = "Check Default Values in Publisher")
public void checkDefaultValues() throws IOException, JSONException {
ClientResponse response = getAssetCreatePage("evlc");
assertTrue((response.getStatusCode() == 200),
"Wrong status code ,Expected 200 OK ,Received " +
response.getStatusCode()
);
String createPage = response.getEntity(String.class);
String [] formGroup = createPage.split("<div class=\"form-group\">");
for (String form: formGroup) {
if (form.contains("details_venue")) {
assertTrue(form.contains("value=\"Colombo\""));
break;
}
}
}
@Test(groups = {"wso2.greg", "wso2.greg.es"}, description = "Create Custom Asset without required field in Publisher")
public void createCustomAssetWithoutRequiredField() throws IOException, JSONException {
Map<String, String> queryParamMap = new HashMap<>();
queryParamMap.put("type", "evlc");
String evlcTemplate = readFile(resourcePath + "json" + File.separator + "evlc-sample.json");
assetName = "fff";
String dataBody = String.format(evlcTemplate, assetName, "16/11/2015", "PG", "male", "07772223334", "none", "none");
JSONObject jsonObject = new JSONObject(dataBody);
jsonObject.remove("details_date");
ClientResponse response =
genericRestClient.geneticRestRequestPost(publisherUrl + "/assets",
MediaType.APPLICATION_JSON,
MediaType.APPLICATION_JSON, jsonObject.toString()
, queryParamMap, headerMap, cookieHeader);
JSONObject resObject = new JSONObject(response.getEntity(String.class));
if (response.getStatusCode() == 201) {
customAssetId = (String) resObject.get("id");
}
assertTrue((response.getStatusCode() == 400),
"Wrong status code ,Expected 400 Bad Request ,Received " +
response.getStatusCode()
);
}
private ClientResponse getAssetCreatePage(String shortName) {
Map<String, String> queryParamMap = new HashMap<>();
return genericRestClient.geneticRestRequestGet(publisherUrl.replace("/apis","/assets/") + shortName + "/create",
queryParamMap, "text/html", headerMap, cookieHeader);
}
@AfterClass(alwaysRun = true)
public void cleanUp() throws Exception {
Map<String, String> queryParamMap = new HashMap<>();
if (assetId != null) {
queryParamMap.put("type", "soapservice");
deleteAssetById(publisherUrl, genericRestClient, cookieHeader, assetId, queryParamMap);
}
if (customAssetId != null) {
queryParamMap.put("type", "evlc");
deleteAssetById(publisherUrl, genericRestClient, cookieHeader, customAssetId, queryParamMap);
}
assertTrue(deleteCustomRxtConfiguration("event_lc.rxt"),"Deleting of added custom event_lc.rxt encountered a failure");
}
@DataProvider
private static TestUserMode[][] userModeProvider() {
return new TestUserMode[][]{
new TestUserMode[]{TestUserMode.SUPER_TENANT_ADMIN}
// new TestUserMode[]{TestUserMode.TENANT_USER},
};
}
}
|
|
/**
* <copyright>
* </copyright>
*
*
*/
package eu.hyvar.context.contextValidity.resource.hyvalidityformula.ui;
import java.io.IOException;
import java.io.StringReader;
import java.util.Iterator;
import org.eclipse.core.runtime.Assert;
import org.eclipse.core.runtime.ListenerList;
import org.eclipse.jface.action.ToolBarManager;
import org.eclipse.jface.resource.JFaceResources;
import org.eclipse.jface.text.AbstractInformationControl;
import org.eclipse.jface.text.IDelayedInputChangeProvider;
import org.eclipse.jface.text.IInformationControlExtension2;
import org.eclipse.jface.text.IInputChangedListener;
import org.eclipse.jface.text.TextPresentation;
import org.eclipse.swt.SWT;
import org.eclipse.swt.SWTError;
import org.eclipse.swt.browser.Browser;
import org.eclipse.swt.browser.LocationListener;
import org.eclipse.swt.browser.OpenWindowListener;
import org.eclipse.swt.browser.ProgressAdapter;
import org.eclipse.swt.browser.ProgressEvent;
import org.eclipse.swt.browser.WindowEvent;
import org.eclipse.swt.custom.StyleRange;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.events.KeyListener;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.graphics.TextLayout;
import org.eclipse.swt.graphics.TextStyle;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Slider;
/**
* <p>
* Displays HTML information in a {@link Browser} widget.
* </p>
* <p>
* <p>
* </p>
* <p>
* This IInformationControlExtension2 expects {@link #setInput(Object)} to be
* called with an argument of type BrowserInformationControlInput.
* </p>
* <p>
* </p>
* </p>
* <p>
* <p>Moved into this package from
* <code>org.eclipse.jface.internal.text.revisions</code>.</p>
* </p>
* <p>
* <p>This class may be instantiated; it is not intended to be subclassed.</p>
* </p>
* <p>
* <p>Current problems:
* </p>
* <p>
* <ul>
* </p>
* <p>
* <li>the size computation is too small</li>
* </p>
* <p>
* <li>focusLost event is not sent - see
* https://bugs.eclipse.org/bugs/show_bug.cgi?id=84532</li>
* </p>
* <p>
* </ul>
* </p>
* <p>
* </p>
* </p>
*
* @since 3.2
*/
public class HyvalidityformulaBrowserInformationControl extends AbstractInformationControl implements IInformationControlExtension2, IDelayedInputChangeProvider {
/**
* <p>
* Tells whether the SWT Browser widget and hence this information control is
* available.
* </p>
*
* @param parent the parent component used for checking or <code>null</code> if
* none
*
* @return <code>true</code> if this control is available
*/
public static boolean isAvailable(Composite parent) {
if (!fgAvailabilityChecked) {
try {
Browser browser= new Browser(parent, SWT.NONE);
browser.dispose();
fgIsAvailable= true;
Slider sliderV= new Slider(parent, SWT.VERTICAL);
Slider sliderH= new Slider(parent, SWT.HORIZONTAL);
int width= sliderV.computeSize(SWT.DEFAULT, SWT.DEFAULT).x;
int height= sliderH.computeSize(SWT.DEFAULT, SWT.DEFAULT).y;
fgScrollBarSize= new Point(width, height);
sliderV.dispose();
sliderH.dispose();
} catch (SWTError er) {
fgIsAvailable= false;
} finally {
fgAvailabilityChecked= true;
}
}
return fgIsAvailable;
}
/**
* Minimal size constraints.
*/
private static final int MIN_WIDTH = 80;
private static final int MIN_HEIGHT = 50;
/**
* Availability checking cache.
*/
private static boolean fgIsAvailable = false;
private static boolean fgAvailabilityChecked = false;
/**
* Cached scroll bar width and height
*/
private static Point fgScrollBarSize;
/**
* The control's browser widget
*/
private Browser fBrowser;
/**
* Tells whether the browser has content
*/
private boolean fBrowserHasContent;
/**
* Text layout used to approximate size of content when rendered in browser
*/
private TextLayout fTextLayout;
/**
* Bold text style
*/
private TextStyle fBoldStyle;
private eu.hyvar.context.contextValidity.resource.hyvalidityformula.ui.HyvalidityformulaDocBrowserInformationControlInput fInput;
/**
* <code>true</code> iff the browser has completed loading of the last input set
* via {@link #setInformation(String)}.
*/
private boolean fCompleted = false;
/**
* The listener to be notified when a delayed location changing event happened.
*/
private IInputChangedListener fDelayedInputChangeListener;
/**
* The listeners to be notified when the input changed.
*/
private ListenerList fInputChangeListeners = new ListenerList(ListenerList.IDENTITY);
/**
* The symbolic name of the font used for size computations, or <code>null</code>
* to use dialog font.
*/
private final String fSymbolicFontName;
/**
* <p>
* Creates a browser information control with the given shell as parent.
* </p>
*
* @param parent the parent shell
* @param symbolicFontName the symbolic name of the font used for size computations
* @param resizable <code>true</code> if the control should be resizable
*/
public HyvalidityformulaBrowserInformationControl(Shell parent, String symbolicFontName, boolean resizable) {
super(parent, resizable);
fSymbolicFontName= symbolicFontName;
create();
}
/**
* <p>
* Creates a browser information control with the given shell as parent.
* </p>
*
* @param parent the parent shell
* @param symbolicFontName the symbolic name of the font used for size computations
* @param statusFieldText the text to be used in the optional status field or
* <code>null</code> if the status field should be hidden
*/
public HyvalidityformulaBrowserInformationControl(Shell parent, String symbolicFontName, String statusFieldText) {
super(parent, statusFieldText);
fSymbolicFontName= symbolicFontName;
create();
}
/**
* <p>
* Creates a browser information control with the given shell as parent.
* </p>
*
* @param parent the parent shell
* @param symbolicFontName the symbolic name of the font used for size computations
* @param toolBarManager the manager or <code>null</code> if toolbar is not desired
*
* @since 3.4
*/
public HyvalidityformulaBrowserInformationControl(Shell parent, String symbolicFontName, ToolBarManager toolBarManager) {
super(parent, toolBarManager);
fSymbolicFontName= symbolicFontName;
create();
}
/**
*
* @see org.eclipse.jface.text.AbstractInformationControl#createContent(Composite)
*/
protected void createContent(Composite parent) {
fBrowser= new Browser(parent, SWT.NONE);
Display display= getShell().getDisplay();
fBrowser.setForeground(display.getSystemColor(SWT.COLOR_INFO_FOREGROUND));
fBrowser.setBackground(display.getSystemColor(SWT.COLOR_INFO_BACKGROUND));
fBrowser.addKeyListener(new KeyListener() {
public void keyPressed(KeyEvent e) {
if (e.character == 0x1B) // ESC
dispose(); // XXX: Just hide? Would avoid constant recreations.
}
public void keyReleased(KeyEvent e) {}
});
fBrowser.addProgressListener(new ProgressAdapter() {
public void completed(ProgressEvent event) {
fCompleted= true;
}
});
fBrowser.addOpenWindowListener(new OpenWindowListener() {
public void open(WindowEvent event) {
event.required= true; // Cancel opening of new windows
}
});
// Replace browser's built-in context menu with none
fBrowser.setMenu(new Menu(getShell(), SWT.NONE));
createTextLayout();
}
/**
* {@inheritDoc} This control can handle {@link String}(no handle) and
*/
public void setInput(Object input) {
Assert.isLegal(input == null || input instanceof String || input instanceof eu.hyvar.context.contextValidity.resource.hyvalidityformula.ui.HyvalidityformulaDocBrowserInformationControlInput);
if (input instanceof String) {
setInformation((String)input);
return;
}
fInput= (eu.hyvar.context.contextValidity.resource.hyvalidityformula.ui.HyvalidityformulaDocBrowserInformationControlInput) input;
String content= null;
if (fInput != null) content= fInput.getHtml();
fBrowserHasContent= content != null && content.length() > 0;
if (!fBrowserHasContent) content= "<html><body ></html>";
boolean RTL= (getShell().getStyle() & SWT.RIGHT_TO_LEFT) != 0;
boolean resizable= isResizable();
// The default "overflow:auto" would not result in a predictable width for the
// client area and the re-wrapping would cause visual noise
String[] styles= null;
if (RTL && resizable) {
styles= new String[] { "direction:rtl;", "overflow:scroll;", "word-wrap:break-word;" };
} else if (RTL && !resizable) {
styles= new String[] { "direction:rtl;", "overflow:hidden;", "word-wrap:break-word;" };
} else if (!resizable) {
// XXX: In IE, "word-wrap: break-word;" causes bogus wrapping even in non-broken
// words :-(see e.g. Javadoc of String). Re-check whether we really still need
// this now that the Javadoc Hover header already sets this style.
styles= new String[] { "overflow:hidden;"/*, "word-wrap: break-word;"*/ };
} else {
styles= new String[] { "overflow:scroll;" };
}
StringBuffer buffer= new StringBuffer(content);
eu.hyvar.context.contextValidity.resource.hyvalidityformula.ui.HyvalidityformulaHTMLPrinter.insertStyles(buffer, styles);
content= buffer.toString();
// XXX: Should add some JavaScript here that shows something like "(continued...)"
// or "..." at the end of the visible area when the page overflowed with
// "overflow:hidden;".
fCompleted= false;
fBrowser.setText(content);
Object[] listeners= fInputChangeListeners.getListeners();
for (int i= 0; i < listeners.length; i++) {
((IInputChangedListener)listeners[i]).inputChanged(fInput);
}
}
public void setVisible(boolean visible) {
Shell shell= getShell();
if (shell.isVisible() == visible) {
return;
}
if (!visible) {
super.setVisible(false);
setInput(null);
return;
}
// The Browser widget flickers when made visible while it is not completely
// loaded. The fix is to delay the call to setVisible until either loading is
// completed (see ProgressListener in constructor), or a timeout has been reached.
final Display display = shell.getDisplay();
// Make sure the display wakes from sleep after timeout:
display.timerExec(100, new Runnable() {
public void run() {
fCompleted= true;
}
});
while (!fCompleted) {
// Drive the event loop to process the events required to load the browser
// widget's contents:
if (!display.readAndDispatch()) {
display.sleep();
}
}
shell = getShell();
if (shell == null || shell.isDisposed()) {
return;
}
// Avoids flickering when replacing hovers, especially on Vista in ON_CLICK mode.
// Causes flickering on GTK. Carbon does not care.
if ("win32".equals(SWT.getPlatform())) {
shell.moveAbove(null);
}
super.setVisible(true);
}
/**
*
* @see AbstractInformationControl#setSize(int, int)
*/
public void setSize(int width, int height) {
fBrowser.setRedraw(false); // avoid flickering
try {
super.setSize(width, height);
} finally {
fBrowser.setRedraw(true);
}
}
/**
* <p>
* Creates and initializes the text layout used to compute the size hint.
* </p>
*
* @since 3.2
*/
private void createTextLayout() {
fTextLayout= new TextLayout(fBrowser.getDisplay());
// Initialize fonts
String symbolicFontName= fSymbolicFontName == null ? JFaceResources.DIALOG_FONT : fSymbolicFontName;
Font font = JFaceResources.getFont(symbolicFontName);
fTextLayout.setFont(font);
fTextLayout.setWidth(-1);
font = JFaceResources.getFontRegistry().getBold(symbolicFontName);
fBoldStyle = new TextStyle(font, null, null);
// Compute and set tab width
fTextLayout.setText(" ");
int tabWidth = fTextLayout.getBounds().width;
fTextLayout.setTabs(new int[] {tabWidth});
fTextLayout.setText("");
}
public void dispose() {
if (fTextLayout != null) {
fTextLayout.dispose();
fTextLayout = null;
}
fBrowser = null;
super.dispose();
}
public Point computeSizeHint() {
Point sizeConstraints = getSizeConstraints();
Rectangle trim = computeTrim();
int height = trim.height;
TextPresentation presentation= new TextPresentation();
String text;
try {
text = eu.hyvar.context.contextValidity.resource.hyvalidityformula.ui.HyvalidityformulaHTMLPrinter.html2text(new StringReader(fInput.getHtml()), presentation);
} catch (IOException e) {
text = "";
}
fTextLayout.setText(text);
fTextLayout.setWidth(sizeConstraints == null ? SWT.DEFAULT : sizeConstraints.x - trim.width);
Iterator<?> iter= presentation.getAllStyleRangeIterator();
while (iter.hasNext()) {
StyleRange sr= (StyleRange)iter.next();
if (sr.fontStyle == SWT.BOLD) {
fTextLayout.setStyle(fBoldStyle, sr.start, sr.start + sr.length - 1);
}
}
Rectangle bounds= fTextLayout.getBounds(); // does not return minimum width, see https://bugs.eclipse.org/bugs/show_bug.cgi?id=217446
int lineCount= fTextLayout.getLineCount();
int textWidth= 0;
for (int i= 0; i < lineCount; i++) {
Rectangle rect= fTextLayout.getLineBounds(i);
int lineWidth= rect.x + rect.width;
if (i == 0) {
lineWidth += fInput.getLeadingImageWidth();
}
textWidth= Math.max(textWidth, lineWidth);
}
bounds.width= textWidth;
fTextLayout.setText("");
int minWidth= bounds.width;
height= height + bounds.height;
// Add some air to accommodate for different browser renderings
minWidth+= 15;
height+= 15;
// Apply max size constraints
if (sizeConstraints != null) {
if (sizeConstraints.x != SWT.DEFAULT) {
minWidth= Math.min(sizeConstraints.x, minWidth + trim.width);
}
if (sizeConstraints.y != SWT.DEFAULT) {
height= Math.min(sizeConstraints.y, height);
}
}
// Ensure minimal size
int width= Math.max(MIN_WIDTH, minWidth);
height= Math.max(MIN_HEIGHT, height);
Point windowSize = new Point(width, height);
return windowSize;
}
/**
*
* @see org.eclipse.jface.text.IInformationControlExtension3#computeTrim()
*/
public Rectangle computeTrim() {
Rectangle trim = super.computeTrim();
if (isResizable()) {
boolean RTL = (getShell().getStyle() & SWT.RIGHT_TO_LEFT) != 0;
if (RTL) {
trim.x-= fgScrollBarSize.x;
}
trim.width+= fgScrollBarSize.x;
trim.height+= fgScrollBarSize.y;
}
return trim;
}
/**
* <p>
* Adds the listener to the collection of listeners who will be notified when the
* current location has changed or is about to change.
* </p>
*
* @param listener the location listener
*
* @since 3.4
*/
public void addLocationListener(LocationListener listener) {
fBrowser.addLocationListener(listener);
}
public void setForegroundColor(Color foreground) {
super.setForegroundColor(foreground);
fBrowser.setForeground(foreground);
}
public void setBackgroundColor(Color background) {
super.setBackgroundColor(background);
fBrowser.setBackground(background);
}
public boolean hasContents() {
return fBrowserHasContent;
}
/**
* <p>
* Adds a listener for input changes to this input change provider. Has no effect
* if an identical listener is already registered.
* </p>
*
* @param inputChangeListener the listener to add
*
* @since 3.4
*/
public void addInputChangeListener(IInputChangedListener inputChangeListener) {
Assert.isNotNull(inputChangeListener);
fInputChangeListeners.add(inputChangeListener);
}
/**
* <p>
* Removes the given input change listener from this input change provider. Has no
* effect if an identical listener is not registered.
* </p>
*
* @param inputChangeListener the listener to remove
*
* @since 3.4
*/
public void removeInputChangeListener(IInputChangedListener inputChangeListener) {
fInputChangeListeners.remove(inputChangeListener);
}
/**
*
* @see
* IDelayedInputChangeProvider#setDelayedInputChangeListener(IInputChangedListener)
*
* @since 3.4
*/
public void setDelayedInputChangeListener(IInputChangedListener inputChangeListener) {
fDelayedInputChangeListener= inputChangeListener;
}
/**
* <p>
* Tells whether a delayed input change listener is registered.
* </p>
*
* @return <code>true</code> iff a delayed input change listener is currently
* registered
*
* @since 3.4
*/
public boolean hasDelayedInputChangeListener() {
return fDelayedInputChangeListener != null;
}
/**
* <p>
* Notifies listeners of a delayed input change.
* </p>
*
* @param newInput the new input, or <code>null</code> to request cancellation
*
* @since 3.4
*/
public void notifyDelayedInputChange(Object newInput) {
if (fDelayedInputChangeListener != null) fDelayedInputChangeListener.inputChanged(newInput);
}
/**
*
* @see java.lang.Object#toString()
*
* @since 3.4
*/
public String toString() {
String style= (getShell().getStyle() & SWT.RESIZE) == 0 ? "fixed" : "resizeable";
return super.toString() + " - style: " + style;
}
/**
*
* @return the current browser input or <code>null</code>
*/
public eu.hyvar.context.contextValidity.resource.hyvalidityformula.ui.HyvalidityformulaDocBrowserInformationControlInput getInput() {
return fInput;
}
/**
*
* @see
* org.eclipse.jface.text.IInformationControlExtension5#computeSizeConstraints(int,
* int)
*/
public Point computeSizeConstraints(int widthInChars, int heightInChars) {
if (fSymbolicFontName == null) {
return null;
}
GC gc= new GC(fBrowser);
Font font= fSymbolicFontName == null ? JFaceResources.getDialogFont() : JFaceResources.getFont(fSymbolicFontName);
gc.setFont(font);
int width= gc.getFontMetrics().getAverageCharWidth();
int height= gc.getFontMetrics().getHeight();
gc.dispose();
return new Point(widthInChars * width, heightInChars * height);
}
}
|
|
/**
* Copyright 2013-2014 Ronald W Hoffman
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ScripterRon.BitcoinCore;
import java.io.EOFException;
import java.math.BigInteger;
import java.util.Arrays;
/**
* <p>A transaction output has the following format:</p>
* <pre>
* Size Field Description
* ==== ===== ===========
* 8 bytes TxOutValue Value expressed in Satoshis (0.00000001 BTC)
* VarInt TxOutScriptLength Script length
* Variable TxOutScript Script
* </pre>
*
* <p>All numbers are encoded in little-endian format (least-significant byte to most-significant byte)</p>
*/
public class TransactionOutput implements ByteSerializable {
/** Unspendable 'Proof-of-burn' script (1CounterpartyXXXX...) */
private final byte[] unspendableScript = new byte[] {
(byte)0x76, (byte)0xa9, (byte)0x14,
(byte)0x81, (byte)0x88, (byte)0x95, (byte)0xf3, (byte)0xdc, (byte)0x2c, (byte)0x17, (byte)0x86,
(byte)0x29, (byte)0xd3, (byte)0xd2, (byte)0xd8, (byte)0xfa, (byte)0x3e, (byte)0xc4, (byte)0xa3,
(byte)0xf8, (byte)0x17, (byte)0x98, (byte)0x21,
(byte)0x88, (byte)0xac
};
/** Output value in Satoshis (0.00000001 BTC) */
private final BigInteger value;
/** Transaction output index */
private final int txIndex;
/** Output script */
private final byte[] scriptBytes;
/**
* Creates a transaction output for the specified amount using a
* PAY_TO_PUBKEY_HASH script
*
* @param txIndex Transaction output index
* @param value Transaction output value
* @param address Send address
*/
public TransactionOutput(int txIndex, BigInteger value, Address address) {
this.txIndex = txIndex;
this.value = value;
//
// Create the output script for PAY_TO_PUBKEY_HASH
// OP_DUP OP_HASH160 <pubkey-hash> OP_EQUALVERIFY OP_CHECKSIG
//
scriptBytes = new byte[1+1+1+20+1+1];
scriptBytes[0] = (byte)ScriptOpCodes.OP_DUP;
scriptBytes[1] = (byte)ScriptOpCodes.OP_HASH160;
scriptBytes[2] = (byte)20;
System.arraycopy(address.getHash(), 0, scriptBytes, 3, 20);
scriptBytes[23] = (byte)ScriptOpCodes.OP_EQUALVERIFY;
scriptBytes[24] = (byte)ScriptOpCodes.OP_CHECKSIG;
}
/**
* Creates a transaction output for the specified amount using the supplied script
*
* @param txIndex Transaction output index
* @param value Transaction output value
* @param scriptBytes Transaction output script
*/
public TransactionOutput(int txIndex, BigInteger value, byte[] scriptBytes) {
this.txIndex = txIndex;
this.value = value;
this.scriptBytes = scriptBytes;
}
/**
* Creates a transaction output from the encoded byte stream
*
* @param txIndex Index within the transaction output list
* @param inBuffer Input stream
* @throws EOFException Input stream is too short
* @throws VerificationException Verification failed
*/
public TransactionOutput(int txIndex, SerializedBuffer inBuffer) throws EOFException, VerificationException {
this.txIndex = txIndex;
//
// Get the amount
//
value = BigInteger.valueOf(inBuffer.getLong());
//
// Get the script
//
scriptBytes = inBuffer.getBytes();
}
/**
* Return the serialized transaction output
*
* @param outBuffer Output buffer
* @return Output buffer
*/
@Override
public SerializedBuffer getBytes(SerializedBuffer outBuffer) {
outBuffer.putLong(value.longValue())
.putVarInt(scriptBytes.length)
.putBytes(scriptBytes);
return outBuffer;
}
/**
* Returns the serialized transaction output
*
* @return Serialized transaction output
*/
@Override
public byte[] getBytes() {
SerializedBuffer buffer = new SerializedBuffer();
return getBytes(buffer).toByteArray();
}
/**
* Returns the output amount
*
* @return Output amount
*/
public BigInteger getValue() {
return value;
}
/**
* Returns the transaction index for this output
*
* @return Transaction index
*/
public int getIndex() {
return txIndex;
}
/**
* Returns the script bytes
*
* @return Script bytes or null
*/
public byte[] getScriptBytes() {
return scriptBytes;
}
/**
* Checks if the output is spendable. This is done by checking for OP_RETURN
* as the first script operation. Any script starting this way can never be
* spent. Note that an empty script is always spendable.
*
* Proof-of-burn transactions are sent to '1CounterpartyXXXXXXXXXXXXXXXUWLpVr'.
* This address has no private key and thus can never be spent. So we will
* mark it as unspendable.
*
* @return TRUE if the output is spendable
*/
public boolean isSpendable() {
boolean spendable = true;
if (scriptBytes.length > 0) {
if (scriptBytes[0] == ScriptOpCodes.OP_RETURN)
spendable = false;
else if (Arrays.equals(scriptBytes, unspendableScript))
spendable = false;
}
return spendable;
}
/**
* Serializes this output for use in a transaction signature
*
* @param index Index of input being signed
* @param hashType The signature hash type
* @param outBuffer Output buffer
*/
public void serializeForSignature(int index, int hashType, SerializedBuffer outBuffer) {
if (hashType == ScriptOpCodes.SIGHASH_SINGLE && index != txIndex) {
//
// For SIGHASH_SINGLE, we have a zero-length script and a value of -1
//
outBuffer.putLong(-1L)
.putVarInt(0);
} else {
//
// Encode normally
//
outBuffer.putLong(value.longValue())
.putVarInt(scriptBytes.length)
.putBytes(scriptBytes);
}
}
}
|
|
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.identitymanagement.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* Contains the response to a successful <a>ListAttachedGroupPolicies</a> request.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iam-2010-05-08/ListAttachedGroupPolicies" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListAttachedGroupPoliciesResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* A list of the attached policies.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<AttachedPolicy> attachedPolicies;
/**
* <p>
* A flag that indicates whether there are more items to return. If your results were truncated, you can make a
* subsequent pagination request using the <code>Marker</code> request parameter to retrieve more items. Note that
* IAM might return fewer than the <code>MaxItems</code> number of results even when there are more results
* available. We recommend that you check <code>IsTruncated</code> after every call to ensure that you receive all
* your results.
* </p>
*/
private Boolean isTruncated;
/**
* <p>
* When <code>IsTruncated</code> is <code>true</code>, this element is present and contains the value to use for the
* <code>Marker</code> parameter in a subsequent pagination request.
* </p>
*/
private String marker;
/**
* <p>
* A list of the attached policies.
* </p>
*
* @return A list of the attached policies.
*/
public java.util.List<AttachedPolicy> getAttachedPolicies() {
if (attachedPolicies == null) {
attachedPolicies = new com.amazonaws.internal.SdkInternalList<AttachedPolicy>();
}
return attachedPolicies;
}
/**
* <p>
* A list of the attached policies.
* </p>
*
* @param attachedPolicies
* A list of the attached policies.
*/
public void setAttachedPolicies(java.util.Collection<AttachedPolicy> attachedPolicies) {
if (attachedPolicies == null) {
this.attachedPolicies = null;
return;
}
this.attachedPolicies = new com.amazonaws.internal.SdkInternalList<AttachedPolicy>(attachedPolicies);
}
/**
* <p>
* A list of the attached policies.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setAttachedPolicies(java.util.Collection)} or {@link #withAttachedPolicies(java.util.Collection)} if you
* want to override the existing values.
* </p>
*
* @param attachedPolicies
* A list of the attached policies.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListAttachedGroupPoliciesResult withAttachedPolicies(AttachedPolicy... attachedPolicies) {
if (this.attachedPolicies == null) {
setAttachedPolicies(new com.amazonaws.internal.SdkInternalList<AttachedPolicy>(attachedPolicies.length));
}
for (AttachedPolicy ele : attachedPolicies) {
this.attachedPolicies.add(ele);
}
return this;
}
/**
* <p>
* A list of the attached policies.
* </p>
*
* @param attachedPolicies
* A list of the attached policies.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListAttachedGroupPoliciesResult withAttachedPolicies(java.util.Collection<AttachedPolicy> attachedPolicies) {
setAttachedPolicies(attachedPolicies);
return this;
}
/**
* <p>
* A flag that indicates whether there are more items to return. If your results were truncated, you can make a
* subsequent pagination request using the <code>Marker</code> request parameter to retrieve more items. Note that
* IAM might return fewer than the <code>MaxItems</code> number of results even when there are more results
* available. We recommend that you check <code>IsTruncated</code> after every call to ensure that you receive all
* your results.
* </p>
*
* @param isTruncated
* A flag that indicates whether there are more items to return. If your results were truncated, you can make
* a subsequent pagination request using the <code>Marker</code> request parameter to retrieve more items.
* Note that IAM might return fewer than the <code>MaxItems</code> number of results even when there are more
* results available. We recommend that you check <code>IsTruncated</code> after every call to ensure that
* you receive all your results.
*/
public void setIsTruncated(Boolean isTruncated) {
this.isTruncated = isTruncated;
}
/**
* <p>
* A flag that indicates whether there are more items to return. If your results were truncated, you can make a
* subsequent pagination request using the <code>Marker</code> request parameter to retrieve more items. Note that
* IAM might return fewer than the <code>MaxItems</code> number of results even when there are more results
* available. We recommend that you check <code>IsTruncated</code> after every call to ensure that you receive all
* your results.
* </p>
*
* @return A flag that indicates whether there are more items to return. If your results were truncated, you can
* make a subsequent pagination request using the <code>Marker</code> request parameter to retrieve more
* items. Note that IAM might return fewer than the <code>MaxItems</code> number of results even when there
* are more results available. We recommend that you check <code>IsTruncated</code> after every call to
* ensure that you receive all your results.
*/
public Boolean getIsTruncated() {
return this.isTruncated;
}
/**
* <p>
* A flag that indicates whether there are more items to return. If your results were truncated, you can make a
* subsequent pagination request using the <code>Marker</code> request parameter to retrieve more items. Note that
* IAM might return fewer than the <code>MaxItems</code> number of results even when there are more results
* available. We recommend that you check <code>IsTruncated</code> after every call to ensure that you receive all
* your results.
* </p>
*
* @param isTruncated
* A flag that indicates whether there are more items to return. If your results were truncated, you can make
* a subsequent pagination request using the <code>Marker</code> request parameter to retrieve more items.
* Note that IAM might return fewer than the <code>MaxItems</code> number of results even when there are more
* results available. We recommend that you check <code>IsTruncated</code> after every call to ensure that
* you receive all your results.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListAttachedGroupPoliciesResult withIsTruncated(Boolean isTruncated) {
setIsTruncated(isTruncated);
return this;
}
/**
* <p>
* A flag that indicates whether there are more items to return. If your results were truncated, you can make a
* subsequent pagination request using the <code>Marker</code> request parameter to retrieve more items. Note that
* IAM might return fewer than the <code>MaxItems</code> number of results even when there are more results
* available. We recommend that you check <code>IsTruncated</code> after every call to ensure that you receive all
* your results.
* </p>
*
* @return A flag that indicates whether there are more items to return. If your results were truncated, you can
* make a subsequent pagination request using the <code>Marker</code> request parameter to retrieve more
* items. Note that IAM might return fewer than the <code>MaxItems</code> number of results even when there
* are more results available. We recommend that you check <code>IsTruncated</code> after every call to
* ensure that you receive all your results.
*/
public Boolean isTruncated() {
return this.isTruncated;
}
/**
* <p>
* When <code>IsTruncated</code> is <code>true</code>, this element is present and contains the value to use for the
* <code>Marker</code> parameter in a subsequent pagination request.
* </p>
*
* @param marker
* When <code>IsTruncated</code> is <code>true</code>, this element is present and contains the value to use
* for the <code>Marker</code> parameter in a subsequent pagination request.
*/
public void setMarker(String marker) {
this.marker = marker;
}
/**
* <p>
* When <code>IsTruncated</code> is <code>true</code>, this element is present and contains the value to use for the
* <code>Marker</code> parameter in a subsequent pagination request.
* </p>
*
* @return When <code>IsTruncated</code> is <code>true</code>, this element is present and contains the value to use
* for the <code>Marker</code> parameter in a subsequent pagination request.
*/
public String getMarker() {
return this.marker;
}
/**
* <p>
* When <code>IsTruncated</code> is <code>true</code>, this element is present and contains the value to use for the
* <code>Marker</code> parameter in a subsequent pagination request.
* </p>
*
* @param marker
* When <code>IsTruncated</code> is <code>true</code>, this element is present and contains the value to use
* for the <code>Marker</code> parameter in a subsequent pagination request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListAttachedGroupPoliciesResult withMarker(String marker) {
setMarker(marker);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAttachedPolicies() != null)
sb.append("AttachedPolicies: ").append(getAttachedPolicies()).append(",");
if (getIsTruncated() != null)
sb.append("IsTruncated: ").append(getIsTruncated()).append(",");
if (getMarker() != null)
sb.append("Marker: ").append(getMarker());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListAttachedGroupPoliciesResult == false)
return false;
ListAttachedGroupPoliciesResult other = (ListAttachedGroupPoliciesResult) obj;
if (other.getAttachedPolicies() == null ^ this.getAttachedPolicies() == null)
return false;
if (other.getAttachedPolicies() != null && other.getAttachedPolicies().equals(this.getAttachedPolicies()) == false)
return false;
if (other.getIsTruncated() == null ^ this.getIsTruncated() == null)
return false;
if (other.getIsTruncated() != null && other.getIsTruncated().equals(this.getIsTruncated()) == false)
return false;
if (other.getMarker() == null ^ this.getMarker() == null)
return false;
if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAttachedPolicies() == null) ? 0 : getAttachedPolicies().hashCode());
hashCode = prime * hashCode + ((getIsTruncated() == null) ? 0 : getIsTruncated().hashCode());
hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode());
return hashCode;
}
@Override
public ListAttachedGroupPoliciesResult clone() {
try {
return (ListAttachedGroupPoliciesResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
|
|
/*
* Copyright 2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jsefa.rbf.annotation;
import static org.jsefa.common.annotation.AnnotationParameterNames.DATA_TYPE_NAME;
import static org.jsefa.common.annotation.AnnotationParameterNames.NAME;
import static org.jsefa.rbf.annotation.RbfAnnotationDataNames.DEFAULT_PREFIX;
import static org.jsefa.rbf.annotation.RbfAnnotationDataNames.PREFIX;
import static org.jsefa.rbf.annotation.RbfAnnotationDataNames.RECORDS;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.jsefa.common.accessor.ObjectAccessorProvider;
import org.jsefa.common.annotation.AnnotatedFieldsProvider;
import org.jsefa.common.annotation.AnnotationDataProvider;
import org.jsefa.common.annotation.AnnotationException;
import org.jsefa.common.annotation.TypeMappingFactory;
import org.jsefa.common.converter.SimpleTypeConverter;
import org.jsefa.common.converter.provider.SimpleTypeConverterProvider;
import org.jsefa.common.mapping.FieldDescriptor;
import org.jsefa.common.mapping.TypeMapping;
import org.jsefa.common.mapping.TypeMappingException;
import org.jsefa.common.validator.Validator;
import org.jsefa.common.validator.provider.ValidatorProvider;
import org.jsefa.rbf.mapping.FieldMapping;
import org.jsefa.rbf.mapping.RbfComplexTypeMapping;
import org.jsefa.rbf.mapping.RbfFieldDescriptor;
import org.jsefa.rbf.mapping.RbfListTypeMapping;
import org.jsefa.rbf.mapping.RbfNodeMapping;
import org.jsefa.rbf.mapping.RbfNodeType;
import org.jsefa.rbf.mapping.RecordDescriptor;
import org.jsefa.rbf.mapping.RbfTypeMappingRegistry;
import org.jsefa.rbf.mapping.RecordMapping;
/**
* Factory for creating {@link TypeMapping}s from annotated classes.
* <p>
* It is thread-safe and all subclasses must be thread-safe, too.
*
* @author Norman Lahme-Huetig
*
*/
public abstract class RbfTypeMappingFactory extends TypeMappingFactory<String, RbfTypeMappingRegistry> {
private final RbfAnnotations annotations;
/**
* Constructs a new <code>AbstractRbfTypeMappingFactory</code>.
*
* @param typeMappingRegistry the type mapping registry. New types will be registered using that registry.
* @param simpleTypeConverterProvider the simple type converter provider to use
* @param validatorProvider the validator provider to use
* @param objectAccessorProvider the object accessor provider to use
* @param annotations the parameter objects providing the annotation classes to use.
*/
public RbfTypeMappingFactory(RbfTypeMappingRegistry typeMappingRegistry,
SimpleTypeConverterProvider simpleTypeConverterProvider, ValidatorProvider validatorProvider,
ObjectAccessorProvider objectAccessorProvider, RbfAnnotations annotations) {
super(typeMappingRegistry, simpleTypeConverterProvider, validatorProvider, objectAccessorProvider);
this.annotations = annotations;
}
/**
* Creates a type mapping for the given object type and returns its name.
*
* @param objectType the object type.
* @return the type mapping name.
*/
public final String createIfAbsent(Class<?> objectType) {
if (!hasComplexType(objectType)) {
throw new AnnotationException("The class " + objectType + " has no data type annotation");
}
String dataTypeName = createComplexTypeMappingIfAbsent(objectType, true);
assertIsCycleFree(dataTypeName);
return dataTypeName;
}
/**
* Creates a new simple type mapping.
*
* @param objectType the object type
* @param dataTypeName the data type name
* @param converter the converter
* @param field the field
* @return the simple type mapping.
*/
protected abstract TypeMapping<String> createSimpleTypeMapping(Class<?> objectType, String dataTypeName,
SimpleTypeConverter converter, Field field);
/**
* {@inheritDoc}
*/
protected String getAnnotatedDataTypeName(Annotation annotation, Class<?> annotationContextClass) {
return AnnotationDataProvider.get(annotation, DATA_TYPE_NAME);
}
private String createSimpleTypeMappingIfAbsent(Class<?> objectType, Field field, Annotation fieldAnnotation) {
String dataTypeName = createSimpleDataTypeName(field);
if (prepareToCreate(objectType, dataTypeName)) {
SimpleTypeConverter converter = createSimpleTypeConverter(objectType, field, fieldAnnotation);
getTypeMappingRegistry().register(createSimpleTypeMapping(objectType, dataTypeName, converter, field));
}
return dataTypeName;
}
@SuppressWarnings("unchecked")
private String createComplexTypeMappingIfAbsent(Class<?> objectType, boolean subRecordsAllowed) {
String dataTypeName = createComplexDataTypeName(objectType);
if (prepareToCreate(objectType, dataTypeName)) {
Collection<RbfNodeMapping<?>> nodeMappings = new ArrayList<RbfNodeMapping<?>>();
nodeMappings.addAll(createFieldMappings(objectType));
if (subRecordsAllowed) {
nodeMappings.addAll(createRecordMappings(objectType));
} else {
assertNoSubRecordsDeclared(objectType);
}
Validator validator = getValidatorFactory().createValidator(objectType,
this.annotations.getFieldAnnotationClass(), this.annotations.getSubRecordAnnotationClass(),
this.annotations.getSubRecordListAnnotationClass());
RbfComplexTypeMapping complexTypeMapping = new RbfComplexTypeMapping(objectType, dataTypeName,
getObjectAccessorProvider().get(objectType), nodeMappings, validator);
getTypeMappingRegistry().register(complexTypeMapping);
}
return dataTypeName;
}
@SuppressWarnings("unchecked")
private Collection<FieldMapping> createFieldMappings(Class<?> objectType) {
Collection<FieldMapping> fieldMappings = new ArrayList<FieldMapping>();
int relativeFieldIndex = 0;
for (Field field : AnnotatedFieldsProvider.getSortedAnnotatedFields(objectType, this.annotations
.getFieldAnnotationClass())) {
String fieldDataTypeName = AnnotationDataProvider.get(field, DATA_TYPE_NAME, this.annotations
.getFieldAnnotationClass());
Annotation fieldAnnotation = field.getAnnotation(this.annotations.getFieldAnnotationClass());
if (hasSimpleType(field.getType())) {
if (fieldDataTypeName == null) {
fieldDataTypeName = createSimpleTypeMappingIfAbsent(field.getType(), field, fieldAnnotation);
} else {
assertTypeMappingExists(fieldDataTypeName);
}
Class<?> normalizedFieldObjectType = getTypeMappingRegistry().get(fieldDataTypeName)
.getObjectType();
Validator validator = getValidatorFactory().createContextualValidator(normalizedFieldObjectType,
field, fieldAnnotation, this.annotations.getDataTypeAnnotationClass());
fieldMappings.add(new FieldMapping(fieldDataTypeName,
new RbfFieldDescriptor(relativeFieldIndex++), normalizedFieldObjectType,
new FieldDescriptor(field.getName(), normalizedFieldObjectType), validator));
} else if (hasComplexType(field.getType())) {
if (fieldDataTypeName == null) {
fieldDataTypeName = createComplexTypeMappingIfAbsent(field.getType(), false);
} else {
assertTypeMappingExists(fieldDataTypeName);
}
Class<?> normalizedFieldObjectType = getTypeMappingRegistry().get(fieldDataTypeName)
.getObjectType();
Validator validator = getValidatorFactory().createContextualValidator(normalizedFieldObjectType,
field, fieldAnnotation, this.annotations.getDataTypeAnnotationClass());
fieldMappings.add(new FieldMapping(fieldDataTypeName,
new RbfFieldDescriptor(relativeFieldIndex++), normalizedFieldObjectType,
new FieldDescriptor(field.getName(), normalizedFieldObjectType), validator));
} else {
throw new TypeMappingException("Can not create a type mapping for field " + field.getName()
+ " of class " + objectType.getName());
}
}
return fieldMappings;
}
@SuppressWarnings("unchecked")
private Collection<RecordMapping> createRecordMappings(Class<?> objectType) {
Collection<RecordMapping> recordMappings = new ArrayList<RecordMapping>();
int requiredPrefixLength = getRequiredPrefixLength(objectType);
for (Field field : AnnotatedFieldsProvider.getSortedAnnotatedFields(objectType, this.annotations
.getSubRecordAnnotationClass(), this.annotations.getSubRecordListAnnotationClass())) {
if (hasCollectionType(field.getType())
&& field.getAnnotation(this.annotations.getSubRecordListAnnotationClass()) != null) {
String listDataTypeName = createListTypeMappingIfAbsent(field, requiredPrefixLength);
Annotation fieldAnnotation = field.getAnnotation(this.annotations
.getSubRecordListAnnotationClass());
Record[] records = getRecords(fieldAnnotation);
for (Record record : records) {
String listItemDataTypeName = createIfAbsent(field, record, records);
String prefix = record.prefix();
assertPrefixHasRequiredLength(field, prefix, requiredPrefixLength);
Class<?> normalizedListItemObjectType = getTypeMappingRegistry().get(listItemDataTypeName)
.getObjectType();
Validator validator = getValidatorFactory().createContextualValidator(
normalizedListItemObjectType, field, record,
this.annotations.getDataTypeAnnotationClass());
recordMappings.add(new RecordMapping(listDataTypeName, new RecordDescriptor(prefix),
normalizedListItemObjectType, new FieldDescriptor(field.getName(), Collection.class),
true, validator));
}
Validator validator = getValidatorFactory().createContextualValidator(Collection.class, field,
fieldAnnotation, this.annotations.getDataTypeAnnotationClass());
recordMappings
.add(new RecordMapping(listDataTypeName, new RecordDescriptor(null), Collection.class,
new FieldDescriptor(field.getName(), Collection.class), false, validator));
} else if (hasComplexType(field.getType())) {
String fieldDataTypeName = AnnotationDataProvider.get(field, DATA_TYPE_NAME, this.annotations
.getSubRecordAnnotationClass());
if (fieldDataTypeName == null) {
fieldDataTypeName = createComplexTypeMappingIfAbsent(field.getType(), true);
} else {
assertTypeMappingExists(fieldDataTypeName);
}
String prefix = AnnotationDataProvider.get(field, PREFIX, this.annotations
.getSubRecordAnnotationClass());
if (prefix.length() != requiredPrefixLength) {
throw new AnnotationException("The object type " + field.getType()
+ " must have a prefix with length " + requiredPrefixLength);
}
Class<?> normalizedFieldObjectType = getTypeMappingRegistry().get(fieldDataTypeName)
.getObjectType();
Annotation fieldAnnotation = field.getAnnotation(this.annotations.getSubRecordAnnotationClass());
Validator validator = getValidatorFactory().createContextualValidator(normalizedFieldObjectType,
field, fieldAnnotation, this.annotations.getDataTypeAnnotationClass());
recordMappings.add(new RecordMapping(fieldDataTypeName, new RecordDescriptor(prefix),
normalizedFieldObjectType,
new FieldDescriptor(field.getName(), normalizedFieldObjectType), false, validator));
}
}
return recordMappings;
}
@SuppressWarnings("unchecked")
private void assertNoSubRecordsDeclared(Class<?> objectType) {
int counter = AnnotatedFieldsProvider
.getSortedAnnotatedFields(objectType, this.annotations.getSubRecordAnnotationClass(),
this.annotations.getSubRecordListAnnotationClass()).size();
if (counter > 0) {
throw new TypeMappingException("No sub records nor sub record lists allowed within embedded type: "
+ objectType.getName());
}
}
private String createListTypeMappingIfAbsent(Field field, int requiredPrefixLength) {
Annotation subRecordListAnnotation = field.getAnnotation(this.annotations
.getSubRecordListAnnotationClass());
String dataTypeName = createListDataTypeName(field);
if (prepareToCreate(Collection.class, dataTypeName)) {
if (subRecordListAnnotation == null || getRecords(subRecordListAnnotation).length == 0) {
throw new AnnotationException("No FlrSubRecordList annotation with proper content found");
}
Collection<RecordMapping> recordMappings = new ArrayList<RecordMapping>();
Record[] records = getRecords(subRecordListAnnotation);
for (Record record : records) {
String listItemDataTypeName = createIfAbsent(field, record, records);
String prefix = record.prefix();
assertPrefixHasRequiredLength(field, prefix, requiredPrefixLength);
Class<?> normalizedListItemObjectType = getTypeMappingRegistry().get(listItemDataTypeName)
.getObjectType();
Validator validator = getValidatorFactory()
.createContextualValidator(normalizedListItemObjectType, field, record,
this.annotations.getDataTypeAnnotationClass());
recordMappings.add(new RecordMapping(listItemDataTypeName, new RecordDescriptor(prefix),
normalizedListItemObjectType, new FieldDescriptor(field.getName(), Collection.class),
false, validator));
}
getTypeMappingRegistry().register(
new RbfListTypeMapping(dataTypeName, recordMappings, getObjectAccessorProvider().get(
field.getType())));
}
return dataTypeName;
}
private void assertPrefixHasRequiredLength(Field field, String prefix, int requiredPrefixLength) {
if (prefix.length() != requiredPrefixLength) {
throw new AnnotationException("All record annotations of the list field " + field.getName()
+ " of class " + field.getDeclaringClass().getName() + " must have a prefix with length "
+ requiredPrefixLength);
}
}
private String createIfAbsent(Field field, Record record, Record[] records) {
String listItemDataTypeName = AnnotationDataProvider.get(record, DATA_TYPE_NAME);
if (listItemDataTypeName == null) {
Class<?> listItemObjectType = getCollectionItemType(record, field, records.length == 1);
assertHasComplexType(listItemObjectType, field);
listItemDataTypeName = createComplexTypeMappingIfAbsent(listItemObjectType, true);
} else {
assertTypeMappingExists(listItemDataTypeName);
}
return listItemDataTypeName;
}
private void assertHasComplexType(Class<?> listItemObjectType, Field field) {
if (listItemObjectType == null) {
throw new AnnotationException("Neither dataTypeName nor objectType is given for list item of field: "
+ field.getName() + " of class " + field.getDeclaringClass().getName());
}
if (!hasComplexType(listItemObjectType)) {
throw new AnnotationException("The sub record object type " + listItemObjectType.getName()
+ " must have a data type annotation");
}
}
private Record[] getRecords(Annotation annotation) {
return AnnotationDataProvider.get(annotation, RECORDS);
}
private boolean hasComplexType(Class<?> objectType) {
return objectType.isAnnotationPresent(this.annotations.getDataTypeAnnotationClass());
}
private String createSimpleDataTypeName(Field field) {
return field.getDeclaringClass().getName() + "." + field.getName();
}
private String createComplexDataTypeName(Class<?> objectType) {
Annotation dataType = objectType.getAnnotation(this.annotations.getDataTypeAnnotationClass());
if (dataType != null && AnnotationDataProvider.get(dataType, NAME) != null) {
return AnnotationDataProvider.get(dataType, NAME);
} else {
return objectType.getName();
}
}
private String createListDataTypeName(Field field) {
return field.getDeclaringClass().getName() + "." + field.getName();
}
@SuppressWarnings("unchecked")
private int getRequiredPrefixLength(Class<?> objectType) {
if (AnnotatedFieldsProvider
.getSortedAnnotatedFields(objectType, this.annotations.getSubRecordAnnotationClass(),
this.annotations.getSubRecordListAnnotationClass()).size() > 0) {
Annotation dataTypeAnnotation = objectType
.getAnnotation(this.annotations.getDataTypeAnnotationClass());
String defaultPrefix = AnnotationDataProvider.get(dataTypeAnnotation, DEFAULT_PREFIX);
if (defaultPrefix.length() == 0) {
throw new AnnotationException("A prefix must be defined for object type " + objectType.getName());
}
return defaultPrefix.length();
} else {
return 0;
}
}
private void assertIsCycleFree(String dataTypeName) {
assertIsCycleFree(dataTypeName, new ArrayList<Class<?>>());
}
private void assertIsCycleFree(String dataTypeName, List<Class<?>> objectTypePath) {
TypeMapping<String> typeMapping = getTypeMappingRegistry().get(dataTypeName);
for (Class<?> objectTypeOnPath : objectTypePath) {
if (objectTypeOnPath.isAssignableFrom(typeMapping.getObjectType())
|| typeMapping.getObjectType().isAssignableFrom(objectTypeOnPath)) {
objectTypePath.add(typeMapping.getObjectType());
throw new TypeMappingException("Cycle in type graph detected. Path: " + objectTypePath);
}
}
objectTypePath.add(typeMapping.getObjectType());
if (typeMapping instanceof RbfComplexTypeMapping) {
RbfComplexTypeMapping complexTypeMapping = (RbfComplexTypeMapping) typeMapping;
for (String fieldName : complexTypeMapping.getFieldNames(RbfNodeType.FIELD)) {
RbfNodeMapping<?> nodeMapping = complexTypeMapping.getNodeMapping(fieldName, Object.class);
assertIsCycleFree(nodeMapping.getDataTypeName(), objectTypePath);
}
for (String fieldName : complexTypeMapping.getFieldNames(RbfNodeType.RECORD)) {
RbfNodeMapping<?> nodeMapping = complexTypeMapping.getNodeMapping(fieldName, Object.class);
assertIsCycleFree(nodeMapping.getDataTypeName());
}
}
objectTypePath.remove(typeMapping.getObjectType());
}
}
|
|
/*
* Copyright (c) 2006, Swedish Institute of Computer Science.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the Institute nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
*/
package org.contikios.cooja.radiomediums;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Observable;
import java.util.Observer;
import java.util.WeakHashMap;
import org.apache.log4j.Logger;
import org.contikios.cooja.Mote;
import org.contikios.cooja.RadioConnection;
import org.contikios.cooja.RadioMedium;
import org.contikios.cooja.RadioPacket;
import org.contikios.cooja.Simulation;
import org.contikios.cooja.TimeEvent;
import org.contikios.cooja.interfaces.CustomDataRadio;
import org.contikios.cooja.interfaces.Radio;
import org.jdom.Element;
/**
* Abstract radio medium provides basic functionality for implementing radio
* mediums.
*
* The radio medium forwards both radio packets and custom data objects.
*
* The registered radios' signal strengths are updated whenever the radio medium
* changes. There are three fixed levels: no surrounding traffic heard, noise
* heard and data heard.
*
* It handles radio registrations, radio loggers, active connections and
* observes all registered radio interfaces.
*
* @author Fredrik Osterlind
*/
public abstract class AbstractRadioMedium extends RadioMedium {
private static Logger logger = Logger.getLogger(AbstractRadioMedium.class);
/* Signal strengths in dBm.
* Approx. values measured on TmoteSky */
public static final double SS_NOTHING = -100;
public static final double SS_STRONG = -10;
public static final double SS_WEAK = -95;
protected Map<Radio, Double> baseRssi = java.util.Collections.synchronizedMap(new HashMap<Radio, Double>());
protected Map<Radio, Double> sendRssi = java.util.Collections.synchronizedMap(new HashMap<Radio, Double>());
private ArrayList<Radio> registeredRadios = new ArrayList<Radio>();
private ArrayList<RadioConnection> activeConnections = new ArrayList<RadioConnection>();
private RadioConnection lastConnection = null;
private Simulation simulation = null;
/* Book-keeping */
public int COUNTER_TX = 0;
public int COUNTER_RX = 0;
public int COUNTER_INTERFERED = 0;
public class RadioMediumObservable extends Observable {
public void setRadioMediumChanged() {
setChanged();
}
public void setRadioMediumChangedAndNotify() {
setChanged();
notifyObservers();
}
}
private RadioMediumObservable radioMediumObservable = new RadioMediumObservable();
/**
* This constructor should always be called from implemented radio mediums.
*
* @param simulation Simulation
*/
public AbstractRadioMedium(Simulation simulation) {
this.simulation = simulation;
}
/**
* @return All registered radios
*/
public Radio[] getRegisteredRadios() {
return registeredRadios.toArray(new Radio[0]);
}
/**
* @return All active connections
*/
public RadioConnection[] getActiveConnections() {
/* NOTE: toArray([0]) creates array and handles synchronization */
return activeConnections.toArray(new RadioConnection[0]);
}
/**
* Creates a new connection from given radio.
*
* Determines which radios should receive or be interfered by the transmission.
*
* @param radio Source radio
* @return New connection
*/
abstract public RadioConnection createConnections(Radio radio);
/**
* Updates all radio interfaces' signal strengths according to
* the current active connections.
*/
public void updateSignalStrengths() {
/* Reset signal strengths */
for (Radio radio : getRegisteredRadios()) {
radio.setCurrentSignalStrength(getBaseRssi(radio));
}
/* Set signal strength to strong on destinations */
RadioConnection[] conns = getActiveConnections();
for (RadioConnection conn : conns) {
if (conn.getSource().getCurrentSignalStrength() < SS_STRONG) {
conn.getSource().setCurrentSignalStrength(SS_STRONG);
}
for (Radio dstRadio : conn.getDestinations()) {
if (conn.getSource().getChannel() >= 0 &&
dstRadio.getChannel() >= 0 &&
conn.getSource().getChannel() != dstRadio.getChannel()) {
continue;
}
if (dstRadio.getCurrentSignalStrength() < SS_STRONG) {
dstRadio.setCurrentSignalStrength(SS_STRONG);
}
}
}
/* Set signal strength to weak on interfered */
for (RadioConnection conn : conns) {
for (Radio intfRadio : conn.getInterfered()) {
if (intfRadio.getCurrentSignalStrength() < SS_STRONG) {
intfRadio.setCurrentSignalStrength(SS_STRONG);
}
if (conn.getSource().getChannel() >= 0 &&
intfRadio.getChannel() >= 0 &&
conn.getSource().getChannel() != intfRadio.getChannel()) {
continue;
}
if (!intfRadio.isInterfered()) {
/*logger.warn("Radio was not interfered");*/
intfRadio.interfereAnyReception();
}
}
}
}
/**
* Remove given radio from any active connections.
* This method can be called if a radio node falls asleep or is removed.
*
* @param radio Radio
*/
private void removeFromActiveConnections(Radio radio) {
/* This radio must not be a connection source */
RadioConnection connection = getActiveConnectionFrom(radio);
if (connection != null) {
logger.fatal("Connection source turned off radio: " + radio);
}
/* Set interfered if currently a connection destination */
for (RadioConnection conn : activeConnections) {
if (conn.isDestination(radio)) {
conn.addInterfered(radio);
if (!radio.isInterfered()) {
radio.interfereAnyReception();
}
}
}
}
private RadioConnection getActiveConnectionFrom(Radio source) {
for (RadioConnection conn : activeConnections) {
if (conn.getSource() == source) {
return conn;
}
}
return null;
}
/**
* This observer is responsible for detecting radio interface events, for example
* new transmissions.
*/
private Observer radioEventsObserver = new Observer() {
public void update(Observable obs, Object obj) {
if (!(obs instanceof Radio)) {
logger.fatal("Radio event dispatched by non-radio object");
return;
}
Radio radio = (Radio) obs;
final Radio.RadioEvent event = radio.getLastEvent();
switch (event) {
case RECEPTION_STARTED:
case RECEPTION_INTERFERED:
case RECEPTION_FINISHED:
case UNKNOWN:
return;
case HW_ON: {
/* Update signal strengths */
updateSignalStrengths();
}
break;
case HW_OFF: {
/* Remove any radio connections from this radio */
removeFromActiveConnections(radio);
/* Update signal strengths */
updateSignalStrengths();
}
break;
case TRANSMISSION_STARTED: {
/* Create new radio connection */
if (radio.isReceiving()) {
/*
* Radio starts transmitting when it should be
* receiving! Ok, but it won't receive the packet
*/
radio.interfereAnyReception();
for (RadioConnection conn : activeConnections) {
if (conn.isDestination(radio)) {
conn.addInterfered(radio);
}
}
}
RadioConnection newConnection = createConnections(radio);
activeConnections.add(newConnection);
for (Radio r : newConnection.getAllDestinations()) {
if (newConnection.getDestinationDelay(r) == 0) {
r.signalReceptionStart();
} else {
/* EXPERIMENTAL: Simulating propagation delay */
final Radio delayedRadio = r;
TimeEvent delayedEvent = new TimeEvent(0) {
public void execute(long t) {
delayedRadio.signalReceptionStart();
}
};
simulation.scheduleEvent(delayedEvent, simulation.getSimulationTime() + newConnection.getDestinationDelay(r));
}
} /* Update signal strengths */
updateSignalStrengths();
/* Notify observers */
lastConnection = null;
radioMediumObservable.setRadioMediumChangedAndNotify();
}
break;
case TRANSMISSION_FINISHED: {
/* Remove radio connection */
/* Connection */
RadioConnection connection = getActiveConnectionFrom(radio);
if (connection == null) {
logger.fatal("No radio connection found");
return;
}
activeConnections.remove(connection);
lastConnection = connection;
COUNTER_TX++;
for (Radio dstRadio : connection.getAllDestinations()) {
if (connection.getDestinationDelay(dstRadio) == 0) {
dstRadio.signalReceptionEnd();
} else {
/* EXPERIMENTAL: Simulating propagation delay */
final Radio delayedRadio = dstRadio;
TimeEvent delayedEvent = new TimeEvent(0) {
public void execute(long t) {
delayedRadio.signalReceptionEnd();
}
};
simulation.scheduleEvent(delayedEvent,
simulation.getSimulationTime() + connection.getDestinationDelay(dstRadio));
}
}
COUNTER_RX += connection.getDestinations().length;
COUNTER_INTERFERED += connection.getInterfered().length;
for (Radio intRadio : connection.getInterferedNonDestinations()) {
intRadio.signalReceptionEnd();
}
/* Update signal strengths */
updateSignalStrengths();
/* Notify observers */
radioMediumObservable.setRadioMediumChangedAndNotify();
}
break;
case CUSTOM_DATA_TRANSMITTED: {
/* Connection */
RadioConnection connection = getActiveConnectionFrom(radio);
if (connection == null) {
logger.fatal("No radio connection found");
return;
}
/* Custom data object */
Object data = ((CustomDataRadio) radio).getLastCustomDataTransmitted();
if (data == null) {
logger.fatal("No custom data object to forward");
return;
}
for (Radio dstRadio : connection.getAllDestinations()) {
if (!(dstRadio instanceof CustomDataRadio) ||
!((CustomDataRadio) dstRadio).canReceiveFrom((CustomDataRadio)radio)) {
/* Radios communicate via radio packets */
continue;
}
if (connection.getDestinationDelay(dstRadio) == 0) {
((CustomDataRadio) dstRadio).receiveCustomData(data);
} else {
/* EXPERIMENTAL: Simulating propagation delay */
final CustomDataRadio delayedRadio = (CustomDataRadio) dstRadio;
final Object delayedData = data;
TimeEvent delayedEvent = new TimeEvent(0) {
public void execute(long t) {
delayedRadio.receiveCustomData(delayedData);
}
};
simulation.scheduleEvent(delayedEvent,
simulation.getSimulationTime() + connection.getDestinationDelay(dstRadio));
}
}
}
break;
case PACKET_TRANSMITTED: {
/* Connection */
RadioConnection connection = getActiveConnectionFrom(radio);
if (connection == null) {
logger.fatal("No radio connection found");
return;
}
/* Radio packet */
RadioPacket packet = radio.getLastPacketTransmitted();
if (packet == null) {
logger.fatal("No radio packet to forward");
return;
}
for (Radio dstRadio : connection.getAllDestinations()) {
if ((radio instanceof CustomDataRadio) &&
(dstRadio instanceof CustomDataRadio) &&
((CustomDataRadio) dstRadio).canReceiveFrom((CustomDataRadio)radio)) {
/* Radios instead communicate via custom data objects */
continue;
}
/* Forward radio packet */
if (connection.getDestinationDelay(dstRadio) == 0) {
dstRadio.setReceivedPacket(packet);
} else {
/* EXPERIMENTAL: Simulating propagation delay */
final Radio delayedRadio = dstRadio;
final RadioPacket delayedPacket = packet;
TimeEvent delayedEvent = new TimeEvent(0) {
public void execute(long t) {
delayedRadio.setReceivedPacket(delayedPacket);
}
};
simulation.scheduleEvent(delayedEvent,
simulation.getSimulationTime() + connection.getDestinationDelay(dstRadio));
}
}
}
break;
default:
logger.fatal("Unsupported radio event: " + event);
}
}
};
public void registerMote(Mote mote, Simulation sim) {
registerRadioInterface(mote.getInterfaces().getRadio(), sim);
}
public void unregisterMote(Mote mote, Simulation sim) {
unregisterRadioInterface(mote.getInterfaces().getRadio(), sim);
}
public void registerRadioInterface(Radio radio, Simulation sim) {
if (radio == null) {
logger.warn("No radio to register");
return;
}
registeredRadios.add(radio);
radio.addObserver(radioEventsObserver);
/* Update signal strengths */
updateSignalStrengths();
}
public void unregisterRadioInterface(Radio radio, Simulation sim) {
if (!registeredRadios.contains(radio)) {
logger.warn("No radio to unregister: " + radio);
return;
}
radio.deleteObserver(radioEventsObserver);
registeredRadios.remove(radio);
removeFromActiveConnections(radio);
/* Update signal strengths */
updateSignalStrengths();
}
/**
* Get the RSSI value that is set when there is "silence"
*
* @param radio
* The radio to get the base RSSI for
* @return The base RSSI value; Default: SS_NOTHING
*/
public double getBaseRssi(Radio radio) {
Double rssi = baseRssi.get(radio);
if (rssi == null) {
rssi = SS_NOTHING;
}
return rssi;
}
/**
* Set the base RSSI for a radio. This value is set when there is "silence"
*
* @param radio
* The radio to set the RSSI value for
* @param rssi
* The RSSI value to set during silence
*/
public void setBaseRssi(Radio radio, double rssi) {
baseRssi.put(radio, rssi);
simulation.invokeSimulationThread(new Runnable() {
@Override
public void run() {
updateSignalStrengths();
}
});
}
/**
* Get the minimum RSSI value that is set when the radio is sending
*
* @param radio
* The radio to get the send RSSI for
* @return The send RSSI value; Default: SS_STRONG
*/
public double getSendRssi(Radio radio) {
Double rssi = sendRssi.get(radio);
if (rssi == null) {
rssi = SS_STRONG;
}
return rssi;
}
/**
* Set the send RSSI for a radio. This is the minimum value when the radio is
* sending
*
* @param radio
* The radio to set the RSSI value for
* @param rssi
* The minimum RSSI value to set when sending
*/
public void setSendRssi(Radio radio, double rssi) {
sendRssi.put(radio, rssi);
}
public void addRadioMediumObserver(Observer observer) {
radioMediumObservable.addObserver(observer);
}
public Observable getRadioMediumObservable() {
return radioMediumObservable;
}
public void deleteRadioMediumObserver(Observer observer) {
radioMediumObservable.deleteObserver(observer);
}
public RadioConnection getLastConnection() {
return lastConnection;
}
public Collection<Element> getConfigXML() {
Collection<Element> config = new ArrayList<Element>();
for(Entry<Radio, Double> ent: baseRssi.entrySet()){
Element element = new Element("BaseRSSIConfig");
element.setAttribute("Mote", "" + ent.getKey().getMote().getID());
element.addContent("" + ent.getValue());
config.add(element);
}
for(Entry<Radio, Double> ent: sendRssi.entrySet()){
Element element = new Element("SendRSSIConfig");
element.setAttribute("Mote", "" + ent.getKey().getMote().getID());
element.addContent("" + ent.getValue());
config.add(element);
}
return config;
}
private Collection<Element> delayedConfiguration = null;
public boolean setConfigXML(final Collection<Element> configXML, boolean visAvailable) {
delayedConfiguration = configXML;
return true;
}
public void simulationFinishedLoading() {
if (delayedConfiguration == null) {
return;
}
for (Element element : delayedConfiguration) {
if (element.getName().equals("BaseRSSIConfig")) {
Radio r = simulation.getMoteWithID(Integer.parseInt(element.getAttribute("Mote").getValue())).getInterfaces().getRadio();
setBaseRssi(r, Double.parseDouble(element.getText()));
} else if (element.getName().equals("SendRSSIConfig")) {
Radio r = simulation.getMoteWithID(Integer.parseInt(element.getAttribute("Mote").getValue())).getInterfaces().getRadio();
setSendRssi(r, Double.parseDouble(element.getText()));
}
}
}
}
|
|
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.test;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.HazelcastInstanceAware;
import com.hazelcast.core.HazelcastInstanceNotActiveException;
import com.hazelcast.core.Offloadable;
import com.hazelcast.internal.nearcache.impl.invalidation.Invalidator;
import com.hazelcast.internal.nearcache.impl.invalidation.MetaDataGenerator;
import com.hazelcast.internal.partition.InternalPartitionService;
import com.hazelcast.map.EntryProcessor;
import com.hazelcast.map.impl.MapService;
import com.hazelcast.map.impl.MapServiceContext;
import com.hazelcast.map.impl.nearcache.MapNearCacheManager;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.nio.serialization.IdentifiedDataSerializable;
import com.hazelcast.spi.impl.NodeEngineImpl;
import com.hazelcast.internal.util.UuidUtil;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import static com.hazelcast.map.impl.MapService.SERVICE_NAME;
import static com.hazelcast.test.Accessors.getNodeEngineImpl;
import static com.hazelcast.internal.util.RandomPicker.getInt;
import static java.lang.Integer.MAX_VALUE;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
public class DistortInvalidationMetadataEntryProcessor
implements EntryProcessor<Integer, Integer, Object>, IdentifiedDataSerializable, HazelcastInstanceAware, Offloadable {
static final int CLASS_ID = 3;
private int mapSize;
private String mapName;
private int duration;
private HazelcastInstance instance;
public DistortInvalidationMetadataEntryProcessor() {
}
@Override
public Object process(Map.Entry<Integer, Integer> entry) {
final HazelcastInstance instance = this.instance;
final AtomicBoolean stopTest = new AtomicBoolean();
Thread distortSequence = new Thread(() -> {
while (!stopTest.get()) {
distortRandomPartitionSequence(mapName, instance);
sleepSeconds(1);
}
});
Thread distortUuid = new Thread(() -> {
while (!stopTest.get()) {
distortRandomPartitionUuid(instance);
sleepSeconds(5);
}
});
Thread put = new Thread(() -> {
// change some data
while (!stopTest.get()) {
try {
int key = getInt(mapSize);
int value = getInt(Integer.MAX_VALUE);
Map<Integer, Integer> map = instance.getMap(mapName);
int oldValue = map.put(key, value);
sleepAtLeastMillis(100);
} catch (HazelcastInstanceNotActiveException e) {
break;
}
}
});
put.start();
distortSequence.start();
distortUuid.start();
sleepSeconds(duration);
// stop threads
stopTest.set(true);
try {
distortUuid.join();
distortSequence.join();
put.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
for (int i = 0; i < mapSize; i++) {
System.out.println(instance.getMap(mapName).get(i));
}
return null;
}
private void distortRandomPartitionSequence(String mapName, HazelcastInstance member) {
NodeEngineImpl nodeEngineImpl = getNodeEngineImpl(member);
MapService mapService = nodeEngineImpl.getService(SERVICE_NAME);
MapServiceContext mapServiceContext = mapService.getMapServiceContext();
MapNearCacheManager mapNearCacheManager = mapServiceContext.getMapNearCacheManager();
Invalidator invalidator = mapNearCacheManager.getInvalidator();
MetaDataGenerator metaDataGenerator = invalidator.getMetaDataGenerator();
InternalPartitionService partitionService = nodeEngineImpl.getPartitionService();
int partitionCount = partitionService.getPartitionCount();
metaDataGenerator.setCurrentSequence(mapName, getInt(partitionCount), getInt(MAX_VALUE));
}
private void distortRandomPartitionUuid(HazelcastInstance member) {
NodeEngineImpl nodeEngineImpl = getNodeEngineImpl(member);
int partitionCount = nodeEngineImpl.getPartitionService().getPartitionCount();
int partitionId = getInt(partitionCount);
MapService mapService = nodeEngineImpl.getService(SERVICE_NAME);
MapServiceContext mapServiceContext = mapService.getMapServiceContext();
MapNearCacheManager mapNearCacheManager = mapServiceContext.getMapNearCacheManager();
Invalidator invalidator = mapNearCacheManager.getInvalidator();
MetaDataGenerator metaDataGenerator = invalidator.getMetaDataGenerator();
metaDataGenerator.setUuid(partitionId, UuidUtil.newUnsecureUUID());
}
private void sleepSeconds(int seconds) {
try {
TimeUnit.SECONDS.sleep(seconds);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
private void sleepAtLeastMillis(int sleepFor) {
boolean interrupted = false;
try {
long remainingNanos = MILLISECONDS.toNanos(sleepFor);
long sleepUntil = System.nanoTime() + remainingNanos;
while (remainingNanos > 0) {
try {
NANOSECONDS.sleep(remainingNanos);
} catch (InterruptedException e) {
interrupted = true;
} finally {
remainingNanos = sleepUntil - System.nanoTime();
}
}
} finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
}
@Override
public int getFactoryId() {
return IdentifiedFactory.FACTORY_ID;
}
@Override
public int getClassId() {
return CLASS_ID;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(mapName);
out.writeInt(mapSize);
out.writeInt(duration);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
mapName = in.readUTF();
mapSize = in.readInt();
duration = in.readInt();
}
@Override
public void setHazelcastInstance(HazelcastInstance hazelcastInstance) {
this.instance = hazelcastInstance;
}
@Override
public String getExecutorName() {
return OFFLOADABLE_EXECUTOR;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.codehaus.groovy.runtime.metaclass;
import groovy.lang.MetaMethod;
import org.codehaus.groovy.reflection.CachedClass;
import org.codehaus.groovy.util.FastArray;
import org.codehaus.groovy.reflection.GeneratedMetaMethod;
import org.codehaus.groovy.util.SingleKeyHashMap;
import java.util.NoSuchElementException;
public class MetaMethodIndex {
public SingleKeyHashMap methodHeaders = new SingleKeyHashMap();
public static class Header {
public Entry head;
Class cls;
public int clsHashCode31;
public Class subclass;
public Header(Class cls) {
this (cls, null);
}
public Header(Class cls, Class subclass) {
this.cls = cls;
this.subclass = subclass;
this.clsHashCode31 = 31 * cls.hashCode();
}
}
public static class CacheEntry {
public final Class [] params;
public final MetaMethod method;
public CacheEntry(final Class[] params, final MetaMethod method) {
this.params = params;
this.method = method;
}
}
public static class Entry {
public int hash;
public Entry nextHashEntry, nextClassEntry;
public String name;
public Class cls;
public Object methods, methodsForSuper, staticMethods;
public CacheEntry cachedMethod, cachedMethodForSuper, cachedStaticMethod;
public String toString () {
return "[" + name + ", " + cls.getName() + "]";
}
}
public MetaMethodIndex(CachedClass theCachedClass) {
init(DEFAULT_CAPACITY);
CachedClass last = null;
if (!theCachedClass.isInterface()) {
for (CachedClass c = theCachedClass; c != null; c = c.getCachedSuperClass()) {
final SingleKeyHashMap.Entry e = methodHeaders.getOrPut(c.getTheClass());
e.value = new Header (c.getTheClass(), last == null ? null : last.getTheClass());
last = c;
}
}
else {
final SingleKeyHashMap.Entry e = methodHeaders.getOrPut(Object.class);
e.value = new Header (Object.class, theCachedClass.getTheClass());
}
}
protected Entry table[];
protected static final int DEFAULT_CAPACITY = 32;
protected static final int MINIMUM_CAPACITY = 4;
protected static final int MAXIMUM_CAPACITY = 1 << 28;
protected int size;
protected transient int threshold;
public static int hash(int h) {
h += ~(h << 9);
h ^= (h >>> 14);
h += (h << 4);
h ^= (h >>> 10);
return h;
}
public int size() {
return size;
}
public boolean isEmpty() {
return size == 0;
}
public void clear() {
Object[] tab = table;
for (int i = 0; i < tab.length; i++)
tab[i] = null;
size = 0;
}
public void init(int initCapacity) {
threshold = (initCapacity * 6) / 8;
table = new Entry[initCapacity];
}
public void resize(int newLength) {
Entry[] oldTable = table;
int oldLength = table.length;
Entry[] newTable = new Entry[newLength];
for (int j = 0; j < oldLength; j++) {
for (Entry e = oldTable[j]; e != null;) {
Entry next = e.nextHashEntry;
int index = e.hash & (newLength - 1);
e.nextHashEntry = newTable[index];
newTable[index] = e;
e = next;
}
}
table = newTable;
threshold = (6 * newLength) / 8;
}
public interface EntryIterator {
boolean hasNext();
Entry next();
}
public Entry[] getTable() {
return table;
}
public EntryIterator getEntrySetIterator() {
return new EntryIterator() {
Entry next; // next entry to return
int index; // current slot
{
Entry[] t = table;
int i = t.length;
Entry n = null;
if (size != 0) { // advance to first entry
while (i > 0 && (n = t[--i]) == null) {
}
}
next = n;
index = i;
}
public boolean hasNext() {
return next != null;
}
public Entry next() {
return nextEntry();
}
Entry nextEntry() {
Entry e = next;
if (e == null)
throw new NoSuchElementException();
Entry n = e.nextHashEntry;
Entry[] t = table;
int i = index;
while (n == null && i > 0)
n = t[--i];
index = i;
next = n;
return e;
}
};
}
public final Entry getMethods(Class cls, String name) {
int h = hash(31 * cls.hashCode() + name.hashCode());
Entry e = table[h & (table.length - 1)];
for (; e != null; e = e.nextHashEntry)
if (e.hash == h && cls == e.cls && (e.name == name || e.name.equals(name)) )
return e;
return null;
}
public Entry getOrPutMethods(String name, Header header) {
final Class cls = header.cls;
int h = hash(header.clsHashCode31 + name.hashCode());
final Entry[] t = table;
final int index = h & (t.length - 1);
Entry e = t[index];
for (; e != null; e = e.nextHashEntry)
if (e.hash == h && cls == e.cls && (e.name == name || e.name.equals(name)) )
return e;
Entry entry = new Entry();
entry.nextHashEntry = t[index];
entry.hash = h;
entry.name = name.intern();
entry.cls = cls;
t[index] = entry;
entry.nextClassEntry = header.head;
header.head = entry;
if (++size == threshold)
resize(2 * t.length);
return entry;
}
public Header getHeader(Class cls) {
Header header;
final SingleKeyHashMap.Entry head = methodHeaders.getOrPut(cls);
if (head.value == null) {
head.value = new Header(cls);
}
header = (Header) head.value;
return header;
}
public void copyNonPrivateMethods(Class from, Class to) {
copyNonPrivateMethods(getHeader(from), getHeader(to));
}
public void copyNonPrivateMethods(Header from, Header to) {
for (Entry e = from.head; e != null; e = e.nextClassEntry)
copyNonPrivateMethods(e, to);
}
public void copyAllMethodsToSuper(Header from, Header to) {
for (Entry e = from.head; e != null; e = e.nextClassEntry)
copyAllMethodsToSuper(e, to);
}
public void copyNonPrivateMethodsFromSuper(Header from) {
for (Entry e = from.head; e != null; e = e.nextClassEntry)
copyNonPrivateMethodsFromSuper(e);
}
private void copyNonPrivateMethods(Entry from, Header to) {
Object oldListOrMethod = from.methods;
if (oldListOrMethod instanceof FastArray) {
FastArray oldList = (FastArray) oldListOrMethod;
Entry e = null;
int len1 = oldList.size();
Object list[] = oldList.getArray();
for (int j = 0; j != len1; ++j) {
MetaMethod method = (MetaMethod) list[j];
if (method.isPrivate()) continue;
if (e == null)
e = getOrPutMethods(from.name, to);
e.methods = addMethodToList(e.methods, method);
}
} else {
MetaMethod method = (MetaMethod) oldListOrMethod;
if (!method.isPrivate()) {
Entry e = getOrPutMethods(from.name, to);
e.methods = addMethodToList(e.methods, method);
}
}
}
private void copyAllMethodsToSuper(Entry from, Header to) {
Object oldListOrMethod = from.methods;
if (oldListOrMethod instanceof FastArray) {
FastArray oldList = (FastArray) oldListOrMethod;
Entry e = null;
int len1 = oldList.size();
Object list[] = oldList.getArray();
for (int j = 0; j != len1; ++j) {
MetaMethod method = (MetaMethod) list[j];
if (e == null)
e = getOrPutMethods(from.name, to);
e.methodsForSuper = addMethodToList(e.methodsForSuper, method);
}
} else {
MetaMethod method = (MetaMethod) oldListOrMethod;
Entry e = getOrPutMethods(from.name, to);
e.methodsForSuper = addMethodToList(e.methodsForSuper, method);
}
}
private void copyNonPrivateMethodsFromSuper(Entry e) {
Object oldListOrMethod = e.methodsForSuper;
if (oldListOrMethod == null)
return;
if (oldListOrMethod instanceof FastArray) {
FastArray oldList = (FastArray) oldListOrMethod;
int len1 = oldList.size();
Object list[] = oldList.getArray();
for (int j = 0; j != len1; ++j) {
MetaMethod method = (MetaMethod) list[j];
if (method.isPrivate()) continue;
e.methods = addMethodToList(e.methods, method);
}
} else {
MetaMethod method = (MetaMethod) oldListOrMethod;
if (!method.isPrivate()) {
e.methods = addMethodToList(e.methods, method);
}
}
}
public void copyNonPrivateMethodsDown(Class from, Class to) {
copyNonPrivateNonNewMetaMethods(getHeader(from), getHeader(to));
}
public void copyNonPrivateNonNewMetaMethods(Header from, Header to) {
for (Entry e = from.head; e != null; e = e.nextClassEntry)
copyNonPrivateNonNewMetaMethods(e, to);
}
private void copyNonPrivateNonNewMetaMethods(Entry from, Header to) {
Object oldListOrMethod = from.methods;
if (oldListOrMethod == null)
return;
if (oldListOrMethod instanceof FastArray) {
FastArray oldList = (FastArray) oldListOrMethod;
Entry e = null;
int len1 = oldList.size();
Object list[] = oldList.getArray();
for (int j = 0; j != len1; ++j) {
MetaMethod method = (MetaMethod) list[j];
if (method instanceof NewMetaMethod || method.isPrivate()) continue;
if (e == null)
e = getOrPutMethods(from.name, to);
e.methods = addMethodToList(e.methods, method);
}
} else {
MetaMethod method = (MetaMethod) oldListOrMethod;
if (method instanceof NewMetaMethod || method.isPrivate()) return;
Entry e = getOrPutMethods(from.name, to);
e.methods = addMethodToList(e.methods, method);
}
}
public Object addMethodToList(Object o, MetaMethod method) {
if (o == null) {
return method;
}
if (o instanceof MetaMethod) {
MetaMethod match = (MetaMethod) o;
if (!isMatchingMethod(match, method)) {
FastArray list = new FastArray(2);
list.add(match);
list.add(method);
return list;
} else {
if (match.isPrivate()
|| (!isNonRealMethod(match) && match.getDeclaringClass().isInterface() && !method.getDeclaringClass().isInterface())) {
// do not overwrite interface methods with instance methods
// do not overwrite private methods
// Note: private methods from parent classes are not shown here,
// but when doing the multimethod connection step, we overwrite
// methods of the parent class with methods of a subclass and
// in that case we want to keep the private methods
} else {
CachedClass methodC = method.getDeclaringClass();
CachedClass matchC = match.getDeclaringClass();
if (methodC == matchC) {
if (isNonRealMethod(method)) {
return method;
}
} else if (!methodC.isAssignableFrom(matchC.getTheClass())) {
return method;
}
}
}
return o;
}
if (o instanceof FastArray) {
FastArray list = (FastArray) o;
int found = findMatchingMethod(list, method);
if (found == -1) {
list.add(method);
} else {
MetaMethod match = (MetaMethod) list.get(found);
if (match==method) return o;
if (match.isPrivate()
|| (!isNonRealMethod(match) && match.getDeclaringClass().isInterface() && !method.getDeclaringClass().isInterface())) {
// do not overwrite interface methods with instance methods
// do not overwrite private methods
// Note: private methods from parent classes are not shown here,
// but when doing the multimethod connection step, we overwrite
// methods of the parent class with methods of a subclass and
// in that case we want to keep the private methods
} else {
CachedClass methodC = method.getDeclaringClass();
CachedClass matchC = match.getDeclaringClass();
if (methodC == matchC) {
if (isNonRealMethod(method)) {
list.set(found, method);
}
} else if (!methodC.isAssignableFrom(matchC.getTheClass())) {
list.set(found, method);
}
}
}
}
return o;
}
private static boolean isNonRealMethod(MetaMethod method) {
return method instanceof NewInstanceMetaMethod ||
method instanceof NewStaticMetaMethod ||
method instanceof ClosureMetaMethod ||
method instanceof GeneratedMetaMethod ||
method instanceof ClosureStaticMetaMethod ||
method instanceof MixinInstanceMetaMethod ||
method instanceof ClosureMetaMethod.AnonymousMetaMethod;
}
private static boolean isMatchingMethod(MetaMethod aMethod, MetaMethod method) {
if (aMethod==method) return true;
CachedClass[] params1 = aMethod.getParameterTypes();
CachedClass[] params2 = method.getParameterTypes();
if (params1.length != params2.length) {
return false;
}
boolean matches = true;
for (int i = 0; i < params1.length; i++) {
if (params1[i] != params2[i]) {
matches = false;
break;
}
}
return matches;
}
private static int findMatchingMethod(FastArray list, MetaMethod method) {
int len = list.size();
Object data[] = list.getArray();
for (int j = 0; j != len; ++j) {
MetaMethod aMethod = (MetaMethod) data[j];
if (isMatchingMethod(aMethod, method))
return j;
}
return -1;
}
public void copyMethodsToSuper() {
Entry[] table = this.table;
int length = table.length;
for (int j = 0; j < length; j++) {
for (Entry e = table[j]; e != null; e = e.nextHashEntry) {
if (e.methods instanceof FastArray)
e.methodsForSuper = ((FastArray) e.methods).copy();
else
e.methodsForSuper = e.methods;
}
}
}
public void copy(Class c, Header index) {
copy(getHeader(c), index);
}
public void copy(Header from, Header to) {
for (Entry e = from.head; e != null; e = e.nextClassEntry)
copyAllMethods(e, to);
}
private void copyAllMethods(Entry from, Header to) {
Object oldListOrMethod = from.methods;
if (oldListOrMethod instanceof FastArray) {
FastArray oldList = (FastArray) oldListOrMethod;
Entry e = null;
int len1 = oldList.size();
Object list[] = oldList.getArray();
for (int j = 0; j != len1; ++j) {
MetaMethod method = (MetaMethod) list[j];
if (e == null)
e = getOrPutMethods(from.name, to);
e.methods = addMethodToList(e.methods, method);
}
} else {
MetaMethod method = (MetaMethod) oldListOrMethod;
if (!method.isPrivate()) {
Entry e = getOrPutMethods(from.name, to);
e.methods = addMethodToList(e.methods, method);
}
}
}
public void clearCaches() {
for (int i = 0; i != table.length; ++i )
for (Entry e = table [i]; e != null; e = e.nextHashEntry ) {
e.cachedMethod = e.cachedMethodForSuper = e.cachedStaticMethod = null;
}
}
public void clearCaches(String name) {
for (int i = 0; i != table.length; ++i )
for (Entry e = table [i]; e != null; e = e.nextHashEntry ) {
if (e.name.equals(name)) {
e.cachedMethod = e.cachedMethodForSuper = e.cachedStaticMethod = null;
}
}
}
}
|
|
/* $Id$
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.digester3.plugins;
import static org.junit.Assert.*;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.digester3.Digester;
import org.junit.Test;
/**
* Test cases for functionality which sets what xml attributes specify the plugin class or plugin declaration id.
*/
public class TestConfigurablePluginAttributes
{
// --------------------------------------------------------------- Test cases
@Test
public void testDefaultBehaviour()
throws Exception
{
// tests that by default the attributes used are
// named "plugin-class" and "plugin-id"
final Digester digester = new Digester();
digester.setNamespaceAware( true );
final PluginRules rc = new PluginRules();
digester.setRules( rc );
final PluginDeclarationRule pdr = new PluginDeclarationRule();
digester.addRule( "root/plugin", pdr );
final PluginCreateRule widgetPluginRule = new PluginCreateRule( Widget.class );
digester.addRule( "root/widget", widgetPluginRule );
digester.addSetNext( "root/widget", "addWidget" );
final PluginCreateRule gadgetPluginRule = new PluginCreateRule( Widget.class );
digester.addRule( "root/gadget", gadgetPluginRule );
digester.addSetNext( "root/gadget", "addGadget" );
final MultiContainer root = new MultiContainer();
digester.push( root );
try
{
digester.parse( Utils.getInputStream( this, "test7.xml" ) );
}
catch ( final Exception e )
{
throw e;
}
final List<Widget> widgets = root.getWidgets();
assertNotNull( widgets );
assertEquals( 4, widgets.size() );
assertEquals( TextLabel.class, widgets.get( 0 ).getClass() );
assertEquals( TextLabel.class, widgets.get( 1 ).getClass() );
assertEquals( TextLabel.class, widgets.get( 2 ).getClass() );
assertEquals( TextLabel.class, widgets.get( 3 ).getClass() );
final List<Widget> gadgets = root.getGadgets();
assertNotNull( gadgets );
assertEquals( 4, gadgets.size() );
assertEquals( TextLabel.class, gadgets.get( 0 ).getClass() );
assertEquals( TextLabel.class, gadgets.get( 1 ).getClass() );
assertEquals( TextLabel.class, gadgets.get( 2 ).getClass() );
assertEquals( TextLabel.class, gadgets.get( 3 ).getClass() );
}
@Test
public void testGlobalOverride()
throws Exception
{
// Tests that using setDefaultPluginXXXX overrides behavior for all
// PluginCreateRule instances. Also tests specifying attributes
// with "null" for namespace (ie attributes not in any namespace).
//
// note that in order not to screw up all other tests, we need
// to reset the global names after we finish here!
final Digester digester = new Digester();
digester.setNamespaceAware( true );
final PluginRules rc = new PluginRules();
digester.setRules( rc );
rc.setPluginIdAttribute( null, "id" );
rc.setPluginClassAttribute( null, "class" );
final PluginDeclarationRule pdr = new PluginDeclarationRule();
digester.addRule( "root/plugin", pdr );
final PluginCreateRule widgetPluginRule = new PluginCreateRule( Widget.class );
digester.addRule( "root/widget", widgetPluginRule );
digester.addSetNext( "root/widget", "addWidget" );
final PluginCreateRule gadgetPluginRule = new PluginCreateRule( Widget.class );
digester.addRule( "root/gadget", gadgetPluginRule );
digester.addSetNext( "root/gadget", "addGadget" );
final MultiContainer root = new MultiContainer();
digester.push( root );
try
{
digester.parse( Utils.getInputStream( this, "test7.xml" ) );
}
catch ( final Exception e )
{
throw e;
}
final List<Widget> widgets = root.getWidgets();
assertNotNull( widgets );
assertEquals( 4, widgets.size() );
assertEquals( Slider.class, widgets.get( 0 ).getClass() );
assertEquals( Slider.class, widgets.get( 1 ).getClass() );
assertEquals( Slider.class, widgets.get( 2 ).getClass() );
assertEquals( Slider.class, widgets.get( 3 ).getClass() );
final List<Widget> gadgets = root.getGadgets();
assertNotNull( gadgets );
assertEquals( 4, gadgets.size() );
assertEquals( Slider.class, gadgets.get( 0 ).getClass() );
assertEquals( Slider.class, gadgets.get( 1 ).getClass() );
assertEquals( Slider.class, gadgets.get( 2 ).getClass() );
assertEquals( Slider.class, gadgets.get( 3 ).getClass() );
}
@Test
public void testInstanceOverride()
throws Exception
{
// Tests that using setPluginXXXX overrides behavior for only
// that particular PluginCreateRule instance. Also tests that
// attributes can be in namespaces.
final Digester digester = new Digester();
digester.setNamespaceAware( true );
final PluginRules rc = new PluginRules();
digester.setRules( rc );
final PluginDeclarationRule pdr = new PluginDeclarationRule();
digester.addRule( "root/plugin", pdr );
// for plugins at pattern "root/widget", use xml attributes "id" and
// "class" in the custom namespace as the values for plugin id and
// class, not the default (and non-namespaced) values of
// "plugin-id" and "plugin-class".
final PluginCreateRule widgetPluginRule = new PluginCreateRule( Widget.class );
widgetPluginRule.setPluginIdAttribute( "http://commons.apache.org/digester/plugins", "id" );
widgetPluginRule.setPluginClassAttribute( "http://commons.apache.org/digester/plugins", "class" );
digester.addRule( "root/widget", widgetPluginRule );
digester.addSetNext( "root/widget", "addWidget" );
final PluginCreateRule gadgetPluginRule = new PluginCreateRule( Widget.class );
digester.addRule( "root/gadget", gadgetPluginRule );
digester.addSetNext( "root/gadget", "addGadget" );
final MultiContainer root = new MultiContainer();
digester.push( root );
try
{
digester.parse( Utils.getInputStream( this, "test7.xml" ) );
}
catch ( final Exception e )
{
throw e;
}
final List<Widget> widgets = root.getWidgets();
assertNotNull( widgets );
assertEquals( 4, widgets.size() );
assertEquals( TextLabel2.class, widgets.get( 0 ).getClass() );
assertEquals( TextLabel2.class, widgets.get( 1 ).getClass() );
assertEquals( TextLabel2.class, widgets.get( 2 ).getClass() );
assertEquals( TextLabel2.class, widgets.get( 3 ).getClass() );
final List<Widget> gadgets = root.getGadgets();
assertNotNull( gadgets );
assertEquals( 4, gadgets.size() );
assertEquals( TextLabel.class, gadgets.get( 0 ).getClass() );
assertEquals( TextLabel.class, gadgets.get( 1 ).getClass() );
assertEquals( TextLabel.class, gadgets.get( 2 ).getClass() );
assertEquals( TextLabel.class, gadgets.get( 3 ).getClass() );
}
// inner classes used for testing
public static class MultiContainer
{
private final LinkedList<Widget> widgets = new LinkedList<Widget>();
private final LinkedList<Widget> gadgets = new LinkedList<Widget>();
public MultiContainer()
{
}
public void addWidget( final Widget child )
{
widgets.add( child );
}
public List<Widget> getWidgets()
{
return widgets;
}
public void addGadget( final Widget child )
{
gadgets.add( child );
}
public List<Widget> getGadgets()
{
return gadgets;
}
}
}
|
|
package com.flat502.rox.http;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.util.Iterator;
import org.custommonkey.xmlunit.XMLTestCase;
import com.flat502.rox.http.exception.HttpResponseException;
public class Test_HttpRequestBuffer extends XMLTestCase {
public void testSimplePOST() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"Content-Length: 5", "", "Hello" };
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
assertTrue(httpReq.isComplete());
assertEquals("POST", httpReq.getMethod());
assertEquals("text/xml", httpReq.getHeaderValue("Content-Type"));
assertEquals("text/xml", httpReq.getHeaderValue("CONTENT-TYPE"));
assertEquals("Hello", new String(httpReq.getContent(), "UTF-8"));
}
public void testSimpleGET() throws Exception {
String[] msg = new String[] { "GET /method?param HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"Content-Length: 0", "", "" };
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
assertTrue(httpReq.isComplete());
assertEquals("GET", httpReq.getMethod());
assertEquals("/method?param", httpReq.getURI());
assertEquals("text/xml", httpReq.getHeaderValue("Content-Type"));
assertEquals("text/xml", httpReq.getHeaderValue("CONTENT-TYPE"));
assertEquals("", new String(httpReq.getContent(), "UTF-8"));
}
public void testUnsupportedHttpVersion() throws Exception {
String[] msg = new String[] { "POST / HTTP/0.9", "Host: hostname", "Content-Type: text/xml",
"Content-Length: 5", "", "Hello" };
try {
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
httpReq.isComplete();
fail();
} catch (HttpResponseException e) {
assertEquals(HttpConstants.StatusCodes._505_HTTP_VERSION_NOT_SUPPORTED, e.getStatusCode());
}
}
public void testUnsupportedHttpMethod() throws Exception {
String[] msg = new String[] { "DELETE / HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"Content-Length: 5", "", "Hello" };
try {
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
httpReq.isComplete();
fail();
} catch (HttpResponseException e) {
assertEquals(HttpConstants.StatusCodes._501_NOT_IMPLEMENTED, e.getStatusCode());
}
}
public void testMalformedRequestHeader() throws Exception {
String[] msg = new String[] { "Garbage", "Host: hostname", "Content-Type: text/xml", "Content-Length: 5",
"", "Hello" };
try {
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
httpReq.isComplete();
fail();
} catch (HttpResponseException e) {
assertEquals(HttpConstants.StatusCodes._400_BAD_REQUEST, e.getStatusCode());
}
}
public void testEmptyContent() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"Content-Length: 0", "", "" };
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
assertTrue(httpReq.isComplete());
assertEquals(0, httpReq.getContent().length);
}
public void testNoHost() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Content-Length: 5", "", "Hello" };
try {
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
httpReq.isComplete();
fail();
} catch (HttpResponseException e) {
assertEquals(HttpConstants.StatusCodes._412_PRECONDITION_FAILED, e.getStatusCode());
}
}
public void testNoHostVersion10() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.0", "Content-Length: 5", "", "Hello" };
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
try {
httpReq.isComplete();
} catch (HttpResponseException e) {
fail();
}
}
public void testNoContentType() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Host: hostname", "Content-Length: 5", "", "Hello" };
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
assertTrue(httpReq.isComplete());
}
public void testExcessiveContent() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"Content-Length: 5", "", "HelloWorld" };
HttpRequestBuffer httpReq = new HttpRequestBuffer(null, null);
byte[] buf = toBuffer(msg);
int excess = httpReq.addBytes(buf, 0, buf.length);
assertEquals(buf.length - "World".length(), excess);
assertTrue(httpReq.isComplete());
assertEquals("POST", httpReq.getMethod());
assertEquals("text/xml", httpReq.getHeaderValue("Content-Type"));
assertEquals("Hello", new String(httpReq.getContent(), "UTF-8"));
}
public void testNoContentLength() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Host: hostname", "Content-Type: text/xml", "", "Hello" };
try {
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
httpReq.isComplete();
fail();
} catch (HttpResponseException e) {
assertEquals(HttpConstants.StatusCodes._412_PRECONDITION_FAILED, e.getStatusCode());
}
}
public void testInvalidContentType() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Host: hostname", "Content-Type: text/html",
"Content-Length: 5", "", "Hello" };
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
assertTrue(httpReq.isComplete());
}
public void testContentTypeCharSet() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Host: hostname", "Content-Type: text/xml; charset=ASCII",
"Content-Length: 5", "", "Hello" };
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
assertTrue(httpReq.isComplete());
assertEquals("US-ASCII", httpReq.getContentCharset().name());
}
public void testMultilineHeaders() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"X-Custom-Header: some", " multiline value", "Content-Length: 5", "", "Hello" };
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
assertTrue(httpReq.isComplete());
assertEquals("some\r\n multiline value", httpReq.getHeaderValue("X-Custom-Header"));
assertEquals("Hello", new String(httpReq.getContent(), "UTF-8"));
}
public void testDuplicateHeaders() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"X-Duplicate-Header: part 1", "X-Duplicate-Header: part 2", "Content-Length: 5", "", "Hello" };
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
assertTrue(httpReq.isComplete());
assertEquals("part 1, part 2", httpReq.getHeaderValue("X-Duplicate-Header"));
assertEquals("Hello", new String(httpReq.getContent(), "UTF-8"));
}
public void testAcceptContentEmpty() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"Content-Length: 5", "Accept-Encoding:", "", "Hello" };
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
assertTrue(httpReq.isComplete());
assertEquals("", httpReq.getHeaderValue("Accept-Encoding"));
assertNotNull(httpReq.getAcceptedEncodings());
assertEquals(0, httpReq.getAcceptedEncodings().size());
}
public void testAcceptContentSingleName() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"Content-Length: 5", "Accept-Encoding: identity", "", "Hello" };
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
assertTrue(httpReq.isComplete());
assertNotNull(httpReq.getAcceptedEncodings());
assertEquals(1, httpReq.getAcceptedEncodings().size());
assertTrue(httpReq.getAcceptedEncodings().containsKey("identity"));
assertEquals(null, httpReq.getAcceptedEncodings().get("identity"));
}
public void testAcceptContentSingleNameUppercase() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"Content-Length: 5", "Accept-Encoding: IDENTITY", "", "Hello" };
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
assertTrue(httpReq.isComplete());
assertNotNull(httpReq.getAcceptedEncodings());
assertEquals(1, httpReq.getAcceptedEncodings().size());
assertTrue(httpReq.getAcceptedEncodings().containsKey("identity"));
assertEquals(null, httpReq.getAcceptedEncodings().get("identity"));
}
public void testAcceptContentNameWithQuality() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"Content-Length: 5", "Accept-Encoding: identity;q=0.5", "", "Hello" };
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
assertTrue(httpReq.isComplete());
assertNotNull(httpReq.getAcceptedEncodings());
assertEquals(1, httpReq.getAcceptedEncodings().size());
assertTrue(httpReq.getAcceptedEncodings().containsKey("identity"));
assertEquals(new Float(0.5), httpReq.getAcceptedEncodings().get("identity"));
}
public void testAcceptContentNameMultipleEncodingsNoQuality() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"Content-Length: 5", "Accept-Encoding: identity, deflater", "", "Hello" };
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
assertTrue(httpReq.isComplete());
assertNotNull(httpReq.getAcceptedEncodings());
assertEquals(2, httpReq.getAcceptedEncodings().size());
assertTrue(httpReq.getAcceptedEncodings().containsKey("identity"));
assertEquals(null, httpReq.getAcceptedEncodings().get("identity"));
assertTrue(httpReq.getAcceptedEncodings().containsKey("deflater"));
assertEquals(null, httpReq.getAcceptedEncodings().get("deflater"));
Iterator keys = httpReq.getAcceptedEncodings().keySet().iterator();
assertEquals("identity", keys.next());
assertEquals("deflater", keys.next());
}
public void testAcceptContentNameMultipleEncodingsOneQuality() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"Content-Length: 5", "Accept-Encoding: identity; q=0.5, deflater", "", "Hello" };
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
assertTrue(httpReq.isComplete());
assertNotNull(httpReq.getAcceptedEncodings());
assertEquals(2, httpReq.getAcceptedEncodings().size());
assertTrue(httpReq.getAcceptedEncodings().containsKey("identity"));
assertEquals(new Float(0.5), httpReq.getAcceptedEncodings().get("identity"));
assertTrue(httpReq.getAcceptedEncodings().containsKey("deflater"));
assertEquals(null, httpReq.getAcceptedEncodings().get("deflater"));
Iterator keys = httpReq.getAcceptedEncodings().keySet().iterator();
assertEquals("deflater", keys.next());
assertEquals("identity", keys.next());
}
public void testAcceptContentComplex() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"Content-Length: 5",
"Accept-Encoding: gzip;q=0.5, identity; q=0.5, deflater;q= 0.5, gzip, other;q=0.1", "", "Hello" };
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
assertTrue(httpReq.isComplete());
assertNotNull(httpReq.getAcceptedEncodings());
assertEquals(4, httpReq.getAcceptedEncodings().size());
assertTrue(httpReq.getAcceptedEncodings().containsKey("identity"));
assertEquals(new Float(0.5), httpReq.getAcceptedEncodings().get("identity"));
assertTrue(httpReq.getAcceptedEncodings().containsKey("deflater"));
assertEquals(new Float(0.5), httpReq.getAcceptedEncodings().get("deflater"));
assertTrue(httpReq.getAcceptedEncodings().containsKey("gzip"));
assertEquals(null, httpReq.getAcceptedEncodings().get("gzip"));
assertTrue(httpReq.getAcceptedEncodings().containsKey("other"));
assertEquals(new Float(0.1), httpReq.getAcceptedEncodings().get("other"));
Iterator keys = httpReq.getAcceptedEncodings().keySet().iterator();
assertEquals("gzip", keys.next());
assertEquals("identity", keys.next());
assertEquals("deflater", keys.next());
assertEquals("other", keys.next());
}
public void testAcceptContentSortOrder() throws Exception {
String[] msg = new String[] { "POST / HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"Content-Length: 5",
"Accept-Encoding: first;q=0.5, second;q=0.3, third;q=0.4, fourth, fifth;q=0.1, repeat, again", "",
"Hello" };
HttpRequestBuffer httpReq = this.newHttpRequestBuffer(msg);
assertTrue(httpReq.isComplete());
assertNotNull(httpReq.getAcceptedEncodings());
Iterator keys = httpReq.getAcceptedEncodings().keySet().iterator();
assertEquals("fourth", keys.next());
assertEquals("repeat", keys.next());
assertEquals("again", keys.next());
assertEquals("first", keys.next());
assertEquals("third", keys.next());
assertEquals("second", keys.next());
assertEquals("fifth", keys.next());
}
public void testPipelinedRequests() throws Exception {
String[] msg = new String[] { "GET / HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"Content-Length: 5", "", "HelloGET / HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"Content-Length: 5", "", "WorldGET / HTTP/1.1", "Host: hostname", "Content-Type: text/xml",
"Content-Length: 4", "", "More" };
HttpRequestBuffer httpReq = new HttpRequestBuffer(null, null);
byte[] buf = toBuffer(msg);
int excess = httpReq.addBytes(buf, 0, buf.length);
assertEquals(82, excess);
// Message 1
assertTrue(httpReq.isComplete());
assertEquals("GET", httpReq.getMethod());
assertEquals("/", httpReq.getURI());
assertEquals("text/xml", httpReq.getHeaderValue("Content-Type"));
assertEquals(5, httpReq.getContent().length);
assertEquals("Hello", new String(httpReq.getContent(), "UTF-8"));
// Message 2
HttpRequestBuffer httpReq2 = new HttpRequestBuffer(null, null);
excess = httpReq2.addBytes(buf, excess, buf.length - excess);
assertEquals(164, excess);
assertEquals("GET", httpReq2.getMethod());
assertEquals("/", httpReq2.getURI());
assertEquals("text/xml", httpReq2.getHeaderValue("Content-Type"));
assertEquals(5, httpReq2.getContent().length);
assertEquals("World", new String(httpReq2.getContent(), "UTF-8"));
// Message 3
HttpRequestBuffer httpReq3 = new HttpRequestBuffer(null, null);
excess = httpReq3.addBytes(buf, excess, buf.length - excess);
assertEquals(0, excess);
assertEquals("GET", httpReq3.getMethod());
assertEquals("/", httpReq3.getURI());
assertEquals("text/xml", httpReq3.getHeaderValue("Content-Type"));
assertEquals(4, httpReq3.getContent().length);
assertEquals("More", new String(httpReq3.getContent(), "UTF-8"));
}
private HttpRequestBuffer newHttpRequestBuffer(String[] msg) throws Exception {
return this.newHttpRequestBuffer(msg, "UTF-8");
}
private HttpRequestBuffer newHttpRequestBuffer(String[] msg, String charSet) throws Exception {
HttpRequestBuffer httpReq = new HttpRequestBuffer(null, null);
byte[] buf = toBuffer(msg);
httpReq.addBytes(buf, 0, buf.length);
return httpReq;
}
private byte[] toBuffer(String[] msg) throws UnsupportedEncodingException {
return this.toBuffer(msg, "UTF-8");
}
private byte[] toBuffer(String[] msg, String charSet) throws UnsupportedEncodingException {
ByteArrayOutputStream byteOs = new ByteArrayOutputStream();
PrintStream out = new PrintStream(byteOs, true, charSet);
for (int i = 0; i < msg.length; i++) {
out.print(msg[i]);
if (i < msg.length - 1) {
out.print("\r\n");
}
}
return byteOs.toByteArray();
}
public static void main(String[] args) {
junit.textui.TestRunner.run(Test_HttpRequestBuffer.class);
}
}
|
|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.vcs.log.ui.frame;
import com.google.common.primitives.Ints;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.util.BackgroundTaskUtil;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Conditions;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import com.intellij.vcs.commit.message.CommitMessageInspectionProfile;
import com.intellij.vcs.log.*;
import com.intellij.vcs.log.data.VcsLogData;
import com.intellij.vcs.log.impl.HashImpl;
import com.intellij.vcs.log.ui.VcsLogColorManager;
import com.intellij.vcs.log.ui.details.CommitDetailsListPanel;
import com.intellij.vcs.log.ui.frame.CommitPresentationUtil.CommitPresentation;
import com.intellij.vcs.log.ui.table.CommitSelectionListener;
import com.intellij.vcs.log.ui.table.VcsLogGraphTable;
import com.intellij.vcs.log.util.VcsLogUtil;
import kotlin.Unit;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import static com.intellij.vcs.log.ui.frame.CommitPresentationUtil.buildPresentation;
/**
* @author Kirill Likhodedov
*/
public class VcsLogCommitDetailsListPanel extends CommitDetailsListPanel<CommitPanel> implements Disposable {
@NotNull private final VcsLogData myLogData;
@NotNull private final VcsLogColorManager myColorManager;
@NotNull private List<Integer> mySelection = ContainerUtil.emptyList();
@Nullable private ProgressIndicator myResolveIndicator = null;
public VcsLogCommitDetailsListPanel(@NotNull VcsLogData logData,
@NotNull VcsLogColorManager colorManager,
@NotNull Disposable parent) {
super(parent);
myLogData = logData;
myColorManager = colorManager;
logData.getProject().getMessageBus().connect(this).subscribe(CommitMessageInspectionProfile.TOPIC, () -> update());
Runnable containingBranchesListener = this::branchesChanged;
myLogData.getContainingBranchesGetter().addTaskCompletedListener(containingBranchesListener);
Disposer.register(this, () -> {
myLogData.getContainingBranchesGetter().removeTaskCompletedListener(containingBranchesListener);
});
setStatusText(VcsLogBundle.message("vcs.log.commit.details.status"));
Disposer.register(parent, this);
}
public void installCommitSelectionListener(@NotNull VcsLogGraphTable graphTable) {
graphTable.getSelectionModel().addListSelectionListener(new CommitSelectionListenerForDetails(graphTable));
}
private void branchesChanged() {
forEachPanelIndexed((i, panel) -> {
panel.updateBranches();
return Unit.INSTANCE;
});
}
private void resolveHashes(@NotNull List<? extends CommitId> ids,
@NotNull List<? extends CommitPresentation> presentations,
@NotNull Set<String> unResolvedHashes,
@NotNull Condition<Object> expired) {
if (!unResolvedHashes.isEmpty()) {
myResolveIndicator = BackgroundTaskUtil.executeOnPooledThread(this, () -> {
MultiMap<String, CommitId> resolvedHashes = new MultiMap<>();
Set<String> fullHashes = new HashSet<>(ContainerUtil.filter(unResolvedHashes, h -> h.length() == VcsLogUtil.FULL_HASH_LENGTH));
for (String fullHash : fullHashes) {
Hash hash = HashImpl.build(fullHash);
for (VirtualFile root : myLogData.getRoots()) {
CommitId id = new CommitId(hash, root);
if (myLogData.getStorage().containsCommit(id)) {
resolvedHashes.putValue(fullHash, id);
}
}
}
unResolvedHashes.removeAll(fullHashes);
if (!unResolvedHashes.isEmpty()) {
myLogData.getStorage().iterateCommits(commitId -> {
for (String hashString : unResolvedHashes) {
if (StringUtil.startsWithIgnoreCase(commitId.getHash().asString(), hashString)) {
resolvedHashes.putValue(hashString, commitId);
}
}
return true;
});
}
List<CommitPresentation> resolvedPresentations = ContainerUtil.map2List(presentations,
presentation -> presentation.resolve(resolvedHashes));
ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator();
ApplicationManager.getApplication().invokeLater(() -> {
myResolveIndicator = null;
setPresentations(ids, resolvedPresentations);
},
Conditions.or(o -> myResolveIndicator != indicator, expired));
});
}
}
private void cancelResolve() {
if (myResolveIndicator != null) {
myResolveIndicator.cancel();
myResolveIndicator = null;
}
}
private void setPresentations(@NotNull List<? extends CommitId> ids,
@NotNull List<? extends CommitPresentation> presentations) {
forEachPanelIndexed((i, panel) -> {
panel.setCommit(ids.get(i), presentations.get(i));
return Unit.INSTANCE;
});
}
@Override
public void dispose() {
cancelResolve();
}
@NotNull
@Override
protected CommitPanel getCommitDetailsPanel() {
return new CommitPanel(myLogData, myColorManager, this::navigate);
}
private class CommitSelectionListenerForDetails extends CommitSelectionListener<VcsCommitMetadata> {
CommitSelectionListenerForDetails(VcsLogGraphTable graphTable) {
super(graphTable, VcsLogCommitDetailsListPanel.this.myLogData.getMiniDetailsGetter());
}
@Override
protected void onDetailsLoaded(@NotNull List<? extends VcsCommitMetadata> detailsList) {
List<CommitId> ids = ContainerUtil.map(detailsList,
detail -> new CommitId(detail.getId(), detail.getRoot()));
Set<String> unResolvedHashes = new HashSet<>();
List<CommitPresentation> presentations = ContainerUtil.map(detailsList,
detail -> buildPresentation(myLogData.getProject(), detail,
unResolvedHashes));
setPresentations(ids, presentations);
List<Integer> currentSelection = mySelection;
resolveHashes(ids, presentations, unResolvedHashes, o -> currentSelection != mySelection);
}
@Override
protected void onSelection(int @NotNull [] selection) {
cancelResolve();
int shownPanelsCount = rebuildPanel(selection.length);
mySelection = Ints.asList(Arrays.copyOf(selection, shownPanelsCount));
List<Integer> currentSelection = mySelection;
ApplicationManager.getApplication().executeOnPooledThread(() -> {
List<Collection<VcsRef>> result = new ArrayList<>();
for (Integer row : currentSelection) {
result.add(myGraphTable.getModel().getRefsAtRow(row));
}
ApplicationManager.getApplication().invokeLater(() -> {
if (currentSelection == mySelection) {
forEachPanelIndexed((i, panel) -> {
panel.setRefs(result.get(i));
return Unit.INSTANCE;
});
}
}, o -> Disposer.isDisposed(myGraphTable));
});
}
@Override
protected void onEmptySelection() {
cancelResolve();
setEmpty(VcsLogBundle.message("vcs.log.changes.details.no.commits.selected.status"));
}
@NotNull
@Override
protected List<Integer> getSelectionToLoad() {
return mySelection;
}
@Override
protected void startLoading() {
startLoadingDetails();
}
@Override
protected void stopLoading() {
stopLoadingDetails();
}
@Override
protected void onError(@NotNull Throwable error) {
setEmpty(VcsLogBundle.message("vcs.log.error.loading.status"));
}
private void setEmpty(@Nls @NotNull String text) {
setStatusText(text);
mySelection = ContainerUtil.emptyList();
setCommits(ContainerUtil.emptyList());
}
}
}
|
|
package php.runtime.util;
import php.runtime.Memory;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
public class PrintF {
private static final BigInteger BIG_2_64 = BigInteger.ONE.shiftLeft(64);
private static final BigInteger BIG_TEN = new BigInteger("10");
private final String format;
private final Locale locale;
protected final Memory[] args;
public PrintF(Locale locale, String format, Memory[] args){
this.locale = locale;
this.format = format;
this.args = args;
}
private List<Segment> parse(){
List<Segment> segments = new ArrayList<Segment>();
int length = format.length();
int start = 0;
int index = 0;
StringBuilder sb = new StringBuilder();
StringBuilder flags = new StringBuilder();
for(int i = 0; i < length; i++) {
char ch = format.charAt(i);
if (i + 1 < length && ch == '%'){
sb.append(ch);
boolean isLeft = false;
boolean isAlt = false;
boolean isShowSign = false;
int argIndex = -1;
int leftPadLength = 0;
int width = 0;
int padChar = -1;
flags.setLength(0);
int j = i + 1;
loop:
for(; j < length; j++){
ch = format.charAt(j);
switch (ch){
case '-':
isLeft = true;
if (j + 1 < length && format.charAt(j + 1) == '0') {
padChar = '0';
j++;
}
break;
case '#':
isAlt = true;
break;
case '0':
case '1': case '2': case '3': case '4': case '5':
case '6': case '7': case '8': case '9':
if (ch == '0' && padChar < 0)
padChar = '0';
else {
int value = ch - '0';
for (int k = j + 1; k < length; k++) {
char digit = format.charAt(k);
if (Character.isDigit(digit)) {
value = value * 10 + digit - '0';
j++;
} else
break;
}
if (j + 1 < length && format.charAt(j + 1) == '$') {
argIndex = value - 1;
j++;
} else {
width = value;
}
}
break;
case '\'':
padChar = format.charAt(j + 1);
j += 1;
break;
case '+':
isShowSign = true;
break;
case ' ': case ',': case '(': case 'l':
flags.append(ch);
break;
default:
break loop;
}
}
int head = j;
if (argIndex < 0)
argIndex = index;
loop:
for (; j < length; j++) {
ch = format.charAt(j);
switch (ch) {
case '%':
i = j;
segments.add(new TextSegment(sb.toString()));
sb.setLength(0);
break loop;
case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9':
case '.': case '$':
break;
case 's': case 'S':
sb.setLength(sb.length() - 1);
if (width <= 0 && 0 < leftPadLength)
width = leftPadLength;
index++;
segments.add(new StringSegment(
sb, isLeft || isAlt, padChar, ch == 'S', width, format.substring(head, j), argIndex
));
sb.setLength(0);
i = j;
break loop;
case 'c': case 'C':
sb.setLength(sb.length() - 1);
if (width <= 0 && 0 < leftPadLength)
width = leftPadLength;
index++;
segments.add(new CharSegment(
sb, isLeft || isAlt, padChar, ch == 'C', width, format.substring(head, j), argIndex
));
sb.setLength(0);
i = j;
break loop;
case 'i':
ch = 'd';
case 'd': case 'x': case 'o': case 'X':
case 'b': case 'B': case 'u':
sb.setLength(sb.length() - 1);
if (sb.length() > 0)
segments.add(new TextSegment(sb.toString()));
sb.setLength(0);
if (isAlt)
sb.append('#');
if (isShowSign)
sb.append('+');
sb.append(flags);
if (width > 0) {
if (isLeft)
sb.append('-');
else if (padChar == '0')
sb.append('0');
sb.append(width);
}
sb.append(format, head, j);
sb.append(ch);
index++;
segments.add(LongSegment.valueOf(sb.toString(), argIndex));
sb.setLength(0);
i = j;
break loop;
case 'e': case 'E': case 'f': case 'g': case 'G':
case 'F':
Locale _locale = locale;
if (ch == 'F')
ch = 'f';
else
_locale = null;
sb.setLength(sb.length() - 1);
if (sb.length() > 0)
segments.add(new TextSegment(sb.toString()));
sb.setLength(0);
if (isAlt)
sb.append('#');
if (isShowSign)
sb.append('+');
if (flags.indexOf("l") != -1)
flags.deleteCharAt(flags.indexOf("l"));
sb.append(flags);
if (width > 0) {
if (isLeft)
sb.append('-');
else if (padChar == '0')
sb.append('0');
sb.append(width);
}
sb.append(format, head, j);
sb.append(ch);
index++;
segments.add(new DoubleSegment(
sb.toString(), isLeft && padChar == '0', argIndex, _locale
));
sb.setLength(0);
i = j;
break loop;
default:
if (isLeft)
sb.append('-');
if (isAlt)
sb.append('#');
sb.append(flags);
// Skipping the unknown modifiers like in original implementation
if (sb.length() == 1 && sb.charAt(0) == '%') {
sb.setLength(0);
} else {
sb.append(format, head, j);
sb.append(ch);
}
i = j;
break loop;
}
}
} else
sb.append(ch);
}
if (sb.length() > 0)
segments.add(new TextSegment(sb.toString()));
return segments;
}
public String toString(){
StringBuilder builder = new StringBuilder();
for(Segment segment : parse()){
if (!segment.apply(locale, builder, args))
return null;
}
return builder.toString();
}
abstract public static class Segment {
protected final String format;
public Segment(String format){
this.format = format;
}
static boolean hasIndex(String format) {
return format.indexOf('$') >= 0;
}
static int getIndex(String format) {
int value = 0;
for (int i = 0; i < format.length(); i++) {
char ch;
if ('0' <= (ch = format.charAt(i)) && ch <= '9')
value = 10 * value + ch - '0';
else
break;
}
return value - 1;
}
static String getIndexFormat(String format) {
int p = format.indexOf('$');
return '%' + format.substring(p + 1);
}
abstract public Memory.Type getType();
abstract protected boolean apply(Locale locale, StringBuilder sb, Memory[] args);
}
/**
* %c
*/
static class CharSegment extends StringSegment {
public CharSegment(StringBuilder prefix, boolean isLeft, int pad, boolean isUpper, int width, String format, int index) {
super(prefix, isLeft, pad, isUpper, width, format, index);
}
@Override
public Memory.Type getType() {
return Memory.Type.INT;
}
@Override
protected String toValue(Memory[] args) {
return String.valueOf(args[_index].toChar());
}
}
public class TextSegment extends Segment {
public TextSegment(String format) {
super(format);
}
@Override
public Memory.Type getType() {
return Memory.Type.STRING;
}
@Override
protected boolean apply(Locale locale, StringBuilder sb, Memory[] args) {
sb.append(format);
return true;
}
}
/**
* %s
*/
static class StringSegment extends Segment {
protected final char []_prefix;
protected final int _min;
protected final int _max;
protected final boolean _isLeft;
protected final boolean _isUpper;
protected final char _pad;
protected final int _index;
public StringSegment(StringBuilder prefix,
boolean isLeft, int pad, boolean isUpper,
int width,
String format, int index) {
super(format);
_prefix = new char[prefix.length()];
_isLeft = isLeft;
_isUpper = isUpper;
if (pad >= 0)
_pad = (char) pad;
else
_pad = ' ';
prefix.getChars(0, _prefix.length, _prefix, 0);
if (hasIndex(format)) {
index = getIndex(format);
format = getIndexFormat(format);
}
int i = 0;
int len = format.length();
int max = Integer.MAX_VALUE;
char ch;
if (0 < len && format.charAt(0) == '.') {
max = 0;
for (i++; i < len && '0' <= (ch = format.charAt(i)) && ch <= '9'; i++) {
max = 10 * max + ch - '0';
}
}
_min = width;
_max = max;
_index = index;
}
@Override
public Memory.Type getType() {
return Memory.Type.STRING;
}
protected String toValue(Memory[] args){
return args[_index].toString();
}
@Override
protected boolean apply(Locale locale, StringBuilder sb, Memory[] args) {
sb.append(_prefix, 0, _prefix.length);
if (_index >= args.length)
return false;
String value = toValue(args);
int len = value.length();
if (_max < len) {
value = value.substring(0, _max);
len = _max;
}
if (_isUpper)
value = value.toUpperCase();
if (!_isLeft) {
for (int i = len; i < _min; i++) {
sb.append(_pad);
}
}
sb.append(value);
if (_isLeft) {
for (int i = len; i < _min; i++) {
sb.append(_pad);
}
}
return true;
}
}
static class LongSegment extends Segment {
protected int _index;
LongSegment(String format, int _index) {
super(format);
this._index = _index;
}
static Segment valueOf(String format, int index) {
if (hasIndex(format)) {
index = getIndex(format);
format = getIndexFormat(format);
} else {
format = '%' + format;
}
// Modifier %ld is not supported by Java formatter. The %d is enough.
if (format.charAt(1) == 'l') {
format = format.substring(0, 1) + format.substring(2);
}
if (format.length() > 1 && format.charAt(1) == '.') {
int i;
for (i = 2; i < format.length(); i++) {
char ch = format.charAt(i);
if (! (Character.isDigit(ch)))
break;
}
format = '%' + format.substring(i);
}
char last = format.charAt(format.length() - 1);
if (last == 'x' || last == 'X') {
HexSegment hex = HexSegment.valueOf(format, index);
if (hex != null)
return hex;
}
if (last == 'b' || last == 'B') {
BinarySegment bin = BinarySegment.valueOf(format, index);
if (bin != null)
return bin;
}
if (last == 'u') {
UnsignedSegment unsign = UnsignedSegment.valueOf(format, index);
if (unsign != null)
return unsign;
}
return new LongSegment(format, index);
}
@Override
public Memory.Type getType() {
return Memory.Type.INT;
}
@Override
protected boolean apply(Locale locale, StringBuilder sb, Memory[] args) {
long value;
if (_index < args.length)
value = args[_index].toLong();
else {
return false;
}
sb.append(String.format(Locale.ENGLISH, format, value));
return true;
}
}
static class HexSegment extends Segment {
private final int _index;
private final int _min;
private final char _pad;
private boolean _isUpper;
HexSegment(String format, int index, int min, int pad, boolean isUpper) {
super(format);
_index = index;
_min = min;
if (pad >= 0)
_pad = (char) pad;
else
_pad = ' ';
_isUpper = isUpper;
}
static HexSegment valueOf(String format, int index) {
int length = format.length();
int offset = 1;
boolean isUpper = format.charAt(length - 1) == 'X';
char pad = ' ';
if (format.charAt(offset) == ' ') {
pad = ' ';
offset++;
}
else if (format.charAt(offset) == '0') {
pad = '0';
offset++;
}
int min = 0;
for (; offset < length - 1; offset++) {
char ch = format.charAt(offset);
if ('0' <= ch && ch <= '9')
min = 10 * min + ch - '0';
else
return null;
}
return new HexSegment(format, index, min, pad, isUpper);
}
@Override
public Memory.Type getType() {
return Memory.Type.INT;
}
@Override
public boolean apply(Locale locale, StringBuilder sb, Memory []args) {
long value;
if (_index >= 0 && _index < args.length)
value = args[_index].toLong();
else
return false;
int digits = 0;
long shift = value;
for (int i = 0; i < 16; i++) {
if (shift != 0)
digits = i;
shift = shift >>> 4;
}
for (int i = digits + 1; i < _min; i++)
sb.append(_pad);
for (; digits >= 0; digits--) {
int digit = (int) (value >>> (4 * digits)) & 0xf;
if (digit <= 9)
sb.append((char) ('0' + digit));
else if (_isUpper)
sb.append((char) ('A' + digit - 10));
else
sb.append((char) ('a' + digit - 10));
}
return true;
}
}
static class BinarySegment extends Segment {
private final int _index;
private final int _min;
private final char _pad;
@Override
public Memory.Type getType() {
return Memory.Type.INT;
}
BinarySegment(String format, int index, int min, int pad) {
super(format);
_index = index;
_min = min;
if (pad >= 0)
_pad = (char) pad;
else
_pad = ' ';
}
static BinarySegment valueOf(String format, int index) {
int length = format.length();
int offset = 1;
char pad = ' ';
if (format.charAt(offset) == ' ') {
pad = ' ';
offset++;
} else if (format.charAt(offset) == '0') {
pad = '0';
offset++;
}
int min = 0;
for (; offset < length - 1; offset++) {
char ch = format.charAt(offset);
if ('0' <= ch && ch <= '9')
min = 10 * min + ch - '0';
else
return null;
}
return new BinarySegment(format, index, min, pad);
}
@Override
public boolean apply(Locale locale, StringBuilder sb, Memory[] args) {
long value;
if (_index >= 0 && _index < args.length)
value = args[_index].toLong();
else
return false;
int digits = 0;
long shift = value;
for (int i = 0; i < 64; i++) {
if (shift != 0)
digits = i;
shift = shift >>> 1;
}
for (int i = digits + 1; i < _min; i++)
sb.append(_pad);
for (; digits >= 0; digits--) {
int digit = (int) (value >>> (digits)) & 0x1;
sb.append((char) ('0' + digit));
}
return true;
}
}
static class UnsignedSegment extends Segment {
private final int _index;
private final int _min;
private final char _pad;
@Override
public Memory.Type getType() {
return Memory.Type.INT;
}
UnsignedSegment(String format, int index, int min, int pad) {
super(format);
_index = index;
_min = min;
if (pad >= 0)
_pad = (char) pad;
else
_pad = ' ';
}
static UnsignedSegment valueOf(String format, int index) {
int length = format.length();
int offset = 1;
if (format.charAt(offset) == '+')
offset++;
char pad = ' ';
if (format.charAt(offset) == ' ') {
pad = ' ';
offset++;
}
else if (format.charAt(offset) == '0') {
pad = '0';
offset++;
}
int min = 0;
for (; offset < length - 1; offset++) {
char ch = format.charAt(offset);
if ('0' <= ch && ch <= '9')
min = 10 * min + ch - '0';
else
return null;
}
return new UnsignedSegment(format, index, min, pad);
}
@Override
public boolean apply(Locale locale, StringBuilder sb, Memory[] args) {
long value;
if (_index >= 0 && _index < args.length)
value = args[_index].toLong();
else
return false;
char []buf = new char[32];
int digits = buf.length;
if (value == 0) {
buf[--digits] = '0';
}
else if (value > 0) {
while (value != 0) {
int digit = (int) (value % 10);
buf[--digits] = (char) ('0' + digit);
value = value / 10;
}
}
else {
BigInteger bigInt = new BigInteger(String.valueOf(value));
bigInt = bigInt.add(BIG_2_64);
while (bigInt.compareTo(BigInteger.ZERO) != 0) {
int digit = bigInt.mod(BIG_TEN).intValue();
buf[--digits] = (char) ('0' + digit);
bigInt = bigInt.divide(BIG_TEN);
}
}
for (int i = buf.length - digits; i < _min; i++)
sb.append(_pad);
for (; digits < buf.length; digits++) {
sb.append(buf[digits]);
}
return true;
}
}
static class DoubleSegment extends Segment {
private final String _format;
private final boolean _isLeftZero;
private final int _index;
private final Locale _locale;
@Override
public Memory.Type getType() {
return Memory.Type.DOUBLE;
}
DoubleSegment(String format, boolean isLeftZero, int index, Locale locale) {
super(format);
if (hasIndex(format)) {
_index = getIndex(format);
_format = getIndexFormat(format);
}
else {
_format = '%' + format;
_index = index;
}
_isLeftZero = isLeftZero;
_locale = locale;
}
@Override
public boolean apply(Locale locale, StringBuilder sb, Memory[] args) {
double value;
if (_index < args.length)
value = args[_index].toDouble();
else
return false;
String s;
if (_locale == null)
s = String.format(Locale.ENGLISH, _format, value);
else
s = String.format(_locale, _format, value);
if (_isLeftZero) {
int len = s.length();
// php/1174 "-0" not allowed by java formatter
for (int i = 0; i < len; i++) {
char ch = s.charAt(i);
if (ch == ' ')
sb.append('0');
else
sb.append(ch);
}
} else {
sb.append(s);
}
return true;
}
}
}
|
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.util;
import com.facebook.buck.util.timing.SettableFakeClock;
import com.google.common.base.Functions;
import com.google.common.collect.Multimap;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.channels.WritableByteChannel;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
/** Fake implementation of {@link ListeningProcessExecutor} for tests. */
public class FakeListeningProcessExecutor extends ListeningProcessExecutor {
private final Function<ProcessExecutorParams, Collection<FakeListeningProcessState>>
processStatesFunction;
private final SettableFakeClock clock;
private final List<LaunchedProcess> launchedProcesses;
public FakeListeningProcessExecutor(
Multimap<ProcessExecutorParams, FakeListeningProcessState> processStates) {
this(Functions.forMap(processStates.asMap()), SettableFakeClock.DO_NOT_CARE);
}
public FakeListeningProcessExecutor(
Multimap<ProcessExecutorParams, FakeListeningProcessState> processStates,
SettableFakeClock clock) {
this(Functions.forMap(processStates.asMap()), clock);
}
public FakeListeningProcessExecutor(
Function<ProcessExecutorParams, Collection<FakeListeningProcessState>> processStatesFunction,
SettableFakeClock clock) {
this.processStatesFunction = processStatesFunction;
this.clock = clock;
this.launchedProcesses = new ArrayList<>();
}
private static class FakeLaunchedProcessImpl implements LaunchedProcess {
public final ProcessListener listener;
public final Iterator<FakeListeningProcessState> states;
public final SettableFakeClock clock;
public final long processExecTimeNanos;
public final ByteBuffer stdinBuffer;
public boolean processingStates;
public FakeListeningProcessState currentState;
public ByteArrayOutputStream stdinBytes;
public WritableByteChannel stdinBytesChannel;
public boolean stdinClosed;
public boolean wantsWrite;
public int exitCode;
public long startTimeNanos;
public long processTimeNanos;
public FakeLaunchedProcessImpl(
ProcessListener listener,
Iterator<FakeListeningProcessState> states,
SettableFakeClock clock,
long processExecTimeNanos) {
this.listener = listener;
this.states = states;
this.clock = clock;
this.processExecTimeNanos = processExecTimeNanos;
this.stdinBuffer = ByteBuffer.allocate(BUFFER_CAPACITY);
this.stdinBytes = new ByteArrayOutputStream();
this.stdinBytesChannel = Channels.newChannel(stdinBytes);
this.exitCode = -1;
this.startTimeNanos = this.clock.nanoTime();
}
public void processAllStates() {
if (processingStates) {
// Don't recurse.
return;
}
if (currentState != null) {
if (!processState(currentState)) {
return;
} else {
currentState = null;
}
}
while (states.hasNext()) {
currentState = states.next();
if (!processState(currentState)) {
return;
} else {
currentState = null;
}
}
}
private boolean processState(FakeListeningProcessState state) {
processingStates = true;
boolean result = true;
switch (state.getType()) {
case EXPECT_STDIN:
if (stdinClosed) {
throw new RuntimeException("stdin is closed");
}
if (!wantsWrite) {
result = false;
break;
}
while (wantsWrite) {
wantsWrite = listener.onStdinReady(stdinBuffer);
try {
stdinBytesChannel.write(stdinBuffer);
} catch (IOException e) {
throw new RuntimeException(e);
}
stdinBuffer.clear();
}
if (!ByteBuffer.wrap(stdinBytes.toByteArray()).equals(state.getExpectedStdin().get())) {
throw new RuntimeException("Did not reach expected stdin state");
}
stdinBytes = new ByteArrayOutputStream();
stdinBytesChannel = Channels.newChannel(stdinBytes);
break;
case EXPECT_STDIN_CLOSED:
if (!stdinClosed) {
result = false;
break;
}
break;
case STDOUT:
while (state.getStdout().get().hasRemaining()) {
listener.onStdout(state.getStdout().get(), false);
}
break;
case STDERR:
while (state.getStderr().get().hasRemaining()) {
listener.onStderr(state.getStderr().get(), false);
}
break;
case WAIT:
long stateWaitTime = state.getWaitNanos().get();
if (clock.nanoTime() < startTimeNanos + processTimeNanos + stateWaitTime) {
result = false;
break;
}
processTimeNanos += stateWaitTime;
break;
case EXIT:
exitCode = state.getExitCode().getAsInt();
ByteBuffer empty = ByteBuffer.allocate(0);
listener.onStdout(empty, true);
listener.onStderr(empty, true);
listener.onExit(exitCode);
break;
}
processingStates = false;
return result;
}
@Override
public void wantWrite() {
this.wantsWrite = true;
processAllStates();
}
@Override
public void writeStdin(ByteBuffer buffer) {
try {
stdinBytesChannel.write(buffer);
processAllStates();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public void closeStdin(boolean force) {
stdinClosed = true;
processAllStates();
}
@Override
public boolean hasPendingWrites() {
return stdinBytes.size() > 0;
}
@Override
public boolean isRunning() {
return states.hasNext();
}
}
@Override
public LaunchedProcess launchProcess(ProcessExecutorParams params, ProcessListener listener) {
Collection<FakeListeningProcessState> fakeProcessStates = processStatesFunction.apply(params);
long processExecTimeNanos = 0;
for (FakeListeningProcessState state : fakeProcessStates) {
if (state.getType() == FakeListeningProcessState.Type.WAIT) {
processExecTimeNanos += state.getWaitNanos().get();
}
}
FakeLaunchedProcessImpl process =
new FakeLaunchedProcessImpl(
listener, processStatesFunction.apply(params).iterator(), clock, processExecTimeNanos);
listener.onStart(process);
launchedProcesses.add(process);
return process;
}
@Override
public int waitForProcess(LaunchedProcess process, long timeout, TimeUnit timeUnit) {
FakeLaunchedProcessImpl processImpl = (FakeLaunchedProcessImpl) process;
clock.advanceTimeNanos(Math.min(processImpl.processExecTimeNanos, timeUnit.toNanos(timeout)));
processImpl.processAllStates();
if (processImpl.isRunning()) {
return Integer.MIN_VALUE;
} else {
return processImpl.exitCode;
}
}
@Override
public void destroyProcess(LaunchedProcess process, boolean force) {
FakeLaunchedProcessImpl processImpl = (FakeLaunchedProcessImpl) process;
while (processImpl.states.hasNext()) {
processImpl.states.next();
}
processImpl.currentState = null;
}
public void waitForAllLaunchedProcesses() throws IOException, InterruptedException {
for (LaunchedProcess launchedProcess : launchedProcesses) {
waitForProcess(launchedProcess);
}
}
}
|
|
package com.billybyte.commonlibstometeor.runs;
import java.lang.management.ManagementFactory;
import java.lang.management.RuntimeMXBean;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Map;
import org.java_websocket.WebSocket.READYSTATE;
import me.kutrumbos.DdpClient;
import com.billybyte.commonstaticmethods.LoggingUtils;
import com.billybyte.commonstaticmethods.Utils;
import com.billybyte.meteorjava.MeteorListCallback;
import com.billybyte.meteorjava.MeteorListSendReceive;
import com.billybyte.ui.RedirectedConsoleForJavaProcess;
import com.billybyte.ui.RedirectedConsoleForJavaProcess.ConsoleType;
/**
*
* @author bperlman1
*
*/
public class ArgBundle {
final public Map<String, String> argPairs;
final public String userId ;
final public String meteorUrl;
final public Integer meteorPort;
final public String adminEmail ;
final public String adminPass ;
final public String account ;
final public String strategy ;
final public String dseXmlPath ;
final public List<String> vmArgs;
final public Boolean restart;
final public DdpClient restartDdpClient;
final public Boolean redirect;
final public Integer redirectXloc;
final public Integer redirectLength;
final public Integer redirectYloc;
final public Integer redirectWidth;
final public Long millsBeforeRestart;
final public String logPropertiesPath;
final public LoggingUtils logger;
private static final String DEF_ADMIN_EMAIL = "[email protected]";
private static final String DEF_ADMIN_PASS = "admin1";
private static final String DEF_METURL = "localhost";
private static final Integer DEF_PORT = 3000;
private static final String DEF_ACC = "a1";
private static final String DEF_STRAT = "s1";
private static final String DEF_DSEPATH = "beans_DefaultDse.xml";
private static final Integer DEF_RED_XLOC = 1;
private static final Integer DEF_RED_YLOC = 1;
private static final Integer DEF_RED_LENGTH = 800;
private static final Integer DEF_RED_WIDTH = 400;
private static final Long DEF_MILLSBEFORERESTART = 5000L;
/**
* get all arguments and determine if process should be run in redirect console, and if meteor restart should be implemented
* @param args
*/
public ArgBundle(String[] args){
this.argPairs =
Utils.getArgPairsSeparatedByChar(args, "=");
String uid = argPairs.get("userId");
this.userId = uid==null ? DEF_ADMIN_EMAIL : uid ;
String murl = argPairs.get("metUrl");;
this.meteorUrl = murl==null ? DEF_METURL : murl;
String mp = argPairs.get("metPort");
this.meteorPort = mp==null ? DEF_PORT : new Integer(mp);
String adem = argPairs.get("adminEmail");
this.adminEmail = adem==null ? DEF_ADMIN_EMAIL : adem;
String adp = argPairs.get("adminPass");
this.adminPass = adp==null ? DEF_ADMIN_PASS : adp;
this.account = argPairs.get("account")==null ? DEF_ACC : argPairs.get("account");
this.strategy = argPairs.get("strategy")==null ? DEF_STRAT : argPairs.get("strategy");
this.dseXmlPath = argPairs.get("dseXmlPath")==null ? DEF_DSEPATH : argPairs.get("dseXmlPath");
this.restart = argPairs.get("restart")==null ? false : new Boolean(argPairs.get("restart"));
RuntimeMXBean runtimeMxBean = ManagementFactory.getRuntimeMXBean();
this.vmArgs = runtimeMxBean.getInputArguments();
// get caller class name for logging utils and for restart
final Class<?> clazz = getCallerClassName();
this.logPropertiesPath = argPairs.get("logPropertiesPath");
if(this.logPropertiesPath==null){
this.logger = new LoggingUtils(clazz);
}else{
this.logger = new LoggingUtils(this.logPropertiesPath,null);
}
this.redirect = argPairs.get("redirect")==null ? false : new Boolean(argPairs.get("redirect"));
if(this.redirect){
this.redirectXloc = argPairs.get("redirectXloc")==null ? DEF_RED_XLOC : new Integer(argPairs.get("redirectXloc"));
this.redirectYloc = argPairs.get("redirectYloc")==null ? DEF_RED_YLOC : new Integer(argPairs.get("redirectYloc"));
this.redirectLength = argPairs.get("redirectLength")==null ? DEF_RED_LENGTH : new Integer(argPairs.get("redirectLength"));
this.redirectWidth = argPairs.get("redirectWidth")==null ? DEF_RED_WIDTH : new Integer(argPairs.get("redirectWidth"));
Utils.prtObMess(clazz, "redirecting console output at : " + "x:" + redirectXloc + "y:" + redirectYloc + "len:" + redirectLength + "wid:" + redirectWidth );
// redirect console to console gui and error
// divide the length of each console into 2, so that you have separate views for error and console
// error on top
new RedirectedConsoleForJavaProcess(
this.redirectWidth.intValue(),
this.redirectLength.intValue()/2,
this.redirectXloc,
this.redirectYloc,
clazz.getCanonicalName(),
ConsoleType.SYSTEM_ERR,
logger);
new RedirectedConsoleForJavaProcess(
this.redirectWidth.intValue(),
this.redirectLength.intValue()/2,
this.redirectXloc,
this.redirectYloc+this.redirectLength.intValue()/2,
clazz.getCanonicalName(),
ConsoleType.SYSTEM_OUT,
logger);
}else{
this.redirectXloc=-1;
this.redirectYloc=-1;
this.redirectLength=-1;
this.redirectWidth=-1;
}
Utils.prtObMess(this.getClass(),
"userId:"+this.userId + "," +
"meteorUrl:"+this.meteorUrl + "," +
"meteorPort:"+this.meteorPort + "," +
"adminEmail:"+this.adminEmail + "," +
"adminPass:"+this.adminPass + "," +
"account:"+this.account + "," +
"strategy:"+this.strategy + "," +
"dseXmlPath:"+this.dseXmlPath + "," +
"restart:"+this.restart + "," +
"redirect:"+this.redirect + "," +
"redirectXloc:"+this.redirectXloc + "," +
"redirectYloc:"+this.redirectYloc + "," +
"redirectLength:"+this.redirectLength + "," +
"redirectWidth:"+this.redirectWidth
);
Utils.prtObMess(this.getClass(), this.vmArgs.toArray(new String[]{}).toString());
DdpClient tempDdpClient = null;
this.millsBeforeRestart = argPairs.get("millsBeforeRestart")==null ? DEF_MILLSBEFORERESTART : new Long(argPairs.get("millsBeforeRestart"));
if(restart){
// Handle explicit connection drop: create first level of restart - if the connection just drops explicitly (not silently)
DdpRestartProcessObserver restartObserver =
new DdpRestartProcessObserver(clazz, vmArgs.toArray(new String[]{}), args,millsBeforeRestart);
try {
tempDdpClient = new DdpClient(meteorUrl, meteorPort);
tempDdpClient.connect();
try {
Thread.sleep(500);
if(tempDdpClient.getReadyState()!=READYSTATE.OPEN){
Utils.prtObErrMess(clazz, "cam't connect DdpRestart to Meteor. Attempting restart in 4 seconds");
Thread.sleep(4000);
new Thread(new NewProcessLauncher(clazz, vmArgs.toArray(new String[]{}), args)).start();
throw Utils.IllState(clazz,"throwing exception to stop processing before restart");
}
} catch (InterruptedException e) {
throw Utils.IllState(e);
}
} catch (URISyntaxException e) {
e.printStackTrace();
}
if(tempDdpClient!=null){
tempDdpClient.addObserver(restartObserver);
Utils.prtObMess(this.getClass(), "adding restart observer");
}
}
// this needs to go here because it's final and can only be set once.
this.restartDdpClient = tempDdpClient;
}
private Class<?> getCallerClassName() {
StackTraceElement[] stElements = Thread.currentThread().getStackTrace();
for (int i=1; i<stElements.length; i++) {
StackTraceElement ste = stElements[i];
if (!ste.getClassName().equals(this.getClass().getName()) && ste.getClassName().indexOf("java.lang.Thread")!=0) {
try {
return Class.forName(ste.getClassName());
} catch (ClassNotFoundException e) {
throw Utils.IllState(e);
}
}
}
return null;
}
}
|
|
/*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.plugin.ij.refactor.signature;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiCodeFragment;
import com.intellij.psi.PsiElement;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.refactoring.changeSignature.ParameterInfo;
import com.intellij.refactoring.changeSignature.ParameterInfoImpl;
import com.intellij.refactoring.ui.StringTableCellEditor;
import com.intellij.ui.*;
import com.intellij.util.ui.ColumnInfo;
import com.intellij.util.ui.ListTableModel;
import gw.plugin.ij.editors.LightweightGosuEditor;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.table.TableCellEditor;
import javax.swing.table.TableCellRenderer;
import java.awt.*;
import java.util.ArrayList;
import java.util.List;
public abstract class ParameterTableModelBase extends ListTableModel<ParameterTableModelItem> implements RowEditableTableModel {
protected final PsiElement myTypeContext;
protected final PsiElement myDefaultValueContext;
public ParameterTableModelBase(PsiElement typeContext,
PsiElement defaultValueContext,
ColumnInfo... columnInfos) {
super(columnInfos);
myTypeContext = typeContext;
myDefaultValueContext = defaultValueContext;
}
protected abstract ParameterTableModelItem createRowItem(@Nullable ParameterInfoImpl parameterInfo);
public void addRow() {
addRow(createRowItem(null));
}
public void setParameterInfos(List<ParameterInfoImpl> parameterInfos) {
List<ParameterTableModelItem> items = new ArrayList<ParameterTableModelItem>(parameterInfos.size());
for (ParameterInfoImpl parameterInfo : parameterInfos) {
items.add(createRowItem(parameterInfo));
}
setItems(items);
}
public void setValueAtWithoutUpdate(Object aValue, int rowIndex, int columnIndex) {
super.setValueAt(aValue, rowIndex, columnIndex);
}
@Override
public void setValueAt(Object aValue, int rowIndex, int columnIndex) {
super.setValueAt(aValue, rowIndex, columnIndex);
fireTableCellUpdated(rowIndex, columnIndex); // to update signature
}
protected static abstract class ColumnInfoBase<Aspect>
extends ColumnInfo<ParameterTableModelItem, Aspect> {
private TableCellRenderer myRenderer;
private TableCellEditor myEditor;
public ColumnInfoBase(String name) {
super(name);
}
@Override
public final TableCellEditor getEditor(ParameterTableModelItem o) {
if (myEditor == null) {
myEditor = doCreateEditor(o);
}
return myEditor;
}
@Override
public final TableCellRenderer getRenderer(ParameterTableModelItem item) {
if (myRenderer == null) {
final TableCellRenderer original = doCreateRenderer(item);
myRenderer = new TableCellRenderer() {
public Component getTableCellRendererComponent(JTable table,
Object value,
boolean isSelected,
boolean hasFocus,
int row,
int column) {
Component component = original.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column);
if (!table.isCellEditable(row, table.convertColumnIndexToModel(column))) {
Color bg = table.getBackground().darker();
component.setBackground(ColorUtil.toAlpha(bg, 230));
}
if (component instanceof SimpleColoredComponent) {
((SimpleColoredComponent)component).setIpad(new Insets(0, 0, 0, 0));
}
return component;
}
};
}
return myRenderer;
}
protected abstract TableCellRenderer doCreateRenderer(ParameterTableModelItem item);
protected abstract TableCellEditor doCreateEditor(ParameterTableModelItem item);
}
protected static class TypeColumn extends ColumnInfoBase<LightweightGosuEditor> {
protected final Project myProject;
private final FileType myFileType;
public TypeColumn(Project project, FileType fileType) {
this(project, fileType, RefactoringBundle.message("column.name.type"));
}
public TypeColumn(Project project, FileType fileType, String title) {
super(title);
myProject = project;
myFileType = fileType;
}
@Override
public LightweightGosuEditor valueOf(ParameterTableModelItem item) {
return item.typeCodeFragment;
}
@Override
public boolean isCellEditable(ParameterTableModelItem pParameterTableModelItemBase) {
return true;
}
public TableCellRenderer doCreateRenderer(ParameterTableModelItem pParameterTableModelItemBase) {
return new LightEditorCellRenderer(myProject, myFileType);
}
public TableCellEditor doCreateEditor(ParameterTableModelItem o) {
return new LightEditorCellEditor(myProject, myFileType);
}
}
protected static class NameColumn extends ColumnInfoBase<String> {
private final Project myProject;
public NameColumn(Project project) {
this(project, RefactoringBundle.message("column.name.name"));
}
public NameColumn(Project project, String title) {
super(title);
myProject = project;
}
@Override
public String valueOf(ParameterTableModelItem item) {
return item.parameter.getName();
}
@Override
public void setValue(ParameterTableModelItem item, String value) {
item.parameter.setName(value);
}
@Override
public boolean isCellEditable(ParameterTableModelItem pParameterTableModelItemBase) {
return true;
}
public TableCellRenderer doCreateRenderer(ParameterTableModelItem item) {
return new ColoredTableCellRenderer() {
public void customizeCellRenderer(JTable table, Object value,
boolean isSelected, boolean hasFocus, int row, int column) {
if (value == null) return;
append((String)value, new SimpleTextAttributes(Font.PLAIN, null));
}
};
}
public TableCellEditor doCreateEditor(ParameterTableModelItem o) {
return new StringTableCellEditor(myProject);
}
}
protected static class DefaultValueColumn extends ColumnInfoBase<LightweightGosuEditor> {
private final Project myProject;
private final FileType myFileType;
public DefaultValueColumn(Project project, FileType fileType) {
this(project, fileType, RefactoringBundle.message("column.name.default.value"));
}
public DefaultValueColumn(Project project, FileType fileType, String title) {
super(title);
myProject = project;
myFileType = fileType;
}
@Override
public boolean isCellEditable(ParameterTableModelItem item) {
return item.parameter.getOldIndex() == -1;
}
@Override
public LightweightGosuEditor valueOf(ParameterTableModelItem item) {
return item.defaultValueCodeFragment;
}
public TableCellRenderer doCreateRenderer(ParameterTableModelItem item) {
return new LightEditorCellRenderer(myProject, myFileType);
}
public TableCellEditor doCreateEditor(ParameterTableModelItem item) {
return new LightEditorCellEditor(myProject, myFileType);
}
}
protected static class AnyVarColumn extends ColumnInfoBase<Boolean> {
public AnyVarColumn() {
super(RefactoringBundle.message("column.name.any.var"));
}
@Override
public boolean isCellEditable(ParameterTableModelItem item) {
return item.parameter.getOldIndex() == -1;
}
@Override
public Boolean valueOf(ParameterTableModelItem item) {
return item.parameter.isUseAnySingleVariable();
}
@Override
public void setValue(ParameterTableModelItem item, Boolean value) {
item.parameter.setUseAnySingleVariable(value);
}
public TableCellRenderer doCreateRenderer(ParameterTableModelItem item) {
return new BooleanTableCellRenderer();
}
public TableCellEditor doCreateEditor(ParameterTableModelItem item) {
return new BooleanTableCellEditor(false);
}
@Override
public int getWidth(JTable table) {
final int headerWidth = table.getFontMetrics(table.getFont()).stringWidth(getName()) + 8;
return Math.max(new JCheckBox().getPreferredSize().width, headerWidth);
}
}
}
|
|
/*
* Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
/**
* Propagated Route
*/
public class PropagatedRoute {
private String destinationCidrBlock;
private String gatewayId;
private String status;
private String sourceId;
/**
* Returns the value of the DestinationCidrBlock property for this
* object.
*
* @return The value of the DestinationCidrBlock property for this object.
*/
public String getDestinationCidrBlock() {
return destinationCidrBlock;
}
/**
* Sets the value of the DestinationCidrBlock property for this object.
*
* @param destinationCidrBlock The new value for the DestinationCidrBlock property for this object.
*/
public void setDestinationCidrBlock(String destinationCidrBlock) {
this.destinationCidrBlock = destinationCidrBlock;
}
/**
* Sets the value of the DestinationCidrBlock property for this object.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param destinationCidrBlock The new value for the DestinationCidrBlock property for this object.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public PropagatedRoute withDestinationCidrBlock(String destinationCidrBlock) {
this.destinationCidrBlock = destinationCidrBlock;
return this;
}
/**
* Returns the value of the GatewayId property for this object.
*
* @return The value of the GatewayId property for this object.
*/
public String getGatewayId() {
return gatewayId;
}
/**
* Sets the value of the GatewayId property for this object.
*
* @param gatewayId The new value for the GatewayId property for this object.
*/
public void setGatewayId(String gatewayId) {
this.gatewayId = gatewayId;
}
/**
* Sets the value of the GatewayId property for this object.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param gatewayId The new value for the GatewayId property for this object.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public PropagatedRoute withGatewayId(String gatewayId) {
this.gatewayId = gatewayId;
return this;
}
/**
* Returns the value of the Status property for this object.
*
* @return The value of the Status property for this object.
*/
public String getStatus() {
return status;
}
/**
* Sets the value of the Status property for this object.
*
* @param status The new value for the Status property for this object.
*/
public void setStatus(String status) {
this.status = status;
}
/**
* Sets the value of the Status property for this object.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param status The new value for the Status property for this object.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public PropagatedRoute withStatus(String status) {
this.status = status;
return this;
}
/**
* Returns the value of the SourceId property for this object.
*
* @return The value of the SourceId property for this object.
*/
public String getSourceId() {
return sourceId;
}
/**
* Sets the value of the SourceId property for this object.
*
* @param sourceId The new value for the SourceId property for this object.
*/
public void setSourceId(String sourceId) {
this.sourceId = sourceId;
}
/**
* Sets the value of the SourceId property for this object.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param sourceId The new value for the SourceId property for this object.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public PropagatedRoute withSourceId(String sourceId) {
this.sourceId = sourceId;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (destinationCidrBlock != null) sb.append("DestinationCidrBlock: " + destinationCidrBlock + ", ");
if (gatewayId != null) sb.append("GatewayId: " + gatewayId + ", ");
if (status != null) sb.append("Status: " + status + ", ");
if (sourceId != null) sb.append("SourceId: " + sourceId + ", ");
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDestinationCidrBlock() == null) ? 0 : getDestinationCidrBlock().hashCode());
hashCode = prime * hashCode + ((getGatewayId() == null) ? 0 : getGatewayId().hashCode());
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
hashCode = prime * hashCode + ((getSourceId() == null) ? 0 : getSourceId().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof PropagatedRoute == false) return false;
PropagatedRoute other = (PropagatedRoute)obj;
if (other.getDestinationCidrBlock() == null ^ this.getDestinationCidrBlock() == null) return false;
if (other.getDestinationCidrBlock() != null && other.getDestinationCidrBlock().equals(this.getDestinationCidrBlock()) == false) return false;
if (other.getGatewayId() == null ^ this.getGatewayId() == null) return false;
if (other.getGatewayId() != null && other.getGatewayId().equals(this.getGatewayId()) == false) return false;
if (other.getStatus() == null ^ this.getStatus() == null) return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false;
if (other.getSourceId() == null ^ this.getSourceId() == null) return false;
if (other.getSourceId() != null && other.getSourceId().equals(this.getSourceId()) == false) return false;
return true;
}
}
|
|
package com.darsh.multipleimageselect.activities;
import android.Manifest;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.database.ContentObserver;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.os.Process;
import android.provider.MediaStore;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.appcompat.app.ActionBar;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.Toolbar;
import android.util.DisplayMetrics;
import android.view.MenuItem;
import android.view.View;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.Button;
import android.widget.GridView;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.darsh.multipleimageselect.R;
import com.darsh.multipleimageselect.adapters.CustomAlbumSelectAdapter;
import com.darsh.multipleimageselect.helpers.Constants;
import com.darsh.multipleimageselect.models.Album;
import java.io.File;
import java.util.ArrayList;
import java.util.HashSet;
/**
* Created by Darshan on 4/14/2015.
*/
public class AlbumSelectActivity extends AppCompatActivity {
private final String TAG = AlbumSelectActivity.class.getName();
private ArrayList<Album> albums;
private TextView requestPermission;
private Button grantPermission;
private final String[] requiredPermissions = new String[]{ Manifest.permission.READ_EXTERNAL_STORAGE };
private TextView errorDisplay;
private ProgressBar progressBar;
private GridView gridView;
private CustomAlbumSelectAdapter adapter;
private ActionBar actionBar;
private ContentObserver observer;
private Handler handler;
private Thread thread;
private final String[] projection = new String[]{ MediaStore.Images.Media.BUCKET_DISPLAY_NAME, MediaStore.Images.Media.DATA };
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_album_select);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
actionBar = getSupportActionBar();
if (actionBar != null) {
actionBar.setDisplayHomeAsUpEnabled(true);
actionBar.setHomeAsUpIndicator(R.drawable.ic_arrow_back);
actionBar.setDisplayShowTitleEnabled(true);
actionBar.setTitle(R.string.album_view);
}
Intent intent = getIntent();
if (intent == null) {
finish();
}
Constants.limit = intent.getIntExtra(Constants.INTENT_EXTRA_LIMIT, Constants.DEFAULT_LIMIT);
errorDisplay = (TextView) findViewById(R.id.text_view_error);
errorDisplay.setVisibility(View.INVISIBLE);
requestPermission = (TextView) findViewById(R.id.text_view_request_permission);
grantPermission = (Button) findViewById(R.id.button_grant_permission);
grantPermission.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
requestPermission();
}
});
hidePermissionHelperUI();
progressBar = (ProgressBar) findViewById(R.id.progress_bar_album_select);
gridView = (GridView) findViewById(R.id.grid_view_album_select);
gridView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
Intent intent = new Intent(getApplicationContext(), ImageSelectActivity.class);
intent.putExtra(Constants.INTENT_EXTRA_ALBUM, albums.get(position).name);
startActivityForResult(intent, Constants.REQUEST_CODE);
}
});
}
@Override
protected void onStart() {
super.onStart();
handler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case Constants.PERMISSION_GRANTED: {
hidePermissionHelperUI();
loadAlbums();
break;
}
case Constants.PERMISSION_DENIED: {
showPermissionHelperUI();
progressBar.setVisibility(View.INVISIBLE);
gridView.setVisibility(View.INVISIBLE);
break;
}
case Constants.FETCH_STARTED: {
progressBar.setVisibility(View.VISIBLE);
gridView.setVisibility(View.INVISIBLE);
break;
}
case Constants.FETCH_COMPLETED: {
if (adapter == null) {
adapter = new CustomAlbumSelectAdapter(getApplicationContext(), albums);
gridView.setAdapter(adapter);
progressBar.setVisibility(View.INVISIBLE);
gridView.setVisibility(View.VISIBLE);
orientationBasedUI(getResources().getConfiguration().orientation);
} else {
adapter.notifyDataSetChanged();
}
break;
}
case Constants.ERROR: {
progressBar.setVisibility(View.INVISIBLE);
errorDisplay.setVisibility(View.VISIBLE);
break;
}
default: {
super.handleMessage(msg);
}
}
}
};
observer = new ContentObserver(handler) {
@Override
public void onChange(boolean selfChange, Uri uri) {
loadAlbums();
}
};
getContentResolver().registerContentObserver(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, false, observer);
checkIfPermissionGranted();
}
private void checkIfPermissionGranted() {
if (ContextCompat.checkSelfPermission(AlbumSelectActivity.this,
Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
requestPermission();
return;
}
Message message = handler.obtainMessage();
message.what = Constants.PERMISSION_GRANTED;
message.sendToTarget();
}
private void requestPermission() {
ActivityCompat.requestPermissions(AlbumSelectActivity.this,
requiredPermissions,
Constants.PERMISSION_REQUEST_READ_EXTERNAL_STORAGE);
}
@Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
if (requestCode == Constants.PERMISSION_REQUEST_READ_EXTERNAL_STORAGE) {
Message message = handler.obtainMessage();
message.what = grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED ? Constants.PERMISSION_GRANTED : Constants.PERMISSION_DENIED;
message.sendToTarget();
}
}
private void hidePermissionHelperUI() {
requestPermission.setVisibility(View.INVISIBLE);
grantPermission.setVisibility(View.INVISIBLE);
}
private void showPermissionHelperUI() {
requestPermission.setVisibility(View.VISIBLE);
grantPermission.setVisibility(View.VISIBLE);
}
@Override
protected void onStop() {
super.onStop();
abortLoading();
getContentResolver().unregisterContentObserver(observer);
observer = null;
if (handler != null) {
handler.removeCallbacksAndMessages(null);
handler = null;
}
}
@Override
protected void onDestroy() {
super.onDestroy();
if (actionBar != null) {
actionBar.setHomeAsUpIndicator(null);
}
albums = null;
if (adapter != null) {
adapter.releaseResources();
}
gridView.setOnItemClickListener(null);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
orientationBasedUI(newConfig.orientation);
}
private void orientationBasedUI(int orientation) {
final WindowManager windowManager = (WindowManager) getApplicationContext().getSystemService(Context.WINDOW_SERVICE);
final DisplayMetrics metrics = new DisplayMetrics();
windowManager.getDefaultDisplay().getMetrics(metrics);
if (adapter != null) {
int size = orientation == Configuration.ORIENTATION_PORTRAIT ? metrics.widthPixels / 2 : metrics.widthPixels / 4;
adapter.setLayoutParams(size);
}
gridView.setNumColumns(orientation == Configuration.ORIENTATION_PORTRAIT ? 2 : 4);
}
@Override
public void onBackPressed() {
super.onBackPressed();
setResult(RESULT_CANCELED);
finish();
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == Constants.REQUEST_CODE && resultCode == RESULT_OK && data != null) {
setResult(RESULT_OK, data);
finish();
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home: {
onBackPressed();
return true;
}
default: {
return false;
}
}
}
private void loadAlbums() {
abortLoading();
AlbumLoaderRunnable runnable = new AlbumLoaderRunnable();
thread = new Thread(runnable);
thread.start();
}
private void abortLoading() {
if (thread == null) {
return;
}
if (thread.isAlive()) {
thread.interrupt();
try {
thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
private class AlbumLoaderRunnable implements Runnable {
@Override
public void run() {
android.os.Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND);
Message message;
if (adapter == null) {
message = handler.obtainMessage();
message.what = Constants.FETCH_STARTED;
message.sendToTarget();
}
if (Thread.interrupted()) {
return;
}
Cursor cursor = getApplicationContext().getContentResolver()
.query(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, projection,
null, null, MediaStore.Images.Media.DATE_ADDED);
if (cursor == null) {
message = handler.obtainMessage();
message.what = Constants.ERROR;
message.sendToTarget();
return;
}
ArrayList<Album> temp = new ArrayList<>(cursor.getCount());
HashSet<String> albumSet = new HashSet<>();
File file;
if (cursor.moveToLast()) {
do {
if (Thread.interrupted()) {
return;
}
String album = cursor.getString(cursor.getColumnIndex(projection[0]));
String image = cursor.getString(cursor.getColumnIndex(projection[1]));
/*
It may happen that some image file paths are still present in cache,
though image file does not exist. These last as long as media
scanner is not run again. To avoid get such image file paths, check
if image file exists.
*/
file = new File(image);
if (file.exists() && !albumSet.contains(album)) {
temp.add(new Album(album, image));
albumSet.add(album);
}
} while (cursor.moveToPrevious());
}
cursor.close();
if (albums == null) {
albums = new ArrayList<>();
}
albums.clear();
albums.addAll(temp);
message = handler.obtainMessage();
message.what = Constants.FETCH_COMPLETED;
message.sendToTarget();
Thread.interrupted();
}
}
}
|
|
package dagger.extension.example.model.today;
import android.os.Parcel;
import android.os.Parcelable;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import java.util.List;
import dagger.extension.example.model.Weather;
public class TodayWeather implements Parcelable, Weather
{
public final static Creator<TodayWeather> CREATOR = new Creator<TodayWeather>()
{
@SuppressWarnings({
"unchecked"
})
public TodayWeather createFromParcel(Parcel in)
{
TodayWeather instance = new TodayWeather();
instance.coord = ((Coord) in.readValue((Coord.class.getClassLoader())));
in.readList(instance.weather, (dagger.extension.example.model.today.Weather.class.getClassLoader()));
instance.base = ((String) in.readValue((String.class.getClassLoader())));
instance.main = ((Main) in.readValue((Main.class.getClassLoader())));
instance.wind = ((Wind) in.readValue((Wind.class.getClassLoader())));
instance.clouds = ((Clouds) in.readValue((Clouds.class.getClassLoader())));
instance.dt = ((Integer) in.readValue((Integer.class.getClassLoader())));
instance.sys = ((Sys) in.readValue((Sys.class.getClassLoader())));
instance.id = ((Integer) in.readValue((Integer.class.getClassLoader())));
instance.name = ((String) in.readValue((String.class.getClassLoader())));
instance.cod = ((Integer) in.readValue((Integer.class.getClassLoader())));
return instance;
}
public TodayWeather[] newArray(int size)
{
return (new TodayWeather[size]);
}
};
@SerializedName("coord")
@Expose
private Coord coord;
@SerializedName("weather")
@Expose
private List<dagger.extension.example.model.today.Weather> weather = null;
@SerializedName("base")
@Expose
private String base;
@SerializedName("main")
@Expose
private Main main;
@SerializedName("wind")
@Expose
private Wind wind;
@SerializedName("clouds")
@Expose
private Clouds clouds;
@SerializedName("dt")
@Expose
private Integer dt;
@SerializedName("sys")
@Expose
private Sys sys;
@SerializedName("id")
@Expose
private Integer id;
@SerializedName("name")
@Expose
private String name;
@SerializedName("cod")
@Expose
private Integer cod;
public Coord getCoord()
{
return coord;
}
public void setCoord(Coord coord)
{
this.coord = coord;
}
public List<dagger.extension.example.model.today.Weather> getWeather()
{
return weather;
}
public void setWeather(List<dagger.extension.example.model.today.Weather> weather)
{
this.weather = weather;
}
public String getBase()
{
return base;
}
public void setBase(String base)
{
this.base = base;
}
public Main getMain()
{
return main;
}
public void setMain(Main main)
{
this.main = main;
}
public Wind getWind()
{
return wind;
}
public void setWind(Wind wind)
{
this.wind = wind;
}
public Clouds getClouds()
{
return clouds;
}
public void setClouds(Clouds clouds)
{
this.clouds = clouds;
}
public Integer getDt()
{
return dt;
}
public void setDt(Integer dt)
{
this.dt = dt;
}
public Sys getSys()
{
return sys;
}
public void setSys(Sys sys)
{
this.sys = sys;
}
public Integer getId()
{
return id;
}
public void setId(Integer id)
{
this.id = id;
}
public String getName()
{
return name;
}
public void setName(String name)
{
this.name = name;
}
public Integer getCod()
{
return cod;
}
public void setCod(Integer cod)
{
this.cod = cod;
}
public void writeToParcel(Parcel dest, int flags)
{
dest.writeValue(coord);
dest.writeList(weather);
dest.writeValue(base);
dest.writeValue(main);
dest.writeValue(wind);
dest.writeValue(clouds);
dest.writeValue(dt);
dest.writeValue(sys);
dest.writeValue(id);
dest.writeValue(name);
dest.writeValue(cod);
}
public int describeContents()
{
return 0;
}
@Override
public String temperature()
{
return String.valueOf(main.getTemp());
}
@Override
public String humidity()
{
return String.valueOf(main.getHumidity());
}
@Override
public String icon()
{
String iconId = weather.get(0).getIcon();
return "http://openweathermap.org/img/w/" + iconId + ".png";
}
@Override
public String city()
{
return name;
}
@Override
public String description()
{
return weather.get(0).getDescription();
}
}
|
|
/**
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hystrix.contrib.codahalemetricspublisher;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.MetricRegistry;
import com.netflix.hystrix.HystrixCircuitBreaker;
import com.netflix.hystrix.HystrixCommandGroupKey;
import com.netflix.hystrix.HystrixCommandKey;
import com.netflix.hystrix.HystrixCommandMetrics;
import com.netflix.hystrix.HystrixCommandProperties;
import com.netflix.hystrix.strategy.metrics.HystrixMetricsPublisherCommand;
import com.netflix.hystrix.util.HystrixRollingNumberEvent;
/**
* Implementation of {@link HystrixMetricsPublisherCommand} using Coda Hale Metrics (https://github.com/codahale/metrics)
*/
public class HystrixCodaHaleMetricsPublisherCommand implements HystrixMetricsPublisherCommand {
private final HystrixCommandKey key;
private final HystrixCommandGroupKey commandGroupKey;
private final HystrixCommandMetrics metrics;
private final HystrixCircuitBreaker circuitBreaker;
private final HystrixCommandProperties properties;
private final MetricRegistry metricRegistry;
private final String metricGroup;
private final String metricType;
public HystrixCodaHaleMetricsPublisherCommand(HystrixCommandKey commandKey, HystrixCommandGroupKey commandGroupKey, HystrixCommandMetrics metrics, HystrixCircuitBreaker circuitBreaker, HystrixCommandProperties properties, MetricRegistry metricRegistry) {
this.key = commandKey;
this.commandGroupKey = commandGroupKey;
this.metrics = metrics;
this.circuitBreaker = circuitBreaker;
this.properties = properties;
this.metricRegistry = metricRegistry;
this.metricGroup = commandGroupKey.name();
this.metricType = key.name();
}
@Override
public void initialize() {
metricRegistry.register(createMetricName("isCircuitBreakerOpen"), new Gauge<Boolean>() {
@Override
public Boolean getValue() {
return circuitBreaker.isOpen();
}
});
// allow monitor to know exactly at what point in time these stats are for so they can be plotted accurately
metricRegistry.register(createMetricName("currentTime"), new Gauge<Long>() {
@Override
public Long getValue() {
return System.currentTimeMillis();
}
});
// cumulative counts
createCumulativeCountForEvent("countBadRequests", HystrixRollingNumberEvent.BAD_REQUEST);
createCumulativeCountForEvent("countCollapsedRequests", HystrixRollingNumberEvent.COLLAPSED);
createCumulativeCountForEvent("countEmit", HystrixRollingNumberEvent.EMIT);
createCumulativeCountForEvent("countExceptionsThrown", HystrixRollingNumberEvent.EXCEPTION_THROWN);
createCumulativeCountForEvent("countFailure", HystrixRollingNumberEvent.FAILURE);
createCumulativeCountForEvent("countFallbackEmit", HystrixRollingNumberEvent.FALLBACK_EMIT);
createCumulativeCountForEvent("countFallbackFailure", HystrixRollingNumberEvent.FALLBACK_FAILURE);
createCumulativeCountForEvent("countFallbackMissing", HystrixRollingNumberEvent.FALLBACK_MISSING);
createCumulativeCountForEvent("countFallbackRejection", HystrixRollingNumberEvent.FALLBACK_REJECTION);
createCumulativeCountForEvent("countFallbackSuccess", HystrixRollingNumberEvent.FALLBACK_SUCCESS);
createCumulativeCountForEvent("countResponsesFromCache", HystrixRollingNumberEvent.RESPONSE_FROM_CACHE);
createCumulativeCountForEvent("countSemaphoreRejected", HystrixRollingNumberEvent.SEMAPHORE_REJECTED);
createCumulativeCountForEvent("countShortCircuited", HystrixRollingNumberEvent.SHORT_CIRCUITED);
createCumulativeCountForEvent("countSuccess", HystrixRollingNumberEvent.SUCCESS);
createCumulativeCountForEvent("countThreadPoolRejected", HystrixRollingNumberEvent.THREAD_POOL_REJECTED);
createCumulativeCountForEvent("countTimeout", HystrixRollingNumberEvent.TIMEOUT);
// rolling counts
createRollingCountForEvent("rollingCountBadRequests", HystrixRollingNumberEvent.BAD_REQUEST);
createRollingCountForEvent("rollingCountCollapsedRequests", HystrixRollingNumberEvent.COLLAPSED);
createRollingCountForEvent("rollingCountEmit", HystrixRollingNumberEvent.EMIT);
createRollingCountForEvent("rollingCountExceptionsThrown", HystrixRollingNumberEvent.EXCEPTION_THROWN);
createRollingCountForEvent("rollingCountFailure", HystrixRollingNumberEvent.FAILURE);
createRollingCountForEvent("rollingCountFallbackEmit", HystrixRollingNumberEvent.FALLBACK_EMIT);
createRollingCountForEvent("rollingCountFallbackFailure", HystrixRollingNumberEvent.FALLBACK_FAILURE);
createRollingCountForEvent("rollingCountFallbackMissing", HystrixRollingNumberEvent.FALLBACK_MISSING);
createRollingCountForEvent("rollingCountFallbackRejection", HystrixRollingNumberEvent.FALLBACK_REJECTION);
createRollingCountForEvent("rollingCountFallbackSuccess", HystrixRollingNumberEvent.FALLBACK_SUCCESS);
createRollingCountForEvent("rollingCountResponsesFromCache", HystrixRollingNumberEvent.RESPONSE_FROM_CACHE);
createRollingCountForEvent("rollingCountSemaphoreRejected", HystrixRollingNumberEvent.SEMAPHORE_REJECTED);
createRollingCountForEvent("rollingCountShortCircuited", HystrixRollingNumberEvent.SHORT_CIRCUITED);
createRollingCountForEvent("rollingCountSuccess", HystrixRollingNumberEvent.SUCCESS);
createRollingCountForEvent("rollingCountThreadPoolRejected", HystrixRollingNumberEvent.THREAD_POOL_REJECTED);
createRollingCountForEvent("rollingCountTimeout", HystrixRollingNumberEvent.TIMEOUT);
// the number of executionSemaphorePermits in use right now
metricRegistry.register(createMetricName("executionSemaphorePermitsInUse"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getCurrentConcurrentExecutionCount();
}
});
// error percentage derived from current metrics
metricRegistry.register(createMetricName("errorPercentage"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getHealthCounts().getErrorPercentage();
}
});
// latency metrics
metricRegistry.register(createMetricName("latencyExecute_mean"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getExecutionTimeMean();
}
});
metricRegistry.register(createMetricName("latencyExecute_percentile_5"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getExecutionTimePercentile(5);
}
});
metricRegistry.register(createMetricName("latencyExecute_percentile_25"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getExecutionTimePercentile(25);
}
});
metricRegistry.register(createMetricName("latencyExecute_percentile_50"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getExecutionTimePercentile(50);
}
});
metricRegistry.register(createMetricName("latencyExecute_percentile_75"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getExecutionTimePercentile(75);
}
});
metricRegistry.register(createMetricName("latencyExecute_percentile_90"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getExecutionTimePercentile(90);
}
});
metricRegistry.register(createMetricName("latencyExecute_percentile_99"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getExecutionTimePercentile(99);
}
});
metricRegistry.register(createMetricName("latencyExecute_percentile_995"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getExecutionTimePercentile(99.5);
}
});
metricRegistry.register(createMetricName("latencyTotal_mean"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getTotalTimeMean();
}
});
metricRegistry.register(createMetricName("latencyTotal_percentile_5"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getTotalTimePercentile(5);
}
});
metricRegistry.register(createMetricName("latencyTotal_percentile_25"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getTotalTimePercentile(25);
}
});
metricRegistry.register(createMetricName("latencyTotal_percentile_50"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getTotalTimePercentile(50);
}
});
metricRegistry.register(createMetricName("latencyTotal_percentile_75"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getTotalTimePercentile(75);
}
});
metricRegistry.register(createMetricName("latencyTotal_percentile_90"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getTotalTimePercentile(90);
}
});
metricRegistry.register(createMetricName("latencyTotal_percentile_99"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getTotalTimePercentile(99);
}
});
metricRegistry.register(createMetricName("latencyTotal_percentile_995"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return metrics.getTotalTimePercentile(99.5);
}
});
// group
metricRegistry.register(createMetricName("commandGroup"), new Gauge<String>() {
@Override
public String getValue() {
return commandGroupKey != null ? commandGroupKey.name() : null;
}
});
// properties (so the values can be inspected and monitored)
metricRegistry.register(createMetricName("propertyValue_rollingStatisticalWindowInMilliseconds"), new Gauge<Number>() {
@Override
public Number getValue() {
return properties.metricsRollingStatisticalWindowInMilliseconds().get();
}
});
metricRegistry.register(createMetricName("propertyValue_circuitBreakerRequestVolumeThreshold"), new Gauge<Number>() {
@Override
public Number getValue() {
return properties.circuitBreakerRequestVolumeThreshold().get();
}
});
metricRegistry.register(createMetricName("propertyValue_circuitBreakerSleepWindowInMilliseconds"), new Gauge<Number>() {
@Override
public Number getValue() {
return properties.circuitBreakerSleepWindowInMilliseconds().get();
}
});
metricRegistry.register(createMetricName("propertyValue_circuitBreakerErrorThresholdPercentage"), new Gauge<Number>() {
@Override
public Number getValue() {
return properties.circuitBreakerErrorThresholdPercentage().get();
}
});
metricRegistry.register(createMetricName("propertyValue_circuitBreakerForceOpen"), new Gauge<Boolean>() {
@Override
public Boolean getValue() {
return properties.circuitBreakerForceOpen().get();
}
});
metricRegistry.register(createMetricName("propertyValue_circuitBreakerForceClosed"), new Gauge<Boolean>() {
@Override
public Boolean getValue() {
return properties.circuitBreakerForceClosed().get();
}
});
metricRegistry.register(createMetricName("propertyValue_executionIsolationThreadTimeoutInMilliseconds"), new Gauge<Number>() {
@Override
public Number getValue() {
return properties.executionTimeoutInMilliseconds().get();
}
});
metricRegistry.register(createMetricName("propertyValue_executionTimeoutInMilliseconds"), new Gauge<Number>() {
@Override
public Number getValue() {
return properties.executionTimeoutInMilliseconds().get();
}
});
metricRegistry.register(createMetricName("propertyValue_executionIsolationStrategy"), new Gauge<String>() {
@Override
public String getValue() {
return properties.executionIsolationStrategy().get().name();
}
});
metricRegistry.register(createMetricName("propertyValue_metricsRollingPercentileEnabled"), new Gauge<Boolean>() {
@Override
public Boolean getValue() {
return properties.metricsRollingPercentileEnabled().get();
}
});
metricRegistry.register(createMetricName("propertyValue_requestCacheEnabled"), new Gauge<Boolean>() {
@Override
public Boolean getValue() {
return properties.requestCacheEnabled().get();
}
});
metricRegistry.register(createMetricName("propertyValue_requestLogEnabled"), new Gauge<Boolean>() {
@Override
public Boolean getValue() {
return properties.requestLogEnabled().get();
}
});
metricRegistry.register(createMetricName("propertyValue_executionIsolationSemaphoreMaxConcurrentRequests"), new Gauge<Number>() {
@Override
public Number getValue() {
return properties.executionIsolationSemaphoreMaxConcurrentRequests().get();
}
});
metricRegistry.register(createMetricName("propertyValue_fallbackIsolationSemaphoreMaxConcurrentRequests"), new Gauge<Number>() {
@Override
public Number getValue() {
return properties.fallbackIsolationSemaphoreMaxConcurrentRequests().get();
}
});
}
protected String createMetricName(String name) {
return MetricRegistry.name(metricGroup, metricType, name);
}
protected void createCumulativeCountForEvent(String name, final HystrixRollingNumberEvent event) {
metricRegistry.register(createMetricName(name), new Gauge<Long>() {
@Override
public Long getValue() {
return metrics.getCumulativeCount(event);
}
});
}
protected void createRollingCountForEvent(String name, final HystrixRollingNumberEvent event) {
metricRegistry.register(createMetricName(name), new Gauge<Long>() {
@Override
public Long getValue() {
return metrics.getRollingCount(event);
}
});
}
}
|
|
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <hr>
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* This file has been modified by the OpenOLAT community. Changes are licensed
* under the Apache 2.0 license as the original file.
* <p>
*/
package org.olat.core.gui.render;
import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import org.apache.commons.lang.StringEscapeUtils;
import org.olat.core.logging.OLog;
import org.olat.core.logging.Tracing;
import org.olat.core.util.filter.impl.OWASPAntiSamyXSSFilter;
/**
* @author Felix Jost
*/
public class StringOutput extends Writer {
private static final OLog log = Tracing.createLoggerFor(StringOutput.class);
private StringBuilder sb;
/**
* @param len
*/
public StringOutput(int len) {
sb = new StringBuilder(len);
}
/**
*
*/
public StringOutput() {
sb = new StringBuilder();
}
/**
* @param val
* @return this
*/
public StringOutput append(String val) {
sb.append(val);
return this;
}
/**
* @param val
* @return this
*/
public StringOutput append(boolean val) {
sb.append(val);
return this;
}
/**
* @param i
* @return
*/
public StringOutput append(int i) {
sb.append(i);
return this;
}
/**
* @param stringOutput
* @return
*/
public StringOutput append(StringOutput stringOutput) {
sb.append(stringOutput.toString());
return this;
}
@Override
public void write(char[] cbuf, int off, int len) throws IOException {
sb.append(cbuf, off, len);
}
/**
* @param sMin
* @return
*/
public StringOutput append(long sMin) {
sb.append(String.valueOf(sMin));
return this;
}
/**
* @param buffer
* @return
*/
public StringOutput append(StringBuilder buffer) {
sb.append(buffer);
return this;
}
/**
* @param buffer
* @return
*/
public StringOutput appendScanned(String str) {
sb.append(new OWASPAntiSamyXSSFilter().filter(str));
return this;
}
/**
* @param buffer
* @return
*/
public StringOutput appendHtmlEscaped(String str) {
try {
StringEscapeUtils.escapeHtml(this, str);
} catch (IOException e) {
log.error("Error escaping HTML", e);
}
return this;
}
public void ensureCapacity(int minimumCapacity) {
sb.ensureCapacity(minimumCapacity);
}
public int capacity() {
return sb.capacity();
}
public void setLength(int newLength) {
sb.setLength(newLength);
}
/**
* @return The length of the string output
*/
public int length() {
return sb.length();
}
public Reader getReader() {
return new StringOutputReader();
}
@Override
public void flush() throws IOException {
//
}
@Override
public void close() throws IOException {
//
}
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return sb.toString();
}
private class StringOutputReader extends Reader {
private int length;
private int next = 0;
private int mark = 0;
/**
* Creates a new string reader.
*
* @param s String providing the character stream.
*/
public StringOutputReader() {
this.length = sb.length();
}
/**
* Reads a single character.
*
* @return The character read, or -1 if the end of the stream has been
* reached
*
* @exception IOException If an I/O error occurs
*/
public int read() throws IOException {
synchronized (lock) {
if (next >= length)
return -1;
char[] dst = new char[1];
sb.getChars(next++, next, dst, 0);
return (int)dst[0];
}
}
public int read(char cbuf[], int off, int len) throws IOException {
synchronized (lock) {
if ((off < 0) || (off > cbuf.length) || (len < 0) ||
((off + len) > cbuf.length) || ((off + len) < 0)) {
throw new IndexOutOfBoundsException();
} else if (len == 0) {
return 0;
}
if (next >= length) return -1;
int n = Math.min(length - next, len);
sb.getChars(next, next + n, cbuf, off);
next += n;
return n;
}
}
/**
* Skips the specified number of characters in the stream. Returns
* the number of characters that were skipped.
*
* <p>The <code>ns</code> parameter may be negative, even though the
* <code>skip</code> method of the {@link Reader} superclass throws
* an exception in this case. Negative values of <code>ns</code> cause the
* stream to skip backwards. Negative return values indicate a skip
* backwards. It is not possible to skip backwards past the beginning of
* the string.
*
* <p>If the entire string has been read or skipped, then this method has
* no effect and always returns 0.
*
* @exception IOException If an I/O error occurs
*/
public long skip(long ns) throws IOException {
synchronized (lock) {
if (next >= length)
return 0;
// Bound skip by beginning and end of the source
long n = Math.min(length - next, ns);
n = Math.max(-next, n);
next += n;
return n;
}
}
public boolean ready() throws IOException {
synchronized (lock) {
return true;
}
}
public boolean markSupported() {
return true;
}
public void mark(int readAheadLimit) throws IOException {
if (readAheadLimit < 0) {
throw new IllegalArgumentException("Read-ahead limit < 0");
}
synchronized (lock) {
mark = next;
}
}
public void reset() throws IOException {
synchronized (lock) {
next = mark;
}
}
public void close() {
//
}
}
}
|
|
/**
* Copyright Microsoft Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.microsoft.azure.storage;
import com.microsoft.azure.storage.blob.CloudBlobClient;
import com.microsoft.azure.storage.blob.CloudBlobContainer;
import com.microsoft.azure.storage.blob.CloudBlobDirectory;
import com.microsoft.azure.storage.blob.CloudBlockBlob;
import com.microsoft.azure.storage.blob.CloudPageBlob;
import com.microsoft.azure.storage.core.SR;
import com.microsoft.azure.storage.queue.CloudQueue;
import com.microsoft.azure.storage.queue.CloudQueueClient;
import com.microsoft.azure.storage.table.CloudTable;
import com.microsoft.azure.storage.table.CloudTableClient;
import com.microsoft.azure.storage.TestRunners.CloudTests;
import com.microsoft.azure.storage.TestRunners.DevFabricTests;
import com.microsoft.azure.storage.TestRunners.DevStoreTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.InvalidKeyException;
import static org.junit.Assert.*;
@Category({ DevFabricTests.class, DevStoreTests.class, CloudTests.class })
public class StorageUriTests {
private static final String ACCOUNT_NAME = "account";
private static final String SECONDARY_SUFFIX = "-secondary";
private static final String ENDPOINT_SUFFIX = ".core.windows.net";
private static final String BLOB_SERVICE = ".blob";
private static final String QUEUE_SERVICE = ".queue";
private static final String TABLE_SERVICE = ".table";
@Test
public void testStorageUriWithTwoUris() throws URISyntaxException {
URI primaryClientUri = new URI("http://" + ACCOUNT_NAME + BLOB_SERVICE + ENDPOINT_SUFFIX);
URI primaryContainerUri = new URI(primaryClientUri + "/container");
URI secondaryClientUri = new URI("http://" + ACCOUNT_NAME + SECONDARY_SUFFIX + BLOB_SERVICE + ENDPOINT_SUFFIX);
URI dummyClientUri = new URI("http://" + ACCOUNT_NAME + "-dummy" + BLOB_SERVICE + ENDPOINT_SUFFIX);
// no uri
try {
new StorageUri(null, null);
fail(SR.STORAGE_URI_NOT_NULL);
}
catch (IllegalArgumentException ex) {
assertEquals(SR.STORAGE_URI_NOT_NULL, ex.getMessage());
}
// primary uri only
StorageUri singleUri = new StorageUri(primaryClientUri);
assertEquals(primaryClientUri, singleUri.getPrimaryUri());
assertNull(singleUri.getSecondaryUri());
StorageUri singleUri2 = new StorageUri(primaryClientUri);
assertEquals(singleUri, singleUri2);
StorageUri singleUri3 = new StorageUri(secondaryClientUri);
assertFalse(singleUri.equals(singleUri3));
// secondary uri only
StorageUri singleSecondaryUri = new StorageUri(null, secondaryClientUri);
assertEquals(secondaryClientUri, singleSecondaryUri.getSecondaryUri());
assertNull(singleSecondaryUri.getPrimaryUri());
StorageUri singleSecondarUri2 = new StorageUri(null, secondaryClientUri);
assertEquals(singleSecondaryUri, singleSecondarUri2);
StorageUri singleSecondarUri3 = new StorageUri(null, primaryClientUri);
assertFalse(singleSecondaryUri.equals(singleSecondarUri3));
// primary and secondary uri
StorageUri multiUri = new StorageUri(primaryClientUri, secondaryClientUri);
assertEquals(primaryClientUri, multiUri.getPrimaryUri());
assertEquals(secondaryClientUri, multiUri.getSecondaryUri());
assertFalse(multiUri.equals(singleUri));
StorageUri multiUri2 = new StorageUri(primaryClientUri, secondaryClientUri);
assertEquals(multiUri, multiUri2);
try {
new StorageUri(primaryClientUri, primaryContainerUri);
fail(SR.STORAGE_URI_MUST_MATCH);
}
catch (IllegalArgumentException ex) {
assertEquals(SR.STORAGE_URI_MUST_MATCH, ex.getMessage());
}
StorageUri multiUri3 = new StorageUri(primaryClientUri, dummyClientUri);
assertFalse(multiUri.equals(multiUri3));
StorageUri multiUri4 = new StorageUri(dummyClientUri, secondaryClientUri);
assertFalse(multiUri.equals(multiUri4));
StorageUri multiUri5 = new StorageUri(secondaryClientUri, primaryClientUri);
assertFalse(multiUri.equals(multiUri5));
}
@Test
public void testDevelopmentStorageWithTwoUris() throws URISyntaxException {
CloudStorageAccount account = CloudStorageAccount.getDevelopmentStorageAccount();
URI primaryClientURI = account.getBlobStorageUri().getPrimaryUri();
URI primaryContainerURI = new URI(primaryClientURI.toString() + "/container");
URI secondaryClientURI = account.getBlobStorageUri().getSecondaryUri();
StorageUri singleURI = new StorageUri(primaryClientURI);
assertTrue(primaryClientURI.equals(singleURI.getPrimaryUri()));
assertNull(singleURI.getSecondaryUri());
StorageUri singleURI2 = new StorageUri(primaryClientURI);
assertTrue(singleURI.equals(singleURI2));
StorageUri singleURI3 = new StorageUri(secondaryClientURI);
assertFalse(singleURI.equals(singleURI3));
StorageUri multiURI = new StorageUri(primaryClientURI, secondaryClientURI);
assertTrue(primaryClientURI.equals(multiURI.getPrimaryUri()));
assertTrue(secondaryClientURI.equals(multiURI.getSecondaryUri()));
assertFalse(multiURI.equals(singleURI));
StorageUri multiURI2 = new StorageUri(primaryClientURI, secondaryClientURI);
assertTrue(multiURI.equals(multiURI2));
try {
new StorageUri(primaryClientURI, primaryContainerURI);
fail("StorageUri constructor should fail if both URIs do not point to the same resource");
}
catch (IllegalArgumentException e) {
assertEquals(SR.STORAGE_URI_MUST_MATCH, e.getMessage());
}
StorageUri multiURI3 = new StorageUri(secondaryClientURI, primaryClientURI);
assertFalse(multiURI.equals(multiURI3));
}
@Test
public void testCloudStorageAccountWithStorageUri() throws URISyntaxException, InvalidKeyException {
StorageUri blobEndpoint = new StorageUri(new URI("http://" + ACCOUNT_NAME + BLOB_SERVICE + ENDPOINT_SUFFIX),
new URI("http://" + ACCOUNT_NAME + SECONDARY_SUFFIX + BLOB_SERVICE + ENDPOINT_SUFFIX));
StorageUri queueEndpoint = new StorageUri(new URI("http://" + ACCOUNT_NAME + QUEUE_SERVICE + ENDPOINT_SUFFIX),
new URI("http://" + ACCOUNT_NAME + SECONDARY_SUFFIX + QUEUE_SERVICE + ENDPOINT_SUFFIX));
StorageUri tableEndpoint = new StorageUri(new URI("http://" + ACCOUNT_NAME + TABLE_SERVICE + ENDPOINT_SUFFIX),
new URI("http://" + ACCOUNT_NAME + SECONDARY_SUFFIX + TABLE_SERVICE + ENDPOINT_SUFFIX));
CloudStorageAccount account = CloudStorageAccount.parse(String.format(
"DefaultEndpointsProtocol=http;AccountName=%s;AccountKey=dummyKey", ACCOUNT_NAME));
assertEquals(blobEndpoint, account.getBlobStorageUri());
assertEquals(queueEndpoint, account.getQueueStorageUri());
assertEquals(tableEndpoint, account.getTableStorageUri());
assertEquals(blobEndpoint, account.createCloudBlobClient().getStorageUri());
assertEquals(queueEndpoint, account.createCloudQueueClient().getStorageUri());
assertEquals(tableEndpoint, account.createCloudTableClient().getStorageUri());
assertEquals(blobEndpoint.getPrimaryUri(), account.getBlobEndpoint());
assertEquals(queueEndpoint.getPrimaryUri(), account.getQueueEndpoint());
assertEquals(tableEndpoint.getPrimaryUri(), account.getTableEndpoint());
}
@Test
public void testBlobTypesWithStorageUri() throws StorageException, URISyntaxException {
CloudBlobClient blobClient = TestHelper.createCloudBlobClient();
StorageUri endpoint = new StorageUri(new URI("http://" + ACCOUNT_NAME + BLOB_SERVICE + ENDPOINT_SUFFIX),
new URI("http://" + ACCOUNT_NAME + SECONDARY_SUFFIX + BLOB_SERVICE + ENDPOINT_SUFFIX));
CloudBlobClient client = new CloudBlobClient(endpoint, blobClient.getCredentials());
assertEquals(endpoint, client.getStorageUri());
assertEquals(endpoint.getPrimaryUri(), client.getEndpoint());
StorageUri containerUri = new StorageUri(new URI(endpoint.getPrimaryUri() + "/container"), new URI(
endpoint.getSecondaryUri() + "/container"));
CloudBlobContainer container = client.getContainerReference("container");
assertEquals(containerUri, container.getStorageUri());
assertEquals(containerUri.getPrimaryUri(), container.getUri());
assertEquals(endpoint, container.getServiceClient().getStorageUri());
container = new CloudBlobContainer(containerUri, client.getCredentials());
assertEquals(containerUri, container.getStorageUri());
assertEquals(containerUri.getPrimaryUri(), container.getUri());
assertEquals(endpoint, container.getServiceClient().getStorageUri());
StorageUri directoryUri = new StorageUri(new URI(containerUri.getPrimaryUri() + "/directory/"), new URI(
containerUri.getSecondaryUri() + "/directory/"));
StorageUri subdirectoryUri = new StorageUri(new URI(directoryUri.getPrimaryUri() + "subdirectory/"), new URI(
directoryUri.getSecondaryUri() + "subdirectory/"));
CloudBlobDirectory directory = container.getDirectoryReference("directory");
assertEquals(directoryUri, directory.getStorageUri());
assertEquals(directoryUri.getPrimaryUri(), directory.getUri());
assertEquals("", directory.getParent().getPrefix());
assertEquals(containerUri, directory.getContainer().getStorageUri());
assertEquals(endpoint, directory.getServiceClient().getStorageUri());
CloudBlobDirectory subdirectory = directory.getDirectoryReference("subdirectory");
assertEquals(subdirectoryUri, subdirectory.getStorageUri());
assertEquals(subdirectoryUri.getPrimaryUri(), subdirectory.getUri());
assertEquals(directoryUri, subdirectory.getParent().getStorageUri());
assertEquals(containerUri, subdirectory.getContainer().getStorageUri());
assertEquals(endpoint, subdirectory.getServiceClient().getStorageUri());
StorageUri blobUri = new StorageUri(new URI(subdirectoryUri.getPrimaryUri() + "blob"), new URI(
subdirectoryUri.getSecondaryUri() + "blob"));
CloudBlockBlob blockBlob = subdirectory.getBlockBlobReference("blob");
assertEquals(blobUri, blockBlob.getStorageUri());
assertEquals(blobUri.getPrimaryUri(), blockBlob.getUri());
assertEquals(subdirectoryUri, blockBlob.getParent().getStorageUri());
assertEquals(containerUri, blockBlob.getContainer().getStorageUri());
assertEquals(endpoint, blockBlob.getServiceClient().getStorageUri());
blockBlob = new CloudBlockBlob(blobUri, client.getCredentials());
assertEquals(blobUri, blockBlob.getStorageUri());
assertEquals(blobUri.getPrimaryUri(), blockBlob.getUri());
assertEquals(subdirectoryUri, blockBlob.getParent().getStorageUri());
assertEquals(containerUri, blockBlob.getContainer().getStorageUri());
assertEquals(endpoint, blockBlob.getServiceClient().getStorageUri());
CloudPageBlob pageBlob = subdirectory.getPageBlobReference("blob");
assertEquals(blobUri, pageBlob.getStorageUri());
assertEquals(blobUri.getPrimaryUri(), pageBlob.getUri());
assertEquals(subdirectoryUri, pageBlob.getParent().getStorageUri());
assertEquals(containerUri, pageBlob.getContainer().getStorageUri());
assertEquals(endpoint, pageBlob.getServiceClient().getStorageUri());
pageBlob = new CloudPageBlob(blobUri, client.getCredentials());
assertEquals(blobUri, pageBlob.getStorageUri());
assertEquals(blobUri.getPrimaryUri(), pageBlob.getUri());
assertEquals(subdirectoryUri, pageBlob.getParent().getStorageUri());
assertEquals(containerUri, pageBlob.getContainer().getStorageUri());
assertEquals(endpoint, pageBlob.getServiceClient().getStorageUri());
}
@Test
public void testQueueTypesWithStorageUri() throws URISyntaxException, StorageException {
CloudQueueClient queueClient = TestHelper.createCloudQueueClient();
StorageUri endpoint = new StorageUri(new URI("http://" + ACCOUNT_NAME + QUEUE_SERVICE + ENDPOINT_SUFFIX),
new URI("http://" + ACCOUNT_NAME + SECONDARY_SUFFIX + QUEUE_SERVICE + ENDPOINT_SUFFIX));
CloudQueueClient client = new CloudQueueClient(endpoint, queueClient.getCredentials());
assertEquals(endpoint, client.getStorageUri());
assertEquals(endpoint.getPrimaryUri(), client.getEndpoint());
StorageUri queueUri = new StorageUri(new URI(endpoint.getPrimaryUri() + "/queue"), new URI(
endpoint.getSecondaryUri() + "/queue"));
CloudQueue queue = client.getQueueReference("queue");
assertEquals(queueUri, queue.getStorageUri());
assertEquals(queueUri.getPrimaryUri(), queue.getUri());
assertEquals(endpoint, queue.getServiceClient().getStorageUri());
queue = new CloudQueue(queueUri, client.getCredentials());
assertEquals(queueUri, queue.getStorageUri());
assertEquals(queueUri.getPrimaryUri(), queue.getUri());
assertEquals(endpoint, queue.getServiceClient().getStorageUri());
}
@Test
public void testTableTypesWithStorageUri() throws URISyntaxException, StorageException {
CloudTableClient tableClient = TestHelper.createCloudTableClient();
StorageUri endpoint = new StorageUri(new URI("http://" + ACCOUNT_NAME + TABLE_SERVICE + ENDPOINT_SUFFIX),
new URI("http://" + ACCOUNT_NAME + SECONDARY_SUFFIX + TABLE_SERVICE + ENDPOINT_SUFFIX));
CloudTableClient client = new CloudTableClient(endpoint, tableClient.getCredentials());
assertEquals(endpoint, client.getStorageUri());
assertEquals(endpoint.getPrimaryUri(), client.getEndpoint());
StorageUri tableUri = new StorageUri(new URI(endpoint.getPrimaryUri() + "/table"), new URI(
endpoint.getSecondaryUri() + "/table"));
CloudTable table = client.getTableReference("table");
assertEquals(tableUri, table.getStorageUri());
assertEquals(tableUri.getPrimaryUri(), table.getUri());
assertEquals(endpoint, table.getServiceClient().getStorageUri());
table = new CloudTable(tableUri, client.getCredentials());
assertEquals(tableUri, table.getStorageUri());
assertEquals(tableUri.getPrimaryUri(), table.getUri());
assertEquals(endpoint, table.getServiceClient().getStorageUri());
}
}
|
|
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.saml.processing.api.saml.v2.sig;
import org.keycloak.saml.common.PicketLinkLogger;
import org.keycloak.saml.common.PicketLinkLoggerFactory;
import org.keycloak.saml.common.constants.JBossSAMLConstants;
import org.keycloak.saml.common.constants.JBossSAMLURIConstants;
import org.keycloak.saml.common.exceptions.ProcessingException;
import org.keycloak.saml.processing.core.util.SignatureUtilTransferObject;
import org.keycloak.saml.processing.core.util.XMLSignatureUtil;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import javax.xml.crypto.MarshalException;
import javax.xml.crypto.dsig.DigestMethod;
import javax.xml.crypto.dsig.SignatureMethod;
import javax.xml.crypto.dsig.XMLSignatureException;
import javax.xml.parsers.ParserConfigurationException;
import java.security.GeneralSecurityException;
import java.security.KeyPair;
import java.security.cert.X509Certificate;
import org.keycloak.rotation.KeyLocator;
/**
* Class that deals with SAML2 Signature
*
* @author [email protected]
* @author [email protected]
* @since May 26, 2009
*/
public class SAML2Signature {
private static final PicketLinkLogger logger = PicketLinkLoggerFactory.getLogger();
private static final String ID_ATTRIBUTE_NAME = "ID";
private String signatureMethod = SignatureMethod.RSA_SHA1;
private String digestMethod = DigestMethod.SHA1;
private Node sibling;
/**
* Set the X509Certificate if X509Data is needed in signed info
*/
private X509Certificate x509Certificate;
public String getSignatureMethod() {
return signatureMethod;
}
public void setSignatureMethod(String signatureMethod) {
this.signatureMethod = signatureMethod;
}
public String getDigestMethod() {
return digestMethod;
}
public void setDigestMethod(String digestMethod) {
this.digestMethod = digestMethod;
}
public void setNextSibling(Node sibling) {
this.sibling = sibling;
}
/**
* Set to false, if you do not want to include keyinfo in the signature
*
* @param val
*
* @since v2.0.1
*/
public void setSignatureIncludeKeyInfo(boolean val) {
if (!val) {
XMLSignatureUtil.setIncludeKeyInfoInSignature(false);
}
}
/**
* Set the {@link X509Certificate} if you desire
* to have the SignedInfo have X509 Data
*
* This method needs to be called before any of the sign methods.
*
* @param x509Certificate
*
* @since v2.5.0
*/
public void setX509Certificate(X509Certificate x509Certificate) {
this.x509Certificate = x509Certificate;
}
/**
* Sign an Document at the root
*
* @param keyPair Key Pair
*
* @return
*
* @throws ParserConfigurationException
* @throws XMLSignatureException
* @throws MarshalException
* @throws GeneralSecurityException
*/
public Document sign(Document doc, String referenceID, String keyName, KeyPair keyPair, String canonicalizationMethodType) throws ParserConfigurationException,
GeneralSecurityException, MarshalException, XMLSignatureException {
String referenceURI = "#" + referenceID;
configureIdAttribute(doc);
if (sibling != null) {
SignatureUtilTransferObject dto = new SignatureUtilTransferObject();
dto.setDocumentToBeSigned(doc);
dto.setKeyName(keyName);
dto.setKeyPair(keyPair);
dto.setDigestMethod(digestMethod);
dto.setSignatureMethod(signatureMethod);
dto.setReferenceURI(referenceURI);
dto.setNextSibling(sibling);
if (x509Certificate != null) {
dto.setX509Certificate(x509Certificate);
}
return XMLSignatureUtil.sign(dto, canonicalizationMethodType);
}
return XMLSignatureUtil.sign(doc, keyName, keyPair, digestMethod, signatureMethod, referenceURI, canonicalizationMethodType);
}
/**
* Sign a SAML Document
*
* @param samlDocument
* @param keypair
*
* @throws org.keycloak.saml.common.exceptions.ProcessingException
*/
public void signSAMLDocument(Document samlDocument, String keyName, KeyPair keypair, String canonicalizationMethodType) throws ProcessingException {
// Get the ID from the root
String id = samlDocument.getDocumentElement().getAttribute(ID_ATTRIBUTE_NAME);
try {
sign(samlDocument, id, keyName, keypair, canonicalizationMethodType);
} catch (ParserConfigurationException | GeneralSecurityException | MarshalException | XMLSignatureException e) {
throw new ProcessingException(logger.signatureError(e));
}
}
/**
* Validate the SAML2 Document
*
* @param signedDocument
* @param keyLocator
*
* @return
*
* @throws ProcessingException
*/
public boolean validate(Document signedDocument, KeyLocator keyLocator) throws ProcessingException {
try {
configureIdAttribute(signedDocument);
return XMLSignatureUtil.validate(signedDocument, keyLocator);
} catch (MarshalException | XMLSignatureException me) {
throw new ProcessingException(logger.signatureError(me));
}
}
/**
* Given a {@link Document}, find the {@link Node} which is the sibling of the Issuer element
*
* @param doc
*
* @return
*/
public Node getNextSiblingOfIssuer(Document doc) {
// Find the sibling of Issuer
NodeList nl = doc.getElementsByTagNameNS(JBossSAMLURIConstants.ASSERTION_NSURI.get(), JBossSAMLConstants.ISSUER.get());
if (nl.getLength() > 0) {
Node issuer = nl.item(0);
return issuer.getNextSibling();
}
return null;
}
/**
* <p>
* Sets the IDness of the ID attribute. Santuario 1.5.1 does not assumes IDness based on attribute names anymore.
* This
* method should be called before signing/validating a saml document.
* </p>
*
* @param document SAML document to have its ID attribute configured.
*/
private void configureIdAttribute(Document document) {
// Estabilish the IDness of the ID attribute.
document.getDocumentElement().setIdAttribute(ID_ATTRIBUTE_NAME, true);
NodeList nodes = document.getElementsByTagNameNS(JBossSAMLURIConstants.ASSERTION_NSURI.get(),
JBossSAMLConstants.ASSERTION.get());
for (int i = 0; i < nodes.getLength(); i++) {
Node n = nodes.item(i);
if (n instanceof Element) {
((Element) n).setIdAttribute(ID_ATTRIBUTE_NAME, true);
}
}
}
}
|
|
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.siddhi.core.stream.event;
import org.testng.AssertJUnit;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import org.wso2.siddhi.core.event.stream.StreamEvent;
import org.wso2.siddhi.core.event.stream.StreamEventPool;
import org.wso2.siddhi.core.event.stream.converter.ConversionStreamEventChunk;
import org.wso2.siddhi.core.event.stream.converter.StreamEventConverter;
import org.wso2.siddhi.core.event.stream.converter.ZeroStreamEventConverter;
public class ComplexEventChunkTestCase {
private int count;
private StreamEventConverter streamEventConverter;
@BeforeMethod
public void init() {
count = 0;
streamEventConverter = new ZeroStreamEventConverter();
}
@Test
public void eventChunkTest() {
StreamEvent streamEvent1 = new StreamEvent(0, 0, 3);
streamEvent1.setOutputData(new Object[]{"IBM", 700L, 1L});
StreamEvent streamEvent2 = new StreamEvent(0, 0, 3);
streamEvent2.setOutputData(new Object[]{"WSO2", 700L, 2L});
StreamEvent streamEvent3 = new StreamEvent(0, 0, 3);
streamEvent3.setOutputData(new Object[]{"WSO2", 700L, 3L});
streamEvent1.setNext(streamEvent2);
streamEvent2.setNext(streamEvent3);
StreamEventPool streamEventPool = new StreamEventPool(0, 0, 3, 5);
ConversionStreamEventChunk streamEventChunk = new ConversionStreamEventChunk(streamEventConverter,
streamEventPool);
streamEventChunk.convertAndAssign(streamEvent1);
while (streamEventChunk.hasNext()) {
count++;
StreamEvent event = streamEventChunk.next();
AssertJUnit.assertEquals(count * 1L, event.getOutputData()[2]);
}
AssertJUnit.assertEquals(3, count);
}
@Test
public void eventChunkRemoveTest1() {
StreamEvent streamEvent1 = new StreamEvent(0, 0, 3);
streamEvent1.setOutputData(new Object[]{"IBM", 700L, 1L});
StreamEvent streamEvent2 = new StreamEvent(0, 0, 3);
streamEvent2.setOutputData(new Object[]{"WSO2", 700L, 2L});
StreamEvent streamEvent3 = new StreamEvent(0, 0, 3);
streamEvent3.setOutputData(new Object[]{"WSO2", 700L, 3L});
streamEvent1.setNext(streamEvent2);
streamEvent2.setNext(streamEvent3);
StreamEventPool streamEventPool = new StreamEventPool(0, 0, 3, 5);
ConversionStreamEventChunk streamEventChunk = new ConversionStreamEventChunk(streamEventConverter,
streamEventPool);
streamEventChunk.convertAndAssign(streamEvent1);
while (streamEventChunk.hasNext()) {
count++;
streamEventChunk.next();
if (count == 1) {
streamEventChunk.remove();
}
}
AssertJUnit.assertEquals(streamEvent2, streamEventChunk.getFirst());
}
@Test
public void eventChunkRemoveTest2() {
StreamEvent streamEvent1 = new StreamEvent(0, 0, 3);
streamEvent1.setOutputData(new Object[]{"IBM", 700L, 1L});
StreamEvent streamEvent2 = new StreamEvent(0, 0, 3);
streamEvent2.setOutputData(new Object[]{"WSO2", 700L, 2L});
StreamEvent streamEvent3 = new StreamEvent(0, 0, 3);
streamEvent3.setOutputData(new Object[]{"WSO2", 700L, 3L});
StreamEvent streamEvent4 = new StreamEvent(0, 0, 3);
streamEvent4.setOutputData(new Object[]{"WSO2", 700L, 4L});
streamEvent1.setNext(streamEvent2);
streamEvent2.setNext(streamEvent3);
streamEvent3.setNext(streamEvent4);
StreamEventPool streamEventPool = new StreamEventPool(0, 0, 3, 5);
ConversionStreamEventChunk streamEventChunk = new ConversionStreamEventChunk(streamEventConverter,
streamEventPool);
streamEventChunk.convertAndAssign(streamEvent1);
while (streamEventChunk.hasNext()) {
count++;
streamEventChunk.next();
if (count == 1 || count == 2) {
streamEventChunk.remove();
}
}
StreamEvent streamEvent = streamEventChunk.getFirst();
AssertJUnit.assertEquals(streamEvent3, streamEvent);
AssertJUnit.assertEquals(streamEvent4, streamEvent.getNext());
}
@Test
public void eventChunkRemoveTest3() {
StreamEvent streamEvent1 = new StreamEvent(0, 0, 3);
streamEvent1.setOutputData(new Object[]{"IBM", 700L, 100L});
StreamEvent streamEvent2 = new StreamEvent(0, 0, 3);
streamEvent2.setOutputData(new Object[]{"WSO2", 700L, 100L});
StreamEvent streamEvent3 = new StreamEvent(0, 0, 3);
streamEvent3.setOutputData(new Object[]{"WSO2", 700L, 100L});
StreamEvent streamEvent4 = new StreamEvent(0, 0, 3);
streamEvent4.setOutputData(new Object[]{"WSO2", 700L, 100L});
streamEvent1.setNext(streamEvent2);
streamEvent2.setNext(streamEvent3);
streamEvent3.setNext(streamEvent4);
StreamEventPool streamEventPool = new StreamEventPool(0, 0, 3, 5);
ConversionStreamEventChunk streamEventChunk = new ConversionStreamEventChunk(streamEventConverter,
streamEventPool);
streamEventChunk.convertAndAssign(streamEvent1);
while (streamEventChunk.hasNext()) {
streamEventChunk.next();
streamEventChunk.remove();
}
AssertJUnit.assertNull(streamEventChunk.getFirst());
}
@Test
public void eventChunkRemoveTest4() {
StreamEvent streamEvent1 = new StreamEvent(0, 0, 3);
streamEvent1.setOutputData(new Object[]{"IBM", 700L, 100L});
StreamEvent streamEvent2 = new StreamEvent(0, 0, 3);
streamEvent2.setOutputData(new Object[]{"WSO2", 700L, 100L});
StreamEvent streamEvent3 = new StreamEvent(0, 0, 3);
streamEvent3.setOutputData(new Object[]{"WSO2", 700L, 100L});
StreamEvent streamEvent4 = new StreamEvent(0, 0, 3);
streamEvent4.setOutputData(new Object[]{"WSO2", 700L, 100L});
streamEvent1.setNext(streamEvent2);
streamEvent2.setNext(streamEvent3);
streamEvent3.setNext(streamEvent4);
StreamEventPool streamEventPool = new StreamEventPool(0, 0, 3, 5);
ConversionStreamEventChunk streamEventChunk = new ConversionStreamEventChunk(streamEventConverter,
streamEventPool);
streamEventChunk.convertAndAssign(streamEvent1);
while (streamEventChunk.hasNext()) {
count++;
streamEventChunk.next();
if (count == 2 || count == 4) {
streamEventChunk.remove();
}
}
StreamEvent streamEvent = streamEventChunk.getFirst();
AssertJUnit.assertEquals(streamEvent1, streamEvent);
AssertJUnit.assertEquals(streamEvent3, streamEvent.getNext());
AssertJUnit.assertNull(streamEvent.getNext().getNext());
}
@Test(expectedExceptions = IllegalStateException.class)
public void eventChunkRemoveTest5() {
StreamEvent streamEvent1 = new StreamEvent(0, 0, 3);
streamEvent1.setOutputData(new Object[]{"IBM", 700L, 100L});
StreamEvent streamEvent2 = new StreamEvent(0, 0, 3);
streamEvent2.setOutputData(new Object[]{"WSO2", 700L, 100L});
streamEvent1.setNext(streamEvent2);
StreamEventPool streamEventPool = new StreamEventPool(0, 0, 3, 5);
ConversionStreamEventChunk streamEventChunk = new ConversionStreamEventChunk(streamEventConverter,
streamEventPool);
streamEventChunk.convertAndAssign(streamEvent1);
streamEventChunk.remove();
streamEventChunk.remove();
}
}
|
|
/*******************************************************************************
* Copyright 2011, 2012 Chris Banes.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.handmark.pulltorefresh.library.internal;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.ColorStateList;
import android.content.res.TypedArray;
import android.graphics.Typeface;
import android.graphics.drawable.AnimationDrawable;
import android.graphics.drawable.Drawable;
import android.text.TextUtils;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Interpolator;
import android.view.animation.LinearInterpolator;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.handmark.pulltorefresh.library.ILoadingLayout;
import com.handmark.pulltorefresh.library.PullToRefreshBase.Mode;
import com.handmark.pulltorefresh.library.PullToRefreshBase.Orientation;
import com.handmark.pulltorefresh.library.R;
@SuppressLint("ViewConstructor")
public abstract class LoadingLayout extends FrameLayout implements ILoadingLayout {
static final String LOG_TAG = "PullToRefresh-LoadingLayout";
static final Interpolator ANIMATION_INTERPOLATOR = new LinearInterpolator();
private FrameLayout mInnerLayout;
protected final ImageView mHeaderImage;
protected final ProgressBar mHeaderProgress;
private boolean mUseIntrinsicAnimation;
private final TextView mHeaderText;
private final TextView mSubHeaderText;
protected final Mode mMode;
protected final Orientation mScrollDirection;
private CharSequence mPullLabel;
private CharSequence mRefreshingLabel;
private CharSequence mReleaseLabel;
public LoadingLayout(Context context, final Mode mode, final Orientation scrollDirection, TypedArray attrs) {
super(context);
ViewGroup header = (ViewGroup) LayoutInflater.from(context).inflate(
R.layout.pull_to_refresh_header, this);
mHeaderText = (TextView) header.findViewById(R.id.pull_to_refresh_text);
mSubHeaderText = (TextView) header
.findViewById(R.id.pull_to_refresh_sub_text);
mHeaderImage = (ImageView) header
.findViewById(R.id.pull_to_refresh_image);
mHeaderImage.setScaleType(ScaleType.MATRIX);
mHeaderImageMatrix = new Matrix();
mHeaderImage.setImageMatrix(mHeaderImageMatrix);
final Interpolator interpolator = new LinearInterpolator();
mRotateAnimation = new RotateAnimation(0, 360,
Animation.RELATIVE_TO_SELF, 0.5f, Animation.RELATIVE_TO_SELF,
0.5f);
mRotateAnimation.setInterpolator(interpolator);
mRotateAnimation.setDuration(DEFAULT_ROTATION_ANIMATION_DURATION);
mRotateAnimation.setRepeatCount(Animation.INFINITE);
mRotateAnimation.setRepeatMode(Animation.RESTART);
switch (mode) {
case PULL_UP_TO_REFRESH:
// Load in labels
mPullLabel = context
.getString(R.string.pull_to_refresh_from_bottom_pull_label);
mRefreshingLabel = context
.getString(R.string.pull_to_refresh_from_bottom_refreshing_label);
mReleaseLabel = context
.getString(R.string.pull_to_refresh_from_bottom_release_label);
break;
case PULL_DOWN_TO_REFRESH:
default:
// Load in labels
mPullLabel = context.getString(R.string.loading_more_label);
mHeaderImage.setVisibility(View.GONE);
mRefreshingLabel = context
.getString(R.string.pull_to_refresh_refreshing_label);
mReleaseLabel = context
.getString(R.string.pull_to_refresh_release_label);
break;
}
if (attrs.hasValue(R.styleable.PullToRefresh_ptrHeaderBackground)) {
Drawable background = attrs.getDrawable(R.styleable.PullToRefresh_ptrHeaderBackground);
if (null != background) {
ViewCompat.setBackground(this, background);
}
}
if (attrs.hasValue(R.styleable.PullToRefresh_ptrHeaderTextAppearance)) {
TypedValue styleID = new TypedValue();
attrs.getValue(R.styleable.PullToRefresh_ptrHeaderTextAppearance, styleID);
setTextAppearance(styleID.data);
}
if (attrs.hasValue(R.styleable.PullToRefresh_ptrSubHeaderTextAppearance)) {
TypedValue styleID = new TypedValue();
attrs.getValue(R.styleable.PullToRefresh_ptrSubHeaderTextAppearance, styleID);
setSubTextAppearance(styleID.data);
}
// Text Color attrs need to be set after TextAppearance attrs
if (attrs.hasValue(R.styleable.PullToRefresh_ptrHeaderTextColor)) {
ColorStateList colors = attrs
.getColorStateList(R.styleable.PullToRefresh_ptrHeaderTextColor);
setTextColor(null != colors ? colors : ColorStateList
.valueOf(0xFF000000));
}
if (attrs.hasValue(R.styleable.PullToRefresh_ptrHeaderSubTextColor)) {
ColorStateList colors = attrs
.getColorStateList(R.styleable.PullToRefresh_ptrHeaderSubTextColor);
setSubTextColor(null != colors ? colors : ColorStateList
.valueOf(0xFF000000));
}
if (attrs.hasValue(R.styleable.PullToRefresh_ptrHeaderBackground)) {
Drawable background = attrs
.getDrawable(R.styleable.PullToRefresh_ptrHeaderBackground);
if (null != background) {
setBackgroundDrawable(background);
}
}
// Try and get defined drawable from Attrs
Drawable imageDrawable = null;
if (attrs.hasValue(R.styleable.PullToRefresh_ptrDrawable)) {
imageDrawable = attrs
.getDrawable(R.styleable.PullToRefresh_ptrDrawable);
}
// Check Specific Drawable from Attrs, these overrite the generic
// drawable attr above
switch (mode) {
case PULL_FROM_START:
default:
if (attrs.hasValue(R.styleable.PullToRefresh_ptrDrawableStart)) {
imageDrawable = attrs.getDrawable(R.styleable.PullToRefresh_ptrDrawableStart);
} else if (attrs.hasValue(R.styleable.PullToRefresh_ptrDrawableTop)) {
Utils.warnDeprecation("ptrDrawableTop", "ptrDrawableStart");
imageDrawable = attrs.getDrawable(R.styleable.PullToRefresh_ptrDrawableTop);
}
break;
case PULL_FROM_END:
if (attrs.hasValue(R.styleable.PullToRefresh_ptrDrawableEnd)) {
imageDrawable = attrs.getDrawable(R.styleable.PullToRefresh_ptrDrawableEnd);
} else if (attrs.hasValue(R.styleable.PullToRefresh_ptrDrawableBottom)) {
Utils.warnDeprecation("ptrDrawableBottom", "ptrDrawableEnd");
imageDrawable = attrs.getDrawable(R.styleable.PullToRefresh_ptrDrawableBottom);
}
break;
}
// If we don't have a user defined drawable, load the default
if (null == imageDrawable) {
imageDrawable = context.getResources().getDrawable(
R.drawable.default_ptr_drawable);
}
// Set Drawable, and save width/height
if (mode != Mode.PULL_UP_TO_REFRESH) {
setLoadingDrawable(imageDrawable);
}
reset();
}
public final void setHeight(int height) {
ViewGroup.LayoutParams lp = (ViewGroup.LayoutParams) getLayoutParams();
lp.height = height;
requestLayout();
}
public final void setWidth(int width) {
ViewGroup.LayoutParams lp = (ViewGroup.LayoutParams) getLayoutParams();
lp.width = width;
requestLayout();
}
public final int getContentSize() {
switch (mScrollDirection) {
case HORIZONTAL:
return mInnerLayout.getWidth();
case VERTICAL:
default:
return mInnerLayout.getHeight();
}
}
public final void hideAllViews() {
if (View.VISIBLE == mHeaderText.getVisibility()) {
mHeaderText.setVisibility(View.INVISIBLE);
}
if (View.VISIBLE == mHeaderProgress.getVisibility()) {
mHeaderProgress.setVisibility(View.INVISIBLE);
}
if (View.VISIBLE == mHeaderImage.getVisibility()) {
mHeaderImage.setVisibility(View.INVISIBLE);
}
if (View.VISIBLE == mSubHeaderText.getVisibility()) {
mSubHeaderText.setVisibility(View.INVISIBLE);
}
}
public final void onPull(float scaleOfLayout) {
if (!mUseIntrinsicAnimation) {
onPullImpl(scaleOfLayout);
}
}
public final void pullToRefresh() {
if (null != mHeaderText) {
mHeaderText.setText(mPullLabel);
}
// Now call the callback
pullToRefreshImpl();
}
public final void refreshing() {
if (null != mHeaderText) {
mHeaderText.setText(mRefreshingLabel);
}
if (mUseIntrinsicAnimation) {
((AnimationDrawable) mHeaderImage.getDrawable()).start();
} else {
// Now call the callback
refreshingImpl();
}
if (null != mSubHeaderText) {
mSubHeaderText.setVisibility(View.GONE);
}
}
public final void releaseToRefresh() {
if (null != mHeaderText) {
mHeaderText.setText(mReleaseLabel);
}
// Now call the callback
releaseToRefreshImpl();
}
public final void reset() {
if (null != mHeaderText) {
mHeaderText.setText(mPullLabel);
}
mHeaderImage.setVisibility(View.VISIBLE);
if (mUseIntrinsicAnimation) {
((AnimationDrawable) mHeaderImage.getDrawable()).stop();
} else {
// Now call the callback
resetImpl();
}
if (null != mSubHeaderText) {
if (TextUtils.isEmpty(mSubHeaderText.getText())) {
mSubHeaderText.setVisibility(View.GONE);
} else {
mSubHeaderText.setVisibility(View.VISIBLE);
}
}
}
@Override
public void setLastUpdatedLabel(CharSequence label) {
setSubHeaderText(label);
}
public final void setLoadingDrawable(Drawable imageDrawable) {
// Set Drawable
mHeaderImage.setImageDrawable(imageDrawable);
mUseIntrinsicAnimation = (imageDrawable instanceof AnimationDrawable);
// Now call the callback
onLoadingDrawableSet(imageDrawable);
}
public void setPullLabel(CharSequence pullLabel) {
mPullLabel = pullLabel;
}
public void setRefreshingLabel(CharSequence refreshingLabel) {
mRefreshingLabel = refreshingLabel;
}
public void setReleaseLabel(CharSequence releaseLabel) {
mReleaseLabel = releaseLabel;
}
@Override
public void setTextTypeface(Typeface tf) {
mHeaderText.setTypeface(tf);
}
public final void showInvisibleViews() {
if (View.INVISIBLE == mHeaderText.getVisibility()) {
mHeaderText.setVisibility(View.VISIBLE);
}
if (View.INVISIBLE == mHeaderProgress.getVisibility()) {
mHeaderProgress.setVisibility(View.VISIBLE);
}
if (View.INVISIBLE == mHeaderImage.getVisibility()) {
mHeaderImage.setVisibility(View.VISIBLE);
}
if (View.INVISIBLE == mSubHeaderText.getVisibility()) {
mSubHeaderText.setVisibility(View.VISIBLE);
}
}
public void onPullY(float scaleOfHeight) {
mHeaderImageMatrix.setRotate(scaleOfHeight * 90, mRotationPivotX,
mRotationPivotY);
mHeaderImage.setImageMatrix(mHeaderImageMatrix);
}
private void setSubTextColor(ColorStateList color) {
if (null != mSubHeaderText) {
mSubHeaderText.setTextColor(color);
}
}
private void setTextAppearance(int value) {
if (null != mHeaderText) {
mHeaderText.setTextAppearance(getContext(), value);
}
if (null != mSubHeaderText) {
mSubHeaderText.setTextAppearance(getContext(), value);
}
}
private void setTextColor(ColorStateList color) {
if (null != mHeaderText) {
mHeaderText.setTextColor(color);
}
if (null != mSubHeaderText) {
mSubHeaderText.setTextColor(color);
}
}
}
|
|
/*
* Copyright (C) 2012 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.io;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.Beta;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.Ascii;
import com.google.common.base.Optional;
import com.google.common.base.Splitter;
import com.google.common.collect.AbstractIterator;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.io.Writer;
import java.nio.charset.Charset;
import java.util.Iterator;
import java.util.List;
import javax.annotation.Nullable;
/**
* A readable source of characters, such as a text file. Unlike a {@link Reader}, a
* {@code CharSource} is not an open, stateful stream of characters that can be read and closed.
* Instead, it is an immutable <i>supplier</i> of {@code Reader} instances.
*
* <p>{@code CharSource} provides two kinds of methods:
* <ul>
* <li><b>Methods that return a reader:</b> These methods should return a <i>new</i>, independent
* instance each time they are called. The caller is responsible for ensuring that the returned
* reader is closed.
* <li><b>Convenience methods:</b> These are implementations of common operations that are typically
* implemented by opening a reader using one of the methods in the first category, doing
* something and finally closing the reader that was opened.
* </ul>
*
* <p>Several methods in this class, such as {@link #readLines()}, break the contents of the source
* into lines. Like {@link BufferedReader}, these methods break lines on any of {@code \n},
* {@code \r} or {@code \r\n}, do not include the line separator in each line and do not consider
* there to be an empty line at the end if the contents are terminated with a line separator.
*
* <p>Any {@link ByteSource} containing text encoded with a specific {@linkplain Charset character
* encoding} may be viewed as a {@code CharSource} using {@link ByteSource#asCharSource(Charset)}.
*
* @since 14.0
* @author Colin Decker
*/
@GwtIncompatible
public abstract class CharSource {
/**
* Constructor for use by subclasses.
*/
protected CharSource() {}
/**
* Returns a {@link ByteSource} view of this char source that encodes chars read from this source
* as bytes using the given {@link Charset}.
*
* <p>If {@link ByteSource#asCharSource} is called on the returned source with the same charset,
* the default implementation of this method will ensure that the original {@code CharSource} is
* returned, rather than round-trip encoding. Subclasses that override this method should behave
* the same way.
*
* @since 20.0
*/
@Beta
public ByteSource asByteSource(Charset charset) {
return new AsByteSource(charset);
}
/**
* Opens a new {@link Reader} for reading from this source. This method should return a new,
* independent reader each time it is called.
*
* <p>The caller is responsible for ensuring that the returned reader is closed.
*
* @throws IOException if an I/O error occurs in the process of opening the reader
*/
public abstract Reader openStream() throws IOException;
/**
* Opens a new {@link BufferedReader} for reading from this source. This method should return a
* new, independent reader each time it is called.
*
* <p>The caller is responsible for ensuring that the returned reader is closed.
*
* @throws IOException if an I/O error occurs in the process of opening the reader
*/
public BufferedReader openBufferedStream() throws IOException {
Reader reader = openStream();
return (reader instanceof BufferedReader)
? (BufferedReader) reader
: new BufferedReader(reader);
}
/**
* Returns the size of this source in chars, if the size can be easily determined without actually
* opening the data stream.
*
* <p>The default implementation returns {@link Optional#absent}. Some sources, such as a
* {@code CharSequence}, may return a non-absent value. Note that in such cases, it is
* <i>possible</i> that this method will return a different number of chars than would be returned
* by reading all of the chars.
*
* <p>Additionally, for mutable sources such as {@code StringBuilder}s, a subsequent read may
* return a different number of chars if the contents are changed.
*
* @since 19.0
*/
@Beta
public Optional<Long> lengthIfKnown() {
return Optional.absent();
}
/**
* Returns the length of this source in chars, even if doing so requires opening and traversing an
* entire stream. To avoid a potentially expensive operation, see {@link #lengthIfKnown}.
*
* <p>The default implementation calls {@link #lengthIfKnown} and returns the value if present. If
* absent, it will fall back to a heavyweight operation that will open a stream,
* {@link Reader#skip(long) skip} to the end of the stream, and return the total number of chars
* that were skipped.
*
* <p>Note that for sources that implement {@link #lengthIfKnown} to provide a more efficient
* implementation, it is <i>possible</i> that this method will return a different number of chars
* than would be returned by reading all of the chars.
*
* <p>In either case, for mutable sources such as files, a subsequent read may return a different
* number of chars if the contents are changed.
*
* @throws IOException if an I/O error occurs in the process of reading the length of this source
* @since 19.0
*/
@Beta
public long length() throws IOException {
Optional<Long> lengthIfKnown = lengthIfKnown();
if (lengthIfKnown.isPresent()) {
return lengthIfKnown.get();
}
Closer closer = Closer.create();
try {
Reader reader = closer.register(openStream());
return countBySkipping(reader);
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
private long countBySkipping(Reader reader) throws IOException {
long count = 0;
long read;
while ((read = reader.skip(Long.MAX_VALUE)) != 0) {
count += read;
}
return count;
}
/**
* Appends the contents of this source to the given {@link Appendable} (such as a {@link Writer}).
* Does not close {@code appendable} if it is {@code Closeable}.
*
* @return the number of characters copied
* @throws IOException if an I/O error occurs in the process of reading from this source or
* writing to {@code appendable}
*/
@CanIgnoreReturnValue
public long copyTo(Appendable appendable) throws IOException {
checkNotNull(appendable);
Closer closer = Closer.create();
try {
Reader reader = closer.register(openStream());
return CharStreams.copy(reader, appendable);
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
/**
* Copies the contents of this source to the given sink.
*
* @return the number of characters copied
* @throws IOException if an I/O error occurs in the process of reading from this source or
* writing to {@code sink}
*/
@CanIgnoreReturnValue
public long copyTo(CharSink sink) throws IOException {
checkNotNull(sink);
Closer closer = Closer.create();
try {
Reader reader = closer.register(openStream());
Writer writer = closer.register(sink.openStream());
return CharStreams.copy(reader, writer);
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
/**
* Reads the contents of this source as a string.
*
* @throws IOException if an I/O error occurs in the process of reading from this source
*/
public String read() throws IOException {
Closer closer = Closer.create();
try {
Reader reader = closer.register(openStream());
return CharStreams.toString(reader);
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
/**
* Reads the first line of this source as a string. Returns {@code null} if this source is empty.
*
* <p>Like {@link BufferedReader}, this method breaks lines on any of {@code \n}, {@code \r} or
* {@code \r\n}, does not include the line separator in the returned line and does not consider
* there to be an extra empty line at the end if the content is terminated with a line separator.
*
* @throws IOException if an I/O error occurs in the process of reading from this source
*/
@Nullable
public String readFirstLine() throws IOException {
Closer closer = Closer.create();
try {
BufferedReader reader = closer.register(openBufferedStream());
return reader.readLine();
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
/**
* Reads all the lines of this source as a list of strings. The returned list will be empty if
* this source is empty.
*
* <p>Like {@link BufferedReader}, this method breaks lines on any of {@code \n}, {@code \r} or
* {@code \r\n}, does not include the line separator in the returned lines and does not consider
* there to be an extra empty line at the end if the content is terminated with a line separator.
*
* @throws IOException if an I/O error occurs in the process of reading from this source
*/
public ImmutableList<String> readLines() throws IOException {
Closer closer = Closer.create();
try {
BufferedReader reader = closer.register(openBufferedStream());
List<String> result = Lists.newArrayList();
String line;
while ((line = reader.readLine()) != null) {
result.add(line);
}
return ImmutableList.copyOf(result);
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
/**
* Reads lines of text from this source, processing each line as it is read using the given
* {@link LineProcessor processor}. Stops when all lines have been processed or the processor
* returns {@code false} and returns the result produced by the processor.
*
* <p>Like {@link BufferedReader}, this method breaks lines on any of {@code \n}, {@code \r} or
* {@code \r\n}, does not include the line separator in the lines passed to the {@code processor}
* and does not consider there to be an extra empty line at the end if the content is terminated
* with a line separator.
*
* @throws IOException if an I/O error occurs in the process of reading from this source or if
* {@code processor} throws an {@code IOException}
* @since 16.0
*/
@Beta
@CanIgnoreReturnValue // some processors won't return a useful result
public <T> T readLines(LineProcessor<T> processor) throws IOException {
checkNotNull(processor);
Closer closer = Closer.create();
try {
Reader reader = closer.register(openStream());
return CharStreams.readLines(reader, processor);
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
/**
* Returns whether the source has zero chars. The default implementation returns true if
* {@link #lengthIfKnown} returns zero, falling back to opening a stream and checking for EOF if
* the length is not known.
*
* <p>Note that, in cases where {@code lengthIfKnown} returns zero, it is <i>possible</i> that
* chars are actually available for reading. This means that a source may return {@code true} from
* {@code isEmpty()} despite having readable content.
*
* @throws IOException if an I/O error occurs
* @since 15.0
*/
public boolean isEmpty() throws IOException {
Optional<Long> lengthIfKnown = lengthIfKnown();
if (lengthIfKnown.isPresent() && lengthIfKnown.get() == 0L) {
return true;
}
Closer closer = Closer.create();
try {
Reader reader = closer.register(openStream());
return reader.read() == -1;
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
/**
* Concatenates multiple {@link CharSource} instances into a single source. Streams returned from
* the source will contain the concatenated data from the streams of the underlying sources.
*
* <p>Only one underlying stream will be open at a time. Closing the concatenated stream will
* close the open underlying stream.
*
* @param sources the sources to concatenate
* @return a {@code CharSource} containing the concatenated data
* @since 15.0
*/
public static CharSource concat(Iterable<? extends CharSource> sources) {
return new ConcatenatedCharSource(sources);
}
/**
* Concatenates multiple {@link CharSource} instances into a single source. Streams returned from
* the source will contain the concatenated data from the streams of the underlying sources.
*
* <p>Only one underlying stream will be open at a time. Closing the concatenated stream will
* close the open underlying stream.
*
* <p>Note: The input {@code Iterator} will be copied to an {@code ImmutableList} when this method
* is called. This will fail if the iterator is infinite and may cause problems if the iterator
* eagerly fetches data for each source when iterated (rather than producing sources that only
* load data through their streams). Prefer using the {@link #concat(Iterable)} overload if
* possible.
*
* @param sources the sources to concatenate
* @return a {@code CharSource} containing the concatenated data
* @throws NullPointerException if any of {@code sources} is {@code null}
* @since 15.0
*/
public static CharSource concat(Iterator<? extends CharSource> sources) {
return concat(ImmutableList.copyOf(sources));
}
/**
* Concatenates multiple {@link CharSource} instances into a single source. Streams returned from
* the source will contain the concatenated data from the streams of the underlying sources.
*
* <p>Only one underlying stream will be open at a time. Closing the concatenated stream will
* close the open underlying stream.
*
* @param sources the sources to concatenate
* @return a {@code CharSource} containing the concatenated data
* @throws NullPointerException if any of {@code sources} is {@code null}
* @since 15.0
*/
public static CharSource concat(CharSource... sources) {
return concat(ImmutableList.copyOf(sources));
}
/**
* Returns a view of the given character sequence as a {@link CharSource}. The behavior of the
* returned {@code CharSource} and any {@code Reader} instances created by it is unspecified if
* the {@code charSequence} is mutated while it is being read, so don't do that.
*
* @since 15.0 (since 14.0 as {@code CharStreams.asCharSource(String)})
*/
public static CharSource wrap(CharSequence charSequence) {
return new CharSequenceCharSource(charSequence);
}
/**
* Returns an immutable {@link CharSource} that contains no characters.
*
* @since 15.0
*/
public static CharSource empty() {
return EmptyCharSource.INSTANCE;
}
/**
* A byte source that reads chars from this source and encodes them as bytes using a charset.
*/
private final class AsByteSource extends ByteSource {
final Charset charset;
AsByteSource(Charset charset) {
this.charset = checkNotNull(charset);
}
@Override
public CharSource asCharSource(Charset charset) {
if (charset.equals(this.charset)) {
return CharSource.this;
}
return super.asCharSource(charset);
}
@Override
public InputStream openStream() throws IOException {
return new ReaderInputStream(CharSource.this.openStream(), charset, 8192);
}
@Override
public String toString() {
return CharSource.this.toString() + ".asByteSource(" + charset + ")";
}
}
private static class CharSequenceCharSource extends CharSource {
private static final Splitter LINE_SPLITTER = Splitter.onPattern("\r\n|\n|\r");
private final CharSequence seq;
protected CharSequenceCharSource(CharSequence seq) {
this.seq = checkNotNull(seq);
}
@Override
public Reader openStream() {
return new CharSequenceReader(seq);
}
@Override
public String read() {
return seq.toString();
}
@Override
public boolean isEmpty() {
return seq.length() == 0;
}
@Override
public long length() {
return seq.length();
}
@Override
public Optional<Long> lengthIfKnown() {
return Optional.of((long) seq.length());
}
/**
* Returns an iterable over the lines in the string. If the string ends in a newline, a final
* empty string is not included to match the behavior of BufferedReader/LineReader.readLine().
*/
private Iterable<String> lines() {
return new Iterable<String>() {
@Override
public Iterator<String> iterator() {
return new AbstractIterator<String>() {
Iterator<String> lines = LINE_SPLITTER.split(seq).iterator();
@Override
protected String computeNext() {
if (lines.hasNext()) {
String next = lines.next();
// skip last line if it's empty
if (lines.hasNext() || !next.isEmpty()) {
return next;
}
}
return endOfData();
}
};
}
};
}
@Override
public String readFirstLine() {
Iterator<String> lines = lines().iterator();
return lines.hasNext() ? lines.next() : null;
}
@Override
public ImmutableList<String> readLines() {
return ImmutableList.copyOf(lines());
}
@Override
public <T> T readLines(LineProcessor<T> processor) throws IOException {
for (String line : lines()) {
if (!processor.processLine(line)) {
break;
}
}
return processor.getResult();
}
@Override
public String toString() {
return "CharSource.wrap(" + Ascii.truncate(seq, 30, "...") + ")";
}
}
private static final class EmptyCharSource extends CharSequenceCharSource {
private static final EmptyCharSource INSTANCE = new EmptyCharSource();
private EmptyCharSource() {
super("");
}
@Override
public String toString() {
return "CharSource.empty()";
}
}
private static final class ConcatenatedCharSource extends CharSource {
private final Iterable<? extends CharSource> sources;
ConcatenatedCharSource(Iterable<? extends CharSource> sources) {
this.sources = checkNotNull(sources);
}
@Override
public Reader openStream() throws IOException {
return new MultiReader(sources.iterator());
}
@Override
public boolean isEmpty() throws IOException {
for (CharSource source : sources) {
if (!source.isEmpty()) {
return false;
}
}
return true;
}
@Override
public Optional<Long> lengthIfKnown() {
long result = 0L;
for (CharSource source : sources) {
Optional<Long> lengthIfKnown = source.lengthIfKnown();
if (!lengthIfKnown.isPresent()) {
return Optional.absent();
}
result += lengthIfKnown.get();
}
return Optional.of(result);
}
@Override
public long length() throws IOException {
long result = 0L;
for (CharSource source : sources) {
result += source.length();
}
return result;
}
@Override
public String toString() {
return "CharSource.concat(" + sources + ")";
}
}
}
|
|
/*
* Copyright 2015 Open Networking Laboratory
* Originally created by Pengfei Lu, Network and Cloud Computing Laboratory, Dalian University of Technology, China
* Advisers: Keqiu Li, Heng Qi and Haisheng Yu
* This work is supported by the State Key Program of National Natural Science of China(Grant No. 61432002)
* and Prospective Research Project on Future Networks in Jiangsu Future Networks Innovation Institute.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onos.acl;
import com.google.common.base.MoreObjects;
import org.onlab.packet.IPv4;
import org.onlab.packet.Ip4Prefix;
import org.onosproject.core.IdGenerator;
import java.util.Objects;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
/**
* ACL rule class.
*/
public final class AclRule {
private final RuleId id;
private final Ip4Prefix srcIp;
private final Ip4Prefix dstIp;
private final byte ipProto;
private final short dstTpPort;
private final Action action;
private static IdGenerator idGenerator;
/**
* Enum type for ACL rule's action.
*/
public enum Action {
DENY, ALLOW
}
/**
* Constructor for serializer.
*/
private AclRule() {
this.id = null;
this.srcIp = null;
this.dstIp = null;
this.ipProto = 0;
this.dstTpPort = 0;
this.action = null;
}
/**
* Create a new ACL rule.
*
* @param srcIp source IP address
* @param dstIp destination IP address
* @param ipProto IP protocol
* @param dstTpPort destination transport layer port
* @param action ACL rule's action
*/
private AclRule(Ip4Prefix srcIp,
Ip4Prefix dstIp,
byte ipProto,
short dstTpPort,
Action action) {
checkState(idGenerator != null, "Id generator is not bound.");
this.id = RuleId.valueOf(idGenerator.getNewId());
this.srcIp = srcIp;
this.dstIp = dstIp;
this.ipProto = ipProto;
this.dstTpPort = dstTpPort;
this.action = action;
}
/**
* Check if the first CIDR address is in (or the same as) the second CIDR address.
*/
private boolean checkCIDRinCIDR(Ip4Prefix cidrAddr1, Ip4Prefix cidrAddr2) {
if (cidrAddr2 == null) {
return true;
} else if (cidrAddr1 == null) {
return false;
}
if (cidrAddr1.prefixLength() < cidrAddr2.prefixLength()) {
return false;
}
int offset = 32 - cidrAddr2.prefixLength();
int cidr1Prefix = cidrAddr1.address().toInt();
int cidr2Prefix = cidrAddr2.address().toInt();
cidr1Prefix = cidr1Prefix >> offset;
cidr2Prefix = cidr2Prefix >> offset;
cidr1Prefix = cidr1Prefix << offset;
cidr2Prefix = cidr2Prefix << offset;
return (cidr1Prefix == cidr2Prefix);
}
/**
* Check if this ACL rule match the given ACL rule.
* @param r ACL rule to check against
* @return true if this ACL rule matches the given ACL ruleule.
*/
public boolean checkMatch(AclRule r) {
return (this.dstTpPort == r.dstTpPort || r.dstTpPort == 0)
&& (this.ipProto == r.ipProto || r.ipProto == 0)
&& (checkCIDRinCIDR(this.srcIp(), r.srcIp()))
&& (checkCIDRinCIDR(this.dstIp(), r.dstIp()));
}
/**
* Returns a new ACL rule builder.
*
* @return ACL rule builder
*/
public static Builder builder() {
return new Builder();
}
/**
* Builder of an ACL rule.
*/
public static final class Builder {
private Ip4Prefix srcIp = null;
private Ip4Prefix dstIp = null;
private byte ipProto = 0;
private short dstTpPort = 0;
private Action action = Action.DENY;
private Builder() {
// Hide constructor
}
/**
* Sets the source IP address for the ACL rule that will be built.
*
* @param srcIp source IP address to use for built ACL rule
* @return this builder
*/
public Builder srcIp(String srcIp) {
this.srcIp = Ip4Prefix.valueOf(srcIp);
return this;
}
/**
* Sets the destination IP address for the ACL rule that will be built.
*
* @param dstIp destination IP address to use for built ACL rule
* @return this builder
*/
public Builder dstIp(String dstIp) {
this.dstIp = Ip4Prefix.valueOf(dstIp);
return this;
}
/**
* Sets the IP protocol for the ACL rule that will be built.
*
* @param ipProto IP protocol to use for built ACL rule
* @return this builder
*/
public Builder ipProto(byte ipProto) {
this.ipProto = ipProto;
return this;
}
/**
* Sets the destination transport layer port for the ACL rule that will be built.
*
* @param dstTpPort destination transport layer port to use for built ACL rule
* @return this builder
*/
public Builder dstTpPort(short dstTpPort) {
if ((ipProto == IPv4.PROTOCOL_TCP || ipProto == IPv4.PROTOCOL_UDP)) {
this.dstTpPort = dstTpPort;
}
return this;
}
/**
* Sets the action for the ACL rule that will be built.
*
* @param action action to use for built ACL rule
* @return this builder
*/
public Builder action(Action action) {
this.action = action;
return this;
}
/**
* Builds an ACL rule from the accumulated parameters.
* @return ACL rule instance
*/
public AclRule build() {
checkState(srcIp != null && dstIp != null, "Either srcIp or dstIp must be assigned.");
checkState(ipProto == 0 || ipProto == IPv4.PROTOCOL_ICMP
|| ipProto == IPv4.PROTOCOL_TCP || ipProto == IPv4.PROTOCOL_UDP,
"ipProto must be assigned to TCP, UDP, or ICMP.");
return new AclRule(
srcIp,
dstIp,
ipProto,
dstTpPort,
action
);
}
}
/**
* Binds an id generator for unique ACL rule id generation.
*
* Note: A generator cannot be bound if there is already a generator bound.
*
* @param newIdGenerator id generator
*/
public static void bindIdGenerator(IdGenerator newIdGenerator) {
checkState(idGenerator == null, "Id generator is already bound.");
idGenerator = checkNotNull(newIdGenerator);
}
public RuleId id() {
return id;
}
public Ip4Prefix srcIp() {
return srcIp;
}
public Ip4Prefix dstIp() {
return this.dstIp;
}
public byte ipProto() {
return ipProto;
}
public short dstTpPort() {
return dstTpPort;
}
public Action action() {
return action;
}
@Override
public int hashCode() {
return Objects.hash(action,
id.fingerprint(),
ipProto,
srcIp,
dstIp,
dstTpPort);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof AclRule) {
AclRule that = (AclRule) obj;
return Objects.equals(id, that.id) &&
Objects.equals(srcIp, that.srcIp) &&
Objects.equals(dstIp, that.dstIp) &&
Objects.equals(ipProto, that.ipProto) &&
Objects.equals(dstTpPort, that.dstTpPort) &&
Objects.equals(action, that.action);
}
return false;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.omitNullValues()
.add("id", id)
.add("srcIp", srcIp)
.add("dstIp", dstIp)
.add("ipProto", ipProto)
.add("dstTpPort", dstTpPort)
.add("action", action)
.toString();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.storage.am.lsm.invertedindex.search;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.ListIterator;
import org.apache.hyracks.api.comm.IFrame;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.data.std.primitive.IntegerPointable;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.dataflow.common.io.RunFileReader;
import org.apache.hyracks.dataflow.common.io.RunFileWriter;
import org.apache.hyracks.dataflow.std.buffermanager.BufferManagerBackedVSizeFrame;
import org.apache.hyracks.dataflow.std.buffermanager.ISimpleFrameBufferManager;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeFrameTupleAccessor;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeFrameTupleAppender;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeTupleReference;
/**
* Disk-based or in-memory based storage for intermediate and final results of inverted-index
* searches. One frame is dedicated to I/O operation for disk operation mode.
*/
public class InvertedIndexSearchResult {
// The size of count field for each element. Currently, we use an integer value.
protected static final int ELEMENT_COUNT_SIZE = 4;
// I/O buffer's index in the buffers
protected static final int IO_BUFFER_IDX = 0;
protected static final String FILE_PREFIX = "InvertedIndexSearchResult";
protected final IHyracksTaskContext ctx;
protected final FixedSizeFrameTupleAppender appender;
protected final FixedSizeFrameTupleAccessor accessor;
protected final FixedSizeTupleReference tuple;
protected final ISimpleFrameBufferManager bufferManager;
protected ITypeTraits[] typeTraits;
protected int invListElementSize;
protected int currentWriterBufIdx;
protected int currentReaderBufIdx;
protected int numResults;
protected int numPossibleElementPerPage;
// Read and Write I/O buffer
protected IFrame ioBufferFrame = null;
protected ByteBuffer ioBuffer = null;
// Buffers for in-memory operation mode. The first buffer is the ioBuffer.
// In case of the final search result, we will use only use the first buffer. No file will be created.
protected ArrayList<ByteBuffer> buffers;
protected RunFileWriter searchResultWriter;
protected RunFileReader searchResultReader;
protected boolean isInMemoryOpMode;
protected boolean isInReadMode;
protected boolean isWriteFinished;
protected boolean isFileOpened;
public InvertedIndexSearchResult(ITypeTraits[] invListFields, IHyracksTaskContext ctx,
ISimpleFrameBufferManager bufferManager) throws HyracksDataException {
initTypeTraits(invListFields);
this.ctx = ctx;
appender = new FixedSizeFrameTupleAppender(ctx.getInitialFrameSize(), typeTraits);
accessor = new FixedSizeFrameTupleAccessor(ctx.getInitialFrameSize(), typeTraits);
tuple = new FixedSizeTupleReference(typeTraits);
this.bufferManager = bufferManager;
this.isInReadMode = false;
this.isWriteFinished = false;
this.isInMemoryOpMode = false;
this.isFileOpened = false;
this.ioBufferFrame = null;
this.ioBuffer = null;
this.buffers = null;
this.currentWriterBufIdx = 0;
this.currentReaderBufIdx = 0;
this.numResults = 0;
calculateNumElementPerPage();
// Allocates one frame for read/write operation.
prepareIOBuffer();
}
/**
* Initializes the element type in the search result. In addition to the element, we will keep one more integer
* per element to keep its occurrence count.
*/
protected void initTypeTraits(ITypeTraits[] invListFields) {
typeTraits = new ITypeTraits[invListFields.length + 1];
int tmp = 0;
for (int i = 0; i < invListFields.length; i++) {
typeTraits[i] = invListFields[i];
tmp += invListFields[i].getFixedLength();
}
invListElementSize = tmp;
// Integer for counting occurrences.
typeTraits[invListFields.length] = IntegerPointable.TYPE_TRAITS;
}
/**
* Prepares the write operation. Tries to allocate buffers for the expected number of pages.
* If that is possible, all operations will be executed in memory.
* If not, all operations will use a file on disk except for the final search result.
* A result of the final search result will be always in memory.
*/
public void prepareWrite(int numExpectedPages) throws HyracksDataException {
if (isInReadMode || isWriteFinished || searchResultWriter != null) {
return;
}
// Intermediate results? disk or in-memory based
// Allocates more buffers.
isInMemoryOpMode = tryAllocateBuffers(numExpectedPages);
if (!isInMemoryOpMode) {
// Not enough number of buffers. Switch to the file I/O mode.
createAndOpenFile();
}
appender.reset(ioBuffer);
isWriteFinished = false;
}
/**
* Appends an element and its count to the current frame of this result. The boolean value is necessary for
* the final search result case since the append() of that class is overriding this method.
*/
public boolean append(ITupleReference invListElement, int count) throws HyracksDataException {
ByteBuffer currentBuffer;
// Moves to the next page if the current page is full.
if (!appender.hasSpace()) {
currentWriterBufIdx++;
if (isInMemoryOpMode) {
currentBuffer = buffers.get(currentWriterBufIdx);
} else {
searchResultWriter.nextFrame(ioBuffer);
currentBuffer = ioBuffer;
}
appender.reset(currentBuffer);
}
// Appends inverted-list element.
if (!appender.append(invListElement.getFieldData(0), invListElement.getFieldStart(0), invListElementSize)) {
throw HyracksDataException.create(ErrorCode.CANNOT_ADD_ELEMENT_TO_INVERTED_INDEX_SEARCH_RESULT);
}
// Appends count.
if (!appender.append(count)) {
throw HyracksDataException.create(ErrorCode.CANNOT_ADD_ELEMENT_TO_INVERTED_INDEX_SEARCH_RESULT);
}
appender.incrementTupleCount(1);
numResults++;
// Always true for the intermediate result. An append should not fail.
return true;
}
/**
* Finalizes the write operation. After this, no more write operation can be conducted.
*/
public void finalizeWrite() throws HyracksDataException {
if (isWriteFinished) {
return;
}
// For in-memory operation (including the final result), no specific operations are required.
// For disk-based operation, needs to close the writer.
if (!isInMemoryOpMode && searchResultWriter != null) {
searchResultWriter.nextFrame(ioBuffer);
searchResultWriter.close();
}
isWriteFinished = true;
}
/**
* Prepares a read operation.
*/
public void prepareResultRead() throws HyracksDataException {
if (isInReadMode) {
return;
}
// No specific operation is required for in-memory mode (including the final result).
if (!isInMemoryOpMode && searchResultWriter != null) {
if (!isWriteFinished) {
finalizeWrite();
}
searchResultReader = searchResultWriter.createDeleteOnCloseReader();
searchResultReader.open();
searchResultReader.setDeleteAfterClose(true);
}
currentReaderBufIdx = 0;
isInReadMode = true;
}
/**
* Gets the next frame of the current result file. A caller should make sure that initResultRead() is called first.
*/
public ByteBuffer getNextFrame() throws HyracksDataException {
ByteBuffer returnedBuffer = null;
if (isInMemoryOpMode) {
// In-memory mode for an intermediate search result
returnedBuffer = buffers.get(currentReaderBufIdx);
currentReaderBufIdx++;
} else if (searchResultReader != null && searchResultReader.nextFrame(ioBufferFrame)) {
// Disk-based mode for an intermediate search result
returnedBuffer = ioBufferFrame.getBuffer();
}
return returnedBuffer;
}
/**
* Finishes reading the result and frees the buffer.
*/
public void closeResultRead(boolean deallocateIOBufferNeeded) throws HyracksDataException {
if (isInMemoryOpMode) {
// In-memory mode? Releases all buffers for an intermediate search result.
deallocateBuffers();
} else if (searchResultReader != null) {
// Disk mode? Closes the file handle (this should delete the file also.)
searchResultReader.close();
}
// Deallocates I/O buffer if requested.
if (deallocateIOBufferNeeded) {
deallocateIOBuffer();
}
}
public int getCurrentBufferIndex() {
return currentWriterBufIdx;
}
public ITypeTraits[] getTypeTraits() {
return typeTraits;
}
public int getNumResults() {
return numResults;
}
/**
* Deletes any associated file and deallocates all buffers.
*/
public void close() throws HyracksDataException {
if (isInMemoryOpMode) {
deallocateBuffers();
} else {
if (searchResultReader != null) {
searchResultReader.close();
} else if (searchResultWriter != null) {
searchResultWriter.erase();
}
}
deallocateIOBuffer();
}
public void reset() throws HyracksDataException {
// Removes the file if it was in the disk op mode.
if (searchResultReader != null) {
searchResultReader.close();
} else if (searchResultWriter != null) {
searchResultWriter.erase();
} else if (buffers.size() > 1) {
// In-memory mode? Deallocates all buffers.
deallocateBuffers();
}
// Resets the I/O buffer.
clearBuffer(ioBuffer);
searchResultWriter = null;
searchResultReader = null;
isInReadMode = false;
isWriteFinished = false;
isInMemoryOpMode = false;
isFileOpened = false;
currentWriterBufIdx = 0;
currentReaderBufIdx = 0;
numResults = 0;
}
/**
* Gets the expected number of pages if all elements are created as a result.
* An assumption is that there are no common elements between the previous result and the cursor.
*/
public int getExpectedNumPages(int numExpectedElements) {
return (int) Math.ceil((double) numExpectedElements / numPossibleElementPerPage);
}
// Gets the number of possible elements per page based on the inverted list element size.
protected void calculateNumElementPerPage() {
int frameSize = ctx.getInitialFrameSize();
// The count of Minframe, and the count of tuples in a frame should be deducted.
frameSize = frameSize - FixedSizeFrameTupleAppender.MINFRAME_COUNT_SIZE
- FixedSizeFrameTupleAppender.TUPLE_COUNT_SIZE;
numPossibleElementPerPage = (int) Math.floor((double) frameSize / (invListElementSize + ELEMENT_COUNT_SIZE));
}
/**
* Allocates the buffer for read/write operation and initializes the buffers array that will be used keep a result.
*/
protected void prepareIOBuffer() throws HyracksDataException {
if (ioBufferFrame != null) {
clearBuffer(ioBuffer);
} else {
ioBufferFrame = new BufferManagerBackedVSizeFrame(ctx, bufferManager);
ioBuffer = ioBufferFrame.getBuffer();
if (ioBuffer == null) {
// One frame should be allocated for conducting read/write
// operation. Otherwise, can't store the result.
throw HyracksDataException.create(ErrorCode.NOT_ENOUGH_BUDGET_FOR_TEXTSEARCH,
this.getClass().getSimpleName());
}
clearBuffer(ioBuffer);
// For keeping the results in memory if possible.
buffers = new ArrayList<ByteBuffer>();
buffers.add(ioBuffer);
}
}
/**
* Tries to allocate buffers to accommodate the results in memory.
*/
protected boolean tryAllocateBuffers(int numExpectedPages) throws HyracksDataException {
boolean allBufferAllocated = true;
while (buffers.size() < numExpectedPages) {
ByteBuffer tmpBuffer = bufferManager.acquireFrame(ctx.getInitialFrameSize());
if (tmpBuffer == null) {
// Budget exhausted
allBufferAllocated = false;
break;
} else {
clearBuffer(tmpBuffer);
}
buffers.add(tmpBuffer);
}
return allBufferAllocated;
}
// Creates a file for the writer.
protected void createAndOpenFile() throws HyracksDataException {
if (isInMemoryOpMode) {
// In-memory mode should not generate a file.
return;
}
if (searchResultWriter == null) {
FileReference file = ctx.getJobletContext().createManagedWorkspaceFile(FILE_PREFIX);
searchResultWriter = new RunFileWriter(file, ctx.getIoManager());
searchResultWriter.open();
isFileOpened = true;
}
}
// Deallocates the I/O buffer (one frame). This should be the last oepration.
protected void deallocateIOBuffer() throws HyracksDataException {
if (ioBufferFrame != null) {
bufferManager.releaseFrame(ioBuffer);
buffers.clear();
ioBufferFrame = null;
ioBuffer = null;
}
}
/**
* Deallocates the buffers. We do not remove the first buffer since it can be used as an I/O buffer.
*/
protected void deallocateBuffers() throws HyracksDataException {
int toDeleteCount = buffers.size() - 1;
int deletedCount = 0;
for (ListIterator<ByteBuffer> iter = buffers.listIterator(buffers.size()); iter.hasPrevious();) {
if (deletedCount >= toDeleteCount) {
break;
}
ByteBuffer next = iter.previous();
bufferManager.releaseFrame(next);
iter.remove();
deletedCount++;
}
}
public FixedSizeFrameTupleAccessor getAccessor() {
return accessor;
}
public FixedSizeFrameTupleAppender getAppender() {
return appender;
}
public FixedSizeTupleReference getTuple() {
return tuple;
}
protected void clearBuffer(ByteBuffer bufferToClear) {
Arrays.fill(bufferToClear.array(), (byte) 0);
bufferToClear.clear();
}
protected void resetAppenderLocation(int bufferIdx) {
accessor.reset(buffers.get(bufferIdx));
appender.reset(buffers.get(bufferIdx), false, accessor.getTupleCount(),
accessor.getTupleEndOffset(accessor.getTupleCount() - 1));
}
}
|
|
/*
$Id$
Copyright (C) 2003-2013 Virginia Tech.
All rights reserved.
SEE LICENSE FOR MORE INFORMATION
Author: Middleware Services
Email: [email protected]
Version: $Revision$
Updated: $Date$
*/
package edu.vt.middleware.crypt.symmetric;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
import edu.vt.middleware.crypt.AbstractEncryptionCli;
import edu.vt.middleware.crypt.CryptException;
import edu.vt.middleware.crypt.digest.DigestAlgorithm;
import edu.vt.middleware.crypt.pbe.EncryptionScheme;
import edu.vt.middleware.crypt.pbe.KeyGenerator;
import edu.vt.middleware.crypt.pbe.OpenSSLEncryptionScheme;
import edu.vt.middleware.crypt.pbe.OpenSSLKeyGenerator;
import edu.vt.middleware.crypt.pbe.PBES1EncryptionScheme;
import edu.vt.middleware.crypt.pbe.PBES2EncryptionScheme;
import edu.vt.middleware.crypt.pbe.PBKDF1KeyGenerator;
import edu.vt.middleware.crypt.pbe.PBKDF2KeyGenerator;
import edu.vt.middleware.crypt.pbe.PKCS12EncryptionScheme;
import edu.vt.middleware.crypt.pbe.PKCS12KeyGenerator;
import edu.vt.middleware.crypt.pkcs.PBEParameter;
import edu.vt.middleware.crypt.pkcs.PBKDF2Parameters;
import edu.vt.middleware.crypt.util.CryptReader;
import edu.vt.middleware.crypt.util.CryptWriter;
import edu.vt.middleware.crypt.util.HexConverter;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
/**
* Command line interface for symmetric encryption operations.
*
* @author Middleware Services
* @version $Revision$
*/
public class SymmetricCli extends AbstractEncryptionCli
{
/** Cipher mode option. */
protected static final String OPT_MODE = "mode";
/** Cipher padding option. */
protected static final String OPT_PADDING = "padding";
/** Cipher initialization vector option. */
protected static final String OPT_IV = "iv";
/** Cipher key option. */
protected static final String OPT_KEY = "key";
/** Cipher key size. */
protected static final String OPT_KEYSIZE = "keysize";
/** Trigger password-based encryption key generation. */
protected static final String OPT_PBE = "pbe";
/** PBE key generation strategy, e.g. PKCS5S1 */
protected static final String OPT_SCHEME = "scheme";
/** Digest algorithm used with PBE modes that allow it. */
protected static final String OPT_DIGEST = "digest";
/** Salt for PBE key generation. */
protected static final String OPT_SALT = "salt";
/** Iteration count for PBE key generation. */
protected static final String OPT_ITERATIONS = "iter";
/** Generate key option. */
protected static final String OPT_GENKEY = "genkey";
/** Name of operation provided by this class. */
private static final String COMMAND_NAME = "enc";
/** Converts hex to bytes and vice versa. */
private final HexConverter hexConv = new HexConverter();
/**
* CLI entry point method.
*
* @param args Command line arguments.
*/
public static void main(final String[] args)
{
new SymmetricCli().performAction(args);
}
/** {@inheritDoc} */
protected void initOptions()
{
super.initOptions();
final Option mode = new Option(OPT_MODE, true, "cipher mode, e.g. CBC");
mode.setArgName("name");
mode.setOptionalArg(false);
final Option padding = new Option(
OPT_PADDING,
true,
"cipher padding strategy, e.g. PKCS5Padding");
padding.setArgName("padding");
padding.setOptionalArg(false);
final Option key = new Option(OPT_KEY, true, "encryption/decryption key");
key.setArgName("filepath");
key.setOptionalArg(false);
final Option keySize = new Option(
OPT_KEYSIZE,
true,
"key size in bits; only needed if -key option is not specified");
keySize.setArgName("bits");
keySize.setOptionalArg(false);
final Option iv = new Option(OPT_IV, true, "initialization vectory in hex");
iv.setArgName("hex_iv");
iv.setOptionalArg(false);
final Option pbe = new Option(
OPT_PBE,
true,
"generate PBE key from password/phrase; uses pkcs5s2 by default");
pbe.setArgName("password");
pbe.setOptionalArg(false);
final Option pbeScheme = new Option(
OPT_SCHEME,
true,
"PBE key generation mode; one of pkcs5s1, pkcs5s2, openssl, pkcs12");
pbeScheme.setArgName("name");
pbeScheme.setOptionalArg(false);
final Option pbeDigest = new Option(
OPT_DIGEST,
true,
"digest algorithm to use with PBE mode pkcs5s1 or pkcs12");
pbeDigest.setArgName("name");
pbeDigest.setOptionalArg(false);
final Option salt = new Option(
OPT_SALT,
true,
"salt for PBE key generation in hex");
salt.setArgName("hex_salt");
salt.setOptionalArg(false);
final Option iterations = new Option(
OPT_ITERATIONS,
true,
"iteration count for PBE key generation");
salt.setArgName("count");
salt.setOptionalArg(false);
final Option genKey = new Option(
OPT_GENKEY,
true,
"generate key of given size written to path specified by -out option");
genKey.setArgName("bitsize");
genKey.setOptionalArg(false);
options.addOption(mode);
options.addOption(padding);
options.addOption(key);
options.addOption(keySize);
options.addOption(iv);
options.addOption(pbe);
options.addOption(pbeScheme);
options.addOption(pbeDigest);
options.addOption(salt);
options.addOption(iterations);
options.addOption(genKey);
options.addOption(new Option(OPT_ENCRYPT, "perform encryption"));
options.addOption(new Option(OPT_DECRYPT, "perform decryption"));
}
/** {@inheritDoc} */
protected void dispatch(final CommandLine line)
throws Exception
{
if (line.hasOption(OPT_ENCRYPT)) {
encrypt(line);
} else if (line.hasOption(OPT_DECRYPT)) {
decrypt(line);
} else if (line.hasOption(OPT_GENKEY)) {
genKey(line);
} else {
printHelp();
}
}
/**
* Creates a new symmetric encryption algorithm instance based on CLI options.
*
* @param line Parsed command line arguments container.
*
* @return New instance of an initialized symmetric algorithm.
*/
protected SymmetricAlgorithm newAlgorithm(final CommandLine line)
{
final String algName = line.getOptionValue(OPT_CIPHER);
SymmetricAlgorithm algorithm;
if (line.hasOption(OPT_MODE)) {
if (line.hasOption(OPT_PADDING)) {
algorithm = SymmetricAlgorithm.newInstance(
algName,
line.getOptionValue(OPT_MODE),
line.getOptionValue(OPT_PADDING));
} else {
algorithm = SymmetricAlgorithm.newInstance(
algName,
line.getOptionValue(OPT_MODE),
SymmetricAlgorithm.DEFAULT_PADDING);
}
} else if (line.hasOption(OPT_PADDING)) {
algorithm = SymmetricAlgorithm.newInstance(
algName,
SymmetricAlgorithm.DEFAULT_MODE,
line.getOptionValue(OPT_PADDING));
} else {
algorithm = SymmetricAlgorithm.newInstance(algName);
}
return algorithm;
}
/** {@inheritDoc} */
protected String getCommandName()
{
return COMMAND_NAME;
}
/**
* Perform an encryption operation using data specified on the command line.
*
* @param line Parsed command line arguments container.
*
* @throws Exception On encryption errors.
*/
protected void encrypt(final CommandLine line)
throws Exception
{
validateOptions(line);
final SymmetricAlgorithm alg = newAlgorithm(line);
if (line.hasOption(OPT_KEY)) {
alg.setKey(readKey(line));
if (line.hasOption(OPT_IV)) {
alg.setIV(hexConv.toBytes(line.getOptionValue(OPT_IV)));
}
encrypt(alg, getInputStream(line), getOutputStream(line));
} else if (line.hasOption(OPT_PBE)) {
final InputStream in = getInputStream(line);
final OutputStream out = getOutputStream(line);
try {
getPBEScheme(alg, line).encrypt(
line.getOptionValue(OPT_PBE).toCharArray(),
in,
out);
} finally {
closeStream(in);
closeStream(out);
}
} else {
throw new IllegalArgumentException(
"Either -key or -pbe is required for encryption or decryption.");
}
}
/**
* Perform a decryption operation using data specified on the command line.
*
* @param line Parsed command line arguments container.
*
* @throws Exception On decryption errors.
*/
protected void decrypt(final CommandLine line)
throws Exception
{
validateOptions(line);
final SymmetricAlgorithm alg = newAlgorithm(line);
if (line.hasOption(OPT_KEY)) {
alg.setKey(readKey(line));
if (line.hasOption(OPT_IV)) {
alg.setIV(hexConv.toBytes(line.getOptionValue(OPT_IV)));
}
decrypt(alg, getInputStream(line), getOutputStream(line));
} else if (line.hasOption(OPT_PBE)) {
final InputStream in = getInputStream(line);
final OutputStream out = getOutputStream(line);
try {
getPBEScheme(alg, line).decrypt(
line.getOptionValue(OPT_PBE).toCharArray(),
in,
out);
} finally {
closeStream(in);
closeStream(out);
}
} else {
throw new IllegalArgumentException(
"Either -key or -pbe is required for encryption or decryption.");
}
}
/**
* Generate a new encryption key to using command line arguments.
*
* @param line Parsed command line arguments container.
*
* @throws Exception On key generation errors.
*/
protected void genKey(final CommandLine line)
throws Exception
{
validateOptions(line);
final SymmetricAlgorithm alg = newAlgorithm(line);
SecretKey key;
if (line.hasOption(OPT_PBE)) {
key = generatePBEKey(alg, line);
} else {
final int size = Integer.parseInt(line.getOptionValue(OPT_GENKEY));
System.err.println("Generating key of size " + size);
key = SecretKeyUtils.generate(alg.getAlgorithm(), size);
}
CryptWriter.writeEncodedKey(key, getOutputStream(line));
if (line.hasOption(OPT_OUTFILE)) {
System.err.println("Wrote key to " + line.getOptionValue(OPT_OUTFILE));
}
}
/**
* Generates a PBE key from command line options including a password.
*
* @param alg Symmetric algorithm for which a compatible key should be
* generated.
* @param line Parsed command line arguments container.
*
* @return Secret key from password.
*
* @throws Exception On key generation errors.
*/
protected SecretKey generatePBEKey(
final SymmetricAlgorithm alg,
final CommandLine line)
throws Exception
{
if (!line.hasOption(OPT_SALT)) {
throw new IllegalArgumentException(
"Salt is required for PBE key generation.");
}
if (!line.hasOption(OPT_ITERATIONS)) {
throw new IllegalArgumentException(
"Iteration count is required for PBE key generation.");
}
DigestAlgorithm digest = null;
if (line.hasOption(OPT_DIGEST)) {
digest = DigestAlgorithm.newInstance(line.getOptionValue(OPT_DIGEST));
}
String pbeScheme = null;
if (line.hasOption(OPT_SCHEME)) {
pbeScheme = line.getOptionValue(OPT_SCHEME).toLowerCase();
}
final char[] pass = line.getOptionValue(OPT_PBE).toCharArray();
final byte[] salt = hexConv.toBytes(line.getOptionValue(OPT_SALT));
final int iterations = Integer.parseInt(
line.getOptionValue(OPT_ITERATIONS));
final int keySize = line.hasOption(OPT_KEYSIZE)
? Integer.parseInt(line.getOptionValue(OPT_KEYSIZE)) : -1;
final KeyGenerator generator;
final byte[] derivedKey;
final byte[] derivedIV;
if ("pkcs12".equals(pbeScheme)) {
if (digest == null) {
throw new IllegalArgumentException("pkcs12 requires a digest.");
}
if (keySize < 0) {
throw new IllegalArgumentException(
"Key size is required for pkcs5s2 PBE key generation.");
}
System.err.println("Generating PKCS#12 PBE key.");
generator = new PKCS12KeyGenerator(digest, salt, iterations);
derivedKey = generator.generate(pass, keySize);
derivedIV = generator.generate(pass, alg.getBlockSize() * 8);
} else if ("pkcs5s1".equals(pbeScheme)) {
if (digest == null) {
throw new IllegalArgumentException("pkcs5s1 requires a digest.");
}
System.err.println("Generating PKCS#5 PBE key using PBKDF1 scheme.");
generator = new PBKDF1KeyGenerator(digest, salt, iterations);
final byte[] keyWithIV = generator.generate(pass, 128);
derivedKey = new byte[8];
derivedIV = new byte[8];
System.arraycopy(keyWithIV, 0, derivedKey, 0, 8);
System.arraycopy(keyWithIV, 8, derivedIV, 0, 16);
} else if ("openssl".equals(pbeScheme)) {
if (keySize < 0) {
throw new IllegalArgumentException(
"Key size is required for pkcs5s2 PBE key generation.");
}
System.err.println("Generating OpenSSL PBE key.");
generator = new OpenSSLKeyGenerator(salt);
derivedKey = generator.generate(pass, keySize);
derivedIV = generator.generate(pass, alg.getBlockSize() * 8);
} else {
// Default is pkcs5s2
if (digest != null) {
System.err.println("Ignoring digest for pkcs5s2 PBE scheme.");
}
if (keySize < 0) {
throw new IllegalArgumentException(
"Key size is required for pkcs5s2 PBE key generation.");
}
System.err.println("Generating PKCS#5 PBE key using PBKDF2 scheme.");
generator = new PBKDF2KeyGenerator(salt, iterations);
derivedKey = generator.generate(pass, keySize);
derivedIV = generator.generate(pass, alg.getBlockSize() * 8);
}
System.err.println("Derived key: " + hexConv.fromBytes(derivedKey));
System.err.println("Derived iv: " + hexConv.fromBytes(derivedIV));
return new SecretKeySpec(derivedKey, alg.getAlgorithm());
}
/**
* Creates a symmetric key from a file defined by CLI arguments.
*
* @param line Parsed command line arguments container.
*
* @return Symmetric encryption/decryption key.
*
* @throws CryptException On cryptographic errors.
* @throws IOException On IO errors.
*/
protected SecretKey readKey(final CommandLine line)
throws CryptException, IOException
{
return
CryptReader.readSecretKey(
new File(line.getOptionValue(OPT_KEY)),
line.getOptionValue(OPT_CIPHER));
}
/**
* Validates the existence of required options for an operation.
*
* @param line Parsed command line arguments container.
*/
protected void validateOptions(final CommandLine line)
{
if (!line.hasOption(OPT_CIPHER)) {
throw new IllegalArgumentException("cipher option is required.");
}
}
/**
* Gets a password-based encryption scheme based on command line arguments.
*
* @param alg Symmetric cipher algorithm.
* @param line parsed command line arguments container.
*
* @return Initialized encryption scheme.
*/
protected EncryptionScheme getPBEScheme(
final SymmetricAlgorithm alg,
final CommandLine line)
{
if (!line.hasOption(OPT_SALT)) {
throw new IllegalArgumentException(
"Salt is required for PBE encryption/decryption.");
}
if (!line.hasOption(OPT_ITERATIONS)) {
throw new IllegalArgumentException(
"Iteration count is required for PBE encryption/decryption.");
}
DigestAlgorithm digest = null;
if (line.hasOption(OPT_DIGEST)) {
digest = DigestAlgorithm.newInstance(line.getOptionValue(OPT_DIGEST));
}
String scheme = null;
if (line.hasOption(OPT_SCHEME)) {
scheme = line.getOptionValue(OPT_SCHEME).toLowerCase();
}
final byte[] salt = hexConv.toBytes(line.getOptionValue(OPT_SALT));
final int iterations = Integer.parseInt(
line.getOptionValue(OPT_ITERATIONS));
final int keySize = line.hasOption(OPT_KEYSIZE)
? Integer.parseInt(line.getOptionValue(OPT_KEYSIZE)) : 0;
final EncryptionScheme pbeScheme;
if ("pkcs12".equals(scheme)) {
if (digest == null) {
throw new IllegalArgumentException("pkcs12 requires a digest.");
}
if (keySize < 0) {
throw new IllegalArgumentException(
"Key size is required for pkcs5s2 PBE key generation.");
}
System.err.println("Using PKCS#12 PBE encryption scheme.");
pbeScheme = new PKCS12EncryptionScheme(
alg,
digest,
new PBEParameter(salt, iterations),
keySize);
} else if ("pkcs5s1".equals(scheme)) {
if (digest == null) {
throw new IllegalArgumentException("pkcs12 requires a digest.");
}
System.err.println("Using PKCS#5 PBES1 encryption scheme.");
pbeScheme = new PBES1EncryptionScheme(
alg,
digest,
new PBEParameter(salt, iterations));
} else if ("openssl".equals(scheme)) {
if (keySize < 0) {
throw new IllegalArgumentException(
"Key size is required for pkcs5s2 PBE key generation.");
}
System.err.println("Using OpenSSL encryption scheme.");
pbeScheme = new OpenSSLEncryptionScheme(alg, salt, keySize);
} else {
// Default is pkcs5s2
if (digest != null) {
System.err.println("Ignoring digest for pkcs5s2 PBE scheme.");
}
if (keySize < 0) {
throw new IllegalArgumentException(
"Key size is required for pkcs5s2 PBE key generation.");
}
System.err.println("Using PKCS#5 PBES2 encryption scheme.");
pbeScheme = new PBES2EncryptionScheme(
alg,
new PBKDF2Parameters(salt, iterations, keySize / 8));
}
if (line.hasOption(OPT_IV)) {
System.err.println("Using provided IV instead of generated value.");
alg.setIV(hexConv.toBytes(line.getOptionValue(OPT_IV)));
}
return pbeScheme;
}
}
|
|
/******************************************************************************
* Copyright (c) 2006, 2010 VMware Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html and the Apache License v2.0
* is available at http://www.opensource.org/licenses/apache2.0.php.
* You may elect to redistribute this code under either of these licenses.
*
* Contributors:
* VMware Inc.
*****************************************************************************/
package org.eclipse.gemini.blueprint.extender.internal.blueprint.activator;
import java.util.Collection;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.osgi.service.blueprint.container.BlueprintContainer;
import org.osgi.service.blueprint.container.BlueprintEvent;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.beans.factory.config.ConstructorArgumentValues;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.GenericBeanDefinition;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.context.event.ContextClosedEvent;
import org.springframework.context.event.ContextRefreshedEvent;
import org.eclipse.gemini.blueprint.blueprint.container.SpringBlueprintContainer;
import org.eclipse.gemini.blueprint.blueprint.container.SpringBlueprintConverter;
import org.eclipse.gemini.blueprint.blueprint.container.SpringBlueprintConverterService;
import org.eclipse.gemini.blueprint.blueprint.container.support.BlueprintContainerServicePublisher;
import org.eclipse.gemini.blueprint.context.BundleContextAware;
import org.eclipse.gemini.blueprint.context.ConfigurableOsgiBundleApplicationContext;
import org.eclipse.gemini.blueprint.context.event.OsgiBundleApplicationContextEvent;
import org.eclipse.gemini.blueprint.context.event.OsgiBundleApplicationContextListener;
import org.eclipse.gemini.blueprint.context.event.OsgiBundleContextFailedEvent;
import org.eclipse.gemini.blueprint.context.event.OsgiBundleContextRefreshedEvent;
import org.eclipse.gemini.blueprint.extender.event.BootstrappingDependenciesEvent;
import org.eclipse.gemini.blueprint.extender.event.BootstrappingDependenciesFailedEvent;
import org.eclipse.gemini.blueprint.extender.internal.activator.OsgiContextProcessor;
import org.eclipse.gemini.blueprint.extender.internal.blueprint.event.EventAdminDispatcher;
import org.eclipse.gemini.blueprint.service.importer.event.OsgiServiceDependencyWaitStartingEvent;
import org.springframework.util.ClassUtils;
/**
* Blueprint specific context processor.
*
* @author Costin Leau
*/
public class BlueprintContainerProcessor implements
OsgiBundleApplicationContextListener<OsgiBundleApplicationContextEvent>, OsgiContextProcessor {
/** logger */
private static final Log log = LogFactory.getLog(BlueprintContainerProcessor.class);
private static final Class<?> ENV_FB_CLASS;
static {
String className = "org.eclipse.gemini.blueprint.blueprint.reflect.internal.metadata.EnvironmentManagerFactoryBean";
ClassLoader loader = OsgiBundleApplicationContextEvent.class.getClassLoader();
ENV_FB_CLASS = ClassUtils.resolveClassName(className, loader);
}
private final EventAdminDispatcher dispatcher;
private final BlueprintListenerManager listenerManager;
private final Bundle extenderBundle;
private final BeanFactoryPostProcessor cycleBreaker;
class BlueprintWaitingEventDispatcher implements ApplicationListener<ApplicationEvent> {
private final BundleContext bundleContext;
private volatile boolean enabled = true;
private volatile boolean initialized = false;
BlueprintWaitingEventDispatcher(BundleContext context) {
this.bundleContext = context;
}
// WAITING event
public void onApplicationEvent(ApplicationEvent event) {
if (event instanceof ContextClosedEvent) {
enabled = false;
return;
}
if (event instanceof ContextRefreshedEvent) {
initialized = true;
return;
}
if (event instanceof OsgiServiceDependencyWaitStartingEvent) {
if (enabled) {
OsgiServiceDependencyWaitStartingEvent evt = (OsgiServiceDependencyWaitStartingEvent) event;
String[] filter = new String[] { evt.getServiceDependency().getServiceFilter().toString() };
BlueprintEvent waitingEvent =
new BlueprintEvent(BlueprintEvent.WAITING, bundleContext.getBundle(), extenderBundle,
filter);
listenerManager.blueprintEvent(waitingEvent);
dispatcher.waiting(waitingEvent);
}
return;
}
}
};
public BlueprintContainerProcessor(EventAdminDispatcher dispatcher, BlueprintListenerManager listenerManager,
Bundle extenderBundle) {
this.dispatcher = dispatcher;
this.listenerManager = listenerManager;
this.extenderBundle = extenderBundle;
Class<?> processorClass =
ClassUtils.resolveClassName(
"org.eclipse.gemini.blueprint.blueprint.container.support.internal.config.CycleOrderingProcessor",
BundleContextAware.class.getClassLoader());
cycleBreaker = (BeanFactoryPostProcessor) BeanUtils.instantiate(processorClass);
}
public void postProcessClose(ConfigurableOsgiBundleApplicationContext context) {
BlueprintEvent destroyedEvent =
new BlueprintEvent(BlueprintEvent.DESTROYED, context.getBundle(), extenderBundle);
listenerManager.blueprintEvent(destroyedEvent);
dispatcher.afterClose(destroyedEvent);
}
public void postProcessRefresh(ConfigurableOsgiBundleApplicationContext context) {
BlueprintEvent createdEvent = new BlueprintEvent(BlueprintEvent.CREATED, context.getBundle(), extenderBundle);
listenerManager.blueprintEvent(createdEvent);
dispatcher.afterRefresh(createdEvent);
}
public void postProcessRefreshFailure(ConfigurableOsgiBundleApplicationContext context, Throwable th) {
BlueprintEvent failureEvent =
new BlueprintEvent(BlueprintEvent.FAILURE, context.getBundle(), extenderBundle, th);
listenerManager.blueprintEvent(failureEvent);
dispatcher.refreshFailure(failureEvent);
}
public void preProcessClose(ConfigurableOsgiBundleApplicationContext context) {
BlueprintEvent destroyingEvent =
new BlueprintEvent(BlueprintEvent.DESTROYING, context.getBundle(), extenderBundle);
listenerManager.blueprintEvent(destroyingEvent);
dispatcher.beforeClose(destroyingEvent);
}
public void preProcessRefresh(final ConfigurableOsgiBundleApplicationContext context) {
final BundleContext bundleContext = context.getBundleContext();
// create the ModuleContext adapter
final BlueprintContainer blueprintContainer = createBlueprintContainer(context);
// 1. add event listeners
// add service publisher
context.addApplicationListener(new BlueprintContainerServicePublisher(blueprintContainer, bundleContext));
// add waiting event broadcaster
context.addApplicationListener(new BlueprintWaitingEventDispatcher(context.getBundleContext()));
// 2. add environmental managers
context.addBeanFactoryPostProcessor(new BeanFactoryPostProcessor() {
private static final String BLUEPRINT_BUNDLE = "blueprintBundle";
private static final String BLUEPRINT_BUNDLE_CONTEXT = "blueprintBundleContext";
private static final String BLUEPRINT_CONTAINER = "blueprintContainer";
private static final String BLUEPRINT_EXTENDER = "blueprintExtenderBundle";
private static final String BLUEPRINT_CONVERTER = "blueprintConverter";
public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException {
// lazy logger evaluation
Log logger = LogFactory.getLog(context.getClass());
if (!(beanFactory instanceof BeanDefinitionRegistry)) {
logger.warn("Environmental beans will be registered as singletons instead "
+ "of usual bean definitions since beanFactory " + beanFactory
+ " is not a BeanDefinitionRegistry");
}
// add blueprint container bean
addPredefinedBlueprintBean(beanFactory, BLUEPRINT_BUNDLE, bundleContext.getBundle(), logger);
addPredefinedBlueprintBean(beanFactory, BLUEPRINT_BUNDLE_CONTEXT, bundleContext, logger);
addPredefinedBlueprintBean(beanFactory, BLUEPRINT_CONTAINER, blueprintContainer, logger);
// addPredefinedBlueprintBean(beanFactory, BLUEPRINT_EXTENDER, extenderBundle, logger);
addPredefinedBlueprintBean(beanFactory, BLUEPRINT_CONVERTER, new SpringBlueprintConverter(beanFactory),
logger);
// add Blueprint conversion service
// String[] beans = beanFactory.getBeanNamesForType(BlueprintConverterConfigurer.class, false, false);
// if (ObjectUtils.isEmpty(beans)) {
// beanFactory.addPropertyEditorRegistrar(new BlueprintEditorRegistrar());
// }
beanFactory.setConversionService(new SpringBlueprintConverterService(
beanFactory.getConversionService(), beanFactory));
}
private void addPredefinedBlueprintBean(ConfigurableListableBeanFactory beanFactory, String beanName,
Object value, Log logger) {
if (!beanFactory.containsLocalBean(beanName)) {
logger.debug("Registering pre-defined bean named " + beanName);
if (beanFactory instanceof BeanDefinitionRegistry) {
BeanDefinitionRegistry registry = (BeanDefinitionRegistry) beanFactory;
GenericBeanDefinition def = new GenericBeanDefinition();
def.setBeanClass(ENV_FB_CLASS);
ConstructorArgumentValues cav = new ConstructorArgumentValues();
cav.addIndexedArgumentValue(0, value);
def.setConstructorArgumentValues(cav);
def.setLazyInit(false);
def.setRole(BeanDefinition.ROLE_INFRASTRUCTURE);
registry.registerBeanDefinition(beanName, def);
} else {
beanFactory.registerSingleton(beanName, value);
}
} else {
logger.warn("A bean named " + beanName
+ " already exists; aborting registration of the predefined value...");
}
}
});
// 3. add cycle breaker
context.addBeanFactoryPostProcessor(cycleBreaker);
BlueprintEvent creatingEvent = new BlueprintEvent(BlueprintEvent.CREATING, context.getBundle(), extenderBundle);
listenerManager.blueprintEvent(creatingEvent);
dispatcher.beforeRefresh(creatingEvent);
}
private BlueprintContainer createBlueprintContainer(ConfigurableOsgiBundleApplicationContext context) {
// return new ExceptionHandlingBlueprintContainer(context, bundleContext);
return new SpringBlueprintContainer(context);
}
public void onOsgiApplicationEvent(OsgiBundleApplicationContextEvent evt) {
// grace event
if (evt instanceof BootstrappingDependenciesEvent) {
BootstrappingDependenciesEvent event = (BootstrappingDependenciesEvent) evt;
Collection<String> flts = event.getDependencyFilters();
if (flts.isEmpty()) {
if (log.isDebugEnabled()) {
log.debug("All dependencies satisfied, not sending Blueprint GRACE event "
+ "with emtpy dependencies from " + event);
}
} else {
String[] filters = flts.toArray(new String[flts.size()]);
BlueprintEvent graceEvent =
new BlueprintEvent(BlueprintEvent.GRACE_PERIOD, evt.getBundle(), extenderBundle, filters);
listenerManager.blueprintEvent(graceEvent);
dispatcher.grace(graceEvent);
}
return;
}
// bootstrapping failure
if (evt instanceof BootstrappingDependenciesFailedEvent) {
BootstrappingDependenciesFailedEvent event = (BootstrappingDependenciesFailedEvent) evt;
Collection<String> flts = event.getDependencyFilters();
String[] filters = flts.toArray(new String[flts.size()]);
BlueprintEvent failureEvent =
new BlueprintEvent(BlueprintEvent.FAILURE, evt.getBundle(), extenderBundle, filters, event
.getFailureCause());
listenerManager.blueprintEvent(failureEvent);
dispatcher.refreshFailure(failureEvent);
return;
}
// created
if (evt instanceof OsgiBundleContextRefreshedEvent) {
postProcessRefresh((ConfigurableOsgiBundleApplicationContext) evt.getApplicationContext());
return;
}
// failure
if (evt instanceof OsgiBundleContextFailedEvent) {
OsgiBundleContextFailedEvent failureEvent = (OsgiBundleContextFailedEvent) evt;
postProcessRefreshFailure(
((ConfigurableOsgiBundleApplicationContext) failureEvent.getApplicationContext()), failureEvent
.getFailureCause());
return;
}
}
}
|
|
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.repository;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.TreeItem;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.RepositoryDirectory;
import org.pentaho.di.repository.RepositoryDirectoryInterface;
import org.pentaho.di.repository.RepositoryElementMetaInterface;
import org.pentaho.di.repository.RepositoryObject;
import org.pentaho.di.repository.RepositoryObjectType;
import org.pentaho.di.ui.core.gui.GUIResource;
import org.pentaho.di.ui.repository.dialog.SelectDirectoryDialog;
public class RepositoryDirectoryUI {
private static final String DATE_FORMAT = "yyyy/MM/dd HH:mm:ss";
/**
* Set the name of this directory on a TreeItem. Also, create children on this TreeItem to reflect the subdirectories.
* In these sub-directories, fill in the available transformations from the repository.
*
* @param ti
* The TreeItem to set the name on and to create the subdirectories
* @param rep
* The repository
* @param objectMap
* The tree path to repository object mapping to populate.
* @param dircolor
* The color in which the directories will be drawn.
* @param sortPosition
* The sort position
* @param ascending
* The ascending flag
* @param getTransformations
* Include transformations in the tree or not
* @param getJobs
* Include jobs in the tree or not
* @throws KettleDatabaseException
*/
public static void getTreeWithNames( TreeItem ti, Repository rep, Color dircolor, int sortPosition,
boolean includeDeleted, boolean ascending, boolean getTransformations, boolean getJobs,
RepositoryDirectoryInterface dir, String filterString, Pattern pattern ) throws KettleDatabaseException {
ti.setText( dir.getName() );
ti.setData( dir );
ti.setData( "isFolder", true );
// First, we draw the directories
List<RepositoryDirectoryInterface> children = dir.getChildren();
Collections.sort( children, new Comparator<RepositoryDirectoryInterface>() {
@Override
public int compare( RepositoryDirectoryInterface o1, RepositoryDirectoryInterface o2 ) {
return o1.getName().compareToIgnoreCase( o2.getName() );
}
} );
for ( int i = 0; i < children.size(); i++ ) {
RepositoryDirectory subdir = (RepositoryDirectory) children.get( i );
TreeItem subti = new TreeItem( ti, SWT.NONE );
subti.setImage( GUIResource.getInstance().getImageFolder() );
subti.setData( "isFolder", true );
getTreeWithNames(
subti, rep, dircolor, sortPosition, includeDeleted, ascending, getTransformations, getJobs, subdir,
filterString, pattern );
}
List<RepositoryElementMetaInterface> repositoryObjects =
loadRepositoryObjects( dir, getTransformations, getJobs, rep );
// Sort the directory list appropriately...
RepositoryObject.sortRepositoryObjects( repositoryObjects, sortPosition, ascending );
addToTree( ti, filterString, pattern, repositoryObjects );
ti.setExpanded( dir.isRoot() );
}
protected static List<RepositoryElementMetaInterface> loadRepositoryObjects( RepositoryDirectoryInterface dir,
boolean getTransformations, boolean getJobs, Repository rep ) throws KettleDatabaseException {
// Then show the transformations & jobs in that directory...
List<RepositoryElementMetaInterface> repositoryObjects = new ArrayList<RepositoryElementMetaInterface>();
if ( dir.getRepositoryObjects() == null ) {
try {
dir.setRepositoryObjects( rep.getJobAndTransformationObjects( dir.getObjectId(), false ) );
} catch ( KettleException e ) {
throw new KettleDatabaseException( e );
}
}
List<RepositoryObjectType> allowedTypes = new ArrayList<>( 2 );
if ( getTransformations ) {
allowedTypes.add( RepositoryObjectType.TRANSFORMATION );
}
if ( getJobs ) {
allowedTypes.add( RepositoryObjectType.JOB );
}
for ( RepositoryElementMetaInterface repoObject : dir.getRepositoryObjects() ) {
if ( allowedTypes.contains( repoObject.getObjectType() ) ) {
repositoryObjects.add( repoObject );
}
}
return repositoryObjects;
}
private static void addToTree( TreeItem ti, String filterString, Pattern pattern,
List<RepositoryElementMetaInterface> repositoryObjects ) {
for ( int i = 0; i < repositoryObjects.size(); i++ ) {
boolean add = false;
RepositoryElementMetaInterface repositoryObject = repositoryObjects.get( i );
if ( filterString == null && pattern == null ) {
add = true;
} else {
add |= addItem( repositoryObject.getName(), filterString, pattern );
add |= addItem( repositoryObject.getDescription(), filterString, pattern );
add |= addItem( repositoryObject.getModifiedUser(), filterString, pattern );
if ( !add && repositoryObject.getModifiedDate() != null ) {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat( DATE_FORMAT );
add = addItem( simpleDateFormat.format( repositoryObject.getModifiedDate() ), filterString, pattern );
}
if ( !add && repositoryObject.getObjectType() != null ) {
add = addItem( repositoryObject.getObjectType().getTypeDescription(), filterString, pattern );
}
}
if ( add ) {
createTreeItem( ti, repositoryObject );
}
}
}
private static void createTreeItem( TreeItem parent, RepositoryElementMetaInterface repositoryObject ) {
TreeItem tiObject = new TreeItem( parent, SWT.NONE );
tiObject.setData( repositoryObject );
if ( repositoryObject.getObjectType() == RepositoryObjectType.TRANSFORMATION ) {
tiObject.setImage( GUIResource.getInstance().getImageTransRepo() );
} else if ( repositoryObject.getObjectType() == RepositoryObjectType.JOB ) {
tiObject.setImage( GUIResource.getInstance().getImageJobRepo() );
}
SimpleDateFormat simpleDateFormat = new SimpleDateFormat( DATE_FORMAT );
tiObject.setText( 0, Const.NVL( repositoryObject.getName(), "" ) );
tiObject.setText( 1, Const.NVL( repositoryObject.getObjectType().getTypeDescription(), "" ).toUpperCase() );
tiObject.setText( 2, Const.NVL( repositoryObject.getModifiedUser(), "" ) );
tiObject.setText( 3, repositoryObject.getModifiedDate() != null ? simpleDateFormat
.format( repositoryObject.getModifiedDate() ) : "" );
tiObject.setText( 4, Const.NVL( repositoryObject.getDescription(), "" ) );
if ( repositoryObject.isDeleted() ) {
tiObject.setForeground( GUIResource.getInstance().getColorRed() );
}
}
private static boolean addItem( String name, String filter, Pattern pattern ) {
boolean add = false;
if ( name != null ) {
if ( pattern != null ) {
Matcher matcher = pattern.matcher( name );
if ( matcher.matches() ) {
add = true;
}
} else {
if ( name.toUpperCase().indexOf( filter ) >= 0 ) {
add = true;
}
}
}
return add;
}
/**
* Gets a directory tree on a TreeItem to work with.
*
* @param ti
* The TreeItem to set the directory tree on
* @param dircolor
* The color of the directory tree item.
*/
public static void getDirectoryTree( TreeItem ti, Color dircolor, RepositoryDirectoryInterface dir ) {
ti.setText( dir.getName() );
ti.setForeground( dircolor );
// First, we draw the directories
for ( int i = 0; i < dir.getNrSubdirectories(); i++ ) {
RepositoryDirectory subdir = dir.getSubdirectory( i );
TreeItem subti = new TreeItem( ti, SWT.NONE );
subti.setImage( GUIResource.getInstance().getImageFolder() );
getDirectoryTree( subti, dircolor, subdir );
}
}
public static RepositoryDirectoryInterface chooseDirectory( Shell shell, Repository rep, RepositoryDirectoryInterface directoryFrom ) {
if ( rep == null ) {
return null;
}
if ( directoryFrom == null ) {
try {
directoryFrom = rep.getUserHomeDirectory();
} catch ( KettleException ex ) {
directoryFrom = new RepositoryDirectory();
}
}
ObjectId idDirectoryFrom = directoryFrom.getObjectId();
SelectDirectoryDialog sdd = new SelectDirectoryDialog( shell, SWT.NONE, rep );
//PDI-13867: root dir and its direct subdirectories are restricted.
HashSet<String> restrictedPaths = new HashSet<String>();
restrictedPaths.add( directoryFrom.findRoot().getPath() );
restrictedPaths.add( "/home" );
sdd.setRestrictedPaths( restrictedPaths );
//TODO: expand and select directoryFrom in the dialog.
RepositoryDirectoryInterface rd = sdd.open();
if ( rd == null || idDirectoryFrom == rd.getObjectId() ) {
return null;
}
return rd;
}
}
|
|
/*
* No copyright. No warranty. No liability accepted. Not tested.
* Created 21/10/2014 by Zak Fenton.
*/
package org.mettascript.parser;
import java.util.ArrayList;
import java.util.Collection;
/**
*
* @author zak
*/
public class Operation {
/** The left-hand-side of the operation. This will only be null for an
* empty/"nothing" operation, otherwise a missing operation is replaced
* with such.
*/
public Operation leftHandSide;
public Token token;
public String operator = "<NOT-AN-OPERATOR>";
/** The left-hand-side of the operation. This will only be null for an
* empty/"nothing" operation, otherwise a missing operation is replaced
* with such.
*/
public Operation rightHandSide;
/** The operator enclosing this one (or null for the outer operation). */
public Operation enclosing;
/** Only used to construct empty/nothing operations, which replace nulls
* as parameters to real operations.
*/
private Operation() {
}
private Operation(Operation leftHandSide, Token token, Operation rightHandSide) {
if (token.type == Token.Type.NAME
|| token.type == Token.Type.OPERATOR
|| token.type == Token.Type.BRACKET) {
if (leftHandSide == null) {
leftHandSide = new Operation();
}
if (rightHandSide == null) {
rightHandSide = new Operation();
}
leftHandSide.enclosing = rightHandSide.enclosing = this;
operator = token.toString();
}
this.leftHandSide = leftHandSide;
this.token = token;
this.rightHandSide = rightHandSide;
}
/** Only used by {@link FormulaParser}. */
static Operation parse(TokenOrGroup tokenOrGroup) {
if (tokenOrGroup.isToken) {
return new Operation(null, tokenOrGroup.token, null);
} else if ((tokenOrGroup.openingBracket == null || tokenOrGroup.openingBracket.firstSymbol.character == '(')
&& tokenOrGroup.members.size() == 0) {
return new Operation();
} else {
Operation leftHandSide = parse(tokenOrGroup.members.get(0));
int i;
for (i = 2; i < tokenOrGroup.members.size(); i += 2) {
Token operator = tokenOrGroup.members.get(i-1).token;
Operation rightHandSide = parse(tokenOrGroup.members.get(i));
leftHandSide = new Operation(leftHandSide, operator, rightHandSide);
}
if (i != tokenOrGroup.members.size() + 1) {
throw new Error("Something's not right. i=" + i + " size=" + tokenOrGroup.members.size() + "!");
}
if (tokenOrGroup.openingBracket != null
&& (tokenOrGroup.openingBracket.firstSymbol.character == '['
|| tokenOrGroup.openingBracket.firstSymbol.character == '{')) {
return new Operation(new Operation(), tokenOrGroup.openingBracket, leftHandSide);
} else {
return leftHandSide;
}
}
}
public boolean isNothing() {
return token == null;
}
public boolean isBinaryOperation() {
if (token == null) {
return false;
} else {
switch (token.type) {
case NAME:
case OPERATOR:
case BRACKET:
assert leftHandSide != null;
assert rightHandSide != null;
return true;
default:
return false;
}
}
}
public boolean isOperation(String...operatorMatches) {
if (isBinaryOperation()) {
for (String m: operatorMatches) {
if (m.equals(operator)) {
return true;
}
}
}
return false;
}
public boolean isNormalOperation() {
return isBinaryOperation() && !isSpecialOperation();
}
public boolean isSpecialOperation() {
return isOperation("[", "{", "&", "|", "=", ",", ";", ".", "!", "?") && !isNormalEquals();
}
public boolean isBlock() {
return isOperation("[");
}
public boolean isEmptyBlock() {
return isBlock() && rightHandSide.isNothing();
}
public boolean isBlockBody() {
return enclosing == null || enclosing.isBlock();
}
public Operation findBlockBody() {
if (isBlockBody()) {
return this;
} else {
return enclosing.findBlockBody();
}
}
public boolean isStructure() {
return isOperation("{");
}
public boolean isAnd() {
return isOperation("&");
}
public boolean isOr() {
return isOperation("|");
}
public boolean isNot() {
return isOperation("~");
}
public boolean isEquals() {
return isOperation("=");
}
public boolean isComma() {
return isOperation(",");
}
public boolean isSequence() {
return isOperation(";");
}
public boolean isDot() {
return isOperation(".");
}
public boolean isExclamationMark() {
return isOperation("!");
}
public boolean isQuestionMark() {
return isOperation("?");
}
public int getCommaMemberCount() {
if (isComma()) {
return leftHandSide.getCommaMemberCount() + 1;
} else {
return 1;
}
}
public Collection<Operation> getCommaMembers(boolean fromLeft) {
return getAnyMembers(",", fromLeft);
}
public Collection<Operation> getSequenceMembers(boolean fromLeft) {
return getAnyMembers(";", fromLeft);
}
private Collection<Operation> getAnyMembers(String op, boolean fromLeft) {
ArrayList<Operation> result = new ArrayList<Operation>();
if (isOperation(op)) {
if (!fromLeft) {
result.add(this.rightHandSide);
}
result.addAll(leftHandSide.getAnyMembers(op, fromLeft));
if (fromLeft) {
result.add(this.rightHandSide);
}
} else {
result.add(this);
}
return result;
}
public int getSequenceMemberCount() {
if (isSequence()) {
return leftHandSide.getSequenceMemberCount() + 1;
} else {
return 1;
}
}
public boolean isNameAlone() {
return !isNothing() && leftHandSide.isNothing() && token.type == Token.Type.NAME && rightHandSide.isNothing();
}
public boolean isNormalEquals() {
return isEquals() && !isSpecialEquals();
}
public boolean isSpecialEquals() {
return isEquals() && enclosing != null && enclosing.isSequence();
}
public boolean isNameEquals() {
return isSpecialEquals() && leftHandSide.isNameAlone();
}
public boolean isConstantText() {
return token != null && token.type == Token.Type.TEXT;
}
public boolean isConstantNumber() {
return token != null && token.type == Token.Type.NUMBER;
}
public boolean isConstant() {
return isConstantText() || isConstantNumber();
}
public boolean isLeftHandSide() {
return enclosing != null && enclosing.leftHandSide == this;
}
public boolean isRightHandSide() {
return enclosing != null && enclosing.rightHandSide == this;
}
public String toString() {
if (isNothing()) {
return "()";
} else if (isConstant() || isNameAlone()) {
return token.toString();
} else if (isBinaryOperation()) {
return "(" + leftHandSide + " " + operator + " " + rightHandSide + ")";
} else {
throw new Error("Unrecognised!");
}
}
}
|
|
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* you may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.device.mgt.core.operation.mgt.dao.impl.operation;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.device.mgt.common.DeviceIdentifier;
import org.wso2.carbon.device.mgt.common.PaginationRequest;
import org.wso2.carbon.device.mgt.common.operation.mgt.Activity;
import org.wso2.carbon.device.mgt.common.operation.mgt.ActivityStatus;
import org.wso2.carbon.device.mgt.common.operation.mgt.OperationResponse;
import org.wso2.carbon.device.mgt.core.dto.operation.mgt.Operation;
import org.wso2.carbon.device.mgt.core.operation.mgt.OperationMapping;
import org.wso2.carbon.device.mgt.core.operation.mgt.dao.OperationManagementDAOException;
import org.wso2.carbon.device.mgt.core.operation.mgt.dao.OperationManagementDAOFactory;
import org.wso2.carbon.device.mgt.core.operation.mgt.dao.OperationManagementDAOUtil;
import org.wso2.carbon.device.mgt.core.operation.mgt.dao.impl.GenericOperationDAOImpl;
import org.wso2.carbon.device.mgt.core.operation.mgt.dao.util.OperationDAOUtil;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* This class holds the implementation of OperationDAO which can be used to support Oracle db syntax.
*/
public class OracleOperationDAOImpl extends GenericOperationDAOImpl {
@Override
public List<? extends Operation> getOperationsForDevice(int enrolmentId, PaginationRequest request)
throws OperationManagementDAOException {
PreparedStatement stmt = null;
ResultSet rs = null;
Operation operation;
List<Operation> operations = new ArrayList<Operation>();
try {
Connection conn = OperationManagementDAOFactory.getConnection();
String sql = "SELECT o.ID, TYPE, o.CREATED_TIMESTAMP, o.RECEIVED_TIMESTAMP, "
+ "o.OPERATION_CODE, om.STATUS, om.ID AS OM_MAPPING_ID, om.UPDATED_TIMESTAMP FROM DM_OPERATION o "
+ "INNER JOIN (SELECT dm.OPERATION_ID, dm.ID, dm.STATUS, dm.UPDATED_TIMESTAMP FROM DM_ENROLMENT_OP_MAPPING dm "
+ "WHERE dm.ENROLMENT_ID = ?) om ON o.ID = om.OPERATION_ID ORDER BY o.CREATED_TIMESTAMP DESC "
+ "OFFSET ? ROWS FETCH NEXT ? ROWS ONLY";
stmt = conn.prepareStatement(sql);
stmt.setInt(1, enrolmentId);
stmt.setInt(2, request.getStartIndex());
stmt.setInt(3, request.getRowCount());
rs = stmt.executeQuery();
while (rs.next()) {
operation = new Operation();
operation.setId(rs.getInt("ID"));
operation.setType(Operation.Type.valueOf(rs.getString("TYPE")));
operation.setCreatedTimeStamp(rs.getTimestamp("CREATED_TIMESTAMP").toString());
if (rs.getTimestamp("RECEIVED_TIMESTAMP") == null) {
operation.setReceivedTimeStamp("");
} else {
operation.setReceivedTimeStamp(rs.getTimestamp("RECEIVED_TIMESTAMP").toString());
}
operation.setCode(rs.getString("OPERATION_CODE"));
operation.setStatus(Operation.Status.valueOf(rs.getString("STATUS")));
operations.add(operation);
}
} catch (SQLException e) {
throw new OperationManagementDAOException(
"SQL error occurred while retrieving the operation " + "available for the device'" + enrolmentId
+ "' with status '", e);
} finally {
OperationManagementDAOUtil.cleanupResources(stmt, rs);
}
return operations;
}
@Override
public List<? extends Operation> getOperationsByDeviceAndStatus(int enrolmentId,
PaginationRequest request, Operation.Status status) throws OperationManagementDAOException {
PreparedStatement stmt = null;
ResultSet rs = null;
Operation operation;
List<Operation> operations = new ArrayList<Operation>();
try {
Connection conn = OperationManagementDAOFactory.getConnection();
String sql = "SELECT o.ID, TYPE, o.CREATED_TIMESTAMP, o.RECEIVED_TIMESTAMP, o.OPERATION_CODE, "
+ "om.ID AS OM_MAPPING_ID, om.UPDATED_TIMESTAMP FROM DM_OPERATION o "
+ "INNER JOIN (SELECT dm.OPERATION_ID, dm.ID, dm.STATUS, dm.UPDATED_TIMESTAMP FROM DM_ENROLMENT_OP_MAPPING dm "
+ "WHERE dm.ENROLMENT_ID = ? AND dm.STATUS = ?) om ON o.ID = om.OPERATION_ID ORDER BY "
+ "o.CREATED_TIMESTAMP DESC OFFSET ? ROWS FETCH NEXT ? ROWS ONLY";
stmt = conn.prepareStatement(sql);
stmt.setInt(1, enrolmentId);
stmt.setString(2, status.toString());
stmt.setInt(3, request.getStartIndex());
stmt.setInt(4, request.getRowCount());
rs = stmt.executeQuery();
while (rs.next()) {
operation = new Operation();
operation.setId(rs.getInt("ID"));
operation.setType(Operation.Type.valueOf(rs.getString("TYPE")));
operation.setCreatedTimeStamp(rs.getTimestamp("CREATED_TIMESTAMP").toString());
if (rs.getTimestamp("RECEIVED_TIMESTAMP") == null) {
operation.setReceivedTimeStamp("");
} else {
operation.setReceivedTimeStamp(rs.getTimestamp("RECEIVED_TIMESTAMP").toString());
}
operation.setCode(rs.getString("OPERATION_CODE"));
operation.setStatus(status);
operations.add(operation);
}
} catch (SQLException e) {
throw new OperationManagementDAOException(
"SQL error occurred while retrieving the operation " + "available for the device'" + enrolmentId
+ "' with status '" + status.toString(), e);
} finally {
OperationManagementDAOUtil.cleanupResources(stmt, rs);
}
return operations;
}
@Override
public Map<Integer, List<OperationMapping>> getOperationMappingsByStatus(Operation.Status opStatus, Operation.PushNotificationStatus pushNotificationStatus,
int limit) throws OperationManagementDAOException {
PreparedStatement stmt = null;
ResultSet rs = null;
OperationMapping operationMapping;
Map<Integer, List<OperationMapping>> operationMappingsTenantMap = new HashMap<>();
try {
Connection conn = OperationManagementDAOFactory.getConnection();
String sql = "SELECT op.ENROLMENT_ID, op.OPERATION_ID, d.DEVICE_IDENTIFICATION, dt.NAME as DEVICE_TYPE, d" +
".TENANT_ID FROM DM_DEVICE d, DM_ENROLMENT_OP_MAPPING op, DM_DEVICE_TYPE dt WHERE op.STATUS = ? " +
"AND op.PUSH_NOTIFICATION_STATUS = ? AND d.DEVICE_TYPE_ID = dt.ID AND d.ID=op.ENROLMENT_ID AND " +
"ROWNUM <= ? ORDER BY op.OPERATION_ID";
stmt = conn.prepareStatement(sql);
stmt.setString(1, opStatus.toString());
stmt.setString(2, pushNotificationStatus.toString());
stmt.setInt(3, limit);
rs = stmt.executeQuery();
while (rs.next()) {
int tenantID = rs.getInt("TENANT_ID");
List<OperationMapping> operationMappings = operationMappingsTenantMap.get(tenantID);
if (operationMappings == null) {
operationMappings = new LinkedList<>();
operationMappingsTenantMap.put(tenantID, operationMappings);
}
operationMapping = new OperationMapping();
operationMapping.setOperationId(rs.getInt("OPERATION_ID"));
DeviceIdentifier deviceIdentifier = new DeviceIdentifier();
deviceIdentifier.setId(rs.getString("DEVICE_IDENTIFICATION"));
deviceIdentifier.setType(rs.getString("DEVICE_TYPE"));
operationMapping.setDeviceIdentifier(deviceIdentifier);
operationMapping.setEnrollmentId(rs.getInt("ENROLMENT_ID"));
operationMapping.setTenantId(tenantID);
operationMappings.add(operationMapping);
}
} catch (SQLException e) {
throw new OperationManagementDAOException("SQL error while getting operation mappings from database. ", e);
} finally {
OperationManagementDAOUtil.cleanupResources(stmt, rs);
}
return operationMappingsTenantMap;
}
@Override
public List<Activity> getActivitiesUpdatedAfter(long timestamp, int limit, int offset) throws OperationManagementDAOException {
PreparedStatement stmt = null;
ResultSet rs = null;
List<Activity> activities = new ArrayList<>();
try {
Connection conn = OperationManagementDAOFactory.getConnection();
String sql = "SELECT opm.ENROLMENT_ID, opm.CREATED_TIMESTAMP, opm.UPDATED_TIMESTAMP, opm.OPERATION_ID,\n"
+ "op.OPERATION_CODE, op.TYPE OPERATION_TYPE, opm.STATUS, en.DEVICE_ID,\n"
+ "ops.RECEIVED_TIMESTAMP, ops.ID OP_RES_ID, ops.OPERATION_RESPONSE,\n"
+ "de.DEVICE_IDENTIFICATION, dt.NAME DEVICE_TYPE\n" + "FROM DM_ENROLMENT_OP_MAPPING opm\n"
+ "LEFT JOIN DM_OPERATION op ON opm.OPERATION_ID = op.ID \n"
+ "LEFT JOIN DM_ENROLMENT en ON opm.ENROLMENT_ID = en.ID \n"
+ "LEFT JOIN DM_DEVICE de ON en.DEVICE_ID = de.ID \n"
+ "LEFT JOIN DM_DEVICE_TYPE dt ON dt.ID = de.DEVICE_TYPE_ID \n"
+ "LEFT JOIN DM_DEVICE_OPERATION_RESPONSE ops ON \n"
+ "opm.ENROLMENT_ID = ops.ENROLMENT_ID AND opm.OPERATION_ID = ops.OPERATION_ID \n"
+ "WHERE opm.UPDATED_TIMESTAMP > ? \n" + "AND de.TENANT_ID = ? \n";
if (timestamp == 0) {
sql += "ORDER BY opm.OPERATION_ID OFFSET ? ROWS FETCH NEXT ? ROWS ONLY";
} else {
sql += "ORDER BY opm.UPDATED_TIMESTAMP asc OFFSET ? ROWS FETCH NEXT ? ROWS ONLY";
}
stmt = conn.prepareStatement(sql);
stmt.setLong(1, timestamp);
int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId();
stmt.setInt(2, tenantId);
stmt.setInt(3, offset);
stmt.setInt(4, limit);
rs = stmt.executeQuery();
int operationId = 0;
int enrolmentId = 0;
int responseId = 0;
Activity activity = null;
ActivityStatus activityStatus = null;
while (rs.next()) {
if (operationId != rs.getInt("OPERATION_ID")) {
activity = new Activity();
activities.add(activity);
List<ActivityStatus> statusList = new ArrayList<>();
activityStatus = new ActivityStatus();
operationId = rs.getInt("OPERATION_ID");
enrolmentId = rs.getInt("ENROLMENT_ID");
activity.setType(Activity.Type.valueOf(rs.getString("OPERATION_TYPE")));
activity.setCreatedTimeStamp(
new java.util.Date(rs.getLong(("CREATED_TIMESTAMP")) * 1000).toString());
activity.setCode(rs.getString("OPERATION_CODE"));
DeviceIdentifier deviceIdentifier = new DeviceIdentifier();
deviceIdentifier.setId(rs.getString("DEVICE_IDENTIFICATION"));
deviceIdentifier.setType(rs.getString("DEVICE_TYPE"));
activityStatus.setDeviceIdentifier(deviceIdentifier);
activityStatus.setStatus(ActivityStatus.Status.valueOf(rs.getString("STATUS")));
List<OperationResponse> operationResponses = new ArrayList<>();
if (rs.getInt("UPDATED_TIMESTAMP") != 0) {
activityStatus.setUpdatedTimestamp(
new java.util.Date(rs.getLong(("UPDATED_TIMESTAMP")) * 1000).toString());
}
if (rs.getTimestamp("RECEIVED_TIMESTAMP") != (null)) {
operationResponses.add(OperationDAOUtil.getOperationResponse(rs));
responseId = rs.getInt("OP_RES_ID");
}
activityStatus.setResponses(operationResponses);
statusList.add(activityStatus);
activity.setActivityStatus(statusList);
activity.setActivityId(OperationDAOUtil.getActivityId(rs.getInt("OPERATION_ID")));
}
if (operationId == rs.getInt("OPERATION_ID") && enrolmentId != rs.getInt("ENROLMENT_ID")) {
activityStatus = new ActivityStatus();
activity.setType(Activity.Type.valueOf(rs.getString("OPERATION_TYPE")));
activity.setCreatedTimeStamp(
new java.util.Date(rs.getLong(("CREATED_TIMESTAMP")) * 1000).toString());
activity.setCode(rs.getString("OPERATION_CODE"));
DeviceIdentifier deviceIdentifier = new DeviceIdentifier();
deviceIdentifier.setId(rs.getString("DEVICE_IDENTIFICATION"));
deviceIdentifier.setType(rs.getString("DEVICE_TYPE"));
activityStatus.setDeviceIdentifier(deviceIdentifier);
activityStatus.setStatus(ActivityStatus.Status.valueOf(rs.getString("STATUS")));
List<OperationResponse> operationResponses = new ArrayList<>();
if (rs.getInt("UPDATED_TIMESTAMP") != 0) {
activityStatus.setUpdatedTimestamp(
new java.util.Date(rs.getLong(("UPDATED_TIMESTAMP")) * 1000).toString());
}
if (rs.getTimestamp("RECEIVED_TIMESTAMP") != (null)) {
operationResponses.add(OperationDAOUtil.getOperationResponse(rs));
responseId = rs.getInt("OP_RES_ID");
}
activityStatus.setResponses(operationResponses);
activity.getActivityStatus().add(activityStatus);
enrolmentId = rs.getInt("ENROLMENT_ID");
}
if (rs.getInt("OP_RES_ID") != 0 && responseId != rs.getInt("OP_RES_ID")) {
if (rs.getTimestamp("RECEIVED_TIMESTAMP") != (null)) {
activityStatus.getResponses().add(OperationDAOUtil.getOperationResponse(rs));
responseId = rs.getInt("OP_RES_ID");
}
}
}
} catch (SQLException e) {
throw new OperationManagementDAOException(
"Error occurred while getting the operation details from " + "the database.", e);
} catch (ClassNotFoundException e) {
throw new OperationManagementDAOException(
"Error occurred while converting the operation response to string.", e);
} catch (IOException e) {
throw new OperationManagementDAOException(
"IO exception occurred while converting the operations responses.", e);
} finally {
OperationManagementDAOUtil.cleanupResources(stmt, rs);
}
return activities;
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.appmesh.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* An object that represents the DNS service discovery information for your virtual node.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/appmesh-2019-01-25/DnsServiceDiscovery" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DnsServiceDiscovery implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* Specifies the DNS service discovery hostname for the virtual node.
* </p>
*/
private String hostname;
/**
* <p>
* Specifies the DNS response type for the virtual node.
* </p>
*/
private String responseType;
/**
* <p>
* Specifies the DNS service discovery hostname for the virtual node.
* </p>
*
* @param hostname
* Specifies the DNS service discovery hostname for the virtual node.
*/
public void setHostname(String hostname) {
this.hostname = hostname;
}
/**
* <p>
* Specifies the DNS service discovery hostname for the virtual node.
* </p>
*
* @return Specifies the DNS service discovery hostname for the virtual node.
*/
public String getHostname() {
return this.hostname;
}
/**
* <p>
* Specifies the DNS service discovery hostname for the virtual node.
* </p>
*
* @param hostname
* Specifies the DNS service discovery hostname for the virtual node.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DnsServiceDiscovery withHostname(String hostname) {
setHostname(hostname);
return this;
}
/**
* <p>
* Specifies the DNS response type for the virtual node.
* </p>
*
* @param responseType
* Specifies the DNS response type for the virtual node.
* @see DnsResponseType
*/
public void setResponseType(String responseType) {
this.responseType = responseType;
}
/**
* <p>
* Specifies the DNS response type for the virtual node.
* </p>
*
* @return Specifies the DNS response type for the virtual node.
* @see DnsResponseType
*/
public String getResponseType() {
return this.responseType;
}
/**
* <p>
* Specifies the DNS response type for the virtual node.
* </p>
*
* @param responseType
* Specifies the DNS response type for the virtual node.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DnsResponseType
*/
public DnsServiceDiscovery withResponseType(String responseType) {
setResponseType(responseType);
return this;
}
/**
* <p>
* Specifies the DNS response type for the virtual node.
* </p>
*
* @param responseType
* Specifies the DNS response type for the virtual node.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DnsResponseType
*/
public DnsServiceDiscovery withResponseType(DnsResponseType responseType) {
this.responseType = responseType.toString();
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getHostname() != null)
sb.append("Hostname: ").append(getHostname()).append(",");
if (getResponseType() != null)
sb.append("ResponseType: ").append(getResponseType());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DnsServiceDiscovery == false)
return false;
DnsServiceDiscovery other = (DnsServiceDiscovery) obj;
if (other.getHostname() == null ^ this.getHostname() == null)
return false;
if (other.getHostname() != null && other.getHostname().equals(this.getHostname()) == false)
return false;
if (other.getResponseType() == null ^ this.getResponseType() == null)
return false;
if (other.getResponseType() != null && other.getResponseType().equals(this.getResponseType()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getHostname() == null) ? 0 : getHostname().hashCode());
hashCode = prime * hashCode + ((getResponseType() == null) ? 0 : getResponseType().hashCode());
return hashCode;
}
@Override
public DnsServiceDiscovery clone() {
try {
return (DnsServiceDiscovery) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.appmesh.model.transform.DnsServiceDiscoveryMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/**
*
*/
package org.apache.geode.internal.cache;
import static org.apache.geode.distributed.ConfigurationProperties.*;
import static org.junit.Assert.*;
import java.util.Iterator;
import java.util.Properties;
import java.util.Set;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.DataPolicy;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionAttributes;
import org.apache.geode.cache.Scope;
import org.apache.geode.cache.server.CacheServer;
import org.apache.geode.cache30.ClientServerTestCase;
import org.apache.geode.distributed.DistributedSystem;
import org.apache.geode.distributed.internal.InternalLocator;
import org.apache.geode.internal.AvailablePort;
import org.apache.geode.internal.cache.lru.EnableLRU;
import org.apache.geode.internal.cache.tier.sockets.ClientUpdateMessageImpl;
import org.apache.geode.internal.cache.tier.sockets.ConflationDUnitTest;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.NetworkUtils;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.internal.JUnit4DistributedTestCase;
import org.apache.geode.test.junit.categories.DistributedTest;
/**
* Tests the size of clientUpdateMessageImpl with the size calculated by
* {@link org.apache.geode.internal.cache.lru.MemLRUCapacityController} for HA overFlow
*
* @since GemFire 5.7
*/
@Category(DistributedTest.class)
public class HAOverflowMemObjectSizerDUnitTest extends JUnit4DistributedTestCase {
/* entry over head used by memCapacityController */
private static final int OVERHEAD_PER_ENTRY = 250;
protected static InternalLocator locator;
/** The cache instance */
static Cache cache;
/** The distributedSystem instance */
static DistributedSystem ds = null;
static String regionName = HAOverflowMemObjectSizerDUnitTest.class.getSimpleName() + "-region";
/* handler for LRU capacity controller */
private static EnableLRU cc = null;
VM client = null;
static VM serverVM = null;
Integer serverPort1 = null;
Integer serverPort2 = null;
static String ePolicy = "mem";
static int capacity = 1;
/* store the reference of Client Messages Region */
static Region region = null;
@Override
public final void postSetUp() throws Exception {
disconnectAllFromDS();
Host host = Host.getHost(0);
client = host.getVM(1);
serverVM = host.getVM(3);
}
@Override
public final void preTearDown() throws Exception {
serverVM.invoke(() -> ConflationDUnitTest.unsetIsSlowStart());
client.invoke(() -> HAOverflowMemObjectSizerDUnitTest.closeCache());
serverVM.invoke(() -> HAOverflowMemObjectSizerDUnitTest.closeCache());
}
public static void cleanUp(Long limit) {
ConflationDUnitTest.unsetIsSlowStart();
if (region != null) {
Set entries = region.entrySet();
entries = region.entrySet();
long timeElapsed = 0, startTime = System.currentTimeMillis();
while (entries.size() > 0 && timeElapsed <= limit.longValue()) {
// doing it to clean up the queue
// making sure that dispacher will dispached all events
try {
// sleep in small chunks
Thread.sleep(50);
timeElapsed = System.currentTimeMillis() - startTime;
} catch (InterruptedException e) {
fail("interrupted");
}
entries = region.entrySet();
}
}
}
/**
* Creates cache and starts the bridge-server
*
* @param notification property of BridgeServer
*/
public static Integer createCacheServer(Boolean notification) throws Exception {
new HAOverflowMemObjectSizerDUnitTest().createCache(new Properties());
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.NORMAL);
RegionAttributes attrs = factory.create();
Region region = cache.createRegion(regionName, attrs);
assertNotNull(region);
CacheServer server1 = cache.addCacheServer();
assertNotNull(server1);
int port = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
server1.setPort(port);
server1.setNotifyBySubscription(notification.booleanValue());
server1.getClientSubscriptionConfig().setCapacity(capacity);
server1.getClientSubscriptionConfig().setEvictionPolicy(ePolicy);
server1.start();
assertTrue(server1.isRunning());
/*
* storing capacity controller reference
*/
cc = ((VMLRURegionMap) ((LocalRegion) cache.getRegion(
Region.SEPARATOR + CacheServerImpl.generateNameForClientMsgsRegion(port))).entries)
._getCCHelper();
return new Integer(server1.getPort());
}
/**
* create client cache
*/
public static void createCacheClient(Integer port1, String host) throws Exception {
Properties props = new Properties();
props.setProperty(MCAST_PORT, "0");
props.setProperty(LOCATORS, "");
new HAOverflowMemObjectSizerDUnitTest().createCache(props);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.NORMAL);
ClientServerTestCase.configureConnectionPool(factory, host, port1.intValue(), -1, true, -1, 2,
null, -1, -1, false);
RegionAttributes attrs = factory.create();
Region region = cache.createRegion(regionName, attrs);
assertNotNull(region);
region.registerInterest("ALL_KEYS");
}
/**
* This test does the following :<br>
* Configuration: notification by subscription is <b>true </b><br>
* 1)Verify size calculated by getSizeInByte() of ClientUpdateMessagesImpl is equal to the size
* calculated by memCapacity controller <br>
*/
@Test
public void testSizerImplementationofMemCapacityControllerWhenNotificationBySubscriptionIsTrue() {
Integer port1 = (Integer) serverVM
.invoke(() -> HAOverflowMemObjectSizerDUnitTest.createCacheServer(new Boolean(true)));
serverPort1 = port1;
serverVM.invoke(() -> ConflationDUnitTest.setIsSlowStart("15000"));
client.invoke(() -> HAOverflowMemObjectSizerDUnitTest.createCacheClient(port1,
NetworkUtils.getServerHostName(client.getHost())));
serverVM
.invoke(() -> HAOverflowMemObjectSizerDUnitTest.performPut(new Long(0L), new Long(100L)));
serverVM.invoke(
() -> HAOverflowMemObjectSizerDUnitTest.sizerTestForMemCapacityController(serverPort1));
}
/**
* This test does the following :<br>
* Configuration: notification by subscription is<b> false </b><br>
* 1)Verify size calculated by getSizeInByte() of ClientUpdateMessagesImpl is equal to the size
* calculated by memCapacity controller <br>
*/
@Test
public void testSizerImplementationofMemCapacityControllerWhenNotificationBySubscriptionIsFalse() {
Integer port2 = (Integer) serverVM
.invoke(() -> HAOverflowMemObjectSizerDUnitTest.createCacheServer(new Boolean(false)));
serverPort2 = port2;
serverVM.invoke(() -> ConflationDUnitTest.setIsSlowStart("15000"));
client.invoke(() -> HAOverflowMemObjectSizerDUnitTest.createCacheClient(port2,
NetworkUtils.getServerHostName(client.getHost())));
serverVM
.invoke(() -> HAOverflowMemObjectSizerDUnitTest.performPut(new Long(101L), new Long(200L)));
serverVM.invoke(
() -> HAOverflowMemObjectSizerDUnitTest.sizerTestForMemCapacityController(serverPort2));
}
/**
* Check for size return by ClientUpdateMessagesImpl getSizeInByte() with size return by
* memCapacity controller
*
* @param port - BridgeServer port required to get ClientMessagesRegion
*/
public static void sizerTestForMemCapacityController(Integer port) {
region = cache.getRegion(
Region.SEPARATOR + CacheServerImpl.generateNameForClientMsgsRegion(port.intValue()));
assertNotNull(region);
Set entries = region.entrySet();
assertTrue(entries.size() > 0);
Iterator iter = entries.iterator();
for (; iter.hasNext();) {
Region.Entry entry = (Region.Entry) iter.next();
ClientUpdateMessageImpl cum = (ClientUpdateMessageImpl) entry.getValue();
// passed null to get the size of value ie CUM only ,
// but this function also add overhead per entry
// so to get exact size calculated by memCapacityController
// we need substract this over head
// as this default value is private static in MemLRUCapacityController
// cannot access directly
assertTrue("cum size is not equal",
(cc.entrySize(null, entry.getValue()) - OVERHEAD_PER_ENTRY) == cum.getSizeInBytes());
}
cache.getLogger().fine("Test passed. Now, doing a cleanup job.");
// added here as sleep should be on server where CMR is present and
// dispacher supposed to run
cleanUp(new Long(20000));
}
/**
* Creates the cache
*
* @param props - distributed system props
* @throws Exception - thrown in any problem occurs in creating cache
*/
private void createCache(Properties props) throws Exception {
DistributedSystem ds = getSystem(props);
cache = CacheFactory.create(ds);
assertNotNull(cache);
}
/* close cache */
public static void closeCache() {
try {
if (cache != null && !cache.isClosed()) {
cache.close();
cache.getDistributedSystem().disconnect();
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
/**
* perform put on server region that will put entries on CMR region
*
* @param lowerLimit
* @param higerlimit - lower and upper limit on put
*/
public static void performPut(Long lowerLimit, Long higerlimit) {
assertNotNull(lowerLimit);
assertNotNull(higerlimit);
LocalRegion region = (LocalRegion) cache.getRegion(Region.SEPARATOR + regionName);
assertNotNull(region);
for (long i = lowerLimit.longValue(); i < higerlimit.longValue(); i++) {
region.put(new Long(i), new Long(i));
}
}
}
|
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.externalSystem.service.project;
import com.intellij.facet.Facet;
import com.intellij.facet.FacetModel;
import com.intellij.facet.FacetTypeId;
import com.intellij.facet.ModifiableFacetModel;
import com.intellij.ide.highlighter.ModuleFileType;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.externalSystem.ExternalSystemManager;
import com.intellij.openapi.externalSystem.model.DataNode;
import com.intellij.openapi.externalSystem.model.ExternalProjectInfo;
import com.intellij.openapi.externalSystem.model.ProjectKeys;
import com.intellij.openapi.externalSystem.model.project.LibraryData;
import com.intellij.openapi.externalSystem.model.project.ModuleData;
import com.intellij.openapi.externalSystem.model.project.ProjectCoordinate;
import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil;
import com.intellij.openapi.module.ModifiableModuleModel;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.module.ModuleWithNameAlreadyExists;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.ex.ProjectRootManagerEx;
import com.intellij.openapi.roots.impl.ModifiableModelCommitter;
import com.intellij.openapi.roots.impl.libraries.LibraryEx;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.roots.libraries.LibraryTable;
import com.intellij.openapi.roots.libraries.LibraryTablesRegistrar;
import com.intellij.openapi.roots.ui.configuration.FacetsProvider;
import com.intellij.openapi.roots.ui.configuration.ModulesProvider;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.UserDataHolderBase;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.packaging.artifacts.ArtifactModel;
import com.intellij.packaging.artifacts.ModifiableArtifactModel;
import com.intellij.packaging.elements.ManifestFileProvider;
import com.intellij.packaging.elements.PackagingElementResolvingContext;
import com.intellij.packaging.impl.artifacts.DefaultManifestFileProvider;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.graph.CachingSemiGraph;
import com.intellij.util.graph.Graph;
import com.intellij.util.graph.GraphGenerator;
import com.intellij.util.graph.InboundSemiGraph;
import gnu.trove.THashMap;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.util.*;
import java.util.stream.Collectors;
import static com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil.isRelated;
import static com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil.toCanonicalPath;
public abstract class AbstractIdeModifiableModelsProvider extends IdeModelsProviderImpl implements IdeModifiableModelsProvider {
private static final Logger LOG = Logger.getInstance(AbstractIdeModifiableModelsProvider.class);
private ModifiableModuleModel myModifiableModuleModel;
private final Map<Module, ModifiableRootModel> myModifiableRootModels = new THashMap<>();
private final Map<Module, ModifiableFacetModel> myModifiableFacetModels = new THashMap<>();
private final Map<Module, String> myProductionModulesForTestModules = new THashMap<>();
private final Map<Library, Library.ModifiableModel> myModifiableLibraryModels = new IdentityHashMap<>();
private ModifiableArtifactModel myModifiableArtifactModel;
private AbstractIdeModifiableModelsProvider.MyPackagingElementResolvingContext myPackagingElementResolvingContext;
private final ArtifactExternalDependenciesImporter myArtifactExternalDependenciesImporter;
@Nullable
private ModifiableWorkspace myModifiableWorkspace;
private final MyUserDataHolderBase myUserData;
private volatile boolean myDisposed;
public AbstractIdeModifiableModelsProvider(@NotNull Project project) {
super(project);
myUserData = new MyUserDataHolderBase();
myArtifactExternalDependenciesImporter = new ArtifactExternalDependenciesImporterImpl();
}
protected abstract ModifiableArtifactModel doGetModifiableArtifactModel();
protected abstract ModifiableModuleModel doGetModifiableModuleModel();
protected abstract ModifiableRootModel doGetModifiableRootModel(Module module);
protected abstract ModifiableFacetModel doGetModifiableFacetModel(Module module);
protected abstract Library.ModifiableModel doGetModifiableLibraryModel(Library library);
@NotNull
@Override
public abstract LibraryTable.ModifiableModel getModifiableProjectLibrariesModel();
@NotNull
@Override
public Module[] getModules() {
return getModifiableModuleModel().getModules();
}
protected void processExternalArtifactDependencies() {
myArtifactExternalDependenciesImporter.applyChanges(getModifiableArtifactModel(), getPackagingElementResolvingContext());
}
@Override
public PackagingElementResolvingContext getPackagingElementResolvingContext() {
if (myPackagingElementResolvingContext == null) {
myPackagingElementResolvingContext = new MyPackagingElementResolvingContext();
}
return myPackagingElementResolvingContext;
}
@NotNull
@Override
public OrderEntry[] getOrderEntries(@NotNull Module module) {
return getRootModel(module).getOrderEntries();
}
@NotNull
@Override
public Module newModule(@NotNull final String filePath, final String moduleTypeId) {
Module module = getModifiableModuleModel().newModule(filePath, moduleTypeId);
final String moduleName = FileUtil.getNameWithoutExtension(new File(filePath));
if (!module.getName().equals(moduleName)) {
try {
getModifiableModuleModel().renameModule(module, moduleName);
}
catch (ModuleWithNameAlreadyExists exists) {
LOG.warn(exists);
}
}
// set module type id explicitly otherwise it can not be set if there is an existing module (with the same filePath) and w/o 'type' attribute
module.setModuleType(moduleTypeId);
return module;
}
@NotNull
@Override
public Module newModule(@NotNull ModuleData moduleData) {
String imlName = null;
for (String candidate: suggestModuleNameCandidates(moduleData)) {
Module module = findIdeModule(candidate);
if (module == null) {
imlName = candidate;
break;
}
}
assert imlName != null : "Too many duplicated module names";
String filePath = toCanonicalPath(moduleData.getModuleFileDirectoryPath() + "/" + imlName + ModuleFileType.DOT_DEFAULT_EXTENSION);
return newModule(filePath, moduleData.getModuleTypeId());
}
@Nullable
@Override
public Module findIdeModule(@NotNull String ideModuleName) {
Module module = getModifiableModuleModel().findModuleByName(ideModuleName);
return module == null ? getModifiableModuleModel().getModuleToBeRenamed(ideModuleName) : module;
}
@Nullable
@Override
public Library findIdeLibrary(@NotNull LibraryData libraryData) {
final LibraryTable.ModifiableModel libraryTable = getModifiableProjectLibrariesModel();
for (Library ideLibrary: libraryTable.getLibraries()) {
if (isRelated(ideLibrary, libraryData)) return ideLibrary;
}
return null;
}
@Override
@NotNull
public VirtualFile[] getContentRoots(Module module) {
return getRootModel(module).getContentRoots();
}
@NotNull
@Override
public VirtualFile[] getSourceRoots(Module module) {
return getRootModel(module).getSourceRoots();
}
@NotNull
@Override
public VirtualFile[] getSourceRoots(Module module, boolean includingTests) {
return getRootModel(module).getSourceRoots(includingTests);
}
@NotNull
@Override
public ModifiableModuleModel getModifiableModuleModel() {
if (myModifiableModuleModel == null) {
myModifiableModuleModel = doGetModifiableModuleModel();
}
return myModifiableModuleModel;
}
@Override
@NotNull
public ModifiableRootModel getModifiableRootModel(Module module) {
return (ModifiableRootModel)getRootModel(module);
}
@NotNull
private ModuleRootModel getRootModel(Module module) {
return myModifiableRootModels.computeIfAbsent(module, k -> doGetModifiableRootModel(module));
}
@Override
@NotNull
public ModifiableFacetModel getModifiableFacetModel(Module module) {
return myModifiableFacetModels.computeIfAbsent(module, k -> doGetModifiableFacetModel(module));
}
@Override
@NotNull
public ModifiableArtifactModel getModifiableArtifactModel() {
if (myModifiableArtifactModel == null) {
myModifiableArtifactModel = doGetModifiableArtifactModel();
}
return myModifiableArtifactModel;
}
@Override
@NotNull
public Library[] getAllLibraries() {
return getModifiableProjectLibrariesModel().getLibraries();
}
@Override
@Nullable
public Library getLibraryByName(String name) {
return getModifiableProjectLibrariesModel().getLibraryByName(name);
}
@Override
public Library createLibrary(String name) {
return getModifiableProjectLibrariesModel().createLibrary(name);
}
@Override
public Library createLibrary(String name, @Nullable ProjectModelExternalSource externalSource) {
return getModifiableProjectLibrariesModel().createLibrary(name, null, externalSource);
}
@Override
public void removeLibrary(Library library) {
getModifiableProjectLibrariesModel().removeLibrary(library);
}
@Override
public Library.ModifiableModel getModifiableLibraryModel(Library library) {
return myModifiableLibraryModels.computeIfAbsent(library, k -> doGetModifiableLibraryModel(library));
}
@Nullable
public ModifiableWorkspace getModifiableWorkspace() {
if (myModifiableWorkspace == null && ExternalProjectsWorkspaceImpl.isDependencySubstitutionEnabled()) {
myModifiableWorkspace = doGetModifiableWorkspace();
}
return myModifiableWorkspace;
}
@NotNull
@Override
public String[] getLibraryUrls(@NotNull Library library, @NotNull OrderRootType type) {
final Library.ModifiableModel model = myModifiableLibraryModels.get(library);
if (model != null) {
return model.getUrls(type);
}
return library.getUrls(type);
}
@Override
public ModalityState getModalityStateForQuestionDialogs() {
return ModalityState.NON_MODAL;
}
@Override
public ArtifactExternalDependenciesImporter getArtifactExternalDependenciesImporter() {
return myArtifactExternalDependenciesImporter;
}
@NotNull
@Override
public List<Module> getAllDependentModules(@NotNull Module module) {
final ArrayList<Module> list = new ArrayList<>();
final Graph<Module> graph = getModuleGraph();
for (Iterator<Module> i = graph.getOut(module); i.hasNext(); ) {
list.add(i.next());
}
return list;
}
private ModifiableWorkspace doGetModifiableWorkspace() {
return ReadAction.compute(() ->
ServiceManager.getService(myProject, ExternalProjectsWorkspaceImpl.class)
.createModifiableWorkspace(this));
}
private Graph<Module> getModuleGraph() {
return GraphGenerator.generate(CachingSemiGraph.cache(new InboundSemiGraph<Module>() {
@NotNull
@Override
public Collection<Module> getNodes() {
return ContainerUtil.list(getModules());
}
@NotNull
@Override
public Iterator<Module> getIn(Module m) {
Module[] dependentModules = getModifiableRootModel(m).getModuleDependencies(true);
return Arrays.asList(dependentModules).iterator();
}
}));
}
private static class MyUserDataHolderBase extends UserDataHolderBase {
void clear() {
clearUserData();
}
}
private class MyPackagingElementResolvingContext implements PackagingElementResolvingContext {
private final ModulesProvider myModulesProvider = new MyModulesProvider();
private final MyFacetsProvider myFacetsProvider = new MyFacetsProvider();
private final ManifestFileProvider myManifestFileProvider = new DefaultManifestFileProvider(this);
@NotNull
public Project getProject() {
return myProject;
}
@NotNull
public ArtifactModel getArtifactModel() {
return AbstractIdeModifiableModelsProvider.this.getModifiableArtifactModel();
}
@NotNull
public ModulesProvider getModulesProvider() {
return myModulesProvider;
}
@NotNull
public FacetsProvider getFacetsProvider() {
return myFacetsProvider;
}
public Library findLibrary(@NotNull String level, @NotNull String libraryName) {
if (level.equals(LibraryTablesRegistrar.PROJECT_LEVEL)) {
return getLibraryByName(libraryName);
}
final LibraryTable table = LibraryTablesRegistrar.getInstance().getLibraryTableByLevel(level, myProject);
return table != null ? table.getLibraryByName(libraryName) : null;
}
@NotNull
@Override
public ManifestFileProvider getManifestFileProvider() {
return myManifestFileProvider;
}
}
private class MyModulesProvider implements ModulesProvider {
@NotNull
public Module[] getModules() {
return AbstractIdeModifiableModelsProvider.this.getModules();
}
public Module getModule(String name) {
return AbstractIdeModifiableModelsProvider.this.findIdeModule(name);
}
public ModuleRootModel getRootModel(@NotNull Module module) {
return AbstractIdeModifiableModelsProvider.this.getModifiableRootModel(module);
}
public FacetModel getFacetModel(@NotNull Module module) {
return AbstractIdeModifiableModelsProvider.this.getModifiableFacetModel(module);
}
}
private class MyFacetsProvider implements FacetsProvider {
@NotNull
public Facet[] getAllFacets(Module module) {
return getModifiableFacetModel(module).getAllFacets();
}
@NotNull
public <F extends Facet> Collection<F> getFacetsByType(Module module, FacetTypeId<F> type) {
return getModifiableFacetModel(module).getFacetsByType(type);
}
public <F extends Facet> F findFacet(Module module, FacetTypeId<F> type, String name) {
return getModifiableFacetModel(module).findFacet(type, name);
}
}
@Override
public void commit() {
ProjectRootManagerEx.getInstanceEx(myProject).mergeRootsChangesDuring(() -> {
if (ExternalProjectsWorkspaceImpl.isDependencySubstitutionEnabled()) {
updateSubstitutions();
}
processExternalArtifactDependencies();
for (Map.Entry<Library, Library.ModifiableModel> entry: myModifiableLibraryModels.entrySet()) {
Library fromLibrary = entry.getKey();
Library.ModifiableModel modifiableModel = entry.getValue();
// removed and (previously) not committed library is being disposed by LibraryTableBase.LibraryModel.removeLibrary
// the modifiable model of such library shouldn't be committed
if (fromLibrary instanceof LibraryEx && ((LibraryEx)fromLibrary).isDisposed()) {
Disposer.dispose(modifiableModel);
}
else {
modifiableModel.commit();
}
}
getModifiableProjectLibrariesModel().commit();
Collection<ModifiableRootModel> rootModels = myModifiableRootModels.values();
ModifiableRootModel[] rootModels1 = rootModels.toArray(new ModifiableRootModel[0]);
for (ModifiableRootModel model: rootModels1) {
assert !model.isDisposed() : "Already disposed: " + model;
}
if (myModifiableModuleModel != null) {
ModifiableModelCommitter.multiCommit(rootModels1, myModifiableModuleModel);
}
else {
for (ModifiableRootModel model: rootModels1) {
model.commit();
}
}
for (Map.Entry<Module, String> entry: myProductionModulesForTestModules.entrySet()) {
TestModuleProperties.getInstance(entry.getKey()).setProductionModuleName(entry.getValue());
}
for (Map.Entry<Module, ModifiableFacetModel> each: myModifiableFacetModels.entrySet()) {
if (!each.getKey().isDisposed()) {
each.getValue().commit();
}
}
if (myModifiableArtifactModel != null) {
myModifiableArtifactModel.commit();
}
});
myUserData.clear();
}
@Override
public void dispose() {
ApplicationManager.getApplication().assertIsDispatchThread();
assert !myDisposed : "Already disposed!";
myDisposed = true;
for (ModifiableRootModel each: myModifiableRootModels.values()) {
if (each.isDisposed()) continue;
each.dispose();
}
Disposer.dispose(getModifiableProjectLibrariesModel());
for (Library.ModifiableModel each: myModifiableLibraryModels.values()) {
if (each instanceof LibraryEx && ((LibraryEx)each).isDisposed()) continue;
Disposer.dispose(each);
}
if (myModifiableModuleModel != null && myModifiableModuleModel.isChanged()) {
myModifiableModuleModel.dispose();
}
if (myModifiableArtifactModel != null) {
myModifiableArtifactModel.dispose();
}
myModifiableRootModels.clear();
myModifiableFacetModels.clear();
myModifiableLibraryModels.clear();
myUserData.clear();
}
@Override
public void setTestModuleProperties(Module testModule, String productionModuleName) {
myProductionModulesForTestModules.put(testModule, productionModuleName);
}
@Nullable
@Override
public String getProductionModuleName(Module module) {
return myProductionModulesForTestModules.get(module);
}
@Override
public ModuleOrderEntry trySubstitute(Module ownerModule, LibraryOrderEntry libraryOrderEntry, ProjectCoordinate publicationId) {
String workspaceModuleCandidate = findModuleByPublication(publicationId);
Module workspaceModule = workspaceModuleCandidate == null ? null : findIdeModule(workspaceModuleCandidate);
if (workspaceModule == null) {
return null;
}
else {
ModifiableRootModel modifiableRootModel = getModifiableRootModel(ownerModule);
ModuleOrderEntry moduleOrderEntry = modifiableRootModel.addModuleOrderEntry(workspaceModule);
moduleOrderEntry.setScope(libraryOrderEntry.getScope());
moduleOrderEntry.setExported(libraryOrderEntry.isExported());
ModifiableWorkspace workspace = getModifiableWorkspace();
assert workspace != null;
workspace.addSubstitution(ownerModule.getName(),
workspaceModule.getName(),
libraryOrderEntry.getLibraryName(),
libraryOrderEntry.getScope());
modifiableRootModel.removeOrderEntry(libraryOrderEntry);
return moduleOrderEntry;
}
}
@Override
public void registerModulePublication(Module module, ProjectCoordinate modulePublication) {
ModifiableWorkspace workspace = getModifiableWorkspace();
if (workspace != null) {
workspace.register(modulePublication, module);
}
}
@Override
public boolean isSubstituted(String libraryName) {
ModifiableWorkspace workspace = getModifiableWorkspace();
if (workspace == null) return false;
return workspace.isSubstituted(libraryName);
}
@Nullable
@Override
public <T> T getUserData(@NotNull Key<T> key) {
return myUserData.getUserData(key);
}
@Override
public <T> void putUserData(@NotNull Key<T> key, @Nullable T value) {
myUserData.putUserData(key, value);
}
@Nullable
@Override
public String findModuleByPublication(ProjectCoordinate publicationId) {
ModifiableWorkspace workspace = getModifiableWorkspace();
return workspace == null ? null : workspace.findModule(publicationId);
}
private void updateSubstitutions() {
ModifiableWorkspace workspace = getModifiableWorkspace();
if (workspace == null) return;
final List<String> oldModules = Arrays.stream(ModuleManager.getInstance(myProject).getModules())
.map(module -> module.getName()).collect(Collectors.toList());
final List<String> newModules = Arrays.stream(myModifiableModuleModel.getModules())
.map(module -> module.getName()).collect(Collectors.toList());
final Collection<String> removedModules = new THashSet<>(oldModules);
removedModules.removeAll(newModules);
Map<String, String> toSubstitute = ContainerUtil.newHashMap();
for (ExternalSystemManager<?, ?, ?, ?, ?> manager: ExternalSystemApiUtil.getAllManagers()) {
final Collection<ExternalProjectInfo> projectsData =
ProjectDataManager.getInstance().getExternalProjectsData(myProject, manager.getSystemId());
for (ExternalProjectInfo projectInfo: projectsData) {
if (projectInfo.getExternalProjectStructure() == null) {
continue;
}
Collection<DataNode<LibraryData>> libraryNodes =
ExternalSystemApiUtil.findAll(projectInfo.getExternalProjectStructure(), ProjectKeys.LIBRARY);
for (DataNode<LibraryData> libraryNode: libraryNodes) {
String substitutionModuleCandidate = findModuleByPublication(libraryNode.getData());
if (substitutionModuleCandidate != null) {
toSubstitute.put(libraryNode.getData().getInternalName(), substitutionModuleCandidate);
}
}
}
}
for (Module module: getModules()) {
ModifiableRootModel modifiableRootModel = getModifiableRootModel(module);
boolean changed = false;
OrderEntry[] entries = modifiableRootModel.getOrderEntries();
for (int i = 0, length = entries.length; i < length; i++) {
OrderEntry orderEntry = entries[i];
if (orderEntry instanceof ModuleOrderEntry) {
String workspaceModule = ((ModuleOrderEntry)orderEntry).getModuleName();
if (removedModules.contains(workspaceModule)) {
DependencyScope scope = ((ModuleOrderEntry)orderEntry).getScope();
if (workspace.isSubstitution(module.getName(), workspaceModule, scope)) {
String libraryName = workspace.getSubstitutedLibrary(workspaceModule);
if (libraryName != null) {
Library library = getLibraryByName(libraryName);
if (library != null) {
modifiableRootModel.removeOrderEntry(orderEntry);
entries[i] = modifiableRootModel.addLibraryEntry(library);
changed = true;
workspace.removeSubstitution(module.getName(), workspaceModule, libraryName, scope);
}
}
}
}
}
if (!(orderEntry instanceof LibraryOrderEntry)) continue;
LibraryOrderEntry libraryOrderEntry = (LibraryOrderEntry)orderEntry;
if (!libraryOrderEntry.isModuleLevel() && libraryOrderEntry.getLibraryName() != null) {
String workspaceModule = toSubstitute.get(libraryOrderEntry.getLibraryName());
if (workspaceModule != null) {
Module ideModule = findIdeModule(workspaceModule);
if (ideModule != null) {
ModuleOrderEntry moduleOrderEntry = modifiableRootModel.addModuleOrderEntry(ideModule);
moduleOrderEntry.setScope(libraryOrderEntry.getScope());
modifiableRootModel.removeOrderEntry(orderEntry);
entries[i] = moduleOrderEntry;
changed = true;
workspace.addSubstitution(module.getName(), workspaceModule,
libraryOrderEntry.getLibraryName(),
libraryOrderEntry.getScope());
}
}
}
}
if (changed) {
modifiableRootModel.rearrangeOrderEntries(entries);
}
}
workspace.commit();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.expr.fn.impl;
import org.apache.drill.exec.expr.DrillSimpleFunc;
import org.apache.drill.exec.expr.annotations.FunctionTemplate;
import org.apache.drill.exec.expr.annotations.Output;
import org.apache.drill.exec.expr.annotations.Param;
import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope;
import org.apache.drill.exec.expr.holders.BigIntHolder;
import org.apache.drill.exec.expr.holders.VarDecimalHolder;
import org.apache.drill.exec.expr.holders.NullableVarDecimalHolder;
import org.apache.drill.exec.expr.holders.Float4Holder;
import org.apache.drill.exec.expr.holders.Float8Holder;
import org.apache.drill.exec.expr.holders.IntHolder;
import org.apache.drill.exec.expr.holders.NullableBigIntHolder;
import org.apache.drill.exec.expr.holders.NullableFloat4Holder;
import org.apache.drill.exec.expr.holders.NullableFloat8Holder;
import org.apache.drill.exec.expr.holders.NullableIntHolder;
/**
* hash32 function definitions for numeric data types. These functions cast the input numeric value to a
* double before doing the hashing. See comments in {@link Hash64AsDouble} for the reason for doing this.
*/
@SuppressWarnings("unused")
public class Hash32AsDouble {
@FunctionTemplate(name = "hash32AsDouble", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
public static class NullableFloatHash implements DrillSimpleFunc {
@Param
NullableFloat4Holder in;
@Output
IntHolder out;
public void setup() {
}
public void eval() {
if (in.isSet == 0) {
out.value = 0;
} else {
out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash32(in.value, 0);
}
}
}
@FunctionTemplate(name = "hash32AsDouble", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
public static class FloatHash implements DrillSimpleFunc {
@Param
Float4Holder in;
@Output
IntHolder out;
public void setup() {
}
public void eval() {
out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash32(in.value, 0);
}
}
@FunctionTemplate(name = "hash32AsDouble", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
public static class NullableDoubleHash implements DrillSimpleFunc {
@Param
NullableFloat8Holder in;
@Output
IntHolder out;
public void setup() {
}
public void eval() {
if (in.isSet == 0) {
out.value = 0;
} else {
out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash32(in.value, 0);
}
}
}
@FunctionTemplate(name = "hash32AsDouble", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
public static class DoubleHash implements DrillSimpleFunc {
@Param
Float8Holder in;
@Output
IntHolder out;
public void setup() {
}
public void eval() {
out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash32(in.value, 0);
}
}
@FunctionTemplate(name = "hash32AsDouble", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
public static class NullableBigIntHash implements DrillSimpleFunc {
@Param
NullableBigIntHolder in;
@Output
IntHolder out;
public void setup() {
}
public void eval() {
if (in.isSet == 0) {
out.value = 0;
} else {
out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash32(in.value, 0);
}
}
}
@FunctionTemplate(name = "hash32AsDouble", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
public static class NullableIntHash implements DrillSimpleFunc {
@Param
NullableIntHolder in;
@Output
IntHolder out;
public void setup() {
}
public void eval() {
if (in.isSet == 0) {
out.value = 0;
} else {
out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash32(in.value, 0);
}
}
}
@FunctionTemplate(name = "hash32AsDouble", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
public static class BigIntHash implements DrillSimpleFunc {
@Param
BigIntHolder in;
@Output
IntHolder out;
public void setup() {
}
public void eval() {
out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash32(in.value, 0);
}
}
@FunctionTemplate(name = "hash32AsDouble", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
public static class IntHash implements DrillSimpleFunc {
@Param
IntHolder in;
@Output
IntHolder out;
public void setup() {
}
public void eval() {
out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash32(in.value, 0);
}
}
@FunctionTemplate(name = "hash32AsDouble", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
public static class VarDecimalHash implements DrillSimpleFunc {
@Param
VarDecimalHolder in;
@Output
IntHolder out;
public void setup() {
}
public void eval() {
java.math.BigDecimal input = org.apache.drill.exec.util.DecimalUtility.getBigDecimalFromDrillBuf(in.buffer,
in.start, in.end - in.start, in.scale);
out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash32(input.doubleValue(), 0);
}
}
@FunctionTemplate(name = "hash32AsDouble", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
public static class NullableVarDecimalHash implements DrillSimpleFunc {
@Param
NullableVarDecimalHolder in;
@Output
IntHolder out;
public void setup() {
}
public void eval() {
if (in.isSet == 0) {
out.value = 0;
} else {
java.math.BigDecimal input = org.apache.drill.exec.util.DecimalUtility.getBigDecimalFromDrillBuf(in.buffer,
in.start, in.end - in.start, in.scale);
out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash32(input.doubleValue(), 0);
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.