gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.rex;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlDataTypeSpec;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.fun.OracleSqlOperatorTable;
import org.apache.calcite.sql.fun.SqlCaseOperator;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.type.SqlTypeUtil;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Standard implementation of {@link RexSqlConvertletTable}.
*/
public class RexSqlStandardConvertletTable
extends RexSqlReflectiveConvertletTable {
//~ Constructors -----------------------------------------------------------
public RexSqlStandardConvertletTable() {
super();
// Register convertlets
registerEquivOp(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL);
registerEquivOp(SqlStdOperatorTable.GREATER_THAN);
registerEquivOp(SqlStdOperatorTable.LESS_THAN_OR_EQUAL);
registerEquivOp(SqlStdOperatorTable.LESS_THAN);
registerEquivOp(SqlStdOperatorTable.EQUALS);
registerEquivOp(SqlStdOperatorTable.NOT_EQUALS);
registerEquivOp(SqlStdOperatorTable.AND);
registerEquivOp(SqlStdOperatorTable.OR);
registerEquivOp(SqlStdOperatorTable.NOT_IN);
registerEquivOp(SqlStdOperatorTable.IN);
registerEquivOp(SqlStdOperatorTable.LIKE);
registerEquivOp(SqlStdOperatorTable.NOT_LIKE);
registerEquivOp(SqlStdOperatorTable.SIMILAR_TO);
registerEquivOp(SqlStdOperatorTable.NOT_SIMILAR_TO);
registerEquivOp(SqlStdOperatorTable.PLUS);
registerEquivOp(SqlStdOperatorTable.MINUS);
registerEquivOp(SqlStdOperatorTable.MULTIPLY);
registerEquivOp(SqlStdOperatorTable.DIVIDE);
registerEquivOp(SqlStdOperatorTable.NOT);
registerEquivOp(SqlStdOperatorTable.IS_NOT_NULL);
registerEquivOp(SqlStdOperatorTable.IS_NULL);
registerEquivOp(SqlStdOperatorTable.IS_NOT_TRUE);
registerEquivOp(SqlStdOperatorTable.IS_TRUE);
registerEquivOp(SqlStdOperatorTable.IS_NOT_FALSE);
registerEquivOp(SqlStdOperatorTable.IS_FALSE);
registerEquivOp(SqlStdOperatorTable.IS_NOT_UNKNOWN);
registerEquivOp(SqlStdOperatorTable.IS_UNKNOWN);
registerEquivOp(SqlStdOperatorTable.UNARY_MINUS);
registerEquivOp(SqlStdOperatorTable.UNARY_PLUS);
registerCaseOp(SqlStdOperatorTable.CASE);
registerEquivOp(SqlStdOperatorTable.CONCAT);
registerEquivOp(SqlStdOperatorTable.BETWEEN);
registerEquivOp(SqlStdOperatorTable.SYMMETRIC_BETWEEN);
registerEquivOp(SqlStdOperatorTable.NOT_BETWEEN);
registerEquivOp(SqlStdOperatorTable.SYMMETRIC_NOT_BETWEEN);
registerEquivOp(SqlStdOperatorTable.IS_NOT_DISTINCT_FROM);
registerEquivOp(SqlStdOperatorTable.IS_DISTINCT_FROM);
registerEquivOp(SqlStdOperatorTable.MINUS_DATE);
registerEquivOp(SqlStdOperatorTable.EXTRACT);
registerEquivOp(SqlStdOperatorTable.SUBSTRING);
registerEquivOp(SqlStdOperatorTable.CONVERT);
registerEquivOp(SqlStdOperatorTable.TRANSLATE);
registerEquivOp(SqlStdOperatorTable.OVERLAY);
registerEquivOp(SqlStdOperatorTable.TRIM);
registerEquivOp(OracleSqlOperatorTable.TRANSLATE3);
registerEquivOp(SqlStdOperatorTable.POSITION);
registerEquivOp(SqlStdOperatorTable.CHAR_LENGTH);
registerEquivOp(SqlStdOperatorTable.CHARACTER_LENGTH);
registerEquivOp(SqlStdOperatorTable.UPPER);
registerEquivOp(SqlStdOperatorTable.LOWER);
registerEquivOp(SqlStdOperatorTable.INITCAP);
registerEquivOp(SqlStdOperatorTable.POWER);
registerEquivOp(SqlStdOperatorTable.SQRT);
registerEquivOp(SqlStdOperatorTable.MOD);
registerEquivOp(SqlStdOperatorTable.LN);
registerEquivOp(SqlStdOperatorTable.LOG10);
registerEquivOp(SqlStdOperatorTable.ABS);
registerEquivOp(SqlStdOperatorTable.EXP);
registerEquivOp(SqlStdOperatorTable.FLOOR);
registerEquivOp(SqlStdOperatorTable.CEIL);
registerEquivOp(SqlStdOperatorTable.NULLIF);
registerEquivOp(SqlStdOperatorTable.COALESCE);
registerTypeAppendOp(SqlStdOperatorTable.CAST);
}
//~ Methods ----------------------------------------------------------------
/**
* Converts a call to an operator into a {@link SqlCall} to the same
* operator.
*
* <p>Called automatically via reflection.
*
* @param converter Converter
* @param call Call
* @return Sql call
*/
public SqlNode convertCall(
RexToSqlNodeConverter converter,
RexCall call) {
if (get(call) == null) {
return null;
}
final SqlOperator op = call.getOperator();
final List<RexNode> operands = call.getOperands();
final SqlNode[] exprs = convertExpressionList(converter, operands);
if (exprs == null) {
return null;
}
return new SqlBasicCall(
op,
exprs,
SqlParserPos.ZERO);
}
private SqlNode[] convertExpressionList(
RexToSqlNodeConverter converter,
List<RexNode> nodes) {
final SqlNode[] exprs = new SqlNode[nodes.size()];
for (int i = 0; i < nodes.size(); i++) {
RexNode node = nodes.get(i);
exprs[i] = converter.convertNode(node);
if (exprs[i] == null) {
return null;
}
}
return exprs;
}
/**
* Creates and registers a convertlet for an operator in which
* the SQL and Rex representations are structurally equivalent.
*
* @param op operator instance
*/
protected void registerEquivOp(SqlOperator op) {
registerOp(op, new EquivConvertlet(op));
}
/**
* Creates and registers a convertlet for an operator in which
* the SQL representation needs the result type appended
* as an extra argument (e.g. CAST).
*
* @param op operator instance
*/
private void registerTypeAppendOp(final SqlOperator op) {
registerOp(
op,
new RexSqlConvertlet() {
public SqlNode convertCall(
RexToSqlNodeConverter converter,
RexCall call) {
SqlNode[] operands =
convertExpressionList(converter, call.operands);
if (operands == null) {
return null;
}
List<SqlNode> operandList =
new ArrayList<SqlNode>(Arrays.asList(operands));
SqlDataTypeSpec typeSpec =
SqlTypeUtil.convertTypeToSpec(call.getType());
operandList.add(typeSpec);
return new SqlBasicCall(
op,
operandList.toArray(new SqlNode[operandList.size()]),
SqlParserPos.ZERO);
}
});
}
/**
* Creates and registers a convertlet for the CASE operator,
* which takes different forms for SQL vs Rex.
*
* @param op instance of CASE operator
*/
private void registerCaseOp(final SqlOperator op) {
registerOp(
op,
new RexSqlConvertlet() {
public SqlNode convertCall(
RexToSqlNodeConverter converter,
RexCall call) {
assert op instanceof SqlCaseOperator;
SqlNode[] operands =
convertExpressionList(converter, call.operands);
if (operands == null) {
return null;
}
SqlNodeList whenList = new SqlNodeList(SqlParserPos.ZERO);
SqlNodeList thenList = new SqlNodeList(SqlParserPos.ZERO);
int i = 0;
while (i < operands.length - 1) {
whenList.add(operands[i]);
++i;
thenList.add(operands[i]);
++i;
}
SqlNode elseExpr = operands[i];
SqlNode[] newOperands = new SqlNode[3];
newOperands[0] = whenList;
newOperands[1] = thenList;
newOperands[2] = elseExpr;
return op.createCall(null, SqlParserPos.ZERO, newOperands);
}
});
}
/** Convertlet that converts a {@link SqlCall} to a {@link RexCall} of the
* same operator. */
private class EquivConvertlet implements RexSqlConvertlet {
private final SqlOperator op;
EquivConvertlet(SqlOperator op) {
this.op = op;
}
public SqlNode convertCall(RexToSqlNodeConverter converter, RexCall call) {
SqlNode[] operands = convertExpressionList(converter, call.operands);
if (operands == null) {
return null;
}
return new SqlBasicCall(op, operands, SqlParserPos.ZERO);
}
}
}
// End RexSqlStandardConvertletTable.java
|
|
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.vfs;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.common.io.BaseEncoding;
import com.google.devtools.build.lib.testutil.TestUtils;
import com.google.devtools.build.lib.unix.NativePosixFiles;
import com.google.devtools.build.lib.util.Fingerprint;
import com.google.devtools.build.lib.util.Preconditions;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* This class handles the generic tests that any filesystem must pass.
*
* <p>Each filesystem-test should inherit from this class, thereby obtaining
* all the tests.
*/
public abstract class FileSystemTest {
private long savedTime;
protected FileSystem testFS;
protected boolean supportsSymlinks;
protected Path workingDir;
// Some useful examples of various kinds of files (mnemonic: "x" = "eXample")
protected Path xNothing;
protected Path xLink;
protected Path xFile;
protected Path xNonEmptyDirectory;
protected Path xNonEmptyDirectoryFoo;
protected Path xEmptyDirectory;
@Before
public final void createDirectories() throws Exception {
executeBeforeCreatingDirectories();
testFS = getFreshFileSystem();
workingDir = testFS.getPath(getTestTmpDir());
cleanUpWorkingDirectory(workingDir);
supportsSymlinks = testFS.supportsSymbolicLinksNatively();
// % ls -lR
// -rw-rw-r-- xFile
// drwxrwxr-x xNonEmptyDirectory
// -rw-rw-r-- xNonEmptyDirectory/foo
// drwxrwxr-x xEmptyDirectory
xNothing = absolutize("xNothing");
xLink = absolutize("xLink");
xFile = absolutize("xFile");
xNonEmptyDirectory = absolutize("xNonEmptyDirectory");
xNonEmptyDirectoryFoo = xNonEmptyDirectory.getChild("foo");
xEmptyDirectory = absolutize("xEmptyDirectory");
FileSystemUtils.createEmptyFile(xFile);
xNonEmptyDirectory.createDirectory();
FileSystemUtils.createEmptyFile(xNonEmptyDirectoryFoo);
xEmptyDirectory.createDirectory();
}
protected void executeBeforeCreatingDirectories() throws Exception {
// This method exists because LazyDigestFileSystemTest requires some code to be run before
// createDirectories().
}
@After
public final void destroyFileSystem() throws Exception {
destroyFileSystem(testFS);
}
/**
* Returns an instance of the file system to test.
*/
protected abstract FileSystem getFreshFileSystem() throws IOException;
protected boolean isSymbolicLink(File file) {
return NativePosixFiles.isSymbolicLink(file);
}
protected void setWritable(File file) throws IOException {
NativePosixFiles.setWritable(file);
}
protected void setExecutable(File file) throws IOException {
NativePosixFiles.setExecutable(file);
}
private static final Pattern STAT_SUBDIR_ERROR = Pattern.compile("(.*) \\(Not a directory\\)");
// Test that file is not present, using statIfFound. Base implementation throws an exception, but
// subclasses may override statIfFound to return null, in which case their tests should override
// this method.
@SuppressWarnings("unused") // Subclasses may throw.
protected void expectNotFound(Path path) throws IOException {
try {
assertNull(path.statIfFound());
} catch (IOException e) {
// May be because of a non-directory path component. Parse exception to check this.
Matcher matcher = STAT_SUBDIR_ERROR.matcher(e.getMessage());
if (!matcher.matches() || !path.getPathString().startsWith(matcher.group(1))) {
// Throw if this doesn't match what an ENOTDIR error looks like.
throw e;
}
}
}
/**
* Removes all stuff from the test filesystem.
*/
protected void destroyFileSystem(FileSystem fileSystem) throws IOException {
Preconditions.checkArgument(fileSystem.equals(workingDir.getFileSystem()));
cleanUpWorkingDirectory(workingDir);
}
/**
* Cleans up the working directory by removing everything.
*/
protected void cleanUpWorkingDirectory(Path workingPath)
throws IOException {
if (workingPath.exists()) {
removeEntireDirectory(workingPath.getPathFile()); // uses java.io.File!
}
FileSystemUtils.createDirectoryAndParents(workingPath);
}
/**
* This function removes an entire directory and all of its contents.
* Much like rm -rf directoryToRemove
*/
protected void removeEntireDirectory(File directoryToRemove)
throws IOException {
// make sure that we do not remove anything outside the test directory
Path testDirPath = testFS.getPath(getTestTmpDir());
if (!testFS.getPath(directoryToRemove.getAbsolutePath()).startsWith(testDirPath)) {
throw new IOException("trying to remove files outside of the testdata directory");
}
// Some tests set the directories read-only and/or non-executable, so
// override that:
setWritable(directoryToRemove);
setExecutable(directoryToRemove);
File[] files = directoryToRemove.listFiles();
if (files != null) {
for (File currentFile : files) {
boolean isSymbolicLink = isSymbolicLink(currentFile);
if (!isSymbolicLink && currentFile.isDirectory()) {
removeEntireDirectory(currentFile);
} else {
if (!isSymbolicLink) {
setWritable(currentFile);
}
if (!currentFile.delete()) {
throw new IOException("Failed to delete '" + currentFile + "'");
}
}
}
}
if (!directoryToRemove.delete()) {
throw new IOException("Failed to delete '" + directoryToRemove + "'");
}
}
/**
* Returns the directory to use as the FileSystem's working directory.
* Canonicalized to make tests hermetic against symbolic links in TEST_TMPDIR.
*/
protected final String getTestTmpDir() throws IOException {
return new File(TestUtils.tmpDir()).getCanonicalPath() + "/testdir";
}
/**
* Indirection to create links so we can test FileSystems that do not support
* link creation. For example, JavaFileSystemTest overrides this method
* and creates the link with an alternate FileSystem.
*/
protected void createSymbolicLink(Path link, Path target) throws IOException {
createSymbolicLink(link, target.asFragment());
}
/**
* Indirection to create links so we can test FileSystems that do not support
* link creation. For example, JavaFileSystemTest overrides this method
* and creates the link with an alternate FileSystem.
*/
protected void createSymbolicLink(Path link, PathFragment target) throws IOException {
link.createSymbolicLink(target);
}
/**
* Indirection to setReadOnly(false) on FileSystems that do not
* support setReadOnly(false). For example, JavaFileSystemTest overrides this
* method and makes the Path writable with an alternate FileSystem.
*/
protected void makeWritable(Path target) throws IOException {
target.setWritable(true);
}
/**
* Indirection to {@link Path#setExecutable(boolean)} on FileSystems that do
* not support setExecutable. For example, JavaFileSystemTest overrides this
* method and makes the Path executable with an alternate FileSystem.
*/
protected void setExecutable(Path target, boolean mode) throws IOException {
target.setExecutable(mode);
}
// TODO(bazel-team): (2011) Put in a setLastModifiedTime into the various objects
// and clobber the current time of the object we're currently handling.
// Otherwise testing the thing might get a little hard, depending on the clock.
void storeReferenceTime(long timeToMark) {
savedTime = timeToMark;
}
boolean isLaterThanreferenceTime(long testTime) {
return (savedTime <= testTime);
}
protected Path absolutize(String relativePathName) {
return workingDir.getRelative(relativePathName);
}
// Here the tests begin.
@Test
public void testIsFileForNonexistingPath() {
Path nonExistingPath = testFS.getPath("/something/strange");
assertFalse(nonExistingPath.isFile());
}
@Test
public void testIsDirectoryForNonexistingPath() {
Path nonExistingPath = testFS.getPath("/something/strange");
assertFalse(nonExistingPath.isDirectory());
}
@Test
public void testIsLinkForNonexistingPath() {
Path nonExistingPath = testFS.getPath("/something/strange");
assertFalse(nonExistingPath.isSymbolicLink());
}
@Test
public void testExistsForNonexistingPath() throws Exception {
Path nonExistingPath = testFS.getPath("/something/strange");
assertFalse(nonExistingPath.exists());
expectNotFound(nonExistingPath);
}
@Test
public void testBadPermissionsThrowsExceptionOnStatIfFound() throws Exception {
Path inaccessible = absolutize("inaccessible");
inaccessible.createDirectory();
Path child = inaccessible.getChild("child");
FileSystemUtils.createEmptyFile(child);
inaccessible.setExecutable(false);
assertFalse(child.exists());
try {
child.statIfFound();
fail();
} catch (IOException expected) {
// Expected.
}
}
@Test
public void testStatIfFoundReturnsNullForChildOfNonDir() throws Exception {
Path foo = absolutize("foo");
foo.createDirectory();
Path nonDir = foo.getRelative("bar");
FileSystemUtils.createEmptyFile(nonDir);
assertNull(nonDir.getRelative("file").statIfFound());
}
// The following tests check the handling of the current working directory.
@Test
public void testCreatePathRelativeToWorkingDirectory() {
Path relativeCreatedPath = absolutize("some-file");
Path expectedResult = workingDir.getRelative(new PathFragment("some-file"));
assertEquals(expectedResult, relativeCreatedPath);
}
// The following tests check the handling of the root directory
@Test
public void testRootIsDirectory() {
Path rootPath = testFS.getPath("/");
assertTrue(rootPath.isDirectory());
}
@Test
public void testRootHasNoParent() {
Path rootPath = testFS.getPath("/");
assertNull(rootPath.getParentDirectory());
}
// The following functions test the creation of files/links/directories.
@Test
public void testFileExists() throws Exception {
Path someFile = absolutize("some-file");
FileSystemUtils.createEmptyFile(someFile);
assertTrue(someFile.exists());
assertNotNull(someFile.statIfFound());
}
@Test
public void testFileIsFile() throws Exception {
Path someFile = absolutize("some-file");
FileSystemUtils.createEmptyFile(someFile);
assertTrue(someFile.isFile());
}
@Test
public void testFileIsNotDirectory() throws Exception {
Path someFile = absolutize("some-file");
FileSystemUtils.createEmptyFile(someFile);
assertFalse(someFile.isDirectory());
}
@Test
public void testFileIsNotSymbolicLink() throws Exception {
Path someFile = absolutize("some-file");
FileSystemUtils.createEmptyFile(someFile);
assertFalse(someFile.isSymbolicLink());
}
@Test
public void testDirectoryExists() throws Exception {
Path someDirectory = absolutize("some-dir");
someDirectory.createDirectory();
assertTrue(someDirectory.exists());
assertNotNull(someDirectory.statIfFound());
}
@Test
public void testDirectoryIsDirectory() throws Exception {
Path someDirectory = absolutize("some-dir");
someDirectory.createDirectory();
assertTrue(someDirectory.isDirectory());
}
@Test
public void testDirectoryIsNotFile() throws Exception {
Path someDirectory = absolutize("some-dir");
someDirectory.createDirectory();
assertFalse(someDirectory.isFile());
}
@Test
public void testDirectoryIsNotSymbolicLink() throws Exception {
Path someDirectory = absolutize("some-dir");
someDirectory.createDirectory();
assertFalse(someDirectory.isSymbolicLink());
}
@Test
public void testSymbolicFileLinkExists() throws Exception {
if (supportsSymlinks) {
Path someLink = absolutize("some-link");
someLink.createSymbolicLink(xFile);
assertTrue(someLink.exists());
assertNotNull(someLink.statIfFound());
}
}
@Test
public void testSymbolicFileLinkIsSymbolicLink() throws Exception {
if (supportsSymlinks) {
Path someLink = absolutize("some-link");
someLink.createSymbolicLink(xFile);
assertTrue(someLink.isSymbolicLink());
}
}
@Test
public void testSymbolicFileLinkIsFile() throws Exception {
if (supportsSymlinks) {
Path someLink = absolutize("some-link");
someLink.createSymbolicLink(xFile);
assertTrue(someLink.isFile());
}
}
@Test
public void testSymbolicFileLinkIsNotDirectory() throws Exception {
if (supportsSymlinks) {
Path someLink = absolutize("some-link");
someLink.createSymbolicLink(xFile);
assertFalse(someLink.isDirectory());
}
}
@Test
public void testSymbolicDirLinkExists() throws Exception {
if (supportsSymlinks) {
Path someLink = absolutize("some-link");
someLink.createSymbolicLink(xEmptyDirectory);
assertTrue(someLink.exists());
assertNotNull(someLink.statIfFound());
}
}
@Test
public void testSymbolicDirLinkIsSymbolicLink() throws Exception {
if (supportsSymlinks) {
Path someLink = absolutize("some-link");
someLink.createSymbolicLink(xEmptyDirectory);
assertTrue(someLink.isSymbolicLink());
}
}
@Test
public void testSymbolicDirLinkIsDirectory() throws Exception {
if (supportsSymlinks) {
Path someLink = absolutize("some-link");
someLink.createSymbolicLink(xEmptyDirectory);
assertTrue(someLink.isDirectory());
}
}
@Test
public void testSymbolicDirLinkIsNotFile() throws Exception {
if (supportsSymlinks) {
Path someLink = absolutize("some-link");
someLink.createSymbolicLink(xEmptyDirectory);
assertFalse(someLink.isFile());
}
}
@Test
public void testChildOfNonDirectory() throws Exception {
Path somePath = absolutize("file-name");
FileSystemUtils.createEmptyFile(somePath);
Path childOfNonDir = somePath.getChild("child");
assertFalse(childOfNonDir.exists());
expectNotFound(childOfNonDir);
}
@Test
public void testCreateDirectoryIsEmpty() throws Exception {
Path newPath = xEmptyDirectory.getChild("new-dir");
newPath.createDirectory();
assertEquals(newPath.getDirectoryEntries().size(), 0);
}
@Test
public void testCreateDirectoryIsOnlyChildInParent() throws Exception {
Path newPath = xEmptyDirectory.getChild("new-dir");
newPath.createDirectory();
assertThat(newPath.getParentDirectory().getDirectoryEntries()).hasSize(1);
assertThat(newPath.getParentDirectory().getDirectoryEntries()).containsExactly(newPath);
}
@Test
public void testCreateDirectories() throws Exception {
Path newPath = absolutize("new-dir/sub/directory");
assertTrue(FileSystemUtils.createDirectoryAndParents(newPath));
}
@Test
public void testCreateDirectoriesIsDirectory() throws Exception {
Path newPath = absolutize("new-dir/sub/directory");
FileSystemUtils.createDirectoryAndParents(newPath);
assertTrue(newPath.isDirectory());
}
@Test
public void testCreateDirectoriesIsNotFile() throws Exception {
Path newPath = absolutize("new-dir/sub/directory");
FileSystemUtils.createDirectoryAndParents(newPath);
assertFalse(newPath.isFile());
}
@Test
public void testCreateDirectoriesIsNotSymbolicLink() throws Exception {
Path newPath = absolutize("new-dir/sub/directory");
FileSystemUtils.createDirectoryAndParents(newPath);
assertFalse(newPath.isSymbolicLink());
}
@Test
public void testCreateDirectoriesIsEmpty() throws Exception {
Path newPath = absolutize("new-dir/sub/directory");
FileSystemUtils.createDirectoryAndParents(newPath);
assertEquals(newPath.getDirectoryEntries().size(), 0);
}
@Test
public void testCreateDirectoriesIsOnlyChildInParent() throws Exception {
Path newPath = absolutize("new-dir/sub/directory");
FileSystemUtils.createDirectoryAndParents(newPath);
assertThat(newPath.getParentDirectory().getDirectoryEntries()).hasSize(1);
assertThat(newPath.getParentDirectory().getDirectoryEntries()).containsExactly(newPath);
}
@Test
public void testCreateEmptyFileIsEmpty() throws Exception {
Path newPath = xEmptyDirectory.getChild("new-file");
FileSystemUtils.createEmptyFile(newPath);
assertEquals(newPath.getFileSize(), 0);
}
@Test
public void testCreateFileIsOnlyChildInParent() throws Exception {
Path newPath = xEmptyDirectory.getChild("new-file");
FileSystemUtils.createEmptyFile(newPath);
assertThat(newPath.getParentDirectory().getDirectoryEntries()).hasSize(1);
assertThat(newPath.getParentDirectory().getDirectoryEntries()).containsExactly(newPath);
}
// The following functions test the behavior if errors occur during the
// creation of files/links/directories.
@Test
public void testCreateDirectoryWhereDirectoryAlreadyExists() throws Exception {
assertFalse(xEmptyDirectory.createDirectory());
}
@Test
public void testCreateDirectoryWhereFileAlreadyExists() {
try {
xFile.createDirectory();
fail();
} catch (IOException e) {
assertThat(e).hasMessage(xFile + " (File exists)");
}
}
@Test
public void testCannotCreateDirectoryWithoutExistingParent() throws Exception {
Path newPath = testFS.getPath("/deep/new-dir");
try {
newPath.createDirectory();
fail();
} catch (FileNotFoundException e) {
assertThat(e.getMessage()).endsWith(" (No such file or directory)");
}
}
@Test
public void testCannotCreateDirectoryWithReadOnlyParent() throws Exception {
xEmptyDirectory.setWritable(false);
Path xChildOfReadonlyDir = xEmptyDirectory.getChild("x");
try {
xChildOfReadonlyDir.createDirectory();
fail();
} catch (IOException e) {
assertThat(e).hasMessage(xChildOfReadonlyDir + " (Permission denied)");
}
}
@Test
public void testCannotCreateFileWithoutExistingParent() throws Exception {
Path newPath = testFS.getPath("/non-existing-dir/new-file");
try {
FileSystemUtils.createEmptyFile(newPath);
fail();
} catch (FileNotFoundException e) {
assertThat(e.getMessage()).endsWith(" (No such file or directory)");
}
}
@Test
public void testCannotCreateFileWithReadOnlyParent() throws Exception {
xEmptyDirectory.setWritable(false);
Path xChildOfReadonlyDir = xEmptyDirectory.getChild("x");
try {
FileSystemUtils.createEmptyFile(xChildOfReadonlyDir);
fail();
} catch (IOException e) {
assertThat(e).hasMessage(xChildOfReadonlyDir + " (Permission denied)");
}
}
@Test
public void testCannotCreateFileWithinFile() throws Exception {
Path newFilePath = absolutize("some-file");
FileSystemUtils.createEmptyFile(newFilePath);
Path wrongPath = absolutize("some-file/new-file");
try {
FileSystemUtils.createEmptyFile(wrongPath);
fail();
} catch (IOException e) {
assertThat(e.getMessage()).endsWith(" (Not a directory)");
}
}
@Test
public void testCannotCreateDirectoryWithinFile() throws Exception {
Path newFilePath = absolutize("some-file");
FileSystemUtils.createEmptyFile(newFilePath);
Path wrongPath = absolutize("some-file/new-file");
try {
wrongPath.createDirectory();
fail();
} catch (IOException e) {
assertThat(e.getMessage()).endsWith(" (Not a directory)");
}
}
// Test directory contents
@Test
public void testCreateMultipleChildren() throws Exception {
Path theDirectory = absolutize("foo/");
theDirectory.createDirectory();
Path newPath1 = absolutize("foo/new-file-1");
Path newPath2 = absolutize("foo/new-file-2");
Path newPath3 = absolutize("foo/new-file-3");
FileSystemUtils.createEmptyFile(newPath1);
FileSystemUtils.createEmptyFile(newPath2);
FileSystemUtils.createEmptyFile(newPath3);
assertThat(theDirectory.getDirectoryEntries()).containsExactly(newPath1, newPath2, newPath3);
}
@Test
public void testGetDirectoryEntriesThrowsExceptionWhenRunOnFile() throws Exception {
try {
xFile.getDirectoryEntries();
fail("No Exception thrown.");
} catch (IOException ex) {
if (ex instanceof FileNotFoundException) {
fail("The method should throw an object of class IOException.");
}
assertThat(ex).hasMessage(xFile + " (Not a directory)");
}
}
@Test
public void testGetDirectoryEntriesThrowsExceptionForNonexistingPath() {
Path somePath = testFS.getPath("/non-existing-path");
try {
somePath.getDirectoryEntries();
fail("FileNotFoundException not thrown.");
} catch (Exception x) {
assertThat(x).hasMessage(somePath + " (No such file or directory)");
}
}
// Test the removal of items
@Test
public void testDeleteDirectory() throws Exception {
assertTrue(xEmptyDirectory.delete());
}
@Test
public void testDeleteDirectoryIsNotDirectory() throws Exception {
xEmptyDirectory.delete();
assertFalse(xEmptyDirectory.isDirectory());
}
@Test
public void testDeleteDirectoryParentSize() throws Exception {
int parentSize = workingDir.getDirectoryEntries().size();
xEmptyDirectory.delete();
assertEquals(workingDir.getDirectoryEntries().size(), parentSize - 1);
}
@Test
public void testDeleteFile() throws Exception {
assertTrue(xFile.delete());
}
@Test
public void testDeleteFileIsNotFile() throws Exception {
xFile.delete();
assertFalse(xEmptyDirectory.isFile());
}
@Test
public void testDeleteFileParentSize() throws Exception {
int parentSize = workingDir.getDirectoryEntries().size();
xFile.delete();
assertEquals(workingDir.getDirectoryEntries().size(), parentSize - 1);
}
@Test
public void testDeleteRemovesCorrectFile() throws Exception {
Path newPath1 = xEmptyDirectory.getChild("new-file-1");
Path newPath2 = xEmptyDirectory.getChild("new-file-2");
Path newPath3 = xEmptyDirectory.getChild("new-file-3");
FileSystemUtils.createEmptyFile(newPath1);
FileSystemUtils.createEmptyFile(newPath2);
FileSystemUtils.createEmptyFile(newPath3);
assertTrue(newPath2.delete());
assertThat(xEmptyDirectory.getDirectoryEntries()).containsExactly(newPath1, newPath3);
}
@Test
public void testDeleteNonExistingDir() throws Exception {
Path path = xEmptyDirectory.getRelative("non-existing-dir");
assertFalse(path.delete());
}
@Test
public void testDeleteNotADirectoryPath() throws Exception {
Path path = xFile.getChild("new-file");
assertFalse(path.delete());
}
// Here we test the situations where delete should throw exceptions.
@Test
public void testDeleteNonEmptyDirectoryThrowsException() throws Exception {
try {
xNonEmptyDirectory.delete();
fail();
} catch (IOException e) {
assertThat(e).hasMessage(xNonEmptyDirectory + " (Directory not empty)");
}
}
@Test
public void testDeleteNonEmptyDirectoryNotDeletedDirectory() throws Exception {
try {
xNonEmptyDirectory.delete();
fail();
} catch (IOException e) {
// Expected
}
assertTrue(xNonEmptyDirectory.isDirectory());
}
@Test
public void testDeleteNonEmptyDirectoryNotDeletedFile() throws Exception {
try {
xNonEmptyDirectory.delete();
fail();
} catch (IOException e) {
// Expected
}
assertTrue(xNonEmptyDirectoryFoo.isFile());
}
@Test
public void testCannotRemoveRoot() {
Path rootDirectory = testFS.getRootDirectory();
try {
rootDirectory.delete();
fail();
} catch (IOException e) {
String msg = e.getMessage();
assertTrue(String.format("got %s want EBUSY or ENOTEMPTY", msg),
msg.endsWith(" (Directory not empty)")
|| msg.endsWith(" (Device or resource busy)")
|| msg.endsWith(" (Is a directory)")); // Happens on OS X.
}
}
// Test the date functions
@Test
public void testCreateFileChangesTimeOfDirectory() throws Exception {
storeReferenceTime(workingDir.getLastModifiedTime());
Path newPath = absolutize("new-file");
FileSystemUtils.createEmptyFile(newPath);
assertTrue(isLaterThanreferenceTime(workingDir.getLastModifiedTime()));
}
@Test
public void testRemoveFileChangesTimeOfDirectory() throws Exception {
Path newPath = absolutize("new-file");
FileSystemUtils.createEmptyFile(newPath);
storeReferenceTime(workingDir.getLastModifiedTime());
newPath.delete();
assertTrue(isLaterThanreferenceTime(workingDir.getLastModifiedTime()));
}
// This test is a little bit strange, as we cannot test the progression
// of the time directly. As the Java time and the OS time are slightly different.
// Therefore, we first create an unrelated file to get a notion
// of the current OS time and use that as a baseline.
@Test
public void testCreateFileTimestamp() throws Exception {
Path syncFile = absolutize("sync-file");
FileSystemUtils.createEmptyFile(syncFile);
Path newFile = absolutize("new-file");
storeReferenceTime(syncFile.getLastModifiedTime());
FileSystemUtils.createEmptyFile(newFile);
assertTrue(isLaterThanreferenceTime(newFile.getLastModifiedTime()));
}
@Test
public void testCreateDirectoryTimestamp() throws Exception {
Path syncFile = absolutize("sync-file");
FileSystemUtils.createEmptyFile(syncFile);
Path newPath = absolutize("new-dir");
storeReferenceTime(syncFile.getLastModifiedTime());
assertTrue(newPath.createDirectory());
assertTrue(isLaterThanreferenceTime(newPath.getLastModifiedTime()));
}
@Test
public void testWriteChangesModifiedTime() throws Exception {
storeReferenceTime(xFile.getLastModifiedTime());
FileSystemUtils.writeContentAsLatin1(xFile, "abc19");
assertTrue(isLaterThanreferenceTime(xFile.getLastModifiedTime()));
}
@Test
public void testGetLastModifiedTimeThrowsExceptionForNonexistingPath() throws Exception {
Path newPath = testFS.getPath("/non-existing-dir");
try {
newPath.getLastModifiedTime();
fail("FileNotFoundException not thrown!");
} catch (FileNotFoundException x) {
assertThat(x).hasMessage(newPath + " (No such file or directory)");
}
}
// Test file size
@Test
public void testFileSizeThrowsExceptionForNonexistingPath() throws Exception {
Path newPath = testFS.getPath("/non-existing-file");
try {
newPath.getFileSize();
fail("FileNotFoundException not thrown.");
} catch (FileNotFoundException e) {
assertThat(e).hasMessage(newPath + " (No such file or directory)");
}
}
@Test
public void testFileSizeAfterWrite() throws Exception {
String testData = "abc19";
FileSystemUtils.writeContentAsLatin1(xFile, testData);
assertEquals(testData.length(), xFile.getFileSize());
}
// Testing the input/output routines
@Test
public void testFileWriteAndReadAsLatin1() throws Exception {
String testData = "abc19";
FileSystemUtils.writeContentAsLatin1(xFile, testData);
String resultData = new String(FileSystemUtils.readContentAsLatin1(xFile));
assertEquals(testData,resultData);
}
@Test
public void testInputAndOutputStreamEOF() throws Exception {
try (OutputStream outStream = xFile.getOutputStream()) {
outStream.write(1);
}
try (InputStream inStream = xFile.getInputStream()) {
inStream.read();
assertEquals(-1, inStream.read());
}
}
@Test
public void testInputAndOutputStream() throws Exception {
try (OutputStream outStream = xFile.getOutputStream()) {
for (int i = 33; i < 126; i++) {
outStream.write(i);
}
}
try (InputStream inStream = xFile.getInputStream()) {
for (int i = 33; i < 126; i++) {
int readValue = inStream.read();
assertEquals(i, readValue);
}
}
}
@Test
public void testInputAndOutputStreamAppend() throws Exception {
try (OutputStream outStream = xFile.getOutputStream()) {
for (int i = 33; i < 126; i++) {
outStream.write(i);
}
}
try (OutputStream appendOut = xFile.getOutputStream(true)) {
for (int i = 126; i < 155; i++) {
appendOut.write(i);
}
}
try (InputStream inStream = xFile.getInputStream()) {
for (int i = 33; i < 155; i++) {
int readValue = inStream.read();
assertEquals(i, readValue);
}
}
}
@Test
public void testInputAndOutputStreamNoAppend() throws Exception {
try (OutputStream outStream = xFile.getOutputStream()) {
outStream.write(1);
}
try (OutputStream noAppendOut = xFile.getOutputStream(false)) {
}
try (InputStream inStream = xFile.getInputStream()) {
assertEquals(-1, inStream.read());
}
}
@Test
public void testGetOutputStreamCreatesFile() throws Exception {
Path newFile = absolutize("does_not_exist_yet.txt");
try (OutputStream out = newFile.getOutputStream()) {
out.write(42);
}
assertTrue(newFile.isFile());
}
@Test
public void testOutputStreamThrowExceptionOnDirectory() throws Exception {
try {
xEmptyDirectory.getOutputStream();
fail("The Exception was not thrown!");
} catch (IOException ex) {
assertThat(ex).hasMessage(xEmptyDirectory + " (Is a directory)");
}
}
@Test
public void testInputStreamThrowExceptionOnDirectory() throws Exception {
try {
xEmptyDirectory.getInputStream();
fail("The Exception was not thrown!");
} catch (IOException ex) {
assertThat(ex).hasMessage(xEmptyDirectory + " (Is a directory)");
}
}
// Test renaming
@Test
public void testCanRenameToUnusedName() throws Exception {
xFile.renameTo(xNothing);
assertFalse(xFile.exists());
assertTrue(xNothing.isFile());
}
@Test
public void testCanRenameFileToExistingFile() throws Exception {
Path otherFile = absolutize("otherFile");
FileSystemUtils.createEmptyFile(otherFile);
xFile.renameTo(otherFile); // succeeds
assertFalse(xFile.exists());
assertTrue(otherFile.isFile());
}
@Test
public void testCanRenameDirToExistingEmptyDir() throws Exception {
xNonEmptyDirectory.renameTo(xEmptyDirectory); // succeeds
assertFalse(xNonEmptyDirectory.exists());
assertTrue(xEmptyDirectory.isDirectory());
assertThat(xEmptyDirectory.getDirectoryEntries()).isNotEmpty();
}
@Test
public void testCantRenameDirToExistingNonEmptyDir() throws Exception {
try {
xEmptyDirectory.renameTo(xNonEmptyDirectory);
fail();
} catch (IOException e) {
assertThat(e.getMessage()).endsWith(" (Directory not empty)");
}
}
@Test
public void testCantRenameDirToExistingNonEmptyDirNothingChanged() throws Exception {
try {
xEmptyDirectory.renameTo(xNonEmptyDirectory);
fail();
} catch (IOException e) {
// Expected
}
assertTrue(xNonEmptyDirectory.isDirectory());
assertTrue(xEmptyDirectory.isDirectory());
assertThat(xEmptyDirectory.getDirectoryEntries()).isEmpty();
assertThat(xNonEmptyDirectory.getDirectoryEntries()).isNotEmpty();
}
@Test
public void testCantRenameDirToExistingFile() {
try {
xEmptyDirectory.renameTo(xFile);
fail();
} catch (IOException e) {
assertThat(e).hasMessage(xEmptyDirectory + " -> " + xFile + " (Not a directory)");
}
}
@Test
public void testCantRenameDirToExistingFileNothingChanged() {
try {
xEmptyDirectory.renameTo(xFile);
fail();
} catch (IOException e) {
// Expected
}
assertTrue(xEmptyDirectory.isDirectory());
assertTrue(xFile.isFile());
}
@Test
public void testCantRenameFileToExistingDir() {
try {
xFile.renameTo(xEmptyDirectory);
fail();
} catch (IOException e) {
assertThat(e).hasMessage(xFile + " -> " + xEmptyDirectory + " (Is a directory)");
}
}
@Test
public void testCantRenameFileToExistingDirNothingChanged() {
try {
xFile.renameTo(xEmptyDirectory);
fail();
} catch (IOException e) {
// Expected
}
assertTrue(xEmptyDirectory.isDirectory());
assertTrue(xFile.isFile());
}
@Test
public void testMoveOnNonExistingFileThrowsException() throws Exception {
Path nonExistingPath = absolutize("non-existing");
Path targetPath = absolutize("does-not-matter");
try {
nonExistingPath.renameTo(targetPath);
fail();
} catch (FileNotFoundException e) {
assertThat(e.getMessage()).endsWith(" (No such file or directory)");
}
}
// Test the Paths
@Test
public void testGetPathOnlyAcceptsAbsolutePath() {
try {
testFS.getPath("not-absolute");
fail("The expected Exception was not thrown.");
} catch (IllegalArgumentException ex) {
assertThat(ex).hasMessage("not-absolute (not an absolute path)");
}
}
@Test
public void testGetPathOnlyAcceptsAbsolutePathFragment() {
try {
testFS.getPath(new PathFragment("not-absolute"));
fail("The expected Exception was not thrown.");
} catch (IllegalArgumentException ex) {
assertThat(ex).hasMessage("not-absolute (not an absolute path)");
}
}
// Test the access permissions
@Test
public void testNewFilesAreWritable() throws Exception {
assertTrue(xFile.isWritable());
}
@Test
public void testNewFilesAreReadable() throws Exception {
assertTrue(xFile.isReadable());
}
@Test
public void testNewDirsAreWritable() throws Exception {
assertTrue(xEmptyDirectory.isWritable());
}
@Test
public void testNewDirsAreReadable() throws Exception {
assertTrue(xEmptyDirectory.isReadable());
}
@Test
public void testNewDirsAreExecutable() throws Exception {
assertTrue(xEmptyDirectory.isExecutable());
}
@Test
public void testCannotGetExecutableOnNonexistingFile() throws Exception {
try {
xNothing.isExecutable();
fail("No exception thrown.");
} catch (FileNotFoundException ex) {
assertThat(ex).hasMessage(xNothing + " (No such file or directory)");
}
}
@Test
public void testCannotSetExecutableOnNonexistingFile() throws Exception {
try {
xNothing.setExecutable(true);
fail("No exception thrown.");
} catch (FileNotFoundException ex) {
assertThat(ex).hasMessage(xNothing + " (No such file or directory)");
}
}
@Test
public void testCannotGetWritableOnNonexistingFile() throws Exception {
try {
xNothing.isWritable();
fail("No exception thrown.");
} catch (FileNotFoundException ex) {
assertThat(ex).hasMessage(xNothing + " (No such file or directory)");
}
}
@Test
public void testCannotSetWritableOnNonexistingFile() throws Exception {
try {
xNothing.setWritable(false);
fail("No exception thrown.");
} catch (FileNotFoundException ex) {
assertThat(ex).hasMessage(xNothing + " (No such file or directory)");
}
}
@Test
public void testSetReadableOnFile() throws Exception {
xFile.setReadable(false);
assertFalse(xFile.isReadable());
xFile.setReadable(true);
assertTrue(xFile.isReadable());
}
@Test
public void testSetWritableOnFile() throws Exception {
xFile.setWritable(false);
assertFalse(xFile.isWritable());
xFile.setWritable(true);
assertTrue(xFile.isWritable());
}
@Test
public void testSetExecutableOnFile() throws Exception {
xFile.setExecutable(true);
assertTrue(xFile.isExecutable());
xFile.setExecutable(false);
assertFalse(xFile.isExecutable());
}
@Test
public void testSetExecutableOnDirectory() throws Exception {
setExecutable(xNonEmptyDirectory, false);
try {
// We can't map names->inodes in a non-executable directory:
xNonEmptyDirectoryFoo.isWritable(); // i.e. stat
fail();
} catch (IOException e) {
assertThat(e.getMessage()).endsWith(" (Permission denied)");
}
}
@Test
public void testWritingToReadOnlyFileThrowsException() throws Exception {
xFile.setWritable(false);
try {
FileSystemUtils.writeContent(xFile, "hello, world!".getBytes());
fail("No exception thrown.");
} catch (IOException e) {
assertThat(e).hasMessage(xFile + " (Permission denied)");
}
}
@Test
public void testReadingFromUnreadableFileThrowsException() throws Exception {
FileSystemUtils.writeContent(xFile, "hello, world!".getBytes());
xFile.setReadable(false);
try {
FileSystemUtils.readContent(xFile);
fail("No exception thrown.");
} catch (IOException e) {
assertThat(e).hasMessage(xFile + " (Permission denied)");
}
}
@Test
public void testCannotCreateFileInReadOnlyDirectory() throws Exception {
Path xNonEmptyDirectoryBar = xNonEmptyDirectory.getChild("bar");
xNonEmptyDirectory.setWritable(false);
try {
FileSystemUtils.createEmptyFile(xNonEmptyDirectoryBar);
fail("No exception thrown.");
} catch (IOException e) {
assertThat(e).hasMessage(xNonEmptyDirectoryBar + " (Permission denied)");
}
}
@Test
public void testCannotCreateDirectoryInReadOnlyDirectory() throws Exception {
Path xNonEmptyDirectoryBar = xNonEmptyDirectory.getChild("bar");
xNonEmptyDirectory.setWritable(false);
try {
xNonEmptyDirectoryBar.createDirectory();
fail("No exception thrown.");
} catch (IOException e) {
assertThat(e).hasMessage(xNonEmptyDirectoryBar + " (Permission denied)");
}
}
@Test
public void testCannotMoveIntoReadOnlyDirectory() throws Exception {
Path xNonEmptyDirectoryBar = xNonEmptyDirectory.getChild("bar");
xNonEmptyDirectory.setWritable(false);
try {
xFile.renameTo(xNonEmptyDirectoryBar);
fail("No exception thrown.");
} catch (IOException e) {
assertThat(e.getMessage()).endsWith(" (Permission denied)");
}
}
@Test
public void testCannotMoveFromReadOnlyDirectory() throws Exception {
xNonEmptyDirectory.setWritable(false);
try {
xNonEmptyDirectoryFoo.renameTo(xNothing);
fail("No exception thrown.");
} catch (IOException e) {
assertThat(e.getMessage()).endsWith(" (Permission denied)");
}
}
@Test
public void testCannotDeleteInReadOnlyDirectory() throws Exception {
xNonEmptyDirectory.setWritable(false);
try {
xNonEmptyDirectoryFoo.delete();
fail("No exception thrown.");
} catch (IOException e) {
assertThat(e).hasMessage(xNonEmptyDirectoryFoo + " (Permission denied)");
}
}
@Test
public void testCannotCreatSymbolicLinkInReadOnlyDirectory() throws Exception {
Path xNonEmptyDirectoryBar = xNonEmptyDirectory.getChild("bar");
xNonEmptyDirectory.setWritable(false);
if (supportsSymlinks) {
try {
createSymbolicLink(xNonEmptyDirectoryBar, xNonEmptyDirectoryFoo);
fail("No exception thrown.");
} catch (IOException e) {
assertThat(e).hasMessage(xNonEmptyDirectoryBar + " (Permission denied)");
}
}
}
@Test
public void testGetMD5DigestForEmptyFile() throws Exception {
Fingerprint fp = new Fingerprint();
fp.addBytes(new byte[0]);
assertEquals(BaseEncoding.base16().lowerCase().encode(xFile.getMD5Digest()),
fp.hexDigestAndReset());
}
@Test
public void testGetMD5Digest() throws Exception {
byte[] buffer = new byte[500000];
for (int i = 0; i < buffer.length; ++i) {
buffer[i] = 1;
}
FileSystemUtils.writeContent(xFile, buffer);
Fingerprint fp = new Fingerprint();
fp.addBytes(buffer);
assertEquals(BaseEncoding.base16().lowerCase().encode(xFile.getMD5Digest()),
fp.hexDigestAndReset());
}
@Test
public void testStatFailsFastOnNonExistingFiles() throws Exception {
try {
xNothing.stat();
fail("Expected IOException");
} catch(IOException e) {
// Do nothing.
}
}
@Test
public void testStatNullableFailsFastOnNonExistingFiles() throws Exception {
assertNull(xNothing.statNullable());
}
@Test
public void testResolveSymlinks() throws Exception {
if (supportsSymlinks) {
createSymbolicLink(xLink, xFile);
FileSystemUtils.createEmptyFile(xFile);
assertEquals(xFile.asFragment(), testFS.resolveOneLink(xLink));
assertEquals(xFile, xLink.resolveSymbolicLinks());
}
}
@Test
public void testResolveDanglingSymlinks() throws Exception {
if (supportsSymlinks) {
createSymbolicLink(xLink, xNothing);
assertEquals(xNothing.asFragment(), testFS.resolveOneLink(xLink));
try {
xLink.resolveSymbolicLinks();
fail();
} catch (IOException expected) {
}
}
}
@Test
public void testResolveNonSymlinks() throws Exception {
if (supportsSymlinks) {
assertNull(testFS.resolveOneLink(xFile));
assertEquals(xFile, xFile.resolveSymbolicLinks());
}
}
}
|
|
/*
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.cloud.ServiceOptions.Clock;
import com.google.cloud.ServiceOptions.DefaultHttpTransportFactory;
import com.google.cloud.ServiceOptions.HttpTransportFactory;
import com.google.cloud.spi.ServiceRpcFactory;
import org.easymock.EasyMock;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Set;
@RunWith(JUnit4.class)
public class ServiceOptionsTest {
private static final String JSON_KEY =
"{\n"
+ " \"private_key_id\": \"somekeyid\",\n"
+ " \"private_key\": \"-----BEGIN PRIVATE KEY-----\\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggS"
+ "kAgEAAoIBAQC+K2hSuFpAdrJI\\nnCgcDz2M7t7bjdlsadsasad+fvRSW6TjNQZ3p5LLQY1kSZRqBqylRkzteMOyHg"
+ "aR\\n0Pmxh3ILCND5men43j3h4eDbrhQBuxfEMalkG92sL+PNQSETY2tnvXryOvmBRwa/\\nQP/9dJfIkIDJ9Fw9N4"
+ "Bhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nknddadwkwewcVxHFhcZJO+XWf6ofLUXpRwiTZakGMn8EE1uVa2"
+ "LgczOjwWHGi99MFjxSer5m9\\n1tCa3/KEGKiS/YL71JvjwX3mb+cewlkcmweBKZHM2JPTk0ZednFSpVZMtycjkbLa"
+ "\\ndYOS8V85AgMBewECggEBAKksaldajfDZDV6nGqbFjMiizAKJolr/M3OQw16K6o3/\\n0S31xIe3sSlgW0+UbYlF"
+ "4U8KifhManD1apVSC3csafaspP4RZUHFhtBywLO9pR5c\\nr6S5aLp+gPWFyIp1pfXbWGvc5VY/v9x7ya1VEa6rXvL"
+ "sKupSeWAW4tMj3eo/64ge\\nsdaceaLYw52KeBYiT6+vpsnYrEkAHO1fF/LavbLLOFJmFTMxmsNaG0tuiJHgjshB\\"
+ "n82DpMCbXG9YcCgI/DbzuIjsdj2JC1cascSP//3PmefWysucBQe7Jryb6NQtASmnv\\nCdDw/0jmZTEjpe4S1lxfHp"
+ "lAhHFtdgYTvyYtaLZiVVkCgYEA8eVpof2rceecw/I6\\n5ng1q3Hl2usdWV/4mZMvR0fOemacLLfocX6IYxT1zA1FF"
+ "JlbXSRsJMf/Qq39mOR2\\nSpW+hr4jCoHeRVYLgsbggtrevGmILAlNoqCMpGZ6vDmJpq6ECV9olliDvpPgWOP+\\nm"
+ "YPDreFBGxWvQrADNbRt2dmGsrsCgYEAyUHqB2wvJHFqdmeBsaacewzV8x9WgmeX\\ngUIi9REwXlGDW0Mz50dxpxcK"
+ "CAYn65+7TCnY5O/jmL0VRxU1J2mSWyWTo1C+17L0\\n3fUqjxL1pkefwecxwecvC+gFFYdJ4CQ/MHHXU81Lwl1iWdF"
+ "Cd2UoGddYaOF+KNeM\\nHC7cmqra+JsCgYEAlUNywzq8nUg7282E+uICfCB0LfwejuymR93CtsFgb7cRd6ak\\nECR"
+ "8FGfCpH8ruWJINllbQfcHVCX47ndLZwqv3oVFKh6pAS/vVI4dpOepP8++7y1u\\ncoOvtreXCX6XqfrWDtKIvv0vjl"
+ "HBhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nkndj5uNl5SiuVxHFhcZJO+XWf6ofLUregtevZakGMn8EE1uVa"
+ "2AY7eafmoU/nZPT\\n00YB0TBATdCbn/nBSuKDESkhSg9s2GEKQZG5hBmL5uCMfo09z3SfxZIhJdlerreP\\nJ7gSi"
+ "dI12N+EZxYd4xIJh/HFDgp7RRO87f+WJkofMQKBgGTnClK1VMaCRbJZPriw\\nEfeFCoOX75MxKwXs6xgrw4W//AYG"
+ "GUjDt83lD6AZP6tws7gJ2IwY/qP7+lyhjEqN\\nHtfPZRGFkGZsdaksdlaksd323423d+15/UvrlRSFPNj1tWQmNKk"
+ "XyRDW4IG1Oa2p\\nrALStNBx5Y9t0/LQnFI4w3aG\\n-----END PRIVATE KEY-----\\n\",\n"
+ " \"client_email\": \"[email protected]\",\n"
+ " \"client_id\": \"someclientid.apps.googleusercontent.com\",\n"
+ " \"type\": \"service_account\"\n"
+ "}";
private static final InputStream JSON_KEY_STREAM = new ByteArrayInputStream(JSON_KEY.getBytes());
private static AuthCredentials authCredentials;
static {
try {
authCredentials = AuthCredentials.createForJson(JSON_KEY_STREAM);
} catch (IOException e) {
fail("Couldn't create fake JSON credentials.");
}
}
private static final HttpTransportFactory MOCK_HTTP_TRANSPORT_FACTORY =
EasyMock.createMock(HttpTransportFactory.class);
private static final Clock TEST_CLOCK = new TestClock();
private static final TestServiceOptions OPTIONS =
TestServiceOptions.builder()
.authCredentials(authCredentials)
.clock(TEST_CLOCK)
.connectTimeout(1234)
.host("host")
.httpTransportFactory(MOCK_HTTP_TRANSPORT_FACTORY)
.projectId("project-id")
.readTimeout(5678)
.retryParams(RetryParams.noRetries())
.build();
private static final TestServiceOptions DEFAULT_OPTIONS =
TestServiceOptions.builder().projectId("project-id").build();
private static final TestServiceOptions OPTIONS_COPY = OPTIONS.toBuilder().build();
private static class TestClock extends Clock {
@Override
public long millis() {
return 123456789L;
}
}
private interface TestService extends Service<TestServiceOptions> {}
private static class TestServiceImpl
extends BaseService<TestServiceOptions> implements TestService {
private TestServiceImpl(TestServiceOptions options) {
super(options);
}
}
private interface TestServiceFactory extends ServiceFactory<TestService, TestServiceOptions> {}
private static class DefaultTestServiceFactory implements TestServiceFactory {
private static final TestServiceFactory INSTANCE = new DefaultTestServiceFactory();
@Override
public TestService create(TestServiceOptions options) {
return new TestServiceImpl(options);
}
}
private interface TestServiceRpcFactory
extends ServiceRpcFactory<TestServiceRpc, TestServiceOptions> {}
private static class DefaultTestServiceRpcFactory implements TestServiceRpcFactory {
private static final TestServiceRpcFactory INSTANCE = new DefaultTestServiceRpcFactory();
@Override
public TestServiceRpc create(TestServiceOptions options) {
return new DefaultTestServiceRpc(options);
}
}
private interface TestServiceRpc {}
private static class DefaultTestServiceRpc implements TestServiceRpc {
DefaultTestServiceRpc(TestServiceOptions options) {}
}
private static class TestServiceOptions
extends ServiceOptions<TestService, TestServiceRpc, TestServiceOptions> {
private static class Builder
extends ServiceOptions.Builder<TestService, TestServiceRpc, TestServiceOptions, Builder> {
private Builder() {}
private Builder(TestServiceOptions options) {
super(options);
}
@Override
protected TestServiceOptions build() {
return new TestServiceOptions(this);
}
}
private TestServiceOptions(Builder builder) {
super(TestServiceFactory.class, TestServiceRpcFactory.class, builder);
}
@Override
protected TestServiceFactory defaultServiceFactory() {
return DefaultTestServiceFactory.INSTANCE;
}
@Override
protected TestServiceRpcFactory defaultRpcFactory() {
return DefaultTestServiceRpcFactory.INSTANCE;
}
@Override
protected Set<String> scopes() {
return null;
}
@Override
public Builder toBuilder() {
return new Builder(this);
}
private static Builder builder() {
return new Builder();
}
@Override
public boolean equals(Object obj) {
return obj instanceof TestServiceOptions && baseEquals((TestServiceOptions) obj);
}
@Override
public int hashCode() {
return baseHashCode();
}
}
@Test
public void testBuilder() {
assertSame(authCredentials, OPTIONS.authCredentials());
assertSame(TEST_CLOCK, OPTIONS.clock());
assertEquals(1234, OPTIONS.connectTimeout());
assertEquals("host", OPTIONS.host());
assertSame(MOCK_HTTP_TRANSPORT_FACTORY, OPTIONS.httpTransportFactory());
assertEquals("project-id", OPTIONS.projectId());
assertEquals(5678, OPTIONS.readTimeout());
assertSame(RetryParams.noRetries(), OPTIONS.retryParams());
assertSame(Clock.defaultClock(), DEFAULT_OPTIONS.clock());
assertEquals(-1, DEFAULT_OPTIONS.connectTimeout());
assertEquals("https://www.googleapis.com", DEFAULT_OPTIONS.host());
assertTrue(DEFAULT_OPTIONS.httpTransportFactory() instanceof DefaultHttpTransportFactory);
assertEquals(-1, DEFAULT_OPTIONS.readTimeout());
assertSame(RetryParams.defaultInstance(), DEFAULT_OPTIONS.retryParams());
}
@Test
public void testGetProjectIdRequired() {
assertTrue(OPTIONS.projectIdRequired());
}
@Test
public void testService() {
assertTrue(OPTIONS.service() instanceof TestServiceImpl);
}
@Test
public void testRpc() {
assertTrue(OPTIONS.rpc() instanceof DefaultTestServiceRpc);
}
@Test
public void testBaseEquals() {
assertEquals(OPTIONS, OPTIONS_COPY);
assertNotEquals(DEFAULT_OPTIONS, OPTIONS);
}
@Test
public void testBaseHashCode() {
assertEquals(OPTIONS.hashCode(), OPTIONS_COPY.hashCode());
assertNotEquals(DEFAULT_OPTIONS.hashCode(), OPTIONS.hashCode());
}
}
|
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.batch.fluent.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.batch.models.AllocationState;
import com.azure.resourcemanager.batch.models.ApplicationPackageReference;
import com.azure.resourcemanager.batch.models.AutoScaleRun;
import com.azure.resourcemanager.batch.models.CertificateReference;
import com.azure.resourcemanager.batch.models.DeploymentConfiguration;
import com.azure.resourcemanager.batch.models.InterNodeCommunicationState;
import com.azure.resourcemanager.batch.models.MetadataItem;
import com.azure.resourcemanager.batch.models.MountConfiguration;
import com.azure.resourcemanager.batch.models.NetworkConfiguration;
import com.azure.resourcemanager.batch.models.PoolProvisioningState;
import com.azure.resourcemanager.batch.models.ResizeOperationStatus;
import com.azure.resourcemanager.batch.models.ScaleSettings;
import com.azure.resourcemanager.batch.models.StartTask;
import com.azure.resourcemanager.batch.models.TaskSchedulingPolicy;
import com.azure.resourcemanager.batch.models.UserAccount;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.time.OffsetDateTime;
import java.util.List;
/** Pool properties. */
@Fluent
public final class PoolProperties {
@JsonIgnore private final ClientLogger logger = new ClientLogger(PoolProperties.class);
/*
* The display name for the pool. The display name need not be unique and
* can contain any Unicode characters up to a maximum length of 1024.
*/
@JsonProperty(value = "displayName")
private String displayName;
/*
* The last modified time of the pool. This is the last time at which the
* pool level data, such as the targetDedicatedNodes or autoScaleSettings,
* changed. It does not factor in node-level changes such as a compute node
* changing state.
*/
@JsonProperty(value = "lastModified", access = JsonProperty.Access.WRITE_ONLY)
private OffsetDateTime lastModified;
/*
* The creation time of the pool.
*/
@JsonProperty(value = "creationTime", access = JsonProperty.Access.WRITE_ONLY)
private OffsetDateTime creationTime;
/*
* The current state of the pool.
*/
@JsonProperty(value = "provisioningState", access = JsonProperty.Access.WRITE_ONLY)
private PoolProvisioningState provisioningState;
/*
* The time at which the pool entered its current state.
*/
@JsonProperty(value = "provisioningStateTransitionTime", access = JsonProperty.Access.WRITE_ONLY)
private OffsetDateTime provisioningStateTransitionTime;
/*
* Whether the pool is resizing.
*/
@JsonProperty(value = "allocationState", access = JsonProperty.Access.WRITE_ONLY)
private AllocationState allocationState;
/*
* The time at which the pool entered its current allocation state.
*/
@JsonProperty(value = "allocationStateTransitionTime", access = JsonProperty.Access.WRITE_ONLY)
private OffsetDateTime allocationStateTransitionTime;
/*
* The size of virtual machines in the pool. All VMs in a pool are the same
* size. For information about available sizes of virtual machines for
* Cloud Services pools (pools created with cloudServiceConfiguration), see
* Sizes for Cloud Services
* (https://azure.microsoft.com/documentation/articles/cloud-services-sizes-specs/).
* Batch supports all Cloud Services VM sizes except ExtraSmall. For
* information about available VM sizes for pools using images from the
* Virtual Machines Marketplace (pools created with
* virtualMachineConfiguration) see Sizes for Virtual Machines (Linux)
* (https://azure.microsoft.com/documentation/articles/virtual-machines-linux-sizes/)
* or Sizes for Virtual Machines (Windows)
* (https://azure.microsoft.com/documentation/articles/virtual-machines-windows-sizes/).
* Batch supports all Azure VM sizes except STANDARD_A0 and those with
* premium storage (STANDARD_GS, STANDARD_DS, and STANDARD_DSV2 series).
*/
@JsonProperty(value = "vmSize")
private String vmSize;
/*
* Deployment configuration properties. Using CloudServiceConfiguration
* specifies that the nodes should be creating using Azure Cloud Services
* (PaaS), while VirtualMachineConfiguration uses Azure Virtual Machines
* (IaaS).
*/
@JsonProperty(value = "deploymentConfiguration")
private DeploymentConfiguration deploymentConfiguration;
/*
* The number of compute nodes currently in the pool.
*/
@JsonProperty(value = "currentDedicatedNodes", access = JsonProperty.Access.WRITE_ONLY)
private Integer currentDedicatedNodes;
/*
* The number of low-priority compute nodes currently in the pool.
*/
@JsonProperty(value = "currentLowPriorityNodes", access = JsonProperty.Access.WRITE_ONLY)
private Integer currentLowPriorityNodes;
/*
* Scale settings for the pool Defines the desired size of the pool. This
* can either be 'fixedScale' where the requested targetDedicatedNodes is
* specified, or 'autoScale' which defines a formula which is periodically
* reevaluated. If this property is not specified, the pool will have a
* fixed scale with 0 targetDedicatedNodes.
*/
@JsonProperty(value = "scaleSettings")
private ScaleSettings scaleSettings;
/*
* The results and errors from an execution of a pool autoscale formula.
* This property is set only if the pool automatically scales, i.e.
* autoScaleSettings are used.
*/
@JsonProperty(value = "autoScaleRun", access = JsonProperty.Access.WRITE_ONLY)
private AutoScaleRun autoScaleRun;
/*
* Whether the pool permits direct communication between nodes. This
* imposes restrictions on which nodes can be assigned to the pool.
* Enabling this value can reduce the chance of the requested number of
* nodes to be allocated in the pool. If not specified, this value defaults
* to 'Disabled'.
*/
@JsonProperty(value = "interNodeCommunication")
private InterNodeCommunicationState interNodeCommunication;
/*
* The network configuration for a pool.
*/
@JsonProperty(value = "networkConfiguration")
private NetworkConfiguration networkConfiguration;
/*
* The number of task slots that can be used to run concurrent tasks on a
* single compute node in the pool. The default value is 1. The maximum
* value is the smaller of 4 times the number of cores of the vmSize of the
* pool or 256.
*/
@JsonProperty(value = "taskSlotsPerNode")
private Integer taskSlotsPerNode;
/*
* Specifies how tasks should be distributed across compute nodes. If not
* specified, the default is spread.
*/
@JsonProperty(value = "taskSchedulingPolicy")
private TaskSchedulingPolicy taskSchedulingPolicy;
/*
* The list of user accounts to be created on each node in the pool.
*/
@JsonProperty(value = "userAccounts")
private List<UserAccount> userAccounts;
/*
* A list of name-value pairs associated with the pool as metadata. The
* Batch service does not assign any meaning to metadata; it is solely for
* the use of user code.
*/
@JsonProperty(value = "metadata")
private List<MetadataItem> metadata;
/*
* A task which is run when a compute node joins a pool in the Azure Batch
* service, or when the compute node is rebooted or reimaged. In an PATCH
* (update) operation, this property can be set to an empty object to
* remove the start task from the pool.
*/
@JsonProperty(value = "startTask")
private StartTask startTask;
/*
* The list of certificates to be installed on each compute node in the
* pool. For Windows compute nodes, the Batch service installs the
* certificates to the specified certificate store and location. For Linux
* compute nodes, the certificates are stored in a directory inside the
* task working directory and an environment variable
* AZ_BATCH_CERTIFICATES_DIR is supplied to the task to query for this
* location. For certificates with visibility of 'remoteUser', a 'certs'
* directory is created in the user's home directory (e.g.,
* /home/{user-name}/certs) and certificates are placed in that directory.
*/
@JsonProperty(value = "certificates")
private List<CertificateReference> certificates;
/*
* The list of application packages to be installed on each compute node in
* the pool. Changes to application package references affect all new
* compute nodes joining the pool, but do not affect compute nodes that are
* already in the pool until they are rebooted or reimaged. There is a
* maximum of 10 application package references on any given pool.
*/
@JsonProperty(value = "applicationPackages")
private List<ApplicationPackageReference> applicationPackages;
/*
* The list of application licenses the Batch service will make available
* on each compute node in the pool. The list of application licenses must
* be a subset of available Batch service application licenses. If a
* license is requested which is not supported, pool creation will fail.
*/
@JsonProperty(value = "applicationLicenses")
private List<String> applicationLicenses;
/*
* Details about the current or last completed resize operation. Describes
* either the current operation (if the pool AllocationState is Resizing)
* or the previously completed operation (if the AllocationState is
* Steady).
*/
@JsonProperty(value = "resizeOperationStatus", access = JsonProperty.Access.WRITE_ONLY)
private ResizeOperationStatus resizeOperationStatus;
/*
* A list of file systems to mount on each node in the pool. This supports
* Azure Files, NFS, CIFS/SMB, and Blobfuse.
*/
@JsonProperty(value = "mountConfiguration")
private List<MountConfiguration> mountConfiguration;
/**
* Get the displayName property: The display name for the pool. The display name need not be unique and can contain
* any Unicode characters up to a maximum length of 1024.
*
* @return the displayName value.
*/
public String displayName() {
return this.displayName;
}
/**
* Set the displayName property: The display name for the pool. The display name need not be unique and can contain
* any Unicode characters up to a maximum length of 1024.
*
* @param displayName the displayName value to set.
* @return the PoolProperties object itself.
*/
public PoolProperties withDisplayName(String displayName) {
this.displayName = displayName;
return this;
}
/**
* Get the lastModified property: The last modified time of the pool. This is the last time at which the pool level
* data, such as the targetDedicatedNodes or autoScaleSettings, changed. It does not factor in node-level changes
* such as a compute node changing state.
*
* @return the lastModified value.
*/
public OffsetDateTime lastModified() {
return this.lastModified;
}
/**
* Get the creationTime property: The creation time of the pool.
*
* @return the creationTime value.
*/
public OffsetDateTime creationTime() {
return this.creationTime;
}
/**
* Get the provisioningState property: The current state of the pool.
*
* @return the provisioningState value.
*/
public PoolProvisioningState provisioningState() {
return this.provisioningState;
}
/**
* Get the provisioningStateTransitionTime property: The time at which the pool entered its current state.
*
* @return the provisioningStateTransitionTime value.
*/
public OffsetDateTime provisioningStateTransitionTime() {
return this.provisioningStateTransitionTime;
}
/**
* Get the allocationState property: Whether the pool is resizing.
*
* @return the allocationState value.
*/
public AllocationState allocationState() {
return this.allocationState;
}
/**
* Get the allocationStateTransitionTime property: The time at which the pool entered its current allocation state.
*
* @return the allocationStateTransitionTime value.
*/
public OffsetDateTime allocationStateTransitionTime() {
return this.allocationStateTransitionTime;
}
/**
* Get the vmSize property: The size of virtual machines in the pool. All VMs in a pool are the same size. For
* information about available sizes of virtual machines for Cloud Services pools (pools created with
* cloudServiceConfiguration), see Sizes for Cloud Services
* (https://azure.microsoft.com/documentation/articles/cloud-services-sizes-specs/). Batch supports all Cloud
* Services VM sizes except ExtraSmall. For information about available VM sizes for pools using images from the
* Virtual Machines Marketplace (pools created with virtualMachineConfiguration) see Sizes for Virtual Machines
* (Linux) (https://azure.microsoft.com/documentation/articles/virtual-machines-linux-sizes/) or Sizes for Virtual
* Machines (Windows) (https://azure.microsoft.com/documentation/articles/virtual-machines-windows-sizes/). Batch
* supports all Azure VM sizes except STANDARD_A0 and those with premium storage (STANDARD_GS, STANDARD_DS, and
* STANDARD_DSV2 series).
*
* @return the vmSize value.
*/
public String vmSize() {
return this.vmSize;
}
/**
* Set the vmSize property: The size of virtual machines in the pool. All VMs in a pool are the same size. For
* information about available sizes of virtual machines for Cloud Services pools (pools created with
* cloudServiceConfiguration), see Sizes for Cloud Services
* (https://azure.microsoft.com/documentation/articles/cloud-services-sizes-specs/). Batch supports all Cloud
* Services VM sizes except ExtraSmall. For information about available VM sizes for pools using images from the
* Virtual Machines Marketplace (pools created with virtualMachineConfiguration) see Sizes for Virtual Machines
* (Linux) (https://azure.microsoft.com/documentation/articles/virtual-machines-linux-sizes/) or Sizes for Virtual
* Machines (Windows) (https://azure.microsoft.com/documentation/articles/virtual-machines-windows-sizes/). Batch
* supports all Azure VM sizes except STANDARD_A0 and those with premium storage (STANDARD_GS, STANDARD_DS, and
* STANDARD_DSV2 series).
*
* @param vmSize the vmSize value to set.
* @return the PoolProperties object itself.
*/
public PoolProperties withVmSize(String vmSize) {
this.vmSize = vmSize;
return this;
}
/**
* Get the deploymentConfiguration property: Deployment configuration properties. Using CloudServiceConfiguration
* specifies that the nodes should be creating using Azure Cloud Services (PaaS), while VirtualMachineConfiguration
* uses Azure Virtual Machines (IaaS).
*
* @return the deploymentConfiguration value.
*/
public DeploymentConfiguration deploymentConfiguration() {
return this.deploymentConfiguration;
}
/**
* Set the deploymentConfiguration property: Deployment configuration properties. Using CloudServiceConfiguration
* specifies that the nodes should be creating using Azure Cloud Services (PaaS), while VirtualMachineConfiguration
* uses Azure Virtual Machines (IaaS).
*
* @param deploymentConfiguration the deploymentConfiguration value to set.
* @return the PoolProperties object itself.
*/
public PoolProperties withDeploymentConfiguration(DeploymentConfiguration deploymentConfiguration) {
this.deploymentConfiguration = deploymentConfiguration;
return this;
}
/**
* Get the currentDedicatedNodes property: The number of compute nodes currently in the pool.
*
* @return the currentDedicatedNodes value.
*/
public Integer currentDedicatedNodes() {
return this.currentDedicatedNodes;
}
/**
* Get the currentLowPriorityNodes property: The number of low-priority compute nodes currently in the pool.
*
* @return the currentLowPriorityNodes value.
*/
public Integer currentLowPriorityNodes() {
return this.currentLowPriorityNodes;
}
/**
* Get the scaleSettings property: Scale settings for the pool Defines the desired size of the pool. This can either
* be 'fixedScale' where the requested targetDedicatedNodes is specified, or 'autoScale' which defines a formula
* which is periodically reevaluated. If this property is not specified, the pool will have a fixed scale with 0
* targetDedicatedNodes.
*
* @return the scaleSettings value.
*/
public ScaleSettings scaleSettings() {
return this.scaleSettings;
}
/**
* Set the scaleSettings property: Scale settings for the pool Defines the desired size of the pool. This can either
* be 'fixedScale' where the requested targetDedicatedNodes is specified, or 'autoScale' which defines a formula
* which is periodically reevaluated. If this property is not specified, the pool will have a fixed scale with 0
* targetDedicatedNodes.
*
* @param scaleSettings the scaleSettings value to set.
* @return the PoolProperties object itself.
*/
public PoolProperties withScaleSettings(ScaleSettings scaleSettings) {
this.scaleSettings = scaleSettings;
return this;
}
/**
* Get the autoScaleRun property: The results and errors from an execution of a pool autoscale formula. This
* property is set only if the pool automatically scales, i.e. autoScaleSettings are used.
*
* @return the autoScaleRun value.
*/
public AutoScaleRun autoScaleRun() {
return this.autoScaleRun;
}
/**
* Get the interNodeCommunication property: Whether the pool permits direct communication between nodes. This
* imposes restrictions on which nodes can be assigned to the pool. Enabling this value can reduce the chance of the
* requested number of nodes to be allocated in the pool. If not specified, this value defaults to 'Disabled'.
*
* @return the interNodeCommunication value.
*/
public InterNodeCommunicationState interNodeCommunication() {
return this.interNodeCommunication;
}
/**
* Set the interNodeCommunication property: Whether the pool permits direct communication between nodes. This
* imposes restrictions on which nodes can be assigned to the pool. Enabling this value can reduce the chance of the
* requested number of nodes to be allocated in the pool. If not specified, this value defaults to 'Disabled'.
*
* @param interNodeCommunication the interNodeCommunication value to set.
* @return the PoolProperties object itself.
*/
public PoolProperties withInterNodeCommunication(InterNodeCommunicationState interNodeCommunication) {
this.interNodeCommunication = interNodeCommunication;
return this;
}
/**
* Get the networkConfiguration property: The network configuration for a pool.
*
* @return the networkConfiguration value.
*/
public NetworkConfiguration networkConfiguration() {
return this.networkConfiguration;
}
/**
* Set the networkConfiguration property: The network configuration for a pool.
*
* @param networkConfiguration the networkConfiguration value to set.
* @return the PoolProperties object itself.
*/
public PoolProperties withNetworkConfiguration(NetworkConfiguration networkConfiguration) {
this.networkConfiguration = networkConfiguration;
return this;
}
/**
* Get the taskSlotsPerNode property: The number of task slots that can be used to run concurrent tasks on a single
* compute node in the pool. The default value is 1. The maximum value is the smaller of 4 times the number of cores
* of the vmSize of the pool or 256.
*
* @return the taskSlotsPerNode value.
*/
public Integer taskSlotsPerNode() {
return this.taskSlotsPerNode;
}
/**
* Set the taskSlotsPerNode property: The number of task slots that can be used to run concurrent tasks on a single
* compute node in the pool. The default value is 1. The maximum value is the smaller of 4 times the number of cores
* of the vmSize of the pool or 256.
*
* @param taskSlotsPerNode the taskSlotsPerNode value to set.
* @return the PoolProperties object itself.
*/
public PoolProperties withTaskSlotsPerNode(Integer taskSlotsPerNode) {
this.taskSlotsPerNode = taskSlotsPerNode;
return this;
}
/**
* Get the taskSchedulingPolicy property: Specifies how tasks should be distributed across compute nodes. If not
* specified, the default is spread.
*
* @return the taskSchedulingPolicy value.
*/
public TaskSchedulingPolicy taskSchedulingPolicy() {
return this.taskSchedulingPolicy;
}
/**
* Set the taskSchedulingPolicy property: Specifies how tasks should be distributed across compute nodes. If not
* specified, the default is spread.
*
* @param taskSchedulingPolicy the taskSchedulingPolicy value to set.
* @return the PoolProperties object itself.
*/
public PoolProperties withTaskSchedulingPolicy(TaskSchedulingPolicy taskSchedulingPolicy) {
this.taskSchedulingPolicy = taskSchedulingPolicy;
return this;
}
/**
* Get the userAccounts property: The list of user accounts to be created on each node in the pool.
*
* @return the userAccounts value.
*/
public List<UserAccount> userAccounts() {
return this.userAccounts;
}
/**
* Set the userAccounts property: The list of user accounts to be created on each node in the pool.
*
* @param userAccounts the userAccounts value to set.
* @return the PoolProperties object itself.
*/
public PoolProperties withUserAccounts(List<UserAccount> userAccounts) {
this.userAccounts = userAccounts;
return this;
}
/**
* Get the metadata property: A list of name-value pairs associated with the pool as metadata. The Batch service
* does not assign any meaning to metadata; it is solely for the use of user code.
*
* @return the metadata value.
*/
public List<MetadataItem> metadata() {
return this.metadata;
}
/**
* Set the metadata property: A list of name-value pairs associated with the pool as metadata. The Batch service
* does not assign any meaning to metadata; it is solely for the use of user code.
*
* @param metadata the metadata value to set.
* @return the PoolProperties object itself.
*/
public PoolProperties withMetadata(List<MetadataItem> metadata) {
this.metadata = metadata;
return this;
}
/**
* Get the startTask property: A task which is run when a compute node joins a pool in the Azure Batch service, or
* when the compute node is rebooted or reimaged. In an PATCH (update) operation, this property can be set to an
* empty object to remove the start task from the pool.
*
* @return the startTask value.
*/
public StartTask startTask() {
return this.startTask;
}
/**
* Set the startTask property: A task which is run when a compute node joins a pool in the Azure Batch service, or
* when the compute node is rebooted or reimaged. In an PATCH (update) operation, this property can be set to an
* empty object to remove the start task from the pool.
*
* @param startTask the startTask value to set.
* @return the PoolProperties object itself.
*/
public PoolProperties withStartTask(StartTask startTask) {
this.startTask = startTask;
return this;
}
/**
* Get the certificates property: The list of certificates to be installed on each compute node in the pool. For
* Windows compute nodes, the Batch service installs the certificates to the specified certificate store and
* location. For Linux compute nodes, the certificates are stored in a directory inside the task working directory
* and an environment variable AZ_BATCH_CERTIFICATES_DIR is supplied to the task to query for this location. For
* certificates with visibility of 'remoteUser', a 'certs' directory is created in the user's home directory (e.g.,
* /home/{user-name}/certs) and certificates are placed in that directory.
*
* @return the certificates value.
*/
public List<CertificateReference> certificates() {
return this.certificates;
}
/**
* Set the certificates property: The list of certificates to be installed on each compute node in the pool. For
* Windows compute nodes, the Batch service installs the certificates to the specified certificate store and
* location. For Linux compute nodes, the certificates are stored in a directory inside the task working directory
* and an environment variable AZ_BATCH_CERTIFICATES_DIR is supplied to the task to query for this location. For
* certificates with visibility of 'remoteUser', a 'certs' directory is created in the user's home directory (e.g.,
* /home/{user-name}/certs) and certificates are placed in that directory.
*
* @param certificates the certificates value to set.
* @return the PoolProperties object itself.
*/
public PoolProperties withCertificates(List<CertificateReference> certificates) {
this.certificates = certificates;
return this;
}
/**
* Get the applicationPackages property: The list of application packages to be installed on each compute node in
* the pool. Changes to application package references affect all new compute nodes joining the pool, but do not
* affect compute nodes that are already in the pool until they are rebooted or reimaged. There is a maximum of 10
* application package references on any given pool.
*
* @return the applicationPackages value.
*/
public List<ApplicationPackageReference> applicationPackages() {
return this.applicationPackages;
}
/**
* Set the applicationPackages property: The list of application packages to be installed on each compute node in
* the pool. Changes to application package references affect all new compute nodes joining the pool, but do not
* affect compute nodes that are already in the pool until they are rebooted or reimaged. There is a maximum of 10
* application package references on any given pool.
*
* @param applicationPackages the applicationPackages value to set.
* @return the PoolProperties object itself.
*/
public PoolProperties withApplicationPackages(List<ApplicationPackageReference> applicationPackages) {
this.applicationPackages = applicationPackages;
return this;
}
/**
* Get the applicationLicenses property: The list of application licenses the Batch service will make available on
* each compute node in the pool. The list of application licenses must be a subset of available Batch service
* application licenses. If a license is requested which is not supported, pool creation will fail.
*
* @return the applicationLicenses value.
*/
public List<String> applicationLicenses() {
return this.applicationLicenses;
}
/**
* Set the applicationLicenses property: The list of application licenses the Batch service will make available on
* each compute node in the pool. The list of application licenses must be a subset of available Batch service
* application licenses. If a license is requested which is not supported, pool creation will fail.
*
* @param applicationLicenses the applicationLicenses value to set.
* @return the PoolProperties object itself.
*/
public PoolProperties withApplicationLicenses(List<String> applicationLicenses) {
this.applicationLicenses = applicationLicenses;
return this;
}
/**
* Get the resizeOperationStatus property: Details about the current or last completed resize operation. Describes
* either the current operation (if the pool AllocationState is Resizing) or the previously completed operation (if
* the AllocationState is Steady).
*
* @return the resizeOperationStatus value.
*/
public ResizeOperationStatus resizeOperationStatus() {
return this.resizeOperationStatus;
}
/**
* Get the mountConfiguration property: A list of file systems to mount on each node in the pool. This supports
* Azure Files, NFS, CIFS/SMB, and Blobfuse.
*
* @return the mountConfiguration value.
*/
public List<MountConfiguration> mountConfiguration() {
return this.mountConfiguration;
}
/**
* Set the mountConfiguration property: A list of file systems to mount on each node in the pool. This supports
* Azure Files, NFS, CIFS/SMB, and Blobfuse.
*
* @param mountConfiguration the mountConfiguration value to set.
* @return the PoolProperties object itself.
*/
public PoolProperties withMountConfiguration(List<MountConfiguration> mountConfiguration) {
this.mountConfiguration = mountConfiguration;
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
if (deploymentConfiguration() != null) {
deploymentConfiguration().validate();
}
if (scaleSettings() != null) {
scaleSettings().validate();
}
if (autoScaleRun() != null) {
autoScaleRun().validate();
}
if (networkConfiguration() != null) {
networkConfiguration().validate();
}
if (taskSchedulingPolicy() != null) {
taskSchedulingPolicy().validate();
}
if (userAccounts() != null) {
userAccounts().forEach(e -> e.validate());
}
if (metadata() != null) {
metadata().forEach(e -> e.validate());
}
if (startTask() != null) {
startTask().validate();
}
if (certificates() != null) {
certificates().forEach(e -> e.validate());
}
if (applicationPackages() != null) {
applicationPackages().forEach(e -> e.validate());
}
if (resizeOperationStatus() != null) {
resizeOperationStatus().validate();
}
if (mountConfiguration() != null) {
mountConfiguration().forEach(e -> e.validate());
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.schema;
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.phoenix.query.QueryConstants;
import org.apache.phoenix.util.SchemaUtil;
/**
*
* Schema for the bytes in a RowKey. For the RowKey, we use a null byte
* to terminate a variable length type, while for KeyValue bytes we
* write the length of the var char preceding the value. We can't do
* that for a RowKey because it would affect the sort order.
*
*
* @since 0.1
*/
public class RowKeySchema extends ValueSchema {
public static final RowKeySchema EMPTY_SCHEMA = new RowKeySchema(0,Collections.<Field>emptyList(), true)
;
public RowKeySchema() {
}
protected RowKeySchema(int minNullable, List<Field> fields, boolean rowKeyOrderOptimizable) {
super(minNullable, fields, rowKeyOrderOptimizable);
}
public static class RowKeySchemaBuilder extends ValueSchemaBuilder {
private boolean rowKeyOrderOptimizable = false;
public RowKeySchemaBuilder(int maxFields) {
super(maxFields);
setMaxFields(maxFields);
}
@Override
public RowKeySchemaBuilder addField(PDatum datum, boolean isNullable, SortOrder sortOrder) {
super.addField(datum, isNullable, sortOrder);
return this;
}
public RowKeySchemaBuilder rowKeyOrderOptimizable(boolean rowKeyOrderOptimizable) {
this.rowKeyOrderOptimizable = rowKeyOrderOptimizable;
return this;
}
@Override
public RowKeySchema build() {
List<Field> condensedFields = buildFields();
return new RowKeySchema(this.minNullable, condensedFields, rowKeyOrderOptimizable);
}
}
public boolean rowKeyOrderOptimizable() {
return rowKeyOrderOptimizable;
}
public int getMaxFields() {
return this.getMinNullable();
}
// "iterator" initialization methods that initialize a bytes ptr with a row key for further navigation
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
value="NP_BOOLEAN_RETURN_NULL",
justification="Designed to return null.")
public Boolean iterator(byte[] src, int srcOffset, int srcLength, ImmutableBytesWritable ptr, int position,int extraColumnSpan) {
Boolean hasValue = null;
ptr.set(src, srcOffset, 0);
int maxOffset = srcOffset + srcLength;
for (int i = 0; i < position; i++) {
hasValue = next(ptr, i, maxOffset);
}
if(extraColumnSpan > 0) {
readExtraFields(ptr, position, maxOffset, extraColumnSpan);
}
return hasValue;
}
public Boolean iterator(byte[] src, int srcOffset, int srcLength, ImmutableBytesWritable ptr, int position) {
return iterator(src, srcOffset,srcLength, ptr, position,0);
}
public Boolean iterator(ImmutableBytesWritable srcPtr, ImmutableBytesWritable ptr, int position) {
return iterator(srcPtr.get(), srcPtr.getOffset(), srcPtr.getLength(), ptr, position);
}
public Boolean iterator(byte[] src, ImmutableBytesWritable ptr, int position) {
return iterator(src, 0, src.length, ptr, position);
}
public int iterator(byte[] src, int srcOffset, int srcLength, ImmutableBytesWritable ptr) {
int maxOffset = srcOffset + srcLength;
iterator(src, srcOffset, srcLength, ptr, 0);
return maxOffset;
}
public int iterator(byte[] src, ImmutableBytesWritable ptr) {
return iterator(src, 0, src.length, ptr);
}
public int iterator(ImmutableBytesWritable ptr) {
return iterator(ptr.get(),ptr.getOffset(),ptr.getLength(), ptr);
}
// navigation methods that "select" different chunks of the row key held in a bytes ptr
/**
* Move the bytes ptr to the next position in the row key relative to its current position. You
* must have a complete row key. Use @link {@link #position(ImmutableBytesWritable, int, int)}
* if you have a partial row key.
* @param ptr bytes pointer pointing to the value at the positional index provided.
* @param position zero-based index of the next field in the value schema
* @param maxOffset max possible offset value when iterating
* @return true if a value was found and ptr was set, false if the value is null and ptr was not
* set, and null if the value is null and there are no more values
*/
public Boolean next(ImmutableBytesWritable ptr, int position, int maxOffset) {
return next(ptr, position, maxOffset, false);
}
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
value="NP_BOOLEAN_RETURN_NULL",
justification="Designed to return null.")
private Boolean next(ImmutableBytesWritable ptr, int position, int maxOffset, boolean isFirst) {
if (ptr.getOffset() + ptr.getLength() >= maxOffset) {
ptr.set(ptr.get(), maxOffset, 0);
return null;
}
if (position >= getFieldCount()) {
return null;
}
// Move the pointer past the current value and set length
// to 0 to ensure you never set the ptr past the end of the
// backing byte array.
ptr.set(ptr.get(), ptr.getOffset() + ptr.getLength(), 0);
// If positioned at SEPARATOR_BYTE, skip it.
// Don't look back at previous fields if this is our first next call, as
// we may have a partial key for RVCs that doesn't include the leading field.
if (position > 0 && !isFirst && !getField(position-1).getDataType().isFixedWidth()) {
ptr.set(ptr.get(), ptr.getOffset()+ptr.getLength()+1, 0);
}
Field field = this.getField(position);
if (field.getDataType().isFixedWidth()) {
// It is possible that the number of remaining row key bytes are less than the fixed
// width size. See PHOENIX-3968.
ptr.set(ptr.get(), ptr.getOffset(), Math.min(maxOffset - ptr.getOffset(), field.getByteSize()));
} else {
if (position+1 == getFieldCount() ) {
// Last field has no terminator unless it's descending sort order
int len = maxOffset - ptr.getOffset();
ptr.set(ptr.get(), ptr.getOffset(), maxOffset - ptr.getOffset() - (SchemaUtil.getSeparatorByte(rowKeyOrderOptimizable, len == 0, field) == QueryConstants.DESC_SEPARATOR_BYTE ? 1 : 0));
} else {
byte[] buf = ptr.get();
int offset = ptr.getOffset();
// First byte
if (offset < maxOffset && buf[offset] != QueryConstants.SEPARATOR_BYTE) {
byte sepByte = SchemaUtil.getSeparatorByte(rowKeyOrderOptimizable, false, field);
do {
offset++;
} while (offset < maxOffset && buf[offset] != sepByte);
}
ptr.set(buf, ptr.getOffset(), offset - ptr.getOffset());
}
}
return ptr.getLength() > 0;
}
/**
* Like {@link #next(org.apache.hadoop.hbase.io.ImmutableBytesWritable, int, int)}, but also
* includes the next {@code extraSpan} additional fields in the bytes ptr.
* This allows multiple fields to be treated as one concatenated whole.
* @param ptr bytes pointer pointing to the value at the positional index provided.
* @param position zero-based index of the next field in the value schema
* @param maxOffset max possible offset value when iterating
* @param extraSpan the number of extra fields to expand the ptr to contain
* @return true if a value was found and ptr was set, false if the value is null and ptr was not
* set, and null if the value is null and there are no more values
*/
public Boolean next(ImmutableBytesWritable ptr, int position, int maxOffset, int extraSpan) {
Boolean returnValue = next(ptr, position, maxOffset);
readExtraFields(ptr, position + 1, maxOffset, extraSpan);
return returnValue;
}
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
value="NP_BOOLEAN_RETURN_NULL",
justification="Designed to return null.")
public Boolean previous(ImmutableBytesWritable ptr, int position, int minOffset) {
if (position < 0) {
return null;
}
Field field = this.getField(position);
if (field.getDataType().isFixedWidth()) {
ptr.set(ptr.get(), ptr.getOffset()-field.getByteSize(), field.getByteSize());
return true;
}
// If ptr has length of zero, it is assumed that we're at the end of the row key
int offsetAdjustment = position + 1 == this.getFieldCount() || ptr.getLength() == 0 ? 0 : 1;
if (position == 0) {
ptr.set(ptr.get(), minOffset, ptr.getOffset() - minOffset - offsetAdjustment);
return true;
}
field = this.getField(position-1);
// Field before the one we want to position at is variable length
// In this case, we can search backwards for our separator byte
// to determine the length
if (!field.getDataType().isFixedWidth()) {
byte[] buf = ptr.get();
int offset = ptr.getOffset()-1-offsetAdjustment;
// Separator always zero byte if zero length
if (offset > minOffset && buf[offset] != QueryConstants.SEPARATOR_BYTE) {
byte sepByte = SchemaUtil.getSeparatorByte(rowKeyOrderOptimizable, false, field);
do {
offset--;
} while (offset > minOffset && buf[offset] != sepByte);
}
if (offset == minOffset) { // shouldn't happen
ptr.set(buf, minOffset, ptr.getOffset()-minOffset-1);
} else {
ptr.set(buf,offset+1,ptr.getOffset()-1-offsetAdjustment-offset); // Don't include null terminator in length
}
return true;
}
int i,fixedOffset = field.getByteSize();
for (i = position-2; i >= 0 && this.getField(i).getDataType().isFixedWidth(); i--) {
fixedOffset += this.getField(i).getByteSize();
}
// All of the previous fields are fixed width, so we can calculate the offset
// based on the total fixed offset
if (i < 0) {
int length = ptr.getOffset() - fixedOffset - minOffset - offsetAdjustment;
ptr.set(ptr.get(),minOffset+fixedOffset, length);
return true;
}
// Otherwise we're stuck with starting from the minOffset and working all the way forward,
// because we can't infer the length of the previous position.
return iterator(ptr.get(), minOffset, ptr.getOffset() - minOffset - offsetAdjustment, ptr, position+1);
}
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
value="NP_BOOLEAN_RETURN_NULL",
justification="Designed to return null.")
public Boolean reposition(ImmutableBytesWritable ptr, int oldPosition, int newPosition, int minOffset, int maxOffset) {
if (newPosition == oldPosition) {
return ptr.getLength() > 0;
}
Boolean hasValue = null;
if (newPosition > oldPosition) {
do {
hasValue = next(ptr, ++oldPosition, maxOffset);
} while (hasValue != null && oldPosition < newPosition) ;
} else {
int nVarLengthFromBeginning = 0;
for (int i = 0; i <= newPosition; i++) {
if (!this.getField(i).getDataType().isFixedWidth()) {
nVarLengthFromBeginning++;
}
}
int nVarLengthBetween = 0;
for (int i = oldPosition - 1; i >= newPosition; i--) {
if (!this.getField(i).getDataType().isFixedWidth()) {
nVarLengthBetween++;
}
}
if (nVarLengthBetween > nVarLengthFromBeginning) {
return iterator(ptr.get(), minOffset, maxOffset-minOffset, ptr, newPosition+1);
}
do {
hasValue = previous(ptr, --oldPosition, minOffset);
} while (hasValue != null && oldPosition > newPosition);
}
return hasValue;
}
/**
* Like {@link #reposition(org.apache.hadoop.hbase.io.ImmutableBytesWritable, int, int, int, int)},
* but also includes the next {@code extraSpan} additional fields in the bytes ptr.
* This allows multiple fields to be treated as one concatenated whole.
* @param extraSpan the number of extra fields to expand the ptr to contain.
*/
public Boolean reposition(ImmutableBytesWritable ptr, int oldPosition, int newPosition, int minOffset, int maxOffset, int extraSpan) {
Boolean returnValue = reposition(ptr, oldPosition, newPosition, minOffset, maxOffset);
readExtraFields(ptr, newPosition + 1, maxOffset, extraSpan);
return returnValue;
}
/**
* Positions ptr at the part of the row key for the field at endPosition,
* starting from the field at position.
* @param ptr bytes pointer that points to row key being traversed.
* @param position the starting field position
* @param endPosition the ending field position
* @return true if the row key has a value at endPosition with ptr pointing to
* that value and false otherwise with ptr not necessarily set.
*/
public boolean position(ImmutableBytesWritable ptr, int position, int endPosition) {
int maxOffset = ptr.getLength();
this.iterator(ptr); // initialize for iteration
boolean isFirst = true;
while (position <= endPosition) {
if (this.next(ptr, position++, maxOffset, isFirst) == null) {
return false;
}
isFirst = false;
}
return true;
}
/**
* Extends the boundaries of the {@code ptr} to contain the next {@code extraSpan} fields in the row key.
* @param ptr bytes pointer pointing to the value at the positional index provided.
* @param position row key position of the first extra key to read
* @param maxOffset the maximum offset into the bytes pointer to allow
* @param extraSpan the number of extra fields to expand the ptr to contain.
*/
private void readExtraFields(ImmutableBytesWritable ptr, int position, int maxOffset, int extraSpan) {
int initialOffset = ptr.getOffset();
for(int i = 0; i < extraSpan; i++) {
Boolean returnValue = next(ptr, position + i, maxOffset);
if(returnValue == null) {
break;
}
}
int finalLength = ptr.getOffset() - initialOffset + ptr.getLength();
ptr.set(ptr.get(), initialOffset, finalLength);
}
}
|
|
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.protobuf.contrib.j2cl.integration;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;
import com.google.protobuf.contrib.j2cl.protos.Accessor.TestProto;
import com.google.protobuf.contrib.j2cl.protos.Accessor.TestProto.NestedMessage;
import com.google.protobuf.contrib.j2cl.protos.Proto3Accessors.TestProto3;
import java.util.Arrays;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public final class MessageFieldsTest {
@Test
public void testOptionalField_defaultInstance() {
assertThat(TestProto.newBuilder().hasOptionalMessage()).isFalse();
assertThat(TestProto.getDefaultInstance().hasOptionalMessage()).isFalse();
assertThat(TestProto.newBuilder().getOptionalMessage().hasPayload()).isFalse();
assertThat(TestProto.getDefaultInstance().getOptionalMessage().hasPayload()).isFalse();
}
@Test
public void testProto3MessageField_defaultInstance() {
assertThat(TestProto3.newBuilder().hasProto3Message()).isFalse();
assertThat(TestProto3.getDefaultInstance().hasProto3Message()).isFalse();
assertThat(TestProto3.newBuilder().getProto3Message().hasPayload()).isFalse();
assertThat(TestProto3.getDefaultInstance().getProto3Message().hasPayload()).isFalse();
}
@Test
public void testOptionalFieldNoDefault_setValue() {
TestProto.Builder builder =
TestProto.newBuilder()
.setOptionalMessage(NestedMessage.newBuilder().setPayload("payload").build());
assertThat(builder.hasOptionalMessage()).isTrue();
assertThat(builder.getOptionalMessage().getPayload()).isEqualTo("payload");
TestProto proto = builder.build();
assertThat(proto.hasOptionalMessage()).isTrue();
assertThat(proto.getOptionalMessage().getPayload()).isEqualTo("payload");
TestProto3.Builder proto3Builder =
TestProto3.newBuilder()
.setProto3Message(TestProto3.NestedMessage.newBuilder().setPayload("payload").build());
assertThat(proto3Builder.hasProto3Message()).isTrue();
assertThat(proto3Builder.getProto3Message().getPayload()).isEqualTo("payload");
TestProto3 proto3 = proto3Builder.build();
assertThat(proto3.hasProto3Message()).isTrue();
assertThat(proto3.getProto3Message().getPayload()).isEqualTo("payload");
}
@Test
public void testOptionalField_toBuilder() {
TestProto startProto =
TestProto.newBuilder()
.setOptionalMessage(NestedMessage.newBuilder().setPayload("payload").build())
.build();
TestProto.Builder builder = startProto.toBuilder();
assertThat(builder.hasOptionalMessage()).isTrue();
assertThat(builder.getOptionalMessage().getPayload()).isEqualTo("payload");
builder.setOptionalMessage(NestedMessage.newBuilder().setPayload("payload_new").build());
assertThat(builder.hasOptionalMessage()).isTrue();
assertThat(builder.getOptionalMessage().getPayload()).isEqualTo("payload_new");
TestProto proto = builder.build();
assertThat(proto.hasOptionalMessage()).isTrue();
assertThat(proto.getOptionalMessage().getPayload()).isEqualTo("payload_new");
assertThat(startProto.hasOptionalMessage()).isTrue();
assertThat(startProto.getOptionalMessage().getPayload()).isEqualTo("payload");
}
@Test
public void testOptionalField_clear() {
TestProto.Builder builder =
TestProto.newBuilder()
.setOptionalMessage(NestedMessage.newBuilder().setPayload("payload").build());
builder.clearOptionalMessage();
assertThat(builder.hasOptionalMessage()).isFalse();
assertThat(builder.getOptionalMessage().getPayload()).isEmpty();
TestProto proto = builder.build();
assertThat(proto.hasOptionalMessage()).isFalse();
assertThat(proto.getOptionalMessage().getPayload()).isEmpty();
}
@Test
public void testRepeatedField_defaultValue() {
TestProto proto = TestProto.newBuilder().build();
assertThat(proto.getRepeatedMessageCount()).isEqualTo(0);
assertThat(proto.getRepeatedMessageList()).isEmpty();
}
@Test
public void testRepeatedField_add() {
TestProto.Builder builder =
TestProto.newBuilder()
.addRepeatedMessage(NestedMessage.newBuilder().setPayload("one").build())
.addRepeatedMessage(NestedMessage.newBuilder().setPayload("two").build());
List<NestedMessage> nestedList = builder.getRepeatedMessageList();
assertThat(nestedList).hasSize(2);
assertThat(nestedList.get(0).getPayload()).isEqualTo("one");
assertThat(nestedList.get(1).getPayload()).isEqualTo("two");
assertThat(builder.getRepeatedMessage(0).getPayload()).isEqualTo("one");
assertThat(builder.getRepeatedMessage(1).getPayload()).isEqualTo("two");
TestProto proto = builder.build();
nestedList = proto.getRepeatedMessageList();
assertThat(nestedList).hasSize(2);
assertThat(nestedList.get(0).getPayload()).isEqualTo("one");
assertThat(nestedList.get(1).getPayload()).isEqualTo("two");
assertThat(proto.getRepeatedMessage(0).getPayload()).isEqualTo("one");
assertThat(proto.getRepeatedMessage(1).getPayload()).isEqualTo("two");
}
@Test
public void testRepeatedField_addAll() {
TestProto.Builder builder =
TestProto.newBuilder()
.addAllRepeatedMessage(
Arrays.asList(
NestedMessage.newBuilder().setPayload("one").build(),
NestedMessage.newBuilder().setPayload("two").build()));
List<NestedMessage> nestedList = builder.getRepeatedMessageList();
assertThat(nestedList).hasSize(2);
assertThat(nestedList.get(0).getPayload()).isEqualTo("one");
assertThat(nestedList.get(1).getPayload()).isEqualTo("two");
assertThat(builder.getRepeatedMessage(0).getPayload()).isEqualTo("one");
assertThat(builder.getRepeatedMessage(1).getPayload()).isEqualTo("two");
TestProto proto = builder.build();
nestedList = proto.getRepeatedMessageList();
assertThat(nestedList).hasSize(2);
assertThat(nestedList.get(0).getPayload()).isEqualTo("one");
assertThat(nestedList.get(1).getPayload()).isEqualTo("two");
assertThat(proto.getRepeatedMessage(0).getPayload()).isEqualTo("one");
assertThat(proto.getRepeatedMessage(1).getPayload()).isEqualTo("two");
}
@Test
public void testRepeatedField_setValue() {
TestProto startProto =
TestProto.newBuilder()
.addAllRepeatedMessage(
Arrays.asList(
NestedMessage.newBuilder().setPayload("one").build(),
NestedMessage.newBuilder().setPayload("two").build()))
.build();
TestProto.Builder builder =
startProto
.toBuilder()
.setRepeatedMessage(1, NestedMessage.newBuilder().setPayload("another two").build());
List<NestedMessage> nestedList = builder.getRepeatedMessageList();
assertThat(nestedList).hasSize(2);
assertThat(nestedList.get(0).getPayload()).isEqualTo("one");
assertThat(nestedList.get(1).getPayload()).isEqualTo("another two");
assertThat(builder.getRepeatedMessage(0).getPayload()).isEqualTo("one");
assertThat(builder.getRepeatedMessage(1).getPayload()).isEqualTo("another two");
TestProto proto = builder.build();
nestedList = proto.getRepeatedMessageList();
assertThat(nestedList).hasSize(2);
assertThat(nestedList.get(0).getPayload()).isEqualTo("one");
assertThat(nestedList.get(1).getPayload()).isEqualTo("another two");
assertThat(proto.getRepeatedMessage(0).getPayload()).isEqualTo("one");
assertThat(proto.getRepeatedMessage(1).getPayload()).isEqualTo("another two");
}
@Test
public void testRepeatedField_outOfBounds() {
TestProto.Builder builder =
TestProto.newBuilder()
.addAllRepeatedMessage(
Arrays.asList(
NestedMessage.newBuilder().setPayload("one").build(),
NestedMessage.newBuilder().setPayload("two").build()));
TestProto proto = builder.build();
if (InternalChecks.isCheckIndex()) {
assertThrows(
Exception.class, () -> builder.setRepeatedMessage(2, NestedMessage.getDefaultInstance()));
assertThrows(Exception.class, () -> builder.getRepeatedMessage(2));
assertThrows(Exception.class, () -> proto.getRepeatedMessage(2));
}
}
@Test
public void testRepeatedField_clear() {
TestProto startProto =
TestProto.newBuilder()
.addAllRepeatedMessage(
Arrays.asList(
NestedMessage.newBuilder().setPayload("one").build(),
NestedMessage.newBuilder().setPayload("two").build()))
.build();
TestProto.Builder builder = startProto.toBuilder().clearRepeatedMessage();
assertThat(builder.getRepeatedMessageCount()).isEqualTo(0);
assertThat(builder.getRepeatedMessageList()).isEmpty();
TestProto proto = builder.build();
assertThat(proto.getRepeatedMessageCount()).isEqualTo(0);
assertThat(proto.getRepeatedMessageList()).isEmpty();
}
@Test
public void testRepeatedField_newBuilder() {
TestProto startProto =
TestProto.newBuilder()
.addRepeatedMessage(NestedMessage.newBuilder().setPayload("one").build())
.addRepeatedMessage(NestedMessage.newBuilder().setPayload("two").build())
.build();
TestProto proto = TestProto.newBuilder(startProto).build();
List<NestedMessage> nestedList = proto.getRepeatedMessageList();
assertThat(nestedList).hasSize(2);
assertThat(nestedList.get(0).getPayload()).isEqualTo("one");
assertThat(nestedList.get(1).getPayload()).isEqualTo("two");
assertThat(proto.getRepeatedMessage(0).getPayload()).isEqualTo("one");
assertThat(proto.getRepeatedMessage(1).getPayload()).isEqualTo("two");
}
@Test
public void testOptionalField_setBuilder() {
TestProto startProto =
TestProto.newBuilder()
.setOptionalMessage(NestedMessage.newBuilder().setPayload("one"))
.build();
assertThat(startProto.getOptionalMessage().getPayload()).isEqualTo("one");
}
@Test
public void testRepeatedField_addWithBuilder() {
TestProto.Builder builder =
TestProto.newBuilder()
.addRepeatedMessage(TestProto.NestedMessage.newBuilder().setPayload("one"))
.addRepeatedMessage(TestProto.NestedMessage.newBuilder().setPayload("two"));
List<NestedMessage> nestedList = builder.getRepeatedMessageList();
assertThat(nestedList).hasSize(2);
assertThat(nestedList.get(0).getPayload()).isEqualTo("one");
assertThat(nestedList.get(1).getPayload()).isEqualTo("two");
TestProto proto = builder.build();
nestedList = proto.getRepeatedMessageList();
assertThat(nestedList).hasSize(2);
assertThat(nestedList.get(0).getPayload()).isEqualTo("one");
assertThat(nestedList.get(1).getPayload()).isEqualTo("two");
}
@Test
public void testRepeatedField_setWithBuilder() {
TestProto.Builder builder =
TestProto.newBuilder()
.addRepeatedMessage(TestProto.NestedMessage.newBuilder().setPayload("one"))
.addRepeatedMessage(TestProto.NestedMessage.newBuilder().setPayload("two"));
builder.setRepeatedMessage(0, TestProto.NestedMessage.newBuilder().setPayload("three"));
builder.setRepeatedMessage(1, TestProto.NestedMessage.newBuilder().setPayload("four"));
List<NestedMessage> nestedList = builder.getRepeatedMessageList();
assertThat(nestedList).hasSize(2);
assertThat(nestedList.get(0).getPayload()).isEqualTo("three");
assertThat(nestedList.get(1).getPayload()).isEqualTo("four");
TestProto proto = builder.build();
nestedList = proto.getRepeatedMessageList();
assertThat(nestedList).hasSize(2);
assertThat(nestedList.get(0).getPayload()).isEqualTo("three");
assertThat(nestedList.get(1).getPayload()).isEqualTo("four");
}
@Test
public void testRepeatedField_getReturnsImmutableList() {
TestProto.Builder builder =
TestProto.newBuilder()
.addRepeatedMessage(TestProto.NestedMessage.newBuilder().setPayload("one"));
List<NestedMessage> nestedList = builder.getRepeatedMessageList();
assertThrows(
Exception.class,
() -> nestedList.add(TestProto.NestedMessage.newBuilder().setPayload("one").build()));
assertThrows(Exception.class, () -> nestedList.remove(0));
}
}
|
|
import org.json.JSONArray;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.sql.*;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class ArticleFetcher {
public static void retrieveAllArticles() {
for (Language language : Language.values()) {
retrieveArticles(language);
}
}
public static void retrieveArticles(Language language) {
JSONObject articlesData;
try {
URL url = new URL("https://api.cognitive.microsoft.com/bing/v5.0/news?mkt=" + language.getMarketCode());
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Ocp-Apim-Subscription-Key", Keys.getBingSearchKey());
InputStreamReader streamReader = new InputStreamReader(connection.getInputStream());
BufferedReader reader = new BufferedReader(streamReader);
StringBuilder response = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
response.append(line);
}
reader.close();
articlesData = new JSONObject(response.toString());
} catch (IOException e) {
e.printStackTrace();
return;
}
JSONArray retrievedArticles = articlesData.getJSONArray("value");
for (int i = 0; i < retrievedArticles.length(); i++) {
JSONObject retrievedArticle = retrievedArticles.getJSONObject(i);
String urlString = retrievedArticle.getString("url");
try {
URL url = new URL(urlString);
Article article = lookupArticle(url);
if (article == null) {
article = parseArticle(url, language);
storeArticle(article);
}
System.out.println(article.title);
} catch (MalformedURLException e) {
e.printStackTrace();
}
}
}
public static Article parseArticle(URL articleURL, Language language) {
JSONObject articleData;
try {
String encodedArticleURL = URLEncoder.encode(articleURL.toString(), "UTF-8");
URL url = new URL("http://mercury.postlight.com/parser?url=" + encodedArticleURL);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Content-Type", "application/json");
connection.setRequestProperty("x-api-key", Keys.getMercuryKey());
InputStreamReader streamReader = new InputStreamReader(connection.getInputStream());
BufferedReader reader = new BufferedReader(streamReader);
StringBuilder response = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
response.append(line);
}
reader.close();
articleData = new JSONObject(response.toString());
} catch (IOException e) {
e.printStackTrace();
return null;
}
String title = articleData.getString("title");
URL imageURL = null;
try {
imageURL = new URL(articleData.getString("lead_image_url"));
} catch (MalformedURLException e) {
e.printStackTrace();
}
String content = articleData.getString("content");
String author = null;
if (!articleData.isNull("author")) {
author = articleData.getString("author");
}
DateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
Date publishedDate = null;
if (!articleData.isNull("date_published")) {
try {
publishedDate = format.parse(articleData.getString("date_published"));
} catch (ParseException e) {
e.printStackTrace();
}
}
return new Article(articleURL, title, imageURL, content, author, publishedDate, language);
}
public static Article lookupArticle(URL url) {
Connection c = null;
Statement stmt = null;
try {
Class.forName("org.sqlite.JDBC");
c = DriverManager.getConnection("jdbc:sqlite:main.db");
c.setAutoCommit(false);
System.out.println("SELECT * FROM ARTICLES WHERE URL = '" + url.toString() + "';");
stmt = c.createStatement();
ResultSet rs = stmt.executeQuery("SELECT * FROM ARTICLES WHERE URL = '" + url.toString() + "';");
while (rs.next()) {
int id = rs.getInt("ID");
System.out.println(id);
}
System.out.println("Done getting stuff");
rs.close();
stmt.close();
c.close();
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (SQLException e) {
e.printStackTrace();
}
return null;
}
public static void storeArticle(Article article) {
Connection c = null;
Statement stmt = null;
String sql = null; // up here for easy debugging
try {
Class.forName("org.sqlite.JDBC");
c = DriverManager.getConnection("jdbc:sqlite:main.db");
List<String> keys = new ArrayList<>();
List<String> values = new ArrayList<>();
keys.add("URL");
values.add("'" + article.url.toString() + "'");
keys.add("TITLE");
values.add("'" + article.title.replace("'", "''") + "'");
if (article.imageURL != null) {
keys.add("IMAGEURL");
values.add("'" + article.imageURL.toString() + "'");
}
if (article.content != null) {
keys.add("CONTENT");
values.add("'" + article.content.replace("'", "''") + "'");
}
if (article.author != null) {
keys.add("AUTHOR");
values.add("'" + article.author + "'");
}
if (article.publishedDate != null) {
keys.add("PUBLISH_DATE");
values.add("'" + article.publishedDate.toString() + "'");
}
keys.add("LANGUAGE");
values.add("'" + article.language.getISO839_1() + "'");
String keysString = "";
for (String key : keys) {
keysString += key + ", ";
}
keysString = keysString.substring(0, keysString.length() - 2);
String valuesString = "";
for (String value : values) {
valuesString += value + ", ";
}
valuesString = valuesString.substring(0, valuesString.length() - 2);
stmt = c.createStatement();
sql = "INSERT INTO ARTICLES (" + keysString + ") " +
"VALUES (" + valuesString + ");";
stmt.executeUpdate(sql);
stmt.close();
c.close();
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (SQLException e) {
e.printStackTrace();
System.out.println("Attempted statement: " + sql);
}
}
}
|
|
package jenkins.python.expoint;
import com.infradna.tool.bridge_method_injector.WithBridgeMethods;
import hudson.Extension;
import hudson.ExtensionPoint;
import hudson.FilePath;
import hudson.FileSystemProvisioner;
import hudson.Launcher;
import hudson.model.Descriptor.FormException;
import hudson.model.Queue.Task;
import hudson.model.labels.LabelAtom;
import hudson.model.queue.CauseOfBlockage;
import hudson.node_monitors.NodeMonitor;
import hudson.remoting.VirtualChannel;
import hudson.security.ACL;
import hudson.security.AccessControlled;
import hudson.security.Permission;
import hudson.slaves.ComputerListener;
import hudson.slaves.NodeDescriptor;
import hudson.slaves.NodeProperty;
import hudson.slaves.NodePropertyDescriptor;
import hudson.slaves.OfflineCause;
import hudson.util.ClockDifference;
import hudson.util.DescribableList;
import hudson.util.EnumConverter;
import hudson.util.TagCloud;
import hudson.util.TagCloud.WeightFunction;
import java.io.IOException;
import java.lang.reflect.Type;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.List;
import java.util.logging.Logger;
import javax.annotation.CheckForNull;
import jenkins.model.Jenkins;
import jenkins.util.io.OnMaster;
import net.sf.json.JSONObject;
import org.kohsuke.stapler.BindInterceptor;
import org.kohsuke.stapler.Stapler;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.export.ExportedBean;
import org.kohsuke.stapler.export.Exported;
import hudson.model.*;
import hudson.model.Node.*;
import hudson.search.SearchFactory;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import hudson.search.SearchableModelObject;
import hudson.search.Search;
import hudson.search.SearchIndexBuilder;
import hudson.search.SearchIndex;
import org.kohsuke.stapler.interceptor.RequirePOST;
import hudson.model.AbstractModelObject.*;
import jenkins.python.DataConvertor;
import jenkins.python.PythonExecutor;
/**
* This class was automatically generated by the PWM tool on 2014/03/21.
* @see hudson.model.Node
*/
public abstract class NodePW extends Node {
private transient PythonExecutor pexec;
private void initPython() {
if (pexec == null) {
pexec = new PythonExecutor(this);
String[] jMethods = new String[13];
jMethods[0] = "getNodeName";
jMethods[1] = "setNodeName";
jMethods[2] = "getNodeDescription";
jMethods[3] = "createLauncher";
jMethods[4] = "getNumExecutors";
jMethods[5] = "getMode";
jMethods[6] = "createComputer";
jMethods[7] = "getLabelString";
jMethods[8] = "getWorkspaceFor";
jMethods[9] = "getRootPath";
jMethods[10] = "getNodeProperties";
jMethods[11] = "getDescriptor";
jMethods[12] = "getClockDifference";
String[] pFuncs = new String[13];
pFuncs[0] = "get_node_name";
pFuncs[1] = "set_node_name";
pFuncs[2] = "get_node_description";
pFuncs[3] = "create_launcher";
pFuncs[4] = "get_num_executors";
pFuncs[5] = "get_mode";
pFuncs[6] = "create_computer";
pFuncs[7] = "get_label_string";
pFuncs[8] = "get_workspace_for";
pFuncs[9] = "get_root_path";
pFuncs[10] = "get_node_properties";
pFuncs[11] = "get_descriptor";
pFuncs[12] = "get_clock_difference";
Class[][] argTypes = new Class[13][];
argTypes[0] = new Class[0];
argTypes[1] = new Class[1];
argTypes[1][0] = String.class;
argTypes[2] = new Class[0];
argTypes[3] = new Class[1];
argTypes[3][0] = TaskListener.class;
argTypes[4] = new Class[0];
argTypes[5] = new Class[0];
argTypes[6] = new Class[0];
argTypes[7] = new Class[0];
argTypes[8] = new Class[1];
argTypes[8][0] = TopLevelItem.class;
argTypes[9] = new Class[0];
argTypes[10] = new Class[0];
argTypes[11] = new Class[0];
argTypes[12] = new Class[0];
pexec.checkAbstrMethods(jMethods, pFuncs, argTypes);
String[] functions = new String[19];
functions[0] = "get_display_name";
functions[1] = "get_search_url";
functions[2] = "is_hold_off_launch_until_save";
functions[3] = "get_label_cloud";
functions[4] = "get_assigned_labels";
functions[5] = "set_label_string";
functions[6] = "get_self_label";
functions[7] = "can_take";
functions[8] = "can_take";
functions[9] = "create_path";
functions[10] = "get_file_system_provisioner";
functions[11] = "get_node_property_descriptors";
functions[12] = "get_acl";
functions[13] = "reconfigure";
functions[14] = "get_description";
functions[15] = "get_name";
functions[16] = "make_search_index";
functions[17] = "get_search";
functions[18] = "get_search_name";
int[] argsCount = new int[19];
argsCount[0] = 0;
argsCount[1] = 0;
argsCount[2] = 0;
argsCount[3] = 0;
argsCount[4] = 0;
argsCount[5] = 1;
argsCount[6] = 0;
argsCount[7] = 1;
argsCount[8] = 1;
argsCount[9] = 1;
argsCount[10] = 0;
argsCount[11] = 0;
argsCount[12] = 0;
argsCount[13] = 2;
argsCount[14] = 0;
argsCount[15] = 0;
argsCount[16] = 0;
argsCount[17] = 0;
argsCount[18] = 0;
pexec.registerFunctions(functions, argsCount);
}
}
@Override
@Exported(visibility = 999)
public String getNodeName() {
initPython();
return (String) pexec.execPython("get_node_name");
}
@Override
public void setNodeName(String name) {
initPython();
pexec.execPythonVoid("set_node_name", name);
}
@Override
@Exported
public String getNodeDescription() {
initPython();
return (String) pexec.execPython("get_node_description");
}
@Override
public Launcher createLauncher(TaskListener listener) {
initPython();
return (Launcher) pexec.execPython("create_launcher", listener);
}
@Override
@Exported
public int getNumExecutors() {
initPython();
return pexec.execPythonInt("get_num_executors");
}
@Override
@Exported
public Mode getMode() {
initPython();
return (Mode) pexec.execPython("get_mode");
}
@Override
public Computer createComputer() {
initPython();
return (Computer) pexec.execPython("create_computer");
}
@Override
public String getLabelString() {
initPython();
return (String) pexec.execPython("get_label_string");
}
@Override
public FilePath getWorkspaceFor(TopLevelItem item) {
initPython();
return (FilePath) pexec.execPython("get_workspace_for", item);
}
@Override
public FilePath getRootPath() {
initPython();
return (FilePath) pexec.execPython("get_root_path");
}
@Override
public DescribableList<NodeProperty<?>, NodePropertyDescriptor> getNodeProperties() {
initPython();
return (DescribableList) pexec.execPython("get_node_properties");
}
@Override
public NodeDescriptor getDescriptor() {
initPython();
return (NodeDescriptor) pexec.execPython("get_descriptor");
}
@Override
public ClockDifference getClockDifference() throws IOException, InterruptedException {
initPython();
return (ClockDifference) pexec.execPython("get_clock_difference");
}
@Override
public String getDisplayName() {
initPython();
if (pexec.isImplemented(0)) {
return (String) pexec.execPython("get_display_name");
} else {
return super.getDisplayName();
}
}
@Override
public String getSearchUrl() {
initPython();
if (pexec.isImplemented(1)) {
return (String) pexec.execPython("get_search_url");
} else {
return super.getSearchUrl();
}
}
@Override
public boolean isHoldOffLaunchUntilSave() {
initPython();
if (pexec.isImplemented(2)) {
return pexec.execPythonBool("is_hold_off_launch_until_save");
} else {
return super.isHoldOffLaunchUntilSave();
}
}
@Override
public TagCloud<LabelAtom> getLabelCloud() {
initPython();
if (pexec.isImplemented(3)) {
return (TagCloud) pexec.execPython("get_label_cloud");
} else {
return super.getLabelCloud();
}
}
@Override
@Exported
public Set<LabelAtom> getAssignedLabels() {
initPython();
if (pexec.isImplemented(4)) {
return (Set) pexec.execPython("get_assigned_labels");
} else {
return super.getAssignedLabels();
}
}
@Override
public void setLabelString(String labelString) throws IOException {
initPython();
if (pexec.isImplemented(5)) {
pexec.execPythonVoid("set_label_string", labelString);
} else {
super.setLabelString(labelString);
}
}
@Override
@WithBridgeMethods(Label.class)
public LabelAtom getSelfLabel() {
initPython();
if (pexec.isImplemented(6)) {
return (LabelAtom) pexec.execPython("get_self_label");
} else {
return super.getSelfLabel();
}
}
@Override
public CauseOfBlockage canTake(Task task) {
initPython();
if (pexec.isImplemented(7)) {
return (CauseOfBlockage) pexec.execPython("can_take", task);
} else {
return super.canTake(task);
}
}
@Override
public CauseOfBlockage canTake(Queue.BuildableItem item) {
initPython();
if (pexec.isImplemented(8)) {
return (CauseOfBlockage) pexec.execPython("can_take", item);
} else {
return super.canTake(item);
}
}
@Override
public @CheckForNull
FilePath createPath(String absolutePath) {
initPython();
if (pexec.isImplemented(9)) {
return (FilePath) pexec.execPython("create_path", absolutePath);
} else {
return super.createPath(absolutePath);
}
}
@Override
public FileSystemProvisioner getFileSystemProvisioner() {
initPython();
if (pexec.isImplemented(10)) {
return (FileSystemProvisioner) pexec.execPython("get_file_system_provisioner");
} else {
return super.getFileSystemProvisioner();
}
}
@Override
public List<NodePropertyDescriptor> getNodePropertyDescriptors() {
initPython();
if (pexec.isImplemented(11)) {
return (List) pexec.execPython("get_node_property_descriptors");
} else {
return super.getNodePropertyDescriptors();
}
}
@Override
public ACL getACL() {
initPython();
if (pexec.isImplemented(12)) {
return (ACL) pexec.execPython("get_acl");
} else {
return super.getACL();
}
}
@Override
public Node reconfigure(final StaplerRequest req, JSONObject form) throws FormException {
initPython();
if (pexec.isImplemented(13)) {
return (Node) pexec.execPython("reconfigure", req, form);
} else {
return super.reconfigure(req, form);
}
}
@Override
public SearchIndexBuilder makeSearchIndex() {
initPython();
if (pexec.isImplemented(16)) {
return (SearchIndexBuilder) pexec.execPython("make_search_index");
} else {
return super.makeSearchIndex();
}
}
@Override
public Search getSearch() {
initPython();
if (pexec.isImplemented(17)) {
return (Search) pexec.execPython("get_search");
} else {
return super.getSearch();
}
}
@Override
public String getSearchName() {
initPython();
if (pexec.isImplemented(18)) {
return (String) pexec.execPython("get_search_name");
} else {
return super.getSearchName();
}
}
public String superGetDisplayName() {
return super.getDisplayName();
}
public String superGetSearchUrl() {
return super.getSearchUrl();
}
public boolean superIsHoldOffLaunchUntilSave() {
return super.isHoldOffLaunchUntilSave();
}
public TagCloud<LabelAtom> superGetLabelCloud() {
return super.getLabelCloud();
}
public Set<LabelAtom> superGetAssignedLabels() {
return super.getAssignedLabels();
}
public void superSetLabelString(String labelString) throws IOException {
super.setLabelString(labelString);
}
public LabelAtom superGetSelfLabel() {
return super.getSelfLabel();
}
public CauseOfBlockage superCanTake(Task task) {
return super.canTake(task);
}
public CauseOfBlockage superCanTake(Queue.BuildableItem item) {
return super.canTake(item);
}
public FilePath superCreatePath(String absolutePath) {
return super.createPath(absolutePath);
}
public FileSystemProvisioner superGetFileSystemProvisioner() {
return super.getFileSystemProvisioner();
}
public List<NodePropertyDescriptor> superGetNodePropertyDescriptors() {
return super.getNodePropertyDescriptors();
}
public ACL superGetACL() {
return super.getACL();
}
public Node superReconfigure(final StaplerRequest req, JSONObject form) throws FormException {
return super.reconfigure(req, form);
}
public SearchIndexBuilder superMakeSearchIndex() {
return super.makeSearchIndex();
}
public Search superGetSearch() {
return super.getSearch();
}
public String superGetSearchName() {
return super.getSearchName();
}
public Object execPython(String function, Object... params) {
initPython();
return pexec.execPython(function, params);
}
public byte execPythonByte(String function, Object... params) {
initPython();
return pexec.execPythonByte(function, params);
}
public short execPythonShort(String function, Object... params) {
initPython();
return pexec.execPythonShort(function, params);
}
public char execPythonChar(String function, Object... params) {
initPython();
return pexec.execPythonChar(function, params);
}
public int execPythonInt(String function, Object... params) {
initPython();
return pexec.execPythonInt(function, params);
}
public long execPythonLong(String function, Object... params) {
initPython();
return pexec.execPythonLong(function, params);
}
public float execPythonFloat(String function, Object... params) {
initPython();
return pexec.execPythonFloat(function, params);
}
public double execPythonDouble(String function, Object... params) {
initPython();
return pexec.execPythonDouble(function, params);
}
public boolean execPythonBool(String function, Object... params) {
initPython();
return pexec.execPythonBool(function, params);
}
public void execPythonVoid(String function, Object... params) {
initPython();
pexec.execPythonVoid(function, params);
}
}
|
|
/**
* Opensec OVAL - https://nakamura5akihito.github.io/
* Copyright (C) 2015 Akihito Nakamura
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.opensec.oval.model.windows;
import io.opensec.oval.model.ComponentType;
import io.opensec.oval.model.ElementRef;
import io.opensec.oval.model.Family;
import io.opensec.oval.model.definitions.EntityObjectStringType;
import io.opensec.oval.model.definitions.Filter;
import io.opensec.oval.model.definitions.Set;
import io.opensec.oval.model.definitions.SystemObjectType;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
/**
* The user object represents a user on a Windows system.
*
* @author Akihito Nakamura, AIST
* @see <a href="http://oval.mitre.org/language/">OVAL Language</a>
*/
public class UserObject
extends SystemObjectType
{
//TODO: XSD model.
// choice(
// set
// sequence(
// user
// filter
// ))
private Set set;
//{1..1}
private EntityObjectStringType user;
//{1..1}
private final Collection<Filter> filter = new ArrayList<Filter>();
//{0..*}
/**
* Constructor.
*/
public UserObject()
{
this( null, 0 );
}
public UserObject(
final String id,
final int version
)
{
super( id, version );
// _oval_platform_type = OvalPlatformType.windows;
// _oval_component_type = OvalComponentType.user;
_oval_family = Family.WINDOWS;
_oval_component = ComponentType.USER;
}
// public FileObject(
// final String id,
// final int version,
// final String comment
// )
// {
// super( id, version, comment );
// }
//
//
// public FileObject(
// final String id,
// final int version,
// final String path,
// final String filename
// )
// {
// this( id, version,
// new EntityObjectStringType( path ),
// new EntityObjectStringType( filename )
// );
// }
//
//
// public FileObject(
// final String id,
// final int version,
// final EntityObjectStringType path,
// final EntityObjectStringType filename
// )
// {
// super( id, version );
// setPath( path );
// setFilename( filename );
// }
/**
*/
public void setSet(
final Set set
)
{
this.set = set;
}
public Set getSet()
{
return set;
}
/**
*/
public void setUser(
final EntityObjectStringType user
)
{
this.user = user;
}
public EntityObjectStringType getUser()
{
return user;
}
/**
*/
public void setFilter(
final Collection<? extends Filter> filters
)
{
if (filter != filters) {
filter.clear();
if (filters != null && filters.size() > 0) {
filter.addAll( filters );
}
}
}
public boolean addFilter(
final Filter filter
)
{
if (filter == null) {
return false;
}
return this.filter.add( filter );
}
public Collection<Filter> getFilter()
{
return filter;
}
public Iterator<Filter> iterateFilter()
{
return filter.iterator();
}
//*********************************************************************
// DefinitionsElement
//*********************************************************************
@Override
public Collection<ElementRef> ovalGetElementRef()
{
Collection<ElementRef> ref_list = new ArrayList<ElementRef>();
ref_list.add( getUser() );
ref_list.addAll( getFilter() );
return ref_list;
}
//**************************************************************
// java.lang.Object
//**************************************************************
@Override
public int hashCode()
{
return super.hashCode();
}
@Override
public boolean equals(
final Object obj
)
{
if (!(obj instanceof UserObject)) {
return false;
}
return super.equals( obj );
}
@Override
public String toString()
{
return "user_object[" + super.toString()
+ ", set=" + getSet()
+ ", user=" + getUser()
+ ", filter=" + getFilter()
+ "]";
}
}
//UserObject
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.github.jobson.utils;
import net.sf.jsqlparser.expression.*;
import net.sf.jsqlparser.expression.operators.arithmetic.*;
import net.sf.jsqlparser.expression.operators.conditional.AndExpression;
import net.sf.jsqlparser.expression.operators.conditional.OrExpression;
import net.sf.jsqlparser.expression.operators.relational.*;
import net.sf.jsqlparser.schema.Column;
import net.sf.jsqlparser.statement.*;
import net.sf.jsqlparser.statement.alter.Alter;
import net.sf.jsqlparser.statement.alter.sequence.AlterSequence;
import net.sf.jsqlparser.statement.comment.Comment;
import net.sf.jsqlparser.statement.create.index.CreateIndex;
import net.sf.jsqlparser.statement.create.schema.CreateSchema;
import net.sf.jsqlparser.statement.create.sequence.CreateSequence;
import net.sf.jsqlparser.statement.create.table.CreateTable;
import net.sf.jsqlparser.statement.create.view.AlterView;
import net.sf.jsqlparser.statement.create.view.CreateView;
import net.sf.jsqlparser.statement.delete.Delete;
import net.sf.jsqlparser.statement.drop.Drop;
import net.sf.jsqlparser.statement.execute.Execute;
import net.sf.jsqlparser.statement.grant.Grant;
import net.sf.jsqlparser.statement.insert.Insert;
import net.sf.jsqlparser.statement.merge.Merge;
import net.sf.jsqlparser.statement.replace.Replace;
import net.sf.jsqlparser.statement.select.*;
import net.sf.jsqlparser.statement.truncate.Truncate;
import net.sf.jsqlparser.statement.update.Update;
import net.sf.jsqlparser.statement.upsert.Upsert;
import net.sf.jsqlparser.statement.values.ValuesStatement;
import java.util.HashSet;
import java.util.Set;
public final class AllColumnRefsFinder implements StatementVisitor, SelectVisitor, SelectItemVisitor, ExpressionVisitor {
private Set<String> columns;
public Set<String> getAllColumnNamesThatAppearIn(Statement statement) {
this.columns = new HashSet<>();
statement.accept(this);
return this.columns;
}
public void visit(Select select) {
select.getSelectBody().accept(this);
}
public void visit(PlainSelect plainSelect) {
for (SelectItem selectItem : plainSelect.getSelectItems()) {
selectItem.accept(this);
}
if (plainSelect.getWhere() != null)
plainSelect.getWhere().accept(this);
}
public void visit(SelectExpressionItem selectExpressionItem) {
selectExpressionItem.getExpression().accept(this);
}
public void visit(AndExpression andExpression) {
andExpression.getLeftExpression().accept(this);
andExpression.getRightExpression().accept(this);
}
public void visit(OrExpression orExpression) {
orExpression.getLeftExpression().accept(this);
orExpression.getRightExpression().accept(this);
}
public void visit(EqualsTo equalsTo) {
equalsTo.getLeftExpression().accept(this);
equalsTo.getRightExpression().accept(this);
}
public void visit(GreaterThan greaterThan) {
greaterThan.getLeftExpression().accept(this);
greaterThan.getRightExpression().accept(this);
}
public void visit(MinorThan minorThan) {
minorThan.getLeftExpression().accept(this);
minorThan.getRightExpression().accept(this);
}
public void visit(Column column) {
this.columns.add(column.getColumnName());
}
// These are blank because they are supported (and therefore shouldn't throw)
// but do not contribute toward finding the column names.
public void visit(AllColumns allColumns) {
}
public void visit(AllTableColumns allTableColumns) {
}
public void visit(Function function) {
}
public void visit(LongValue longValue) {
}
public void visit(SetOperationList setOperationList) {
}
@Override
public void visit(BitwiseRightShift bitwiseRightShift) {
}
@Override
public void visit(BitwiseLeftShift bitwiseLeftShift) {
}
public void visit(NullValue nullValue) {
}
public void visit(DoubleValue doubleValue) {
}
public void visit(StringValue stringValue) {
}
public void visit(InExpression inExpression) {
}
@Override
public void visit(FullTextSearch fullTextSearch) {
}
// Unimplemented SQL features
public void visit(SignedExpression signedExpression) {
throw new UnsupportedSQLFeatureException("Feature SignedExpression not supported");
}
public void visit(JdbcParameter jdbcParameter) {
throw new UnsupportedSQLFeatureException("Feature JdbcParameter not supported");
}
public void visit(JdbcNamedParameter jdbcNamedParameter) {
throw new UnsupportedSQLFeatureException("Feature JdbcNamedParameter not supported");
}
public void visit(HexValue hexValue) {
throw new UnsupportedSQLFeatureException("Feature HexValue not supported");
}
public void visit(DateValue dateValue) {
throw new UnsupportedSQLFeatureException("Feature DateValue not supported");
}
public void visit(TimeValue timeValue) {
throw new UnsupportedSQLFeatureException("Feature TimeValue not supported");
}
public void visit(TimestampValue timestampValue) {
throw new UnsupportedSQLFeatureException("Feature TimestampValue not supported");
}
public void visit(Parenthesis parenthesis) {
throw new UnsupportedSQLFeatureException("Feature Parenthesis not supported");
}
public void visit(Addition addition) {
throw new UnsupportedSQLFeatureException("Feature Addition not supported");
}
public void visit(Division division) {
throw new UnsupportedSQLFeatureException("Feature Division not supported");
}
@Override
public void visit(IntegerDivision integerDivision) {
throw new UnsupportedSQLFeatureException("Feature IntegerDivision not supported");
}
public void visit(Multiplication multiplication) {
throw new UnsupportedSQLFeatureException("Feature Multiplication not supported");
}
public void visit(Subtraction subtraction) {
throw new UnsupportedSQLFeatureException("Feature Subtraction not supported");
}
public void visit(Between between) {
throw new UnsupportedSQLFeatureException("Feature Between not supported");
}
public void visit(GreaterThanEquals greaterThanEquals) {
throw new UnsupportedSQLFeatureException("Feature GreaterThanEquals not supported");
}
public void visit(IsNullExpression isNullExpression) {
throw new UnsupportedSQLFeatureException("Feature IsNullExpression not supported");
}
@Override
public void visit(IsBooleanExpression isBooleanExpression) {
throw new UnsupportedSQLFeatureException("Feature IsBooleanExpression not supported");
}
public void visit(LikeExpression likeExpression) {
throw new UnsupportedSQLFeatureException("Feature LikeExpression not supported");
}
public void visit(MinorThanEquals minorThanEquals) {
throw new UnsupportedSQLFeatureException("Feature MinorThanEquals not supported");
}
public void visit(NotEqualsTo notEqualsTo) {
throw new UnsupportedSQLFeatureException("Feature NotEqualsTo not supported");
}
public void visit(WithItem withItem) {
throw new UnsupportedSQLFeatureException("Feature WithItem not supported");
}
@Override
public void visit(Comment comment) {
}
@Override
public void visit(Commit commit) {
throw new UnsupportedSQLFeatureException("Feature Commit not supported");
}
public void visit(Delete delete) {
throw new UnsupportedSQLFeatureException("Feature Delete not supported");
}
public void visit(Update update) {
throw new UnsupportedSQLFeatureException("Feature Update not supported");
}
public void visit(Insert insert) {
throw new UnsupportedSQLFeatureException("Feature Insert not supported");
}
public void visit(Replace replace) {
throw new UnsupportedSQLFeatureException("Feature Replace not supported");
}
public void visit(Drop drop) {
throw new UnsupportedSQLFeatureException("Feature Drop not supported");
}
public void visit(Truncate truncate) {
throw new UnsupportedSQLFeatureException("Feature Truncate not supported");
}
public void visit(CreateIndex createIndex) {
throw new UnsupportedSQLFeatureException("Feature CreateIndex not supported");
}
@Override
public void visit(CreateSchema createSchema) {
throw new UnsupportedSQLFeatureException("Feature CreateSchema not supported");
}
public void visit(CreateTable createTable) {
throw new UnsupportedSQLFeatureException("Feature CreateTable not supported");
}
public void visit(CreateView createView) {
throw new UnsupportedSQLFeatureException("Feature CreateView not supported");
}
public void visit(AlterView alterView) {
throw new UnsupportedSQLFeatureException("Feature AlterView not supported");
}
public void visit(Alter alter) {
throw new UnsupportedSQLFeatureException("Feature Alter not supported");
}
public void visit(Statements statements) {
throw new UnsupportedSQLFeatureException("Feature Statements not supported");
}
public void visit(Execute execute) {
throw new UnsupportedSQLFeatureException("Feature Execute not supported");
}
public void visit(SetStatement setStatement) {
throw new UnsupportedSQLFeatureException("Feature SetStatement not supported");
}
@Override
public void visit(ShowColumnsStatement showColumnsStatement) {
throw new UnsupportedSQLFeatureException("Feature ShowColumnsStatement not supported");
}
public void visit(Merge merge) {
throw new UnsupportedSQLFeatureException("Feature Merge not supported");
}
public void visit(SubSelect subSelect) {
throw new UnsupportedSQLFeatureException("Feature SubSelect not supported");
}
public void visit(CaseExpression caseExpression) {
throw new UnsupportedSQLFeatureException("Feature CaseExpression not supported");
}
public void visit(WhenClause whenClause) {
throw new UnsupportedSQLFeatureException("Feature WhenClause not supported");
}
public void visit(ExistsExpression existsExpression) {
throw new UnsupportedSQLFeatureException("Feature ExistsExpression not supported");
}
public void visit(AllComparisonExpression allComparisonExpression) {
throw new UnsupportedSQLFeatureException("Feature AllComparisonExpression not supported");
}
public void visit(AnyComparisonExpression anyComparisonExpression) {
throw new UnsupportedSQLFeatureException("Feature AnyComparisonExpression not supported");
}
public void visit(Concat concat) {
throw new UnsupportedSQLFeatureException("Feature Concat not supported");
}
public void visit(Matches matches) {
throw new UnsupportedSQLFeatureException("Feature Matches not supported");
}
public void visit(BitwiseAnd bitwiseAnd) {
throw new UnsupportedSQLFeatureException("Feature BitwiseAnd not supported");
}
public void visit(BitwiseOr bitwiseOr) {
throw new UnsupportedSQLFeatureException("Feature BitwiseOr not supported");
}
public void visit(BitwiseXor bitwiseXor) {
throw new UnsupportedSQLFeatureException("Feature BitwiseXor not supported");
}
public void visit(CastExpression castExpression) {
throw new UnsupportedSQLFeatureException("Feature CastExpression not supported");
}
public void visit(Modulo modulo) {
throw new UnsupportedSQLFeatureException("Feature Modulo not supported");
}
public void visit(AnalyticExpression analyticExpression) {
throw new UnsupportedSQLFeatureException("Feature AnalyticExpression not supported");
}
public void visit(ExtractExpression extractExpression) {
throw new UnsupportedSQLFeatureException("Feature ExtractExpression not supported");
}
public void visit(IntervalExpression intervalExpression) {
throw new UnsupportedSQLFeatureException("Feature IntervalExpression not supported");
}
public void visit(OracleHierarchicalExpression oracleHierarchicalExpression) {
throw new UnsupportedSQLFeatureException("Feature OracleHierarchicalExpression not supported");
}
public void visit(RegExpMatchOperator regExpMatchOperator) {
throw new UnsupportedSQLFeatureException("Feature RegExpMatchOperator not supported");
}
public void visit(JsonExpression jsonExpression) {
throw new UnsupportedSQLFeatureException("Feature JsonExpression not supported");
}
@Override
public void visit(JsonOperator jsonOperator) {
throw new UnsupportedSQLFeatureException("Feature JsonOperator not supported");
}
public void visit(RegExpMySQLOperator regExpMySQLOperator) {
throw new UnsupportedSQLFeatureException("Feature RegExpMySQLOperator not supported");
}
public void visit(UserVariable userVariable) {
throw new UnsupportedSQLFeatureException("Feature UserVariable not supported");
}
public void visit(NumericBind numericBind) {
throw new UnsupportedSQLFeatureException("Feature NumericBind not supported");
}
public void visit(KeepExpression keepExpression) {
throw new UnsupportedSQLFeatureException("Feature KeepExpression not supported");
}
public void visit(MySQLGroupConcat mySQLGroupConcat) {
throw new UnsupportedSQLFeatureException("Feature MySQLGroupConcat not supported");
}
@Override
public void visit(ValueListExpression valueListExpression) {
}
public void visit(RowConstructor rowConstructor) {
throw new UnsupportedSQLFeatureException("Feature RowConstructor not supported");
}
public void visit(OracleHint oracleHint) {
throw new UnsupportedSQLFeatureException("Feature OracleHint not supported");
}
public void visit(TimeKeyExpression timeKeyExpression) {
throw new UnsupportedSQLFeatureException("Feature TimeKeyExpression not supported");
}
public void visit(DateTimeLiteralExpression dateTimeLiteralExpression) {
throw new UnsupportedSQLFeatureException("Feature DateTimeLiteralExpression not supported");
}
@Override
public void visit(NotExpression notExpression) {
throw new UnsupportedSQLFeatureException("Feature NotExpression not supported");
}
@Override
public void visit(NextValExpression nextValExpression) {
throw new UnsupportedSQLFeatureException("Feature NextValExpression not supported");
}
@Override
public void visit(CollateExpression collateExpression) {
throw new UnsupportedSQLFeatureException("Feature CollateExpression not supported");
}
@Override
public void visit(SimilarToExpression similarToExpression) {
throw new UnsupportedSQLFeatureException("Feature SimilarToExpression not supported");
}
@Override
public void visit(ArrayExpression arrayExpression) {
throw new UnsupportedSQLFeatureException("Feature ArrayExpression not supported");
}
@Override
public void visit(Upsert upsert) {
throw new UnsupportedSQLFeatureException("Feature Upsert not supported");
}
@Override
public void visit(UseStatement useStatement) {
throw new UnsupportedSQLFeatureException("Feature UseStatement not supported");
}
@Override
public void visit(Block block) {
throw new UnsupportedSQLFeatureException("Feature Block not supported");
}
@Override
public void visit(ValuesStatement valuesStatement) {
throw new UnsupportedSQLFeatureException("Feature ValuesStatement not supported");
}
@Override
public void visit(DescribeStatement describeStatement) {
throw new UnsupportedSQLFeatureException("Feature DescribeStatement not supported");
}
@Override
public void visit(ExplainStatement explainStatement) {
throw new UnsupportedSQLFeatureException("Feature ExplainStatement not supported");
}
@Override
public void visit(ShowStatement showStatement) {
throw new UnsupportedSQLFeatureException("Feature ShowStatement not supported");
}
@Override
public void visit(DeclareStatement declareStatement) {
throw new UnsupportedSQLFeatureException("Feature DeclareStatement not supported");
}
@Override
public void visit(Grant grant) {
throw new UnsupportedSQLFeatureException("Feature Grant not supported");
}
@Override
public void visit(CreateSequence createSequence) {
throw new UnsupportedSQLFeatureException("Feature CreateSequence not supported");
}
@Override
public void visit(AlterSequence alterSequence) {
throw new UnsupportedSQLFeatureException("Feature AlterSequence not supported");
}
@Override
public void visit(CreateFunctionalStatement createFunctionalStatement) {
throw new UnsupportedSQLFeatureException("Feature CreateFunctionalStatement not supported");
}
}
|
|
package jadx.core.dex.visitors.regions;
import jadx.core.dex.attributes.AFlag;
import jadx.core.dex.attributes.AType;
import jadx.core.dex.attributes.nodes.DeclareVariablesAttr;
import jadx.core.dex.instructions.InsnType;
import jadx.core.dex.instructions.args.ArgType;
import jadx.core.dex.instructions.args.RegisterArg;
import jadx.core.dex.instructions.args.VarName;
import jadx.core.dex.nodes.IBlock;
import jadx.core.dex.nodes.IBranchRegion;
import jadx.core.dex.nodes.IContainer;
import jadx.core.dex.nodes.IRegion;
import jadx.core.dex.nodes.InsnNode;
import jadx.core.dex.nodes.MethodNode;
import jadx.core.dex.regions.loops.ForLoop;
import jadx.core.dex.regions.loops.LoopRegion;
import jadx.core.dex.regions.loops.LoopType;
import jadx.core.dex.visitors.AbstractVisitor;
import jadx.core.utils.RegionUtils;
import jadx.core.utils.exceptions.JadxException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ProcessVariables extends AbstractVisitor {
private static final Logger LOG = LoggerFactory.getLogger(ProcessVariables.class);
private static class Variable {
private final int regNum;
private final ArgType type;
public Variable(RegisterArg arg) {
this.regNum = arg.getRegNum();
this.type = arg.getType();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Variable variable = (Variable) o;
return regNum == variable.regNum && type.equals(variable.type);
}
@Override
public int hashCode() {
return 31 * regNum + type.hashCode();
}
@Override
public String toString() {
return regNum + " " + type;
}
}
private static class Usage {
private RegisterArg arg;
private VarName varName;
private IRegion argRegion;
private final Set<IRegion> usage = new HashSet<IRegion>(2);
private final Set<IRegion> assigns = new HashSet<IRegion>(2);
public void setArg(RegisterArg arg) {
this.arg = arg;
}
public RegisterArg getArg() {
return arg;
}
public VarName getVarName() {
return varName;
}
public void setVarName(VarName varName) {
this.varName = varName;
}
public void setArgRegion(IRegion argRegion) {
this.argRegion = argRegion;
}
public IRegion getArgRegion() {
return argRegion;
}
public Set<IRegion> getAssigns() {
return assigns;
}
public Set<IRegion> getUseRegions() {
return usage;
}
@Override
public String toString() {
return arg + ", a:" + assigns + ", u:" + usage;
}
}
private static class CollectUsageRegionVisitor extends TracedRegionVisitor {
private final List<RegisterArg> args;
private final Map<Variable, Usage> usageMap;
public CollectUsageRegionVisitor(Map<Variable, Usage> usageMap) {
this.usageMap = usageMap;
args = new ArrayList<RegisterArg>();
}
@Override
public void processBlockTraced(MethodNode mth, IBlock container, IRegion curRegion) {
regionProcess(mth, curRegion);
int len = container.getInstructions().size();
for (int i = 0; i < len; i++) {
InsnNode insn = container.getInstructions().get(i);
if (insn.contains(AFlag.SKIP)) {
continue;
}
args.clear();
processInsn(insn, curRegion);
}
}
private void regionProcess(MethodNode mth, IRegion region) {
if (region instanceof LoopRegion) {
LoopRegion loopRegion = (LoopRegion) region;
LoopType loopType = loopRegion.getType();
if (loopType instanceof ForLoop) {
ForLoop forLoop = (ForLoop) loopType;
processInsn(forLoop.getInitInsn(), region);
processInsn(forLoop.getIncrInsn(), region);
}
}
}
void processInsn(InsnNode insn, IRegion curRegion) {
if (insn == null) {
return;
}
// result
RegisterArg result = insn.getResult();
if (result != null && result.isRegister()) {
Usage u = addToUsageMap(result, usageMap);
if (u.getArg() == null) {
u.setArg(result);
u.setArgRegion(curRegion);
}
u.getAssigns().add(curRegion);
}
// args
args.clear();
insn.getRegisterArgs(args);
for (RegisterArg arg : args) {
Usage u = addToUsageMap(arg, usageMap);
u.getUseRegions().add(curRegion);
}
}
}
@Override
public void visit(MethodNode mth) throws JadxException {
if (mth.isNoCode()) {
return;
}
final Map<Variable, Usage> usageMap = new LinkedHashMap<Variable, Usage>();
for (RegisterArg arg : mth.getArguments(true)) {
addToUsageMap(arg, usageMap);
}
// collect all variables usage
IRegionVisitor collect = new CollectUsageRegionVisitor(usageMap);
DepthRegionTraversal.traverse(mth, collect);
// reduce assigns map
List<RegisterArg> mthArgs = mth.getArguments(true);
for (RegisterArg arg : mthArgs) {
usageMap.remove(new Variable(arg));
}
Iterator<Entry<Variable, Usage>> umIt = usageMap.entrySet().iterator();
while (umIt.hasNext()) {
Entry<Variable, Usage> entry = umIt.next();
Usage u = entry.getValue();
// if no assigns => remove
if (u.getAssigns().isEmpty()) {
umIt.remove();
continue;
}
// variable declared at 'catch' clause
InsnNode parentInsn = u.getArg().getParentInsn();
if (parentInsn == null || parentInsn.getType() == InsnType.MOVE_EXCEPTION) {
umIt.remove();
}
}
if (usageMap.isEmpty()) {
return;
}
Map<IContainer, Integer> regionsOrder = new HashMap<IContainer, Integer>();
calculateOrder(mth.getRegion(), regionsOrder, 0, true);
for (Iterator<Entry<Variable, Usage>> it = usageMap.entrySet().iterator(); it.hasNext(); ) {
Entry<Variable, Usage> entry = it.next();
Usage u = entry.getValue();
// check if variable can be declared at current assigns
for (IRegion assignRegion : u.getAssigns()) {
if (u.getArgRegion() == assignRegion
&& canDeclareInRegion(u, assignRegion, regionsOrder)) {
if (declareAtAssign(u)) {
it.remove();
break;
}
}
}
}
if (usageMap.isEmpty()) {
return;
}
// apply
for (Entry<Variable, Usage> entry : usageMap.entrySet()) {
Usage u = entry.getValue();
// find region which contain all usage regions
Set<IRegion> set = u.getUseRegions();
for (Iterator<IRegion> it = set.iterator(); it.hasNext(); ) {
IRegion r = it.next();
IRegion parent = r.getParent();
if (parent != null && set.contains(parent)) {
it.remove();
}
}
IRegion region = null;
if (!set.isEmpty()) {
region = set.iterator().next();
} else if (!u.getAssigns().isEmpty()) {
region = u.getAssigns().iterator().next();
}
if (region == null) {
continue;
}
IRegion parent = region;
boolean declare = false;
while (parent != null) {
if (canDeclareInRegion(u, region, regionsOrder)) {
declareVar(region, u.getArg());
declare = true;
break;
}
region = parent;
parent = region.getParent();
}
if (!declare) {
declareVar(mth.getRegion(), u.getArg());
}
}
}
private static Usage addToUsageMap(RegisterArg arg, Map<Variable, Usage> usageMap) {
Variable varId = new Variable(arg);
Usage usage = usageMap.get(varId);
if (usage == null) {
usage = new Usage();
usageMap.put(varId, usage);
}
// merge variables names
if (usage.getVarName() == null) {
VarName argVN = arg.getSVar().getVarName();
if (argVN == null) {
argVN = new VarName();
arg.getSVar().setVarName(argVN);
}
usage.setVarName(argVN);
} else {
arg.getSVar().setVarName(usage.getVarName());
}
return usage;
}
private static boolean declareAtAssign(Usage u) {
RegisterArg arg = u.getArg();
InsnNode parentInsn = arg.getParentInsn();
if (!arg.equals(parentInsn.getResult())) {
return false;
}
parentInsn.add(AFlag.DECLARE_VAR);
return true;
}
private static void declareVar(IContainer region, RegisterArg arg) {
DeclareVariablesAttr dv = region.get(AType.DECLARE_VARIABLES);
if (dv == null) {
dv = new DeclareVariablesAttr();
region.addAttr(dv);
}
dv.addVar(arg);
}
private static int calculateOrder(IContainer container, Map<IContainer, Integer> regionsOrder,
int id, boolean inc) {
if (!(container instanceof IRegion)) {
return id;
}
IRegion region = (IRegion) container;
Integer previous = regionsOrder.put(region, id);
if (previous != null) {
return id;
}
for (IContainer c : region.getSubBlocks()) {
if (c instanceof IBranchRegion) {
// on branch set for all inner regions same order id
id = calculateOrder(c, regionsOrder, inc ? id + 1 : id, false);
} else {
List<IContainer> handlers = RegionUtils.getExcHandlersForRegion(c);
if (!handlers.isEmpty()) {
for (IContainer handler : handlers) {
id = calculateOrder(handler, regionsOrder, inc ? id + 1 : id, inc);
}
}
id = calculateOrder(c, regionsOrder, inc ? id + 1 : id, inc);
}
}
return id;
}
private static boolean canDeclareInRegion(Usage u, IRegion region, Map<IContainer, Integer> regionsOrder) {
Integer pos = regionsOrder.get(region);
if (pos == null) {
LOG.debug("TODO: Not found order for region {} for {}", region, u);
return false;
}
// workaround for declare variables used in several loops
if (region instanceof LoopRegion) {
for (IRegion r : u.getAssigns()) {
if (!RegionUtils.isRegionContainsRegion(region, r)) {
return false;
}
}
}
return isAllRegionsAfter(region, pos, u.getAssigns(), regionsOrder)
&& isAllRegionsAfter(region, pos, u.getUseRegions(), regionsOrder);
}
private static boolean isAllRegionsAfter(IRegion region, int pos,
Set<IRegion> regions, Map<IContainer, Integer> regionsOrder) {
for (IRegion r : regions) {
if (r == region) {
continue;
}
Integer rPos = regionsOrder.get(r);
if (rPos == null) {
LOG.debug("TODO: Not found order for region {} in {}", r, regionsOrder);
return false;
}
if (pos > rPos) {
return false;
}
if (pos == rPos) {
return isAllRegionsAfterRecursive(region, regions);
}
}
return true;
}
private static boolean isAllRegionsAfterRecursive(IRegion region, Set<IRegion> others) {
for (IRegion r : others) {
if (!RegionUtils.isRegionContainsRegion(region, r)) {
return false;
}
}
return true;
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
* to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.kafka.common.utils;
import java.io.IOException;
import java.io.InputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.io.FileNotFoundException;
import java.io.StringWriter;
import java.io.PrintWriter;
import java.nio.ByteBuffer;
import java.nio.MappedByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.Properties;
import java.nio.channels.FileChannel;
import java.nio.charset.Charset;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.kafka.common.KafkaException;
public class Utils {
// This matches URIs of formats: host:port and protocol:\\host:port
// IPv6 is supported with [ip] pattern
private static final Pattern HOST_PORT_PATTERN = Pattern.compile(".*?\\[?([0-9a-z\\-.:]*)\\]?:([0-9]+)");
public static final String NL = System.getProperty("line.separator");
private static final Logger log = LoggerFactory.getLogger(Utils.class);
/**
* Get a sorted list representation of a collection.
* @param collection The collection to sort
* @param <T> The class of objects in the collection
* @return An unmodifiable sorted list with the contents of the collection
*/
public static <T extends Comparable<? super T>> List<T> sorted(Collection<T> collection) {
List<T> res = new ArrayList<>(collection);
Collections.sort(res);
return Collections.unmodifiableList(res);
}
/**
* Turn the given UTF8 byte array into a string
*
* @param bytes The byte array
* @return The string
*/
public static String utf8(byte[] bytes) {
try {
return new String(bytes, "UTF8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("This shouldn't happen.", e);
}
}
/**
* Turn a string into a utf8 byte[]
*
* @param string The string
* @return The byte[]
*/
public static byte[] utf8(String string) {
try {
return string.getBytes("UTF8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("This shouldn't happen.", e);
}
}
/**
* Read an unsigned integer from the current position in the buffer, incrementing the position by 4 bytes
*
* @param buffer The buffer to read from
* @return The integer read, as a long to avoid signedness
*/
public static long readUnsignedInt(ByteBuffer buffer) {
return buffer.getInt() & 0xffffffffL;
}
/**
* Read an unsigned integer from the given position without modifying the buffers position
*
* @param buffer the buffer to read from
* @param index the index from which to read the integer
* @return The integer read, as a long to avoid signedness
*/
public static long readUnsignedInt(ByteBuffer buffer, int index) {
return buffer.getInt(index) & 0xffffffffL;
}
/**
* Read an unsigned integer stored in little-endian format from the {@link InputStream}.
*
* @param in The stream to read from
* @return The integer read (MUST BE TREATED WITH SPECIAL CARE TO AVOID SIGNEDNESS)
*/
public static int readUnsignedIntLE(InputStream in) throws IOException {
return (in.read() << 8 * 0)
| (in.read() << 8 * 1)
| (in.read() << 8 * 2)
| (in.read() << 8 * 3);
}
/**
* Get the little-endian value of an integer as a byte array.
* @param val The value to convert to a litte-endian array
* @return The little-endian encoded array of bytes for the value
*/
public static byte[] toArrayLE(int val) {
return new byte[] {
(byte) (val >> 8 * 0),
(byte) (val >> 8 * 1),
(byte) (val >> 8 * 2),
(byte) (val >> 8 * 3)
};
}
/**
* Read an unsigned integer stored in little-endian format from a byte array
* at a given offset.
*
* @param buffer The byte array to read from
* @param offset The position in buffer to read from
* @return The integer read (MUST BE TREATED WITH SPECIAL CARE TO AVOID SIGNEDNESS)
*/
public static int readUnsignedIntLE(byte[] buffer, int offset) {
return (buffer[offset++] << 8 * 0)
| (buffer[offset++] << 8 * 1)
| (buffer[offset++] << 8 * 2)
| (buffer[offset] << 8 * 3);
}
/**
* Write the given long value as a 4 byte unsigned integer. Overflow is ignored.
*
* @param buffer The buffer to write to
* @param value The value to write
*/
public static void writetUnsignedInt(ByteBuffer buffer, long value) {
buffer.putInt((int) (value & 0xffffffffL));
}
/**
* Write the given long value as a 4 byte unsigned integer. Overflow is ignored.
*
* @param buffer The buffer to write to
* @param index The position in the buffer at which to begin writing
* @param value The value to write
*/
public static void writeUnsignedInt(ByteBuffer buffer, int index, long value) {
buffer.putInt(index, (int) (value & 0xffffffffL));
}
/**
* Write an unsigned integer in little-endian format to the {@link OutputStream}.
*
* @param out The stream to write to
* @param value The value to write
*/
public static void writeUnsignedIntLE(OutputStream out, int value) throws IOException {
out.write(value >>> 8 * 0);
out.write(value >>> 8 * 1);
out.write(value >>> 8 * 2);
out.write(value >>> 8 * 3);
}
/**
* Write an unsigned integer in little-endian format to a byte array
* at a given offset.
*
* @param buffer The byte array to write to
* @param offset The position in buffer to write to
* @param value The value to write
*/
public static void writeUnsignedIntLE(byte[] buffer, int offset, int value) {
buffer[offset++] = (byte) (value >>> 8 * 0);
buffer[offset++] = (byte) (value >>> 8 * 1);
buffer[offset++] = (byte) (value >>> 8 * 2);
buffer[offset] = (byte) (value >>> 8 * 3);
}
/**
* Get the absolute value of the given number. If the number is Int.MinValue return 0. This is different from
* java.lang.Math.abs or scala.math.abs in that they return Int.MinValue (!).
*/
public static int abs(int n) {
return (n == Integer.MIN_VALUE) ? 0 : Math.abs(n);
}
/**
* Get the minimum of some long values.
* @param first Used to ensure at least one value
* @param rest The rest of longs to compare
* @return The minimum of all passed argument.
*/
public static long min(long first, long ... rest) {
long min = first;
for (int i = 0; i < rest.length; i++) {
if (rest[i] < min)
min = rest[i];
}
return min;
}
/**
* Get the length for UTF8-encoding a string without encoding it first
*
* @param s The string to calculate the length for
* @return The length when serialized
*/
public static int utf8Length(CharSequence s) {
int count = 0;
for (int i = 0, len = s.length(); i < len; i++) {
char ch = s.charAt(i);
if (ch <= 0x7F) {
count++;
} else if (ch <= 0x7FF) {
count += 2;
} else if (Character.isHighSurrogate(ch)) {
count += 4;
++i;
} else {
count += 3;
}
}
return count;
}
/**
* Read the given byte buffer into a byte array
*/
public static byte[] toArray(ByteBuffer buffer) {
return toArray(buffer, 0, buffer.limit());
}
/**
* Read a byte array from the given offset and size in the buffer
*/
public static byte[] toArray(ByteBuffer buffer, int offset, int size) {
byte[] dest = new byte[size];
if (buffer.hasArray()) {
System.arraycopy(buffer.array(), buffer.arrayOffset() + offset, dest, 0, size);
} else {
int pos = buffer.position();
buffer.get(dest);
buffer.position(pos);
}
return dest;
}
/**
* Check that the parameter t is not null
*
* @param t The object to check
* @return t if it isn't null
* @throws NullPointerException if t is null.
*/
public static <T> T notNull(T t) {
if (t == null)
throw new NullPointerException();
else
return t;
}
/**
* Sleep for a bit
* @param ms The duration of the sleep
*/
public static void sleep(long ms) {
try {
Thread.sleep(ms);
} catch (InterruptedException e) {
// this is okay, we just wake up early
}
}
/**
* Instantiate the class
*/
public static <T> T newInstance(Class<T> c) {
try {
return c.newInstance();
} catch (IllegalAccessException e) {
throw new KafkaException("Could not instantiate class " + c.getName(), e);
} catch (InstantiationException e) {
throw new KafkaException("Could not instantiate class " + c.getName() + " Does it have a public no-argument constructor?", e);
} catch (NullPointerException e) {
throw new KafkaException("Requested class was null", e);
}
}
/**
* Look up the class by name and instantiate it.
* @param klass class name
* @param base super class of the class to be instantiated
* @param <T>
* @return the new instance
*/
public static <T> T newInstance(String klass, Class<T> base) throws ClassNotFoundException {
return Utils.newInstance(Class.forName(klass, true, Utils.getContextOrKafkaClassLoader()).asSubclass(base));
}
/**
* Generates 32 bit murmur2 hash from byte array
* @param data byte array to hash
* @return 32 bit hash of the given array
*/
public static int murmur2(final byte[] data) {
int length = data.length;
int seed = 0x9747b28c;
// 'm' and 'r' are mixing constants generated offline.
// They're not really 'magic', they just happen to work well.
final int m = 0x5bd1e995;
final int r = 24;
// Initialize the hash to a random value
int h = seed ^ length;
int length4 = length / 4;
for (int i = 0; i < length4; i++) {
final int i4 = i * 4;
int k = (data[i4 + 0] & 0xff) + ((data[i4 + 1] & 0xff) << 8) + ((data[i4 + 2] & 0xff) << 16) + ((data[i4 + 3] & 0xff) << 24);
k *= m;
k ^= k >>> r;
k *= m;
h *= m;
h ^= k;
}
// Handle the last few bytes of the input array
switch (length % 4) {
case 3:
h ^= (data[(length & ~3) + 2] & 0xff) << 16;
case 2:
h ^= (data[(length & ~3) + 1] & 0xff) << 8;
case 1:
h ^= data[length & ~3] & 0xff;
h *= m;
}
h ^= h >>> 13;
h *= m;
h ^= h >>> 15;
return h;
}
/**
* Extracts the hostname from a "host:port" address string.
* @param address address string to parse
* @return hostname or null if the given address is incorrect
*/
public static String getHost(String address) {
Matcher matcher = HOST_PORT_PATTERN.matcher(address);
return matcher.matches() ? matcher.group(1) : null;
}
/**
* Extracts the port number from a "host:port" address string.
* @param address address string to parse
* @return port number or null if the given address is incorrect
*/
public static Integer getPort(String address) {
Matcher matcher = HOST_PORT_PATTERN.matcher(address);
return matcher.matches() ? Integer.parseInt(matcher.group(2)) : null;
}
/**
* Formats hostname and port number as a "host:port" address string,
* surrounding IPv6 addresses with braces '[', ']'
* @param host hostname
* @param port port number
* @return address string
*/
public static String formatAddress(String host, Integer port) {
return host.contains(":")
? "[" + host + "]:" + port // IPv6
: host + ":" + port;
}
/**
* Create a string representation of an array joined by the given separator
* @param strs The array of items
* @param seperator The separator
* @return The string representation.
*/
public static <T> String join(T[] strs, String seperator) {
return join(Arrays.asList(strs), seperator);
}
/**
* Create a string representation of a list joined by the given separator
* @param list The list of items
* @param seperator The separator
* @return The string representation.
*/
public static <T> String join(Collection<T> list, String seperator) {
StringBuilder sb = new StringBuilder();
Iterator<T> iter = list.iterator();
while (iter.hasNext()) {
sb.append(iter.next());
if (iter.hasNext())
sb.append(seperator);
}
return sb.toString();
}
/**
* Read a properties file from the given path
* @param filename The path of the file to read
*/
public static Properties loadProps(String filename) throws IOException, FileNotFoundException {
Properties props = new Properties();
InputStream propStream = null;
try {
propStream = new FileInputStream(filename);
props.load(propStream);
} finally {
if (propStream != null)
propStream.close();
}
return props;
}
/**
* Converts a Properties object to a Map<String, String>, calling {@link #toString} to ensure all keys and values
* are Strings.
*/
public static Map<String, String> propsToStringMap(Properties props) {
Map<String, String> result = new HashMap<>();
for (Map.Entry<Object, Object> entry : props.entrySet())
result.put(entry.getKey().toString(), entry.getValue().toString());
return result;
}
/**
* Get the stack trace from an exception as a string
*/
public static String stackTrace(Throwable e) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
return sw.toString();
}
/**
* Create a new thread
* @param name The name of the thread
* @param runnable The work for the thread to do
* @param daemon Should the thread block JVM shutdown?
* @return The unstarted thread
*/
public static Thread newThread(String name, Runnable runnable, Boolean daemon) {
Thread thread = new Thread(runnable, name);
thread.setDaemon(daemon);
thread.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
public void uncaughtException(Thread t, Throwable e) {
log.error("Uncaught exception in thread '" + t.getName() + "':", e);
}
});
return thread;
}
/**
* Create a daemon thread
* @param name The name of the thread
* @param runnable The runnable to execute in the background
* @return The unstarted thread
*/
public static Thread daemonThread(String name, Runnable runnable) {
return newThread(name, runnable, true);
}
/**
* Print an error message and shutdown the JVM
* @param message The error message
*/
public static void croak(String message) {
System.err.println(message);
System.exit(1);
}
/**
* Read a buffer into a Byte array for the given offset and length
*/
public static byte[] readBytes(ByteBuffer buffer, int offset, int length) {
byte[] dest = new byte[length];
if (buffer.hasArray()) {
System.arraycopy(buffer.array(), buffer.arrayOffset() + offset, dest, 0, length);
} else {
buffer.mark();
buffer.position(offset);
buffer.get(dest, 0, length);
buffer.reset();
}
return dest;
}
/**
* Read the given byte buffer into a Byte array
*/
public static byte[] readBytes(ByteBuffer buffer) {
return Utils.readBytes(buffer, 0, buffer.limit());
}
/**
* Attempt to read a file as a string
* @throws IOException
*/
public static String readFileAsString(String path, Charset charset) throws IOException {
if (charset == null) charset = Charset.defaultCharset();
FileInputStream stream = new FileInputStream(new File(path));
String result = new String();
try {
FileChannel fc = stream.getChannel();
MappedByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size());
result = charset.decode(bb).toString();
} finally {
stream.close();
}
return result;
}
public static String readFileAsString(String path) throws IOException {
return Utils.readFileAsString(path, Charset.defaultCharset());
}
/**
* Check if the given ByteBuffer capacity
* @param existingBuffer ByteBuffer capacity to check
* @param newLength new length for the ByteBuffer.
* returns ByteBuffer
*/
public static ByteBuffer ensureCapacity(ByteBuffer existingBuffer, int newLength) {
if (newLength > existingBuffer.capacity()) {
ByteBuffer newBuffer = ByteBuffer.allocate(newLength);
existingBuffer.flip();
newBuffer.put(existingBuffer);
return newBuffer;
}
return existingBuffer;
}
/*
* Creates a set
* @param elems the elements
* @param <T> the type of element
* @return Set
*/
public static <T> Set<T> mkSet(T... elems) {
return new HashSet<>(Arrays.asList(elems));
}
/*
* Creates a list
* @param elems the elements
* @param <T> the type of element
* @return List
*/
public static <T> List<T> mkList(T... elems) {
return Arrays.asList(elems);
}
/*
* Create a string from a collection
* @param coll the collection
* @param separator the separator
*/
public static <T> CharSequence mkString(Collection<T> coll, String separator) {
StringBuilder sb = new StringBuilder();
Iterator<T> iter = coll.iterator();
if (iter.hasNext()) {
sb.append(iter.next().toString());
while (iter.hasNext()) {
sb.append(separator);
sb.append(iter.next().toString());
}
}
return sb;
}
/**
* Recursively delete the given file/directory and any subfiles (if any exist)
*
* @param file The root file at which to begin deleting
*/
public static void delete(File file) {
if (file == null) {
return;
} else if (file.isDirectory()) {
File[] files = file.listFiles();
if (files != null) {
for (File f : files)
delete(f);
}
file.delete();
} else {
file.delete();
}
}
/**
* Returns an empty collection if this list is null
* @param other
* @return
*/
public static <T> List<T> safe(List<T> other) {
return other == null ? Collections.<T>emptyList() : other;
}
/**
* Get the ClassLoader which loaded Kafka.
*/
public static ClassLoader getKafkaClassLoader() {
return Utils.class.getClassLoader();
}
/**
* Get the Context ClassLoader on this thread or, if not present, the ClassLoader that
* loaded Kafka.
*
* This should be used whenever passing a ClassLoader to Class.forName
*/
public static ClassLoader getContextOrKafkaClassLoader() {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
if (cl == null)
return getKafkaClassLoader();
else
return cl;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.cache.query.index.sorted.defragmentation;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.failure.FailureContext;
import org.apache.ignite.internal.cache.query.index.IndexProcessor;
import org.apache.ignite.internal.cache.query.index.sorted.IndexRow;
import org.apache.ignite.internal.cache.query.index.sorted.IndexRowImpl;
import org.apache.ignite.internal.cache.query.index.sorted.InlineIndexRowHandler;
import org.apache.ignite.internal.cache.query.index.sorted.SortedIndexDefinition;
import org.apache.ignite.internal.cache.query.index.sorted.inline.InlineIndex;
import org.apache.ignite.internal.cache.query.index.sorted.inline.InlineIndexImpl;
import org.apache.ignite.internal.cache.query.index.sorted.inline.io.MvccIO;
import org.apache.ignite.internal.pagemem.PageMemory;
import org.apache.ignite.internal.processors.cache.CacheGroupContext;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.persistence.CacheDataRow;
import org.apache.ignite.internal.processors.cache.persistence.CacheDataRowAdapter;
import org.apache.ignite.internal.processors.cache.persistence.checkpoint.CheckpointTimeoutLock;
import org.apache.ignite.internal.processors.cache.persistence.defragmentation.LinkMap;
import org.apache.ignite.internal.processors.cache.persistence.defragmentation.TreeIterator;
import org.apache.ignite.internal.processors.cache.persistence.pagemem.PageMemoryEx;
import org.apache.ignite.internal.processors.cache.persistence.tree.io.BPlusIO;
import org.apache.ignite.internal.processors.cache.tree.mvcc.data.MvccDataRow;
import org.apache.ignite.internal.util.IgniteUtils;
import org.apache.ignite.internal.util.collection.IntMap;
import org.apache.ignite.thread.IgniteThreadPoolExecutor;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.failure.FailureType.CRITICAL_ERROR;
/**
*
*/
public class IndexingDefragmentation {
/** Indexing. */
private final IndexProcessor indexing;
/** Constructor. */
public IndexingDefragmentation(IndexProcessor indexing) {
this.indexing = indexing;
}
/**
* Defragment index partition.
*
* @param grpCtx Old group context.
* @param newCtx New group context.
* @param partPageMem Partition page memory.
* @param mappingByPartition Mapping page memory.
* @param cpLock Defragmentation checkpoint read lock.
* @param cancellationChecker Cancellation checker.
* @param defragmentationThreadPool Thread pool for defragmentation.
* @param log Log.
*
* @throws IgniteCheckedException If failed.
*/
public void defragment(
CacheGroupContext grpCtx,
CacheGroupContext newCtx,
PageMemoryEx partPageMem,
IntMap<LinkMap> mappingByPartition,
CheckpointTimeoutLock cpLock,
Runnable cancellationChecker,
IgniteThreadPoolExecutor defragmentationThreadPool,
IgniteLogger log
) throws IgniteCheckedException {
int pageSize = grpCtx.cacheObjectContext().kernalContext().grid().configuration().getDataStorageConfiguration().getPageSize();
PageMemoryEx oldCachePageMem = (PageMemoryEx)grpCtx.dataRegion().pageMemory();
PageMemory newCachePageMemory = partPageMem;
Collection<TableIndexes> tables = tables(grpCtx);
long cpLockThreshold = 150L;
AtomicLong lastCpLockTs = new AtomicLong(System.currentTimeMillis());
IgniteUtils.doInParallel(
defragmentationThreadPool,
tables,
table -> defragmentTable(
newCtx,
mappingByPartition,
cpLock,
cancellationChecker,
pageSize,
oldCachePageMem,
newCachePageMemory,
cpLockThreshold,
lastCpLockTs,
table
)
);
if (log.isInfoEnabled())
log.info("Defragmentation indexes completed for group '" + grpCtx.groupId() + "'");
}
/**
* Defragment one given table.
*/
private boolean defragmentTable(
CacheGroupContext newCtx,
IntMap<LinkMap> mappingByPartition,
CheckpointTimeoutLock cpLock,
Runnable cancellationChecker,
int pageSize,
PageMemoryEx oldCachePageMem,
PageMemory newCachePageMemory,
long cpLockThreshold,
AtomicLong lastCpLockTs,
TableIndexes indexes
) throws IgniteCheckedException {
cpLock.checkpointReadLock();
try {
TreeIterator treeIterator = new TreeIterator(pageSize);
GridCacheContext<?, ?> cctx = indexes.cctx;
cancellationChecker.run();
for (InlineIndex oldIdx : indexes.idxs) {
InlineIndexRowHandler oldRowHnd = oldIdx.segment(0).rowHandler();
SortedIndexDefinition idxDef = (SortedIndexDefinition) indexing.indexDefinition(oldIdx.id());
InlineIndexImpl newIdx = new DefragIndexFactory(newCtx.offheap(), newCachePageMemory, oldIdx)
.createIndex(cctx, idxDef)
.unwrap(InlineIndexImpl.class);
int segments = oldIdx.segmentsCount();
for (int i = 0; i < segments; ++i) {
treeIterator.iterate(oldIdx.segment(i), oldCachePageMem, (theTree, io, pageAddr, idx) -> {
cancellationChecker.run();
if (System.currentTimeMillis() - lastCpLockTs.get() >= cpLockThreshold) {
cpLock.checkpointReadUnlock();
cpLock.checkpointReadLock();
lastCpLockTs.set(System.currentTimeMillis());
}
assert 1 == io.getVersion()
: "IO version " + io.getVersion() + " is not supported by current defragmentation algorithm." +
" Please implement copying of tree in a new format.";
BPlusIO<IndexRow> h2IO = DefragIndexFactory.wrap(io, oldRowHnd);
IndexRow row = theTree.getRow(h2IO, pageAddr, idx);
if (!row.indexSearchRow()) {
IndexRowImpl r = (IndexRowImpl) row;
CacheDataRow cacheDataRow = r.cacheDataRow();
int partition = cacheDataRow.partition();
long link = r.link();
LinkMap map = mappingByPartition.get(partition);
long newLink = map.get(link);
CacheDataRow newDataRow;
if (((MvccIO) io).storeMvccInfo()) {
newDataRow = new MvccDataRow(newLink);
newDataRow.mvccVersion(row);
} else
newDataRow = new CacheDataRowAdapter(newLink);
// Use old row handler, as MetaInfo is copied from old tree.
IndexRowImpl newRow = new IndexRowImpl(oldRowHnd, newDataRow, r.keys());
newIdx.putIndexRow(newRow);
}
return true;
});
}
}
return true;
}
catch (Throwable t) {
newCtx.cacheObjectContext().kernalContext()
.failure().process(new FailureContext(CRITICAL_ERROR, t));
throw t;
}
finally {
cpLock.checkpointReadUnlock();
}
}
/** Returns collection of table indexes. */
private Collection<TableIndexes> tables(CacheGroupContext gctx) {
Collection<TableIndexes> tables = new ArrayList<>();
for (GridCacheContext<?, ?> cctx: gctx.caches()) {
Map<String, TableIndexes> idxs = new HashMap<>();
List<InlineIndex> indexes = indexing.treeIndexes(cctx, false);
for (InlineIndex idx: indexes) {
String table = indexing.indexDefinition(idx.id()).idxName().tableName();
idxs.putIfAbsent(table, new TableIndexes(cctx, table));
idxs.get(table).addIndex(idx);
}
tables.addAll(idxs.values());
}
return tables;
}
/** Holder for indexes per cache table. */
private static class TableIndexes {
/** Table name. */
final @Nullable String tableName;
/** Cache context. */
final GridCacheContext<?, ?> cctx;
/** Indexes. */
final List<InlineIndex> idxs = new ArrayList<>();
/** */
TableIndexes(GridCacheContext<?, ?> cctx, String tableName) {
this.cctx = cctx;
this.tableName = tableName;
}
/** */
void addIndex(InlineIndex idx) {
idxs.add(idx);
}
}
}
|
|
package org.openestate.io.openimmo.xml;
import java.io.Serializable;
import javax.annotation.Generated;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.XmlValue;
import org.jvnet.jaxb2_commons.lang.CopyStrategy2;
import org.jvnet.jaxb2_commons.lang.CopyTo2;
import org.jvnet.jaxb2_commons.lang.Equals2;
import org.jvnet.jaxb2_commons.lang.EqualsStrategy2;
import org.jvnet.jaxb2_commons.lang.JAXBCopyStrategy;
import org.jvnet.jaxb2_commons.lang.JAXBEqualsStrategy;
import org.jvnet.jaxb2_commons.lang.JAXBToStringStrategy;
import org.jvnet.jaxb2_commons.lang.ToString2;
import org.jvnet.jaxb2_commons.lang.ToStringStrategy2;
import org.jvnet.jaxb2_commons.locator.ObjectLocator;
import org.jvnet.jaxb2_commons.locator.util.LocatorUtils;
/**
* Java class for <max_mietdauer> element.
*
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"content"
})
@XmlRootElement(name = "max_mietdauer")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0")
public class MaxMietdauer implements Serializable, Cloneable, CopyTo2, Equals2, ToString2
{
@XmlValue
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0")
protected String content;
@XmlAttribute(name = "max_dauer")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0")
protected MaxMietdauer.MaxDauer maxDauer;
/**
* Gets the value of the content property.
*
* @return
* possible object is
* {@link String }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0")
public String getContent() {
return content;
}
/**
* Sets the value of the content property.
*
* @param value
* allowed object is
* {@link String }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0")
public void setContent(String value) {
this.content = value;
}
/**
* Gets the value of the maxDauer property.
*
* @return
* possible object is
* {@link MaxMietdauer.MaxDauer }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0")
public MaxMietdauer.MaxDauer getMaxDauer() {
return maxDauer;
}
/**
* Sets the value of the maxDauer property.
*
* @param value
* allowed object is
* {@link MaxMietdauer.MaxDauer }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0")
public void setMaxDauer(MaxMietdauer.MaxDauer value) {
this.maxDauer = value;
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0")
public String toString() {
final ToStringStrategy2 strategy = JAXBToStringStrategy.INSTANCE2;
final StringBuilder buffer = new StringBuilder();
append(null, buffer, strategy);
return buffer.toString();
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0")
public StringBuilder append(ObjectLocator locator, StringBuilder buffer, ToStringStrategy2 strategy) {
strategy.appendStart(locator, this, buffer);
appendFields(locator, buffer, strategy);
strategy.appendEnd(locator, this, buffer);
return buffer;
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0")
public StringBuilder appendFields(ObjectLocator locator, StringBuilder buffer, ToStringStrategy2 strategy) {
{
String theContent;
theContent = this.getContent();
strategy.appendField(locator, this, "content", buffer, theContent, (this.content!= null));
}
{
MaxMietdauer.MaxDauer theMaxDauer;
theMaxDauer = this.getMaxDauer();
strategy.appendField(locator, this, "maxDauer", buffer, theMaxDauer, (this.maxDauer!= null));
}
return buffer;
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0")
public Object clone() {
return copyTo(createNewInstance());
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0")
public Object copyTo(Object target) {
final CopyStrategy2 strategy = JAXBCopyStrategy.INSTANCE2;
return copyTo(null, target, strategy);
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0")
public Object copyTo(ObjectLocator locator, Object target, CopyStrategy2 strategy) {
final Object draftCopy = ((target == null)?createNewInstance():target);
if (draftCopy instanceof MaxMietdauer) {
final MaxMietdauer copy = ((MaxMietdauer) draftCopy);
{
Boolean contentShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.content!= null));
if (contentShouldBeCopiedAndSet == Boolean.TRUE) {
String sourceContent;
sourceContent = this.getContent();
String copyContent = ((String) strategy.copy(LocatorUtils.property(locator, "content", sourceContent), sourceContent, (this.content!= null)));
copy.setContent(copyContent);
} else {
if (contentShouldBeCopiedAndSet == Boolean.FALSE) {
copy.content = null;
}
}
}
{
Boolean maxDauerShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.maxDauer!= null));
if (maxDauerShouldBeCopiedAndSet == Boolean.TRUE) {
MaxMietdauer.MaxDauer sourceMaxDauer;
sourceMaxDauer = this.getMaxDauer();
MaxMietdauer.MaxDauer copyMaxDauer = ((MaxMietdauer.MaxDauer) strategy.copy(LocatorUtils.property(locator, "maxDauer", sourceMaxDauer), sourceMaxDauer, (this.maxDauer!= null)));
copy.setMaxDauer(copyMaxDauer);
} else {
if (maxDauerShouldBeCopiedAndSet == Boolean.FALSE) {
copy.maxDauer = null;
}
}
}
}
return draftCopy;
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0")
public Object createNewInstance() {
return new MaxMietdauer();
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0")
public boolean equals(ObjectLocator thisLocator, ObjectLocator thatLocator, Object object, EqualsStrategy2 strategy) {
if ((object == null)||(this.getClass()!= object.getClass())) {
return false;
}
if (this == object) {
return true;
}
final MaxMietdauer that = ((MaxMietdauer) object);
{
String lhsContent;
lhsContent = this.getContent();
String rhsContent;
rhsContent = that.getContent();
if (!strategy.equals(LocatorUtils.property(thisLocator, "content", lhsContent), LocatorUtils.property(thatLocator, "content", rhsContent), lhsContent, rhsContent, (this.content!= null), (that.content!= null))) {
return false;
}
}
{
MaxMietdauer.MaxDauer lhsMaxDauer;
lhsMaxDauer = this.getMaxDauer();
MaxMietdauer.MaxDauer rhsMaxDauer;
rhsMaxDauer = that.getMaxDauer();
if (!strategy.equals(LocatorUtils.property(thisLocator, "maxDauer", lhsMaxDauer), LocatorUtils.property(thatLocator, "maxDauer", rhsMaxDauer), lhsMaxDauer, rhsMaxDauer, (this.maxDauer!= null), (that.maxDauer!= null))) {
return false;
}
}
return true;
}
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0")
public boolean equals(Object object) {
final EqualsStrategy2 strategy = JAXBEqualsStrategy.INSTANCE2;
return equals(null, null, object, strategy);
}
/**
*
* Java enum for <em>max_dauer</em> attribute in <max_mietdauer> elements.
*
* <p>Java class for null.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="TAG"/>
* <enumeration value="WOCHE"/>
* <enumeration value="MONAT"/>
* <enumeration value="JAHR"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "")
@XmlEnum
@Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0")
public enum MaxDauer {
TAG,
WOCHE,
MONAT,
JAHR;
public String value() {
return name();
}
public static MaxMietdauer.MaxDauer fromValue(String v) {
return valueOf(v);
}
}
}
|
|
/*
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.vitess.client;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.primitives.UnsignedLong;
import com.google.protobuf.ByteString;
import io.vitess.client.cursor.Cursor;
import io.vitess.client.cursor.CursorWithError;
import io.vitess.client.cursor.SimpleCursor;
import io.vitess.proto.Query;
import io.vitess.proto.Query.BindVariable;
import io.vitess.proto.Query.BoundQuery;
import io.vitess.proto.Query.QueryResult;
import io.vitess.proto.Vtgate.BoundKeyspaceIdQuery;
import io.vitess.proto.Vtgate.BoundShardQuery;
import io.vitess.proto.Vtgate.ExecuteEntityIdsRequest.EntityId;
import io.vitess.proto.Vtrpc.RPCError;
import java.math.BigDecimal;
import java.sql.SQLException;
import java.sql.SQLIntegrityConstraintViolationException;
import java.sql.SQLInvalidAuthorizationSpecException;
import java.sql.SQLNonTransientException;
import java.sql.SQLRecoverableException;
import java.sql.SQLSyntaxErrorException;
import java.sql.SQLTimeoutException;
import java.sql.SQLTransientException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
/**
* Proto contains methods for working with Vitess protobuf messages.
*/
public class Proto {
private static final int MAX_DECIMAL_UNIT = 30;
/**
* Throws the proper SQLException for an error returned by VTGate.
*
* <p>
* Errors returned by Vitess are documented in the
* <a href="https://github.com/youtube/vitess/blob/master/proto/vtrpc.proto">vtrpc proto</a>.
*/
public static void checkError(RPCError error) throws SQLException {
if (error != null) {
int errno = getErrno(error.getMessage());
String sqlState = getSQLState(error.getMessage());
switch (error.getCode()) {
case OK:
break;
case INVALID_ARGUMENT:
throw new SQLSyntaxErrorException(error.toString(), sqlState, errno);
case DEADLINE_EXCEEDED:
throw new SQLTimeoutException(error.toString(), sqlState, errno);
case ALREADY_EXISTS:
throw new SQLIntegrityConstraintViolationException(error.toString(), sqlState, errno);
case UNAVAILABLE:
throw new SQLTransientException(error.toString(), sqlState, errno);
case UNAUTHENTICATED:
throw new SQLInvalidAuthorizationSpecException(error.toString(), sqlState, errno);
case ABORTED:
throw new SQLRecoverableException(error.toString(), sqlState, errno);
default:
throw new SQLNonTransientException("Vitess RPC error: " + error.toString(), sqlState,
errno);
}
switch (error.getLegacyCode()) {
case SUCCESS_LEGACY:
break;
case BAD_INPUT_LEGACY:
throw new SQLSyntaxErrorException(error.toString(), sqlState, errno);
case DEADLINE_EXCEEDED_LEGACY:
throw new SQLTimeoutException(error.toString(), sqlState, errno);
case INTEGRITY_ERROR_LEGACY:
throw new SQLIntegrityConstraintViolationException(error.toString(), sqlState, errno);
case TRANSIENT_ERROR_LEGACY:
throw new SQLTransientException(error.toString(), sqlState, errno);
case UNAUTHENTICATED_LEGACY:
throw new SQLInvalidAuthorizationSpecException(error.toString(), sqlState, errno);
case NOT_IN_TX_LEGACY:
throw new SQLRecoverableException(error.toString(), sqlState, errno);
default:
throw new SQLNonTransientException("Vitess RPC error: " + error.toString(), sqlState,
errno);
}
}
}
/**
* Extracts the MySQL errno from a Vitess error message, if any.
*
* <p>
* If no errno information is found, it returns {@code 0}.
*/
public static int getErrno(@Nullable String errorMessage) {
if (errorMessage == null) {
return 0;
}
int tagPos = errorMessage.indexOf("(errno ");
if (tagPos == -1) {
return 0;
}
int start = tagPos + "(errno ".length();
if (start >= errorMessage.length()) {
return 0;
}
int end = errorMessage.indexOf(')', start);
if (end == -1) {
return 0;
}
try {
return Integer.parseInt(errorMessage.substring(start, end));
} catch (NumberFormatException e) {
return 0;
}
}
/**
* Extracts the SQLSTATE from a Vitess error message, if any.
*
* <p>
* If no SQLSTATE information is found, it returns {@code ""}.
*/
public static String getSQLState(@Nullable String errorMessage) {
if (errorMessage == null) {
return "";
}
int tagPos = errorMessage.indexOf("(sqlstate ");
if (tagPos == -1) {
return "";
}
int start = tagPos + "(sqlstate ".length();
if (start >= errorMessage.length()) {
return "";
}
int end = errorMessage.indexOf(')', start);
if (end == -1) {
return "";
}
return errorMessage.substring(start, end);
}
public static BindVariable buildBindVariable(Object value) {
if (value instanceof BindVariable) {
return (BindVariable) value;
}
BindVariable.Builder builder = BindVariable.newBuilder();
if (value instanceof Iterable<?>) {
// List Bind Vars
Iterator<?> itr = ((Iterable<?>) value).iterator();
if (!itr.hasNext()) {
throw new IllegalArgumentException("Can't pass empty list as list bind variable.");
}
builder.setType(Query.Type.TUPLE);
while (itr.hasNext()) {
TypedValue tval = new TypedValue(itr.next());
builder.addValues(Query.Value.newBuilder().setType(tval.type).setValue(tval.value).build());
}
} else {
TypedValue tval = new TypedValue(value);
builder.setType(tval.type);
builder.setValue(tval.value);
}
return builder.build();
}
public static EntityId buildEntityId(byte[] keyspaceId, Object value) {
TypedValue tval = new TypedValue(value);
return EntityId.newBuilder().setKeyspaceId(ByteString.copyFrom(keyspaceId)).setType(tval.type)
.setValue(tval.value).build();
}
/**
* bindQuery creates a BoundQuery from query and vars.
*/
public static BoundQuery bindQuery(String query, Map<String, ?> vars) {
BoundQuery.Builder boundQueryBuilder = BoundQuery.newBuilder().setSql(query);
if (vars != null) {
for (Map.Entry<String, ?> entry : vars.entrySet()) {
boundQueryBuilder.putBindVariables(entry.getKey(), buildBindVariable(entry.getValue()));
}
}
return boundQueryBuilder.build();
}
/**
* bindShardQuery creates a BoundShardQuery.
*/
public static BoundShardQuery bindShardQuery(String keyspace, Iterable<String> shards,
BoundQuery query) {
return BoundShardQuery.newBuilder().setKeyspace(keyspace).addAllShards(shards).setQuery(query)
.build();
}
/**
* bindShardQuery creates a BoundShardQuery.
*/
public static BoundShardQuery bindShardQuery(String keyspace, Iterable<String> shards,
String query, Map<String, ?> vars) {
return bindShardQuery(keyspace, shards, bindQuery(query, vars));
}
/**
* bindKeyspaceIdQuery creates a BoundKeyspaceIdQuery.
*/
public static BoundKeyspaceIdQuery bindKeyspaceIdQuery(String keyspace,
Iterable<byte[]> keyspaceIds, BoundQuery query) {
return BoundKeyspaceIdQuery.newBuilder().setKeyspace(keyspace)
.addAllKeyspaceIds(Iterables.transform(keyspaceIds, BYTE_ARRAY_TO_BYTE_STRING))
.setQuery(query).build();
}
/**
* bindKeyspaceIdQuery creates a BoundKeyspaceIdQuery.
*/
public static BoundKeyspaceIdQuery bindKeyspaceIdQuery(String keyspace,
Iterable<byte[]> keyspaceIds, String query, Map<String, ?> vars) {
return bindKeyspaceIdQuery(keyspace, keyspaceIds, bindQuery(query, vars));
}
public static List<Cursor> toCursorList(List<QueryResult> queryResults) {
ImmutableList.Builder<Cursor> builder = new ImmutableList.Builder<Cursor>();
for (QueryResult queryResult : queryResults) {
builder.add(new SimpleCursor(queryResult));
}
return builder.build();
}
public static List<CursorWithError> fromQueryResponsesToCursorList(List<Query.ResultWithError> resultWithErrorList) {
ImmutableList.Builder<CursorWithError> builder = new ImmutableList.Builder<CursorWithError>();
for (Query.ResultWithError resultWithError : resultWithErrorList) {
builder.add(new CursorWithError(resultWithError));
}
return builder.build();
}
public static final Function<byte[], ByteString> BYTE_ARRAY_TO_BYTE_STRING =
new Function<byte[], ByteString>() {
@Override
public ByteString apply(byte[] from) {
return ByteString.copyFrom(from);
}
};
public static final Function<Map.Entry<byte[], ?>, EntityId> MAP_ENTRY_TO_ENTITY_KEYSPACE_ID =
new Function<Map.Entry<byte[], ?>, EntityId>() {
@Override
public EntityId apply(Map.Entry<byte[], ?> entry) {
return buildEntityId(entry.getKey(), entry.getValue());
}
};
/**
* Represents a type and value in the type system used in query.proto.
*/
protected static class TypedValue {
Query.Type type;
ByteString value;
TypedValue(Object value) {
if (value == null) {
this.type = Query.Type.NULL_TYPE;
this.value = ByteString.EMPTY;
} else if (value instanceof String) {
// String
this.type = Query.Type.VARCHAR;
this.value = ByteString.copyFromUtf8((String) value);
} else if (value instanceof byte[]) {
// Bytes
this.type = Query.Type.VARBINARY;
this.value = ByteString.copyFrom((byte[]) value);
} else if (value instanceof Integer || value instanceof Long || value instanceof Short
|| value instanceof Byte) {
// Int32, Int64, Short, Byte
this.type = Query.Type.INT64;
this.value = ByteString.copyFromUtf8(value.toString());
} else if (value instanceof UnsignedLong) {
// Uint64
this.type = Query.Type.UINT64;
this.value = ByteString.copyFromUtf8(value.toString());
} else if (value instanceof Float || value instanceof Double) {
// Float, Double
this.type = Query.Type.FLOAT64;
this.value = ByteString.copyFromUtf8(value.toString());
} else if (value instanceof Boolean) {
// Boolean
this.type = Query.Type.INT64;
this.value = ByteString.copyFromUtf8(((boolean) value) ? "1" : "0");
} else if (value instanceof BigDecimal) {
// BigDecimal
BigDecimal bigDecimal = (BigDecimal) value;
if (bigDecimal.scale() > MAX_DECIMAL_UNIT) {
// MySQL only supports scale up to 30.
bigDecimal = bigDecimal.setScale(MAX_DECIMAL_UNIT, BigDecimal.ROUND_HALF_UP);
}
this.type = Query.Type.DECIMAL;
this.value = ByteString.copyFromUtf8(bigDecimal.toPlainString());
} else {
throw new IllegalArgumentException(
"unsupported type for Query.Value proto: " + value.getClass());
}
}
}
}
|
|
/*
* Copyright (c) 1996, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.rmi.server;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectStreamClass;
import java.io.StreamCorruptedException;
import java.net.URL;
import java.util.*;
import java.security.AccessControlException;
import java.security.Permission;
import java.rmi.server.RMIClassLoader;
/**
* MarshalInputStream is an extension of ObjectInputStream. When resolving
* a class, it reads an object from the stream written by a corresponding
* MarshalOutputStream. If the class to be resolved is not available
* locally, from the first class loader on the execution stack, or from the
* context class loader of the current thread, it will attempt to load the
* class from the location annotated by the sending MarshalOutputStream.
* This location object must be a string representing a path of URLs.
*
* A new MarshalInputStream should be created to deserialize remote objects or
* graphs containing remote objects. Objects are created from the stream
* using the ObjectInputStream.readObject method.
*
* @author Peter Jones
*/
public class MarshalInputStream extends ObjectInputStream {
/**
* Value of "java.rmi.server.useCodebaseOnly" property,
* as cached at class initialization time.
*
* The default value is true. That is, the value is true
* if the property is absent or is not equal to "false".
* The value is only false when the property is present
* and is equal to "false".
*/
private static final boolean useCodebaseOnlyProperty =
! java.security.AccessController.doPrivileged(
new sun.security.action.GetPropertyAction(
"java.rmi.server.useCodebaseOnly", "true"))
.equalsIgnoreCase("false");
/** table to hold sun classes to which access is explicitly permitted */
protected static Map<String, Class<?>> permittedSunClasses
= new HashMap<>(3);
/** if true, don't try superclass first in resolveClass() */
private boolean skipDefaultResolveClass = false;
/** callbacks to make when done() called: maps Object to Runnable */
private final Map<Object, Runnable> doneCallbacks
= new HashMap<>(3);
/**
* if true, load classes (if not available locally) only from the
* URL specified by the "java.rmi.server.codebase" property.
*/
private boolean useCodebaseOnly = useCodebaseOnlyProperty;
/*
* Fix for 4179055: The remote object services inside the
* activation daemon use stubs that are in the package
* sun.rmi.server. Classes for these stubs should be loaded from
* the classpath by RMI system code and not by the normal
* unmarshalling process as applications should not need to have
* permission to access the sun implementation classes.
*
* Note: this fix should be redone when API changes may be
* integrated
*
* During parameter unmarshalling RMI needs to explicitly permit
* access to three sun.* stub classes
*/
static {
try {
String system =
"sun.rmi.server.Activation$ActivationSystemImpl_Stub";
String registry = "sun.rmi.registry.RegistryImpl_Stub";
permittedSunClasses.put(system, Class.forName(system));
permittedSunClasses.put(registry, Class.forName(registry));
} catch (ClassNotFoundException e) {
throw new NoClassDefFoundError("Missing system class: " +
e.getMessage());
}
}
/**
* Create a new MarshalInputStream object.
*/
public MarshalInputStream(InputStream in)
throws IOException, StreamCorruptedException
{
super(in);
}
/**
* Returns a callback previously registered via the setDoneCallback
* method with given key, or null if no callback has yet been registered
* with that key.
*/
public Runnable getDoneCallback(Object key) {
return doneCallbacks.get(key); // not thread-safe
}
/**
* Registers a callback to make when this stream's done() method is
* invoked, along with a key for retrieving the same callback instance
* subsequently from the getDoneCallback method.
*/
public void setDoneCallback(Object key, Runnable callback) {
//assert(!doneCallbacks.contains(key));
doneCallbacks.put(key, callback); // not thread-safe
}
/**
* Indicates that the user of this MarshalInputStream is done reading
* objects from it, so all callbacks registered with the setDoneCallback
* method should now be (synchronously) executed. When this method
* returns, there are no more callbacks registered.
*
* This method is implicitly invoked by close() before it delegates to
* the superclass's close method.
*/
public void done() {
Iterator<Runnable> iter = doneCallbacks.values().iterator();
while (iter.hasNext()) { // not thread-safe
Runnable callback = iter.next();
callback.run();
}
doneCallbacks.clear();
}
/**
* Closes this stream, implicitly invoking done() first.
*/
public void close() throws IOException {
done();
super.close();
}
/**
* resolveClass is extended to acquire (if present) the location
* from which to load the specified class.
* It will find, load, and return the class.
*/
protected Class<?> resolveClass(ObjectStreamClass classDesc)
throws IOException, ClassNotFoundException
{
/*
* Always read annotation written by MarshalOutputStream
* describing where to load class from.
*/
Object annotation = readLocation();
String className = classDesc.getName();
/*
* Unless we were told to skip this consideration, choose the
* "default loader" to simulate the default ObjectInputStream
* resolveClass mechanism (that is, choose the first non-null
* loader on the execution stack) to maximize the likelihood of
* type compatibility with calling code. (This consideration
* is skipped during server parameter unmarshalling using the 1.2
* stub protocol, because there would never be a non-null class
* loader on the stack in that situation anyway.)
*/
ClassLoader defaultLoader =
skipDefaultResolveClass ? null : latestUserDefinedLoader();
/*
* If the "java.rmi.server.useCodebaseOnly" property was true or
* useCodebaseOnly() was called or the annotation is not a String,
* load from the local loader using the "java.rmi.server.codebase"
* URL. Otherwise, load from a loader using the codebase URL in
* the annotation.
*/
String codebase = null;
if (!useCodebaseOnly && annotation instanceof String) {
codebase = (String) annotation;
}
try {
return RMIClassLoader.loadClass(codebase, className,
defaultLoader);
} catch (AccessControlException e) {
return checkSunClass(className, e);
} catch (ClassNotFoundException e) {
/*
* Fix for 4442373: delegate to ObjectInputStream.resolveClass()
* to resolve primitive classes.
*/
try {
if (Character.isLowerCase(className.charAt(0)) &&
className.indexOf('.') == -1)
{
return super.resolveClass(classDesc);
}
} catch (ClassNotFoundException e2) {
}
throw e;
}
}
/**
* resolveProxyClass is extended to acquire (if present) the location
* to determine the class loader to define the proxy class in.
*/
protected Class<?> resolveProxyClass(String[] interfaces)
throws IOException, ClassNotFoundException
{
/*
* Always read annotation written by MarshalOutputStream.
*/
Object annotation = readLocation();
ClassLoader defaultLoader =
skipDefaultResolveClass ? null : latestUserDefinedLoader();
String codebase = null;
if (!useCodebaseOnly && annotation instanceof String) {
codebase = (String) annotation;
}
return RMIClassLoader.loadProxyClass(codebase, interfaces,
defaultLoader);
}
/*
* Returns the first non-null class loader up the execution stack, or null
* if only code from the null class loader is on the stack.
*/
private static ClassLoader latestUserDefinedLoader() {
return sun.misc.VM.latestUserDefinedLoader();
}
/**
* Fix for 4179055: Need to assist resolving sun stubs; resolve
* class locally if it is a "permitted" sun class
*/
private Class<?> checkSunClass(String className, AccessControlException e)
throws AccessControlException
{
// ensure that we are giving out a stub for the correct reason
Permission perm = e.getPermission();
String name = null;
if (perm != null) {
name = perm.getName();
}
Class<?> resolvedClass = permittedSunClasses.get(className);
// if class not permitted, throw the SecurityException
if ((name == null) ||
(resolvedClass == null) ||
((!name.equals("accessClassInPackage.sun.rmi.server")) &&
(!name.equals("accessClassInPackage.sun.rmi.registry"))))
{
throw e;
}
return resolvedClass;
}
/**
* Return the location for the class in the stream. This method can
* be overridden by subclasses that store this annotation somewhere
* else than as the next object in the stream, as is done by this class.
*/
protected Object readLocation()
throws IOException, ClassNotFoundException
{
return readObject();
}
/**
* Set a flag to indicate that the superclass's default resolveClass()
* implementation should not be invoked by our resolveClass().
*/
void skipDefaultResolveClass() {
skipDefaultResolveClass = true;
}
/**
* Disable code downloading except from the URL specified by the
* "java.rmi.server.codebase" property.
*/
void useCodebaseOnly() {
useCodebaseOnly = true;
}
}
|
|
package com.github.steveice10.mc.auth.util;
import com.github.steveice10.mc.auth.exception.request.InvalidCredentialsException;
import com.github.steveice10.mc.auth.exception.request.RequestException;
import com.github.steveice10.mc.auth.exception.request.ServiceUnavailableException;
import com.github.steveice10.mc.auth.exception.request.UserMigratedException;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.Proxy;
import java.net.URL;
import java.util.UUID;
/**
* Utilities for making HTTP requests.
*/
public class HTTP {
private static final Gson GSON;
static {
GSON = new GsonBuilder().registerTypeAdapter(UUID.class, new UUIDSerializer()).create();
}
private HTTP() {
}
/**
* Makes an HTTP request.
*
* @param proxy Proxy to use when making the request.
* @param url URL to make the request to.
* @param input Input to provide in the request.
* @throws RequestException If an error occurs while making the request.
*/
public static void makeRequest(Proxy proxy, String url, Object input) throws RequestException {
makeRequest(proxy, url, input, null);
}
/**
* Makes an HTTP request.
*
* @param proxy Proxy to use when making the request.
* @param url URL to make the request to.
* @param input Input to provide in the request.
* @param clazz Class to provide the response as.
* @param <T> Type to provide the response as.
* @return The response of the request.
* @throws RequestException If an error occurs while making the request.
*/
public static <T> T makeRequest(Proxy proxy, String url, Object input, Class<T> clazz) throws RequestException {
JsonElement response = null;
try {
String jsonString = input == null ? performGetRequest(proxy, url) : performPostRequest(proxy, url, GSON.toJson(input), "application/json");
response = GSON.fromJson(jsonString, JsonElement.class);
} catch(Exception e) {
throw new ServiceUnavailableException("Could not make request to '" + url + "'.", e);
}
if(response != null) {
if(response.isJsonObject()) {
JsonObject object = response.getAsJsonObject();
if(object.has("error")) {
String error = object.get("error").getAsString();
String cause = object.has("cause") ? object.get("cause").getAsString() : "";
String errorMessage = object.has("errorMessage") ? object.get("errorMessage").getAsString() : "";
if(!error.equals("")) {
if(error.equals("ForbiddenOperationException")) {
if(cause != null && cause.equals("UserMigratedException")) {
throw new UserMigratedException(errorMessage);
} else {
throw new InvalidCredentialsException(errorMessage);
}
} else {
throw new RequestException(errorMessage);
}
}
}
}
if(clazz != null) {
return GSON.fromJson(response, clazz);
}
}
return null;
}
private static HttpURLConnection createUrlConnection(Proxy proxy, String url) throws IOException {
if(proxy == null) {
throw new IllegalArgumentException("Proxy cannot be null.");
}
if(url == null) {
throw new IllegalArgumentException("URL cannot be null.");
}
HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection(proxy);
connection.setConnectTimeout(15000);
connection.setReadTimeout(15000);
connection.setUseCaches(false);
return connection;
}
private static String performGetRequest(Proxy proxy, String url) throws IOException {
if(proxy == null) {
throw new IllegalArgumentException("Proxy cannot be null.");
}
if(url == null) {
throw new IllegalArgumentException("URL cannot be null.");
}
HttpURLConnection connection = createUrlConnection(proxy, url);
connection.setDoInput(true);
InputStream in = null;
try {
int responseCode = connection.getResponseCode();
if(responseCode == 200) {
in = connection.getInputStream();
} else {
in = connection.getErrorStream();
}
if(in != null) {
BufferedReader reader = new BufferedReader(new InputStreamReader(in));
StringBuilder result = new StringBuilder();
String line = null;
while((line = reader.readLine()) != null) {
result.append(line).append("\n");
}
return result.toString();
} else {
return "";
}
} finally {
if(in != null) {
try {
in.close();
} catch(IOException e) {
}
}
}
}
private static String performPostRequest(Proxy proxy, String url, String post, String type) throws IOException {
if(proxy == null) {
throw new IllegalArgumentException("Proxy cannot be null.");
}
if(url == null) {
throw new IllegalArgumentException("URL cannot be null.");
}
if(post == null) {
throw new IllegalArgumentException("Post cannot be null.");
}
if(type == null) {
throw new IllegalArgumentException("Type cannot be null.");
}
byte[] bytes = post.getBytes("UTF-8");
HttpURLConnection connection = createUrlConnection(proxy, url);
connection.setRequestProperty("Content-Type", type + "; charset=utf-8");
connection.setRequestProperty("Content-Length", String.valueOf(bytes.length));
connection.setDoInput(true);
connection.setDoOutput(true);
OutputStream out = null;
try {
out = connection.getOutputStream();
out.write(bytes);
} finally {
if(out != null) {
try {
out.close();
} catch(IOException e) {
}
}
}
InputStream in = null;
try {
int responseCode = connection.getResponseCode();
if(responseCode == 200) {
in = connection.getInputStream();
} else {
in = connection.getErrorStream();
}
if(in != null) {
BufferedReader reader = new BufferedReader(new InputStreamReader(in));
StringBuilder result = new StringBuilder();
String line = null;
while((line = reader.readLine()) != null) {
result.append(line).append("\n");
}
return result.toString();
} else {
return "";
}
} finally {
if(in != null) {
try {
in.close();
} catch(IOException e) {
}
}
}
}
}
|
|
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.databasemigrationservice.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p/>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/dms-2016-01-01/DescribeEndpoints" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeEndpointsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* Filters applied to the describe action.
* </p>
* <p>
* Valid filter names: endpoint-arn | endpoint-type | endpoint-id | engine-name
* </p>
*/
private java.util.List<Filter> filters;
/**
* <p>
* The maximum number of records to include in the response. If more records exist than the specified
* <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that the
* remaining results can be retrieved.
* </p>
* <p>
* Default: 100
* </p>
* <p>
* Constraints: Minimum 20, maximum 100.
* </p>
*/
private Integer maxRecords;
/**
* <p>
* An optional pagination token provided by a previous request. If this parameter is specified, the response
* includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>.
* </p>
*/
private String marker;
/**
* <p>
* Filters applied to the describe action.
* </p>
* <p>
* Valid filter names: endpoint-arn | endpoint-type | endpoint-id | engine-name
* </p>
*
* @return Filters applied to the describe action.</p>
* <p>
* Valid filter names: endpoint-arn | endpoint-type | endpoint-id | engine-name
*/
public java.util.List<Filter> getFilters() {
return filters;
}
/**
* <p>
* Filters applied to the describe action.
* </p>
* <p>
* Valid filter names: endpoint-arn | endpoint-type | endpoint-id | engine-name
* </p>
*
* @param filters
* Filters applied to the describe action.</p>
* <p>
* Valid filter names: endpoint-arn | endpoint-type | endpoint-id | engine-name
*/
public void setFilters(java.util.Collection<Filter> filters) {
if (filters == null) {
this.filters = null;
return;
}
this.filters = new java.util.ArrayList<Filter>(filters);
}
/**
* <p>
* Filters applied to the describe action.
* </p>
* <p>
* Valid filter names: endpoint-arn | endpoint-type | endpoint-id | engine-name
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setFilters(java.util.Collection)} or {@link #withFilters(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param filters
* Filters applied to the describe action.</p>
* <p>
* Valid filter names: endpoint-arn | endpoint-type | endpoint-id | engine-name
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeEndpointsRequest withFilters(Filter... filters) {
if (this.filters == null) {
setFilters(new java.util.ArrayList<Filter>(filters.length));
}
for (Filter ele : filters) {
this.filters.add(ele);
}
return this;
}
/**
* <p>
* Filters applied to the describe action.
* </p>
* <p>
* Valid filter names: endpoint-arn | endpoint-type | endpoint-id | engine-name
* </p>
*
* @param filters
* Filters applied to the describe action.</p>
* <p>
* Valid filter names: endpoint-arn | endpoint-type | endpoint-id | engine-name
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeEndpointsRequest withFilters(java.util.Collection<Filter> filters) {
setFilters(filters);
return this;
}
/**
* <p>
* The maximum number of records to include in the response. If more records exist than the specified
* <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that the
* remaining results can be retrieved.
* </p>
* <p>
* Default: 100
* </p>
* <p>
* Constraints: Minimum 20, maximum 100.
* </p>
*
* @param maxRecords
* The maximum number of records to include in the response. If more records exist than the specified
* <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that the
* remaining results can be retrieved. </p>
* <p>
* Default: 100
* </p>
* <p>
* Constraints: Minimum 20, maximum 100.
*/
public void setMaxRecords(Integer maxRecords) {
this.maxRecords = maxRecords;
}
/**
* <p>
* The maximum number of records to include in the response. If more records exist than the specified
* <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that the
* remaining results can be retrieved.
* </p>
* <p>
* Default: 100
* </p>
* <p>
* Constraints: Minimum 20, maximum 100.
* </p>
*
* @return The maximum number of records to include in the response. If more records exist than the specified
* <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that the
* remaining results can be retrieved. </p>
* <p>
* Default: 100
* </p>
* <p>
* Constraints: Minimum 20, maximum 100.
*/
public Integer getMaxRecords() {
return this.maxRecords;
}
/**
* <p>
* The maximum number of records to include in the response. If more records exist than the specified
* <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that the
* remaining results can be retrieved.
* </p>
* <p>
* Default: 100
* </p>
* <p>
* Constraints: Minimum 20, maximum 100.
* </p>
*
* @param maxRecords
* The maximum number of records to include in the response. If more records exist than the specified
* <code>MaxRecords</code> value, a pagination token called a marker is included in the response so that the
* remaining results can be retrieved. </p>
* <p>
* Default: 100
* </p>
* <p>
* Constraints: Minimum 20, maximum 100.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeEndpointsRequest withMaxRecords(Integer maxRecords) {
setMaxRecords(maxRecords);
return this;
}
/**
* <p>
* An optional pagination token provided by a previous request. If this parameter is specified, the response
* includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>.
* </p>
*
* @param marker
* An optional pagination token provided by a previous request. If this parameter is specified, the response
* includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>.
*/
public void setMarker(String marker) {
this.marker = marker;
}
/**
* <p>
* An optional pagination token provided by a previous request. If this parameter is specified, the response
* includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>.
* </p>
*
* @return An optional pagination token provided by a previous request. If this parameter is specified, the response
* includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>.
*/
public String getMarker() {
return this.marker;
}
/**
* <p>
* An optional pagination token provided by a previous request. If this parameter is specified, the response
* includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>.
* </p>
*
* @param marker
* An optional pagination token provided by a previous request. If this parameter is specified, the response
* includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeEndpointsRequest withMarker(String marker) {
setMarker(marker);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getFilters() != null)
sb.append("Filters: ").append(getFilters()).append(",");
if (getMaxRecords() != null)
sb.append("MaxRecords: ").append(getMaxRecords()).append(",");
if (getMarker() != null)
sb.append("Marker: ").append(getMarker());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeEndpointsRequest == false)
return false;
DescribeEndpointsRequest other = (DescribeEndpointsRequest) obj;
if (other.getFilters() == null ^ this.getFilters() == null)
return false;
if (other.getFilters() != null && other.getFilters().equals(this.getFilters()) == false)
return false;
if (other.getMaxRecords() == null ^ this.getMaxRecords() == null)
return false;
if (other.getMaxRecords() != null && other.getMaxRecords().equals(this.getMaxRecords()) == false)
return false;
if (other.getMarker() == null ^ this.getMarker() == null)
return false;
if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getFilters() == null) ? 0 : getFilters().hashCode());
hashCode = prime * hashCode + ((getMaxRecords() == null) ? 0 : getMaxRecords().hashCode());
hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode());
return hashCode;
}
@Override
public DescribeEndpointsRequest clone() {
return (DescribeEndpointsRequest) super.clone();
}
}
|
|
package com.mapr.music.service;
import com.mapr.music.dao.*;
import com.mapr.music.dto.RateDto;
import com.mapr.music.exception.ResourceNotFoundException;
import com.mapr.music.model.*;
import org.apache.commons.beanutils.PropertyUtilsBean;
import javax.inject.Inject;
import javax.inject.Named;
import java.lang.reflect.InvocationTargetException;
import java.security.Principal;
import java.util.List;
import java.util.UUID;
public class RateService {
private final AlbumRateDao albumRateDao;
private final ArtistRateDao artistRateDao;
private final AlbumDao albumDao;
private final ArtistDao artistDao;
private final MaprDbDao<User> userDao;
@Inject
public RateService(AlbumRateDao albumRateDao,
ArtistRateDao artistRateDao,
@Named("albumDao") AlbumDao albumDao,
@Named("artistDao") ArtistDao artistDao,
@Named("userDao") MaprDbDao<User> userDao) {
this.albumRateDao = albumRateDao;
this.artistRateDao = artistRateDao;
this.albumDao = albumDao;
this.artistDao = artistDao;
this.userDao = userDao;
}
/**
* Returns User's rate for the specified Album.
*
* @param user user's principal.
* @param albumId album's identifier.
* @return User's rate for the specified Album. Empty rate will be returned in case when there is no such rate.
*/
public RateDto getAlbumRate(Principal user, String albumId) {
if (user == null || user.getName() == null || user.getName().isEmpty()) {
return new RateDto();
}
String userId = user.getName();
if (!userDao.exists(userId)) {
throw new ResourceNotFoundException("User with id '" + userId + "' not found");
}
if (!albumDao.exists(albumId)) {
throw new ResourceNotFoundException("Album with id '" + albumId + "' not found");
}
AlbumRate rate = albumRateDao.getRate(userId, albumId);
return (rate != null) ? albumRateToDto(rate) : new RateDto();
}
/**
* Allows to rate specified Album on behalf of specified User.
*
* @param userId user's identifier.
* @param albumId album's identifier.
* @param albumRate rate Data Transfer Object.
* @return created or updated Album's rate.
*/
public RateDto rateAlbum(String userId, String albumId, RateDto albumRate) {
if (albumRate == null) {
throw new IllegalArgumentException("Album rate can not be null");
}
return rateAlbum(userId, albumId, albumRate.getRating());
}
/**
* Allows to rate specified Album on behalf of specified User.
*
* @param userId user's identifier.
* @param albumId album's identifier.
* @param rate rate.
* @return created or updated Album's rate.
*/
public RateDto rateAlbum(String userId, String albumId, double rate) {
if (userId == null || userId.isEmpty()) {
throw new IllegalArgumentException("User id can not be empty");
}
if (albumId == null || albumId.isEmpty()) {
throw new IllegalArgumentException("Album id can not be empty");
}
if (!userDao.exists(userId)) {
throw new ResourceNotFoundException("User with id '" + userId + "' not found");
}
Album existingAlbum = albumDao.getById(albumId);
if (existingAlbum == null) {
throw new ResourceNotFoundException("Album with id '" + albumId + "' not found");
}
AlbumRate possibleExistingRate = albumRateDao.getRate(userId, albumId);
if (possibleExistingRate != null) {
possibleExistingRate.setRating(rate);
AlbumRate newAlbumRate = albumRateDao.update(possibleExistingRate.getId(), possibleExistingRate);
return recomputeAlbumRate(newAlbumRate, existingAlbum);
}
AlbumRate albumRate = new AlbumRate();
albumRate.setId(UUID.randomUUID().toString());
albumRate.setUserId(userId);
albumRate.setDocumentId(albumId);
albumRate.setRating(rate);
AlbumRate newAlbumRate = albumRateDao.create(albumRate);
return recomputeAlbumRate(newAlbumRate, existingAlbum);
}
private RateDto recomputeAlbumRate(AlbumRate newAlbumRate, Album existingAlbum) {
List<AlbumRate> albumRates = albumRateDao.getByAlbumId(newAlbumRate.getDocumentId());
double aggregatedRating = albumRates.stream().mapToDouble(AlbumRate::getRating).sum() / albumRates.size();
existingAlbum.setRating(aggregatedRating);
albumDao.update(existingAlbum.getId(), existingAlbum);
return new RateDto(aggregatedRating);
}
private RateDto recomputeArtistRate(ArtistRate newArtistRate, Artist existingArtist) {
List<ArtistRate> artistRates = artistRateDao.getByArtistId(newArtistRate.getDocumentId());
double aggregatedRating = artistRates.stream().mapToDouble(ArtistRate::getRating).sum() / artistRates.size();
existingArtist.setRating(aggregatedRating);
artistDao.update(existingArtist.getId(), existingArtist);
return new RateDto(aggregatedRating);
}
/**
* Allows to rate specified Album on behalf of specified User.
*
* @param user user's principal.
* @param albumId album's identifier.
* @param albumRate rate Data Transfer Object.
* @return created or updated Album's rate.
*/
public RateDto rateAlbum(Principal user, String albumId, RateDto albumRate) {
if (user == null) {
throw new IllegalArgumentException("User principal can not be null");
}
if (albumRate == null) {
throw new IllegalArgumentException("Album rate can not be null");
}
return rateAlbum(user.getName(), albumId, albumRate.getRating());
}
/**
* Returns User's rate for the specified Artist.
*
* @param user user's principal.
* @param artistId artists's identifier.
* @return User's rate for the specified Artist. Empty rate will be returned in case when there is no such rate.
*/
public RateDto getArtistRate(Principal user, String artistId) {
if (user == null || user.getName() == null || user.getName().isEmpty()) {
return new RateDto();
}
String userId = user.getName();
if (!userDao.exists(userId)) {
throw new ResourceNotFoundException("User with id '" + userId + "' not found");
}
if (!artistDao.exists(artistId)) {
throw new ResourceNotFoundException("Artist with id '" + artistId + "' not found");
}
ArtistRate rate = artistRateDao.getRate(userId, artistId);
return (rate != null) ? artistRateToDto(rate) : new RateDto();
}
/**
* Allows to rate specified Artist on behalf of specified User.
*
* @param userId user's identifier.
* @param artistId artist's identifier.
* @param artistRate rate Data Transfer Object.
* @return created or updated Artist's rate.
*/
public RateDto rateArtist(String userId, String artistId, RateDto artistRate) {
if (artistRate == null) {
throw new IllegalArgumentException("Artist rate can not be null");
}
return rateArtist(userId, artistId, artistRate.getRating());
}
/**
* Allows to rate specified Artist on behalf of specified User.
*
* @param userId user's identifier.
* @param artistId artist's identifier.
* @param rate rate.
* @return created or updated Artist's rate.
*/
public RateDto rateArtist(String userId, String artistId, double rate) {
if (userId == null || userId.isEmpty()) {
throw new IllegalArgumentException("User id can not be empty");
}
if (artistId == null || artistId.isEmpty()) {
throw new IllegalArgumentException("Artist id can not be empty");
}
if (!userDao.exists(userId)) {
throw new ResourceNotFoundException("User with id '" + userId + "' not found");
}
Artist existingArtist = artistDao.getById(artistId);
if (existingArtist == null) {
throw new ResourceNotFoundException("Artist with id '" + artistId + "' not found");
}
ArtistRate possibleExistingRate = artistRateDao.getRate(userId, artistId);
if (possibleExistingRate != null) {
possibleExistingRate.setRating(rate);
ArtistRate newArtistRate = artistRateDao.update(possibleExistingRate.getId(), possibleExistingRate);
return recomputeArtistRate(newArtistRate, existingArtist);
}
ArtistRate artistRate = new ArtistRate();
artistRate.setId(UUID.randomUUID().toString());
artistRate.setUserId(userId);
artistRate.setDocumentId(artistId);
artistRate.setRating(rate);
ArtistRate newArtistRate = artistRateDao.create(artistRate);
return recomputeArtistRate(newArtistRate, existingArtist);
}
/**
* Allows to rate specified Artist on behalf of specified User.
*
* @param user user's principal.
* @param artistId artist's identifier.
* @param artistRate rate Data Transfer Object.
* @return created or updated Artist's rate.
*/
public RateDto rateArtist(Principal user, String artistId, RateDto artistRate) {
if (user == null) {
throw new IllegalArgumentException("User principal can not be null");
}
if (artistRate == null) {
throw new IllegalArgumentException("Artist rate can not be null");
}
return rateArtist(user.getName(), artistId, artistRate.getRating());
}
private RateDto artistRateToDto(ArtistRate artistRate) {
RateDto rateDto = new RateDto();
PropertyUtilsBean propertyUtilsBean = new PropertyUtilsBean();
try {
propertyUtilsBean.copyProperties(rateDto, artistRate);
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
throw new RuntimeException("Can not create artist rate Data Transfer Object", e);
}
return rateDto;
}
private RateDto albumRateToDto(AlbumRate albumRate) {
RateDto rateDto = new RateDto();
PropertyUtilsBean propertyUtilsBean = new PropertyUtilsBean();
try {
propertyUtilsBean.copyProperties(rateDto, albumRate);
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
throw new RuntimeException("Can not create album rate Data Transfer Object", e);
}
return rateDto;
}
}
|
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/tasks/v2beta2/cloudtasks.proto
package com.google.cloud.tasks.v2beta2;
/**
*
*
* <pre>
* Request message for canceling a lease using
* [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease].
* </pre>
*
* Protobuf type {@code google.cloud.tasks.v2beta2.CancelLeaseRequest}
*/
public final class CancelLeaseRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.tasks.v2beta2.CancelLeaseRequest)
CancelLeaseRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CancelLeaseRequest.newBuilder() to construct.
private CancelLeaseRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CancelLeaseRequest() {
name_ = "";
responseView_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CancelLeaseRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private CancelLeaseRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 18:
{
com.google.protobuf.Timestamp.Builder subBuilder = null;
if (scheduleTime_ != null) {
subBuilder = scheduleTime_.toBuilder();
}
scheduleTime_ =
input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(scheduleTime_);
scheduleTime_ = subBuilder.buildPartial();
}
break;
}
case 24:
{
int rawValue = input.readEnum();
responseView_ = rawValue;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tasks.v2beta2.CloudTasksProto
.internal_static_google_cloud_tasks_v2beta2_CancelLeaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tasks.v2beta2.CloudTasksProto
.internal_static_google_cloud_tasks_v2beta2_CancelLeaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tasks.v2beta2.CancelLeaseRequest.class,
com.google.cloud.tasks.v2beta2.CancelLeaseRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SCHEDULE_TIME_FIELD_NUMBER = 2;
private com.google.protobuf.Timestamp scheduleTime_;
/**
*
*
* <pre>
* Required. The task's current schedule time, available in the
* [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] returned by
* [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] response or
* [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] response. This restriction is
* to ensure that your worker currently holds the lease.
* </pre>
*
* <code>.google.protobuf.Timestamp schedule_time = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the scheduleTime field is set.
*/
@java.lang.Override
public boolean hasScheduleTime() {
return scheduleTime_ != null;
}
/**
*
*
* <pre>
* Required. The task's current schedule time, available in the
* [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] returned by
* [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] response or
* [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] response. This restriction is
* to ensure that your worker currently holds the lease.
* </pre>
*
* <code>.google.protobuf.Timestamp schedule_time = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The scheduleTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getScheduleTime() {
return scheduleTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: scheduleTime_;
}
/**
*
*
* <pre>
* Required. The task's current schedule time, available in the
* [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] returned by
* [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] response or
* [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] response. This restriction is
* to ensure that your worker currently holds the lease.
* </pre>
*
* <code>.google.protobuf.Timestamp schedule_time = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getScheduleTimeOrBuilder() {
return getScheduleTime();
}
public static final int RESPONSE_VIEW_FIELD_NUMBER = 3;
private int responseView_;
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta2.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta2.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta2.Task.View response_view = 3;</code>
*
* @return The enum numeric value on the wire for responseView.
*/
@java.lang.Override
public int getResponseViewValue() {
return responseView_;
}
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta2.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta2.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta2.Task.View response_view = 3;</code>
*
* @return The responseView.
*/
@java.lang.Override
public com.google.cloud.tasks.v2beta2.Task.View getResponseView() {
@SuppressWarnings("deprecation")
com.google.cloud.tasks.v2beta2.Task.View result =
com.google.cloud.tasks.v2beta2.Task.View.valueOf(responseView_);
return result == null ? com.google.cloud.tasks.v2beta2.Task.View.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (scheduleTime_ != null) {
output.writeMessage(2, getScheduleTime());
}
if (responseView_ != com.google.cloud.tasks.v2beta2.Task.View.VIEW_UNSPECIFIED.getNumber()) {
output.writeEnum(3, responseView_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (scheduleTime_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getScheduleTime());
}
if (responseView_ != com.google.cloud.tasks.v2beta2.Task.View.VIEW_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, responseView_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.tasks.v2beta2.CancelLeaseRequest)) {
return super.equals(obj);
}
com.google.cloud.tasks.v2beta2.CancelLeaseRequest other =
(com.google.cloud.tasks.v2beta2.CancelLeaseRequest) obj;
if (!getName().equals(other.getName())) return false;
if (hasScheduleTime() != other.hasScheduleTime()) return false;
if (hasScheduleTime()) {
if (!getScheduleTime().equals(other.getScheduleTime())) return false;
}
if (responseView_ != other.responseView_) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (hasScheduleTime()) {
hash = (37 * hash) + SCHEDULE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getScheduleTime().hashCode();
}
hash = (37 * hash) + RESPONSE_VIEW_FIELD_NUMBER;
hash = (53 * hash) + responseView_;
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.tasks.v2beta2.CancelLeaseRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2beta2.CancelLeaseRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta2.CancelLeaseRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2beta2.CancelLeaseRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta2.CancelLeaseRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2beta2.CancelLeaseRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta2.CancelLeaseRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2beta2.CancelLeaseRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta2.CancelLeaseRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2beta2.CancelLeaseRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta2.CancelLeaseRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2beta2.CancelLeaseRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.tasks.v2beta2.CancelLeaseRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for canceling a lease using
* [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease].
* </pre>
*
* Protobuf type {@code google.cloud.tasks.v2beta2.CancelLeaseRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.tasks.v2beta2.CancelLeaseRequest)
com.google.cloud.tasks.v2beta2.CancelLeaseRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tasks.v2beta2.CloudTasksProto
.internal_static_google_cloud_tasks_v2beta2_CancelLeaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tasks.v2beta2.CloudTasksProto
.internal_static_google_cloud_tasks_v2beta2_CancelLeaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tasks.v2beta2.CancelLeaseRequest.class,
com.google.cloud.tasks.v2beta2.CancelLeaseRequest.Builder.class);
}
// Construct using com.google.cloud.tasks.v2beta2.CancelLeaseRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
if (scheduleTimeBuilder_ == null) {
scheduleTime_ = null;
} else {
scheduleTime_ = null;
scheduleTimeBuilder_ = null;
}
responseView_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.tasks.v2beta2.CloudTasksProto
.internal_static_google_cloud_tasks_v2beta2_CancelLeaseRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.tasks.v2beta2.CancelLeaseRequest getDefaultInstanceForType() {
return com.google.cloud.tasks.v2beta2.CancelLeaseRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.tasks.v2beta2.CancelLeaseRequest build() {
com.google.cloud.tasks.v2beta2.CancelLeaseRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.tasks.v2beta2.CancelLeaseRequest buildPartial() {
com.google.cloud.tasks.v2beta2.CancelLeaseRequest result =
new com.google.cloud.tasks.v2beta2.CancelLeaseRequest(this);
result.name_ = name_;
if (scheduleTimeBuilder_ == null) {
result.scheduleTime_ = scheduleTime_;
} else {
result.scheduleTime_ = scheduleTimeBuilder_.build();
}
result.responseView_ = responseView_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.tasks.v2beta2.CancelLeaseRequest) {
return mergeFrom((com.google.cloud.tasks.v2beta2.CancelLeaseRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.tasks.v2beta2.CancelLeaseRequest other) {
if (other == com.google.cloud.tasks.v2beta2.CancelLeaseRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (other.hasScheduleTime()) {
mergeScheduleTime(other.getScheduleTime());
}
if (other.responseView_ != 0) {
setResponseViewValue(other.getResponseViewValue());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.tasks.v2beta2.CancelLeaseRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.tasks.v2beta2.CancelLeaseRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private com.google.protobuf.Timestamp scheduleTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
scheduleTimeBuilder_;
/**
*
*
* <pre>
* Required. The task's current schedule time, available in the
* [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] returned by
* [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] response or
* [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] response. This restriction is
* to ensure that your worker currently holds the lease.
* </pre>
*
* <code>.google.protobuf.Timestamp schedule_time = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the scheduleTime field is set.
*/
public boolean hasScheduleTime() {
return scheduleTimeBuilder_ != null || scheduleTime_ != null;
}
/**
*
*
* <pre>
* Required. The task's current schedule time, available in the
* [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] returned by
* [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] response or
* [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] response. This restriction is
* to ensure that your worker currently holds the lease.
* </pre>
*
* <code>.google.protobuf.Timestamp schedule_time = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The scheduleTime.
*/
public com.google.protobuf.Timestamp getScheduleTime() {
if (scheduleTimeBuilder_ == null) {
return scheduleTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: scheduleTime_;
} else {
return scheduleTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The task's current schedule time, available in the
* [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] returned by
* [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] response or
* [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] response. This restriction is
* to ensure that your worker currently holds the lease.
* </pre>
*
* <code>.google.protobuf.Timestamp schedule_time = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setScheduleTime(com.google.protobuf.Timestamp value) {
if (scheduleTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
scheduleTime_ = value;
onChanged();
} else {
scheduleTimeBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The task's current schedule time, available in the
* [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] returned by
* [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] response or
* [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] response. This restriction is
* to ensure that your worker currently holds the lease.
* </pre>
*
* <code>.google.protobuf.Timestamp schedule_time = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setScheduleTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (scheduleTimeBuilder_ == null) {
scheduleTime_ = builderForValue.build();
onChanged();
} else {
scheduleTimeBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. The task's current schedule time, available in the
* [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] returned by
* [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] response or
* [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] response. This restriction is
* to ensure that your worker currently holds the lease.
* </pre>
*
* <code>.google.protobuf.Timestamp schedule_time = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeScheduleTime(com.google.protobuf.Timestamp value) {
if (scheduleTimeBuilder_ == null) {
if (scheduleTime_ != null) {
scheduleTime_ =
com.google.protobuf.Timestamp.newBuilder(scheduleTime_)
.mergeFrom(value)
.buildPartial();
} else {
scheduleTime_ = value;
}
onChanged();
} else {
scheduleTimeBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The task's current schedule time, available in the
* [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] returned by
* [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] response or
* [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] response. This restriction is
* to ensure that your worker currently holds the lease.
* </pre>
*
* <code>.google.protobuf.Timestamp schedule_time = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearScheduleTime() {
if (scheduleTimeBuilder_ == null) {
scheduleTime_ = null;
onChanged();
} else {
scheduleTime_ = null;
scheduleTimeBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* Required. The task's current schedule time, available in the
* [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] returned by
* [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] response or
* [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] response. This restriction is
* to ensure that your worker currently holds the lease.
* </pre>
*
* <code>.google.protobuf.Timestamp schedule_time = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.Timestamp.Builder getScheduleTimeBuilder() {
onChanged();
return getScheduleTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The task's current schedule time, available in the
* [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] returned by
* [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] response or
* [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] response. This restriction is
* to ensure that your worker currently holds the lease.
* </pre>
*
* <code>.google.protobuf.Timestamp schedule_time = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.TimestampOrBuilder getScheduleTimeOrBuilder() {
if (scheduleTimeBuilder_ != null) {
return scheduleTimeBuilder_.getMessageOrBuilder();
} else {
return scheduleTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: scheduleTime_;
}
}
/**
*
*
* <pre>
* Required. The task's current schedule time, available in the
* [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] returned by
* [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] response or
* [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] response. This restriction is
* to ensure that your worker currently holds the lease.
* </pre>
*
* <code>.google.protobuf.Timestamp schedule_time = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getScheduleTimeFieldBuilder() {
if (scheduleTimeBuilder_ == null) {
scheduleTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getScheduleTime(), getParentForChildren(), isClean());
scheduleTime_ = null;
}
return scheduleTimeBuilder_;
}
private int responseView_ = 0;
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta2.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta2.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta2.Task.View response_view = 3;</code>
*
* @return The enum numeric value on the wire for responseView.
*/
@java.lang.Override
public int getResponseViewValue() {
return responseView_;
}
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta2.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta2.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta2.Task.View response_view = 3;</code>
*
* @param value The enum numeric value on the wire for responseView to set.
* @return This builder for chaining.
*/
public Builder setResponseViewValue(int value) {
responseView_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta2.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta2.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta2.Task.View response_view = 3;</code>
*
* @return The responseView.
*/
@java.lang.Override
public com.google.cloud.tasks.v2beta2.Task.View getResponseView() {
@SuppressWarnings("deprecation")
com.google.cloud.tasks.v2beta2.Task.View result =
com.google.cloud.tasks.v2beta2.Task.View.valueOf(responseView_);
return result == null ? com.google.cloud.tasks.v2beta2.Task.View.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta2.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta2.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta2.Task.View response_view = 3;</code>
*
* @param value The responseView to set.
* @return This builder for chaining.
*/
public Builder setResponseView(com.google.cloud.tasks.v2beta2.Task.View value) {
if (value == null) {
throw new NullPointerException();
}
responseView_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta2.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta2.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta2.Task.View response_view = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearResponseView() {
responseView_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.tasks.v2beta2.CancelLeaseRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.CancelLeaseRequest)
private static final com.google.cloud.tasks.v2beta2.CancelLeaseRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.tasks.v2beta2.CancelLeaseRequest();
}
public static com.google.cloud.tasks.v2beta2.CancelLeaseRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CancelLeaseRequest> PARSER =
new com.google.protobuf.AbstractParser<CancelLeaseRequest>() {
@java.lang.Override
public CancelLeaseRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CancelLeaseRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<CancelLeaseRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CancelLeaseRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.tasks.v2beta2.CancelLeaseRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
|
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apiman.manager.test.junit;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.apiman.manager.api.core.util.PolicyTemplateUtil;
import io.apiman.manager.test.junit.ManagerRestTester.TestInfo;
import io.apiman.manager.test.server.ManagerApiTestServer;
import io.apiman.manager.test.server.MockGatewayServlet;
import io.apiman.test.common.plan.TestGroupType;
import io.apiman.test.common.plan.TestPlan;
import io.apiman.test.common.plan.TestType;
import io.apiman.test.common.resttest.RestTest;
import io.apiman.test.common.util.TestPlanRunner;
import io.apiman.test.common.util.TestUtil;
import java.io.File;
import java.lang.reflect.Method;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import org.apache.commons.lang.StringUtils;
import org.junit.Assert;
import org.junit.runner.Description;
import org.junit.runner.notification.RunNotifier;
import org.junit.runners.ParentRunner;
import org.junit.runners.model.InitializationError;
import org.junit.runners.model.Statement;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A junit test runner that fires up apiman and makes it ready for
* use in the tests. This runner also loads up the test plan from
* the required {@link ManagerRestTestPlan} annotation.
*
* @author [email protected]
*/
@SuppressWarnings("nls")
public class ManagerRestTester extends ParentRunner<TestInfo> {
private static Logger logger = LoggerFactory.getLogger(TestPlanRunner.class);
private static ManagerApiTestServer testServer = new ManagerApiTestServer();
private static final boolean USE_PROXY = false;
private static final int PROXY_PORT = 7071;
private List<TestPlanInfo> testPlans = new ArrayList<>();
private Set<String> resetSysProps = new HashSet<>();
/**
* Constructor.
*/
public ManagerRestTester(Class<?> testClass) throws InitializationError {
super(testClass);
configureSystemProperties();
loadTestPlans(testClass);
}
/**
* Loads the test plans.
* @param testClass
* @throws InitializationError
*/
private void loadTestPlans(Class<?> testClass) throws InitializationError {
try {
ManagerRestTestPlan annotation = testClass.getAnnotation(ManagerRestTestPlan.class);
if (annotation == null) {
Method[] methods = testClass.getMethods();
TreeSet<ManagerRestTestPlan> annotations = new TreeSet<>(new Comparator<ManagerRestTestPlan>() {
@Override
public int compare(ManagerRestTestPlan o1, ManagerRestTestPlan o2) {
Integer i1 = o1.order();
Integer i2 = o2.order();
return i1.compareTo(i2);
}
});
for (Method method : methods) {
annotation = method.getAnnotation(ManagerRestTestPlan.class);
if (annotation != null) {
annotations.add(annotation);
}
}
for (ManagerRestTestPlan anno : annotations) {
TestPlanInfo planInfo = new TestPlanInfo();
planInfo.planPath = anno.value();
planInfo.name = new File(planInfo.planPath).getName();
planInfo.endpoint = TestUtil.doPropertyReplacement(anno.endpoint());
planInfo.plan = TestUtil.loadTestPlan(planInfo.planPath, testClass.getClassLoader());
testPlans.add(planInfo);
}
} else {
TestPlanInfo planInfo = new TestPlanInfo();
planInfo.planPath = annotation.value();
planInfo.name = new File(planInfo.planPath).getName();
planInfo.plan = TestUtil.loadTestPlan(planInfo.planPath, testClass.getClassLoader());
planInfo.endpoint = TestUtil.doPropertyReplacement(annotation.endpoint());
testPlans.add(planInfo);
}
} catch (Throwable e) {
throw new InitializationError(e);
}
if (testPlans.isEmpty()) {
throw new InitializationError("No @ManagerRestTestPlan annotations found on test class: " + testClass);
}
}
/**
* Called to setup the test.
* @throws InitializationError
*/
public static void setup() {
if (!"true".equals(System.getProperty("apiman.junit.no-server", "false"))) {
startServer();
} else {
System.out.println("**** APIMan Server suppressed - assuming running tests against a live server. ****");
}
}
/**
* Called at the end of the test.
*/
public static void shutdown() {
if (!"true".equals(System.getProperty("apiman.junit.no-server", "false"))) {
stopServer();
}
}
/**
* @throws Exception
*/
protected static void startServer() {
try {
testServer.start();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* @throws Exception
*/
protected static void stopServer() {
try {
testServer.stop();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* @see org.junit.runners.ParentRunner#getChildren()
*/
@Override
protected List<TestInfo> getChildren() {
List<TestInfo> children = new ArrayList<>();
TestPlanInfo lastPlan = null;
for (TestPlanInfo planInfo : testPlans) {
lastPlan = planInfo;
planInfo.runner = new TestPlanRunner();
List<TestGroupType> groups = planInfo.plan.getTestGroup();
for (TestGroupType group : groups) {
for (TestType test : group.getTest()) {
TestInfo testInfo = new TestInfo();
if (testPlans.size() > 1) {
testInfo.name = planInfo.name + " / " + test.getName();
} else {
testInfo.name = test.getName();
}
testInfo.plan = planInfo;
testInfo.group = group;
testInfo.test = test;
children.add(testInfo);
}
}
}
ManagerRestTestGatewayLog annotation = getTestClass().getJavaClass().getAnnotation(ManagerRestTestGatewayLog.class);
if (annotation != null) {
GatewayAssertionTestInfo gatewayTest = new GatewayAssertionTestInfo();
gatewayTest.name = "Assert Gateway Log";
gatewayTest.plan = lastPlan;
gatewayTest.expectedLog = annotation.value();
children.add(gatewayTest);
}
ManagerRestTestPublishPayload annotation2 = getTestClass().getJavaClass().getAnnotation(ManagerRestTestPublishPayload.class);
if (annotation2 != null) {
PublishPayloadTestInfo pubTest = new PublishPayloadTestInfo();
pubTest.name = "Assert Publishing Payloads";
pubTest.plan = lastPlan;
pubTest.expectedPayloads = annotation2.value();
children.add(pubTest);
}
return children;
}
/**
* @see org.junit.runners.ParentRunner#run(org.junit.runner.notification.RunNotifier)
*/
@Override
public void run(RunNotifier notifier) {
setup();
PolicyTemplateUtil.clearCache();
MockGatewayServlet.reset();
log("");
log("-------------------------------------------------------------------------------");
log("Executing REST Test");
log("-------------------------------------------------------------------------------");
log("");
try {
super.run(notifier);
} finally {
try { shutdown(); } catch (Throwable e) { e.printStackTrace(); }
resetSystemProperties();
}
log("");
log("-------------------------------------------------------------------------------");
log("REST Test complete");
log("-------------------------------------------------------------------------------");
log("");
}
/**
* @see org.junit.runners.ParentRunner#runChild(java.lang.Object, org.junit.runner.notification.RunNotifier)
*/
@Override
protected void runChild(final TestInfo testInfo, RunNotifier notifier) {
log("-----------------------------------------------------------");
log("Starting Test [{0} / {1}]", testInfo.plan.name, testInfo.name);
log("-----------------------------------------------------------");
Description description = describeChild(testInfo);
if (testInfo instanceof GatewayAssertionTestInfo) {
runLeaf(new Statement() {
@Override
public void evaluate() throws Throwable {
String actualGatewayLog = MockGatewayServlet.getRequestLog();
Assert.assertEquals(((GatewayAssertionTestInfo) testInfo).expectedLog, actualGatewayLog);
}
}, description, notifier);
} else if (testInfo instanceof PublishPayloadTestInfo) {
runLeaf(new Statement() {
@Override
public void evaluate() throws Throwable {
String[] expectedPayloads = ((PublishPayloadTestInfo) testInfo).expectedPayloads;
int index = 0;
for (String expectedPayload : expectedPayloads) {
if (MockGatewayServlet.getPayloads().isEmpty()) {
Assert.fail("Expected a payload but did not find one.");
}
String actualPayload = MockGatewayServlet.getPayloads().get(index);
if (expectedPayload == null || "".equals(expectedPayload)) {
Assert.assertNull(actualPayload);
} else {
ObjectMapper mapper = new ObjectMapper();
JsonNode expected = mapper.readTree(expectedPayload);
JsonNode actual = mapper.readTree(actualPayload.trim());
RestTest mockRT = new RestTest();
testInfo.plan.runner.assertJson(mockRT, expected, actual);
}
index++;
}
}
}, description, notifier);
} else {
runLeaf(new Statement() {
@Override
public void evaluate() throws Throwable {
String rtPath = testInfo.test.getValue();
Integer delay = testInfo.test.getDelay();
if (delay != null) {
try { Thread.sleep(delay); } catch (InterruptedException e) { }
}
if (rtPath != null && !rtPath.trim().isEmpty()) {
RestTest restTest = TestUtil.loadRestTest(rtPath, getTestClass().getJavaClass().getClassLoader());
String endpoint = testInfo.plan.endpoint;
if (StringUtils.isEmpty(endpoint)) {
endpoint = TestUtil.doPropertyReplacement(testInfo.test.getEndpoint());
}
if (StringUtils.isEmpty(endpoint)) {
endpoint = TestUtil.doPropertyReplacement(testInfo.group.getEndpoint());
}
if (StringUtils.isEmpty(endpoint)) {
endpoint = TestUtil.doPropertyReplacement(testInfo.plan.plan.getEndpoint());
}
if (StringUtils.isEmpty(endpoint)) {
endpoint = "http://localhost:" + getTestServerPort() + getBaseApiContext();
}
testInfo.plan.runner.runTest(restTest, endpoint);
}
}
}, description, notifier);
}
}
/**
* @see org.junit.runners.ParentRunner#describeChild(java.lang.Object)
*/
@Override
protected Description describeChild(TestInfo child) {
return Description.createTestDescription(getTestClass().getJavaClass(), child.name);
}
/**
* @return the base context of the DT API
*/
protected String getBaseApiContext() {
return System.getProperty("apiman.junit.server-api-context", "/apiman");
}
/**
* @return the port to use when sending requests
*/
protected int getTestServerPort() {
String spPort = System.getProperty("apiman.junit.server-port");
if (spPort != null) {
return Integer.parseInt(spPort);
}
if (USE_PROXY) {
return PROXY_PORT;
} else {
return testServer.serverPort();
}
}
/**
* Configure some proeprties.
*/
private void configureSystemProperties() {
TestUtil.setProperty("apiman.test.gateway.endpoint", "http://localhost:" + getTestServerPort() + "/mock-gateway");
TestUtil.setProperty("apiman.test.gateway.username", "admin");
TestUtil.setProperty("apiman.test.gateway.password", "admin");
TestUtil.setProperty("apiman.manager.require-auto-granted-org", "false");
RestTestSystemProperties annotation = getTestClass().getJavaClass().getAnnotation(RestTestSystemProperties.class);
if (annotation != null) {
String[] strings = annotation.value();
for (int idx = 0; idx < strings.length; idx += 2) {
String pname = strings[idx];
String pval = strings[idx+1];
log("Setting system property \"{0}\" to \"{1}\".", pname, pval);
if (System.getProperty(pname) == null) {
resetSysProps.add(pname);
}
TestUtil.setProperty(pname, pval);
}
}
}
/**
* Resets the system properties that were set at the start of the test.
*/
private void resetSystemProperties() {
for (String propName : resetSysProps) {
System.clearProperty(propName);
}
resetSysProps.clear();
}
/**
* Logs a message.
*
* @param message
* @param params
*/
public void log(String message, Object... params) {
String outmsg = MessageFormat.format(message, params);
logger.info(" >> " + outmsg);
}
public static class TestPlanInfo {
TestPlan plan;
String name;
String planPath;
String endpoint;
TestPlanRunner runner;
}
public static class TestInfo {
TestGroupType group;
TestType test;
String name;
TestPlanInfo plan;
}
public static class GatewayAssertionTestInfo extends TestInfo {
String expectedLog;
}
public static class PublishPayloadTestInfo extends TestInfo {
String[] expectedPayloads;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.test.recovery;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.configuration.AkkaOptions;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.HeartbeatManagerOptions;
import org.apache.flink.configuration.HighAvailabilityOptions;
import org.apache.flink.configuration.JobManagerOptions;
import org.apache.flink.configuration.MemorySize;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.core.plugin.PluginManager;
import org.apache.flink.core.plugin.PluginUtils;
import org.apache.flink.runtime.entrypoint.StandaloneSessionClusterEntrypoint;
import org.apache.flink.runtime.taskexecutor.TaskManagerRunner;
import org.apache.flink.runtime.util.BlobServerResource;
import org.apache.flink.runtime.zookeeper.ZooKeeperResource;
import org.apache.flink.test.util.TestProcessBuilder;
import org.apache.flink.test.util.TestProcessBuilder.TestProcess;
import org.apache.flink.util.TestLogger;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import static org.apache.flink.runtime.testutils.CommonTestUtils.getJavaCommandPath;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
/**
* Abstract base for tests verifying the behavior of the recovery in the case when a TaskManager
* fails (process is killed) in the middle of a job execution.
*
* <p>The test works with multiple task managers processes by spawning JVMs. Initially, it starts a
* JobManager in process and two TaskManagers JVMs with 2 task slots each. It submits a program with
* parallelism 4 and waits until all tasks are brought up. Coordination between the test and the
* tasks happens via checking for the existence of temporary files. It then starts another
* TaskManager, which is guaranteed to remain empty (all tasks are already deployed) and kills one
* of the original task managers. The recovery should restart the tasks on the new TaskManager.
*/
public abstract class AbstractTaskManagerProcessFailureRecoveryTest extends TestLogger {
protected static final String READY_MARKER_FILE_PREFIX = "ready_";
protected static final String PROCEED_MARKER_FILE = "proceed";
protected static final int PARALLELISM = 4;
@Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder();
@Rule public final BlobServerResource blobServerResource = new BlobServerResource();
@Rule public final ZooKeeperResource zooKeeperResource = new ZooKeeperResource();
@Test
public void testTaskManagerProcessFailure() throws Exception {
TestProcess taskManagerProcess1 = null;
TestProcess taskManagerProcess2 = null;
TestProcess taskManagerProcess3 = null;
File coordinateTempDir = null;
Configuration config = new Configuration();
config.setString(AkkaOptions.ASK_TIMEOUT, "100 s");
config.setString(JobManagerOptions.ADDRESS, "localhost");
config.setString(RestOptions.BIND_PORT, "0");
config.setLong(HeartbeatManagerOptions.HEARTBEAT_INTERVAL, 500L);
config.setLong(HeartbeatManagerOptions.HEARTBEAT_TIMEOUT, 10000L);
config.setString(HighAvailabilityOptions.HA_MODE, "zookeeper");
config.setString(
HighAvailabilityOptions.HA_ZOOKEEPER_QUORUM, zooKeeperResource.getConnectString());
config.setString(
HighAvailabilityOptions.HA_STORAGE_PATH,
temporaryFolder.newFolder().getAbsolutePath());
config.setInteger(TaskManagerOptions.NUM_TASK_SLOTS, 2);
config.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, MemorySize.parse("4m"));
config.set(TaskManagerOptions.NETWORK_MEMORY_MIN, MemorySize.parse("3200k"));
config.set(TaskManagerOptions.NETWORK_MEMORY_MAX, MemorySize.parse("3200k"));
config.set(TaskManagerOptions.TASK_HEAP_MEMORY, MemorySize.parse("128m"));
config.set(TaskManagerOptions.CPU_CORES, 1.0);
config.setString(JobManagerOptions.EXECUTION_FAILOVER_STRATEGY, "full");
try (final StandaloneSessionClusterEntrypoint clusterEntrypoint =
new StandaloneSessionClusterEntrypoint(config)) {
// check that we run this test only if the java command
// is available on this machine
String javaCommand = getJavaCommandPath();
if (javaCommand == null) {
System.out.println(
"---- Skipping Process Failure test : Could not find java executable ----");
return;
}
clusterEntrypoint.startCluster();
// coordination between the processes goes through a directory
coordinateTempDir = temporaryFolder.newFolder();
TestProcessBuilder taskManagerProcessBuilder =
new TestProcessBuilder(TaskExecutorProcessEntryPoint.class.getName());
taskManagerProcessBuilder.addConfigAsMainClassArgs(config);
// start the first two TaskManager processes
taskManagerProcess1 = taskManagerProcessBuilder.start();
taskManagerProcess2 = taskManagerProcessBuilder.start();
// the program will set a marker file in each of its parallel tasks once they are ready,
// so that
// this coordinating code is aware of this.
// the program will very slowly consume elements until the marker file (later created by
// the
// test driver code) is present
final File coordinateDirClosure = coordinateTempDir;
final AtomicReference<Throwable> errorRef = new AtomicReference<>();
// we trigger program execution in a separate thread
Thread programTrigger =
new Thread("Program Trigger") {
@Override
public void run() {
try {
testTaskManagerFailure(config, coordinateDirClosure);
} catch (Throwable t) {
t.printStackTrace();
errorRef.set(t);
}
}
};
// start the test program
programTrigger.start();
// wait until all marker files are in place, indicating that all tasks have started
// max 20 seconds
if (!waitForMarkerFiles(
coordinateTempDir, READY_MARKER_FILE_PREFIX, PARALLELISM, 120000)) {
// check if the program failed for some reason
if (errorRef.get() != null) {
Throwable error = errorRef.get();
error.printStackTrace();
fail(
"The program encountered a "
+ error.getClass().getSimpleName()
+ " : "
+ error.getMessage());
} else {
// no error occurred, simply a timeout
fail("The tasks were not started within time (" + 120000 + "msecs)");
}
}
// start the third TaskManager
taskManagerProcess3 = taskManagerProcessBuilder.start();
// kill one of the previous TaskManagers, triggering a failure and recovery
taskManagerProcess1.destroy();
waitForShutdown("TaskManager 1", taskManagerProcess1);
// we create the marker file which signals the program functions tasks that they can
// complete
touchFile(new File(coordinateTempDir, PROCEED_MARKER_FILE));
// wait for at most 5 minutes for the program to complete
programTrigger.join(300000);
// check that the program really finished
assertFalse("The program did not finish in time", programTrigger.isAlive());
// check whether the program encountered an error
if (errorRef.get() != null) {
Throwable error = errorRef.get();
error.printStackTrace();
fail(
"The program encountered a "
+ error.getClass().getSimpleName()
+ " : "
+ error.getMessage());
}
// all seems well :-)
} catch (Exception e) {
e.printStackTrace();
printProcessLog("TaskManager 1", taskManagerProcess1);
printProcessLog("TaskManager 2", taskManagerProcess2);
printProcessLog("TaskManager 3", taskManagerProcess3);
fail(e.getMessage());
} catch (Error e) {
e.printStackTrace();
printProcessLog("TaskManager 1", taskManagerProcess1);
printProcessLog("TaskManager 2", taskManagerProcess2);
printProcessLog("TaskManager 3", taskManagerProcess3);
throw e;
} finally {
if (taskManagerProcess1 != null) {
taskManagerProcess1.destroy();
}
if (taskManagerProcess2 != null) {
taskManagerProcess2.destroy();
}
if (taskManagerProcess3 != null) {
taskManagerProcess3.destroy();
}
waitForShutdown("TaskManager 1", taskManagerProcess1);
waitForShutdown("TaskManager 2", taskManagerProcess2);
waitForShutdown("TaskManager 3", taskManagerProcess3);
}
}
private void waitForShutdown(final String processName, @Nullable final TestProcess process)
throws InterruptedException {
if (process == null) {
return;
}
if (!process.getProcess().waitFor(30, TimeUnit.SECONDS)) {
log.error("{} did not shutdown in time.", processName);
printProcessLog(processName, process);
process.getProcess().destroyForcibly();
}
}
/**
* The test program should be implemented here in a form of a separate thread. This provides a
* solution for checking that it has been terminated.
*
* @param configuration the config to use
* @param coordinateDir TaskManager failure will be triggered only after processes have
* successfully created file under this directory
*/
public abstract void testTaskManagerFailure(Configuration configuration, File coordinateDir)
throws Exception;
protected static void printProcessLog(String processName, TestProcess process) {
if (process == null) {
System.out.println("-----------------------------------------");
System.out.println(" PROCESS " + processName + " WAS NOT STARTED.");
System.out.println("-----------------------------------------");
} else {
System.out.println("-----------------------------------------");
System.out.println(" BEGIN SPAWNED PROCESS LOG FOR " + processName);
System.out.println("-----------------------------------------");
System.out.println(process.getErrorOutput().toString());
System.out.println("-----------------------------------------");
System.out.println(" END SPAWNED PROCESS LOG");
System.out.println("-----------------------------------------");
}
}
protected static void touchFile(File file) throws IOException {
if (!file.exists()) {
new FileOutputStream(file).close();
}
if (!file.setLastModified(System.currentTimeMillis())) {
throw new IOException("Could not touch the file.");
}
}
protected static boolean waitForMarkerFiles(
File basedir, String prefix, int num, long timeout) {
long now = System.currentTimeMillis();
final long deadline = now + timeout;
while (now < deadline) {
boolean allFound = true;
for (int i = 0; i < num; i++) {
File nextToCheck = new File(basedir, prefix + i);
if (!nextToCheck.exists()) {
allFound = false;
break;
}
}
if (allFound) {
return true;
} else {
// not all found, wait for a bit
try {
Thread.sleep(10);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
now = System.currentTimeMillis();
}
}
return false;
}
// --------------------------------------------------------------------------------------------
/** The entry point for the TaskExecutor JVM. Simply configures and runs a TaskExecutor. */
public static class TaskExecutorProcessEntryPoint {
private static final Logger LOG =
LoggerFactory.getLogger(TaskExecutorProcessEntryPoint.class);
public static void main(String[] args) {
try {
final ParameterTool parameterTool = ParameterTool.fromArgs(args);
Configuration cfg = parameterTool.getConfiguration();
final PluginManager pluginManager =
PluginUtils.createPluginManagerFromRootFolder(cfg);
TaskManagerRunner.runTaskManager(cfg, pluginManager);
} catch (Throwable t) {
LOG.error("Failed to run the TaskManager process", t);
System.exit(1);
}
}
}
}
|
|
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.underfs.gcs;
import alluxio.AlluxioURI;
import alluxio.Constants;
import alluxio.PropertyKey;
import alluxio.underfs.ObjectUnderFileSystem;
import alluxio.underfs.UnderFileSystem;
import alluxio.underfs.UnderFileSystemConfiguration;
import alluxio.underfs.options.OpenOptions;
import alluxio.util.CommonUtils;
import alluxio.util.UnderFileSystemUtils;
import alluxio.util.io.PathUtils;
import com.google.common.base.Preconditions;
import org.jets3t.service.ServiceException;
import org.jets3t.service.StorageObjectsChunk;
import org.jets3t.service.acl.gs.GSAccessControlList;
import org.jets3t.service.impl.rest.httpclient.GoogleStorageService;
import org.jets3t.service.model.GSObject;
import org.jets3t.service.model.StorageObject;
import org.jets3t.service.security.GSCredentials;
import org.jets3t.service.utils.Mimetypes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import javax.annotation.concurrent.ThreadSafe;
/**
* GCS FS {@link UnderFileSystem} implementation based on the jets3t library.
*/
@ThreadSafe
public class GCSUnderFileSystem extends ObjectUnderFileSystem {
private static final Logger LOG = LoggerFactory.getLogger(GCSUnderFileSystem.class);
/** Suffix for an empty file to flag it as a directory. */
private static final String FOLDER_SUFFIX = "_$folder$";
private static final byte[] DIR_HASH;
/** Jets3t GCS client. */
private final GoogleStorageService mClient;
/** Bucket name of user's configured Alluxio bucket. */
private final String mBucketName;
/** The name of the account owner. */
private final String mAccountOwner;
/** The permission mode that the account owner has to the bucket. */
private final short mBucketMode;
static {
try {
DIR_HASH = MessageDigest.getInstance("MD5").digest(new byte[0]);
} catch (NoSuchAlgorithmException e) {
throw new IllegalStateException(e);
}
}
/**
* Constructs a new instance of {@link GCSUnderFileSystem}.
*
* @param uri the {@link AlluxioURI} for this UFS
* @param conf the configuration for this UFS
* @return the created {@link GCSUnderFileSystem} instance
* @throws ServiceException when a connection to GCS could not be created
*/
public static GCSUnderFileSystem createInstance(
AlluxioURI uri, UnderFileSystemConfiguration conf) throws ServiceException {
String bucketName = UnderFileSystemUtils.getBucketName(uri);
Preconditions.checkArgument(conf.containsKey(PropertyKey.GCS_ACCESS_KEY),
"Property " + PropertyKey.GCS_ACCESS_KEY + " is required to connect to GCS");
Preconditions.checkArgument(conf.containsKey(PropertyKey.GCS_SECRET_KEY),
"Property " + PropertyKey.GCS_SECRET_KEY + " is required to connect to GCS");
GSCredentials googleCredentials = new GSCredentials(
conf.getValue(PropertyKey.GCS_ACCESS_KEY),
conf.getValue(PropertyKey.GCS_SECRET_KEY));
// TODO(chaomin): maybe add proxy support for GCS.
GoogleStorageService googleStorageService = new GoogleStorageService(googleCredentials);
String accountOwnerId = googleStorageService.getAccountOwner().getId();
// Gets the owner from user-defined static mapping from GCS account id to Alluxio user name.
String owner = CommonUtils.getValueFromStaticMapping(
conf.getValue(PropertyKey.UNDERFS_GCS_OWNER_ID_TO_USERNAME_MAPPING), accountOwnerId);
// If there is no user-defined mapping, use the display name.
if (owner == null) {
owner = googleStorageService.getAccountOwner().getDisplayName();
}
String accountOwner = owner == null ? accountOwnerId : owner;
GSAccessControlList acl = googleStorageService.getBucketAcl(bucketName);
short bucketMode = GCSUtils.translateBucketAcl(acl, accountOwnerId);
return new GCSUnderFileSystem(uri, googleStorageService, bucketName, bucketMode, accountOwner,
conf);
}
/**
* Constructor for {@link GCSUnderFileSystem}.
*
* @param uri the {@link AlluxioURI} for this UFS
* @param googleStorageService the Jets3t GCS client
* @param bucketName bucket name of user's configured Alluxio bucket
* @param bucketMode the permission mode that the account owner has to the bucket
* @param accountOwner the name of the account owner
* @param conf configuration for this UFS
*/
protected GCSUnderFileSystem(AlluxioURI uri, GoogleStorageService googleStorageService,
String bucketName, short bucketMode, String accountOwner, UnderFileSystemConfiguration conf) {
super(uri, conf);
mClient = googleStorageService;
mBucketName = bucketName;
mBucketMode = bucketMode;
mAccountOwner = accountOwner;
}
@Override
public String getUnderFSType() {
return "gcs";
}
// Setting GCS owner via Alluxio is not supported yet. This is a no-op.
@Override
public void setOwner(String path, String user, String group) {}
// Setting GCS mode via Alluxio is not supported yet. This is a no-op.
@Override
public void setMode(String path, short mode) throws IOException {}
@Override
protected boolean copyObject(String src, String dst) {
LOG.debug("Copying {} to {}", src, dst);
GSObject obj = new GSObject(dst);
// Retry copy for a few times, in case some Jets3t or GCS internal errors happened during copy.
int retries = 3;
for (int i = 0; i < retries; i++) {
try {
mClient.copyObject(mBucketName, src, mBucketName, obj, false);
return true;
} catch (ServiceException e) {
LOG.error("Failed to copy file {} to {}", src, dst, e);
if (i != retries - 1) {
LOG.error("Retrying copying file {} to {}", src, dst);
}
}
}
LOG.error("Failed to copy file {} to {}, after {} retries", src, dst, retries);
return false;
}
@Override
protected boolean createEmptyObject(String key) {
try {
GSObject obj = new GSObject(key);
obj.setDataInputStream(new ByteArrayInputStream(new byte[0]));
obj.setContentLength(0);
obj.setMd5Hash(DIR_HASH);
obj.setContentType(Mimetypes.MIMETYPE_BINARY_OCTET_STREAM);
mClient.putObject(mBucketName, obj);
return true;
} catch (ServiceException e) {
LOG.error("Failed to create directory: {}", key, e);
return false;
}
}
@Override
protected OutputStream createObject(String key) throws IOException {
return new GCSOutputStream(mBucketName, key, mClient);
}
@Override
protected boolean deleteObject(String key) throws IOException {
try {
mClient.deleteObject(mBucketName, key);
} catch (ServiceException e) {
LOG.error("Failed to delete {}", key, e);
return false;
}
return true;
}
@Override
protected String getFolderSuffix() {
return FOLDER_SUFFIX;
}
@Override
protected ObjectListingChunk getObjectListingChunk(String key, boolean recursive)
throws IOException {
key = PathUtils.normalizePath(key, PATH_SEPARATOR);
// In case key is root (empty string) do not normalize prefix
key = key.equals(PATH_SEPARATOR) ? "" : key;
String delimiter = recursive ? "" : PATH_SEPARATOR;
StorageObjectsChunk chunk = getObjectListingChunk(key, delimiter, null);
if (chunk != null) {
return new GCSObjectListingChunk(chunk);
}
return null;
}
// Get next chunk of listing result
private StorageObjectsChunk getObjectListingChunk(String key, String delimiter,
String priorLastKey) {
StorageObjectsChunk res;
try {
res = mClient.listObjectsChunked(mBucketName, key, delimiter,
getListingChunkLength(), priorLastKey);
} catch (ServiceException e) {
LOG.error("Failed to list path {}", key, e);
res = null;
}
return res;
}
/**
* Wrapper over GCS {@link StorageObjectsChunk}.
*/
private final class GCSObjectListingChunk implements ObjectListingChunk {
final StorageObjectsChunk mChunk;
GCSObjectListingChunk(StorageObjectsChunk chunk)
throws IOException {
mChunk = chunk;
if (mChunk == null) {
throw new IOException("GCS listing result is null");
}
}
@Override
public ObjectStatus[] getObjectStatuses() {
StorageObject[] objects = mChunk.getObjects();
ObjectStatus[] ret = new ObjectStatus[objects.length];
for (int i = 0; i < ret.length; ++i) {
ret[i] = new ObjectStatus(objects[i].getKey(), objects[i].getContentLength(),
objects[i].getLastModifiedDate().getTime());
}
return ret;
}
@Override
public String[] getCommonPrefixes() {
return mChunk.getCommonPrefixes();
}
@Override
public ObjectListingChunk getNextChunk() throws IOException {
if (!mChunk.isListingComplete()) {
StorageObjectsChunk nextChunk = getObjectListingChunk(mChunk.getPrefix(),
mChunk.getDelimiter(), mChunk.getPriorLastKey());
if (nextChunk != null) {
return new GCSObjectListingChunk(nextChunk);
}
}
return null;
}
}
@Override
protected ObjectStatus getObjectStatus(String key) {
try {
GSObject meta = mClient.getObjectDetails(mBucketName, key);
if (meta == null) {
return null;
}
return new ObjectStatus(key, meta.getContentLength(), meta.getLastModifiedDate().getTime());
} catch (ServiceException e) {
return null;
}
}
// No group in GCS ACL, returns the account owner for group.
@Override
protected ObjectPermissions getPermissions() {
return new ObjectPermissions(mAccountOwner, mAccountOwner, mBucketMode);
}
@Override
protected String getRootKey() {
return Constants.HEADER_GCS + mBucketName;
}
@Override
protected InputStream openObject(String key, OpenOptions options) throws IOException {
try {
return new GCSInputStream(mBucketName, key, mClient, options.getOffset());
} catch (ServiceException e) {
throw new IOException(e.getMessage());
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.config;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.jmeter.testelement.property.CollectionProperty;
import org.apache.jmeter.testelement.property.JMeterProperty;
import org.apache.jmeter.testelement.property.PropertyIterator;
import org.apache.jmeter.testelement.property.TestElementProperty;
/**
* A set of Argument objects.
*
*/
public class Arguments extends ConfigTestElement implements Serializable, Iterable<JMeterProperty> {
private static final long serialVersionUID = 240L;
/** The name of the property used to store the arguments. */
public static final String ARGUMENTS = "Arguments.arguments"; //$NON-NLS-1$
/**
* Create a new Arguments object with no arguments.
*/
public Arguments() {
setProperty(new CollectionProperty(ARGUMENTS, new ArrayList<Argument>()));
}
/**
* Get the arguments.
*
* @return the arguments
*/
public CollectionProperty getArguments() {
return (CollectionProperty) getProperty(ARGUMENTS);
}
/**
* Clear the arguments.
*/
@Override
public void clear() {
super.clear();
setProperty(new CollectionProperty(ARGUMENTS, new ArrayList<Argument>()));
}
/**
* Set the list of arguments. Any existing arguments will be lost.
*
* @param arguments
* the new arguments
*/
public void setArguments(List<Argument> arguments) {
setProperty(new CollectionProperty(ARGUMENTS, arguments));
}
/**
* Get the arguments as a Map. Each argument name is used as the key, and
* its value as the value.
*
* @return a new Map with String keys and values containing the arguments
*/
public Map<String, String> getArgumentsAsMap() {
PropertyIterator iter = getArguments().iterator();
Map<String, String> argMap = new LinkedHashMap<>();
while (iter.hasNext()) {
Argument arg = (Argument) iter.next().getObjectValue();
// Because CollectionProperty.mergeIn will not prevent adding two
// properties of the same name, we need to select the first value so
// that this element's values prevail over defaults provided by
// configuration
// elements:
if (!argMap.containsKey(arg.getName())) {
argMap.put(arg.getName(), arg.getValue());
}
}
return argMap;
}
/**
* Add a new argument with the given name and value.
*
* @param name
* the name of the argument
* @param value
* the value of the argument
*/
public void addArgument(String name, String value) {
addArgument(new Argument(name, value, null));
}
/**
* Add a new argument.
*
* @param arg
* the new argument
*/
public void addArgument(Argument arg) {
TestElementProperty newArg = new TestElementProperty(arg.getName(), arg);
if (isRunningVersion()) {
this.setTemporary(newArg);
}
getArguments().addItem(newArg);
}
/**
* Add a new argument with the given name, value, and metadata.
*
* @param name
* the name of the argument
* @param value
* the value of the argument
* @param metadata
* the metadata for the argument
*/
public void addArgument(String name, String value, String metadata) {
addArgument(new Argument(name, value, metadata));
}
/**
* Get a PropertyIterator of the arguments.
*
* @return an iteration of the arguments
*/
@Override
public PropertyIterator iterator() {
return getArguments().iterator();
}
/**
* Create a string representation of the arguments.
*
* @return the string representation of the arguments
*/
@Override
public String toString() {
StringBuilder str = new StringBuilder();
PropertyIterator iter = getArguments().iterator();
while (iter.hasNext()) {
Argument arg = (Argument) iter.next().getObjectValue();
final String metaData = arg.getMetaData();
str.append(arg.getName());
if (metaData == null) {
str.append("="); //$NON-NLS-1$
} else {
str.append(metaData);
}
str.append(arg.getValue());
if (iter.hasNext()) {
str.append("&"); //$NON-NLS-1$
}
}
return str.toString();
}
/**
* Remove the specified argument from the list.
*
* @param row
* the index of the argument to remove
*/
public void removeArgument(int row) {
if (row < getArguments().size()) {
getArguments().remove(row);
}
}
/**
* Remove the specified argument from the list.
*
* @param arg
* the argument to remove
*/
public void removeArgument(Argument arg) {
PropertyIterator iter = getArguments().iterator();
while (iter.hasNext()) {
Argument item = (Argument) iter.next().getObjectValue();
if (arg.equals(item)) {
iter.remove();
}
}
}
/**
* Remove the argument with the specified name.
*
* @param argName
* the name of the argument to remove
*/
public void removeArgument(String argName) {
PropertyIterator iter = getArguments().iterator();
while (iter.hasNext()) {
Argument arg = (Argument) iter.next().getObjectValue();
if (arg.getName().equals(argName)) {
iter.remove();
}
}
}
/**
* Remove the argument with the specified name and value.
*
* @param argName
* the name of the argument to remove
* @param argValue the value to compare - must not be null
*/
public void removeArgument(String argName, String argValue) {
PropertyIterator iter = getArguments().iterator();
while (iter.hasNext()) {
Argument arg = (Argument) iter.next().getObjectValue();
if (arg.getName().equals(argName) && argValue.equals(arg.getValue())) {
iter.remove();
}
}
}
/**
* Remove all arguments from the list.
*/
public void removeAllArguments() {
getArguments().clear();
}
/**
* Add a new empty argument to the list. The new argument will have the
* empty string as its name and value, and null metadata.
*/
public void addEmptyArgument() {
addArgument(new Argument("", "", null));
}
/**
* Get the number of arguments in the list.
*
* @return the number of arguments
*/
public int getArgumentCount() {
return getArguments().size();
}
/**
* Get a single argument.
*
* @param row
* the index of the argument to return.
* @return the argument at the specified index, or null if no argument
* exists at that index.
*/
public Argument getArgument(int row) {
Argument argument = null;
if (row < getArguments().size()) {
argument = (Argument) getArguments().get(row).getObjectValue();
}
return argument;
}
}
|
|
package edu.isi.bmkeg.uimaBioC.rubicon.dev;
import java.io.File;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
import org.apache.log4j.Logger;
import org.apache.uima.UimaContext;
import org.apache.uima.analysis_engine.AnalysisEngineProcessException;
import org.apache.uima.jcas.JCas;
import org.apache.uima.jcas.tcas.DocumentAnnotation;
import org.apache.uima.resource.ResourceInitializationException;
import org.bigmech.fries.FRIES_Argument;
import org.bigmech.fries.FRIES_EntityMention;
import org.bigmech.fries.FRIES_EventMention;
import org.bigmech.fries.FRIES_Frame;
import org.bigmech.fries.FRIES_FrameCollection;
import org.bigmech.fries.FRIES_Passage;
import org.bigmech.fries.FRIES_Sentence;
import org.bigmech.fries.FRIES_XRef;
import org.cleartk.ml.CleartkAnnotator;
import org.cleartk.ml.Feature;
import org.cleartk.ml.Instance;
import org.cleartk.ml.feature.extractor.CleartkExtractor;
import org.cleartk.ml.feature.extractor.CoveredTextExtractor;
import org.cleartk.ml.feature.transform.extractor.TfidfExtractor;
import org.cleartk.token.type.Sentence;
import org.cleartk.token.type.Token;
import org.simmetrics.StringMetric;
import org.simmetrics.StringMetricBuilder;
import org.simmetrics.metrics.CosineSimilarity;
import org.simmetrics.metrics.Levenshtein;
import org.simmetrics.simplifiers.CaseSimplifier;
import org.simmetrics.simplifiers.NonDiacriticSimplifier;
import org.simmetrics.tokenizers.QGramTokenizer;
import org.simmetrics.tokenizers.WhitespaceTokenizer;
import org.uimafit.component.JCasAnnotator_ImplBase;
import org.uimafit.descriptor.ConfigurationParameter;
import org.uimafit.factory.ConfigurationParameterFactory;
import org.uimafit.util.JCasUtil;
import com.google.gson.FieldNamingPolicy;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.typeadapters.RuntimeTypeAdapterFactory;
import bioc.type.UimaBioCAnnotation;
import bioc.type.UimaBioCDocument;
import bioc.type.UimaBioCPassage;
import edu.isi.bmkeg.lapdf.xml.model.LapdftextXMLChunk;
import edu.isi.bmkeg.uimaBioC.UimaBioCUtils;
import edu.isi.bmkeg.uimaBioC.utils.NeedlemanWunch;
public class AddReachAnnotations extends CleartkAnnotator<String> {
public final static String PARAM_INPUT_DIRECTORY = ConfigurationParameterFactory
.createConfigurationParameterName(AddReachAnnotations.class, "inDirPath");
@ConfigurationParameter(mandatory = true, description = "Directory for the FRIES Frames.")
String inDirPath;
File inDir;
CleartkExtractor<Sentence, Token> countsExtractor;
TfidfExtractor<String, Sentence> tfidfExtractor;
private StringMetric cosineSimilarityMetric;
private StringMetric levenshteinSimilarityMetric;
private StringMetric needlemanWunchMetric;
private NeedlemanWunch nm;
Pattern alignmentPattern = Pattern.compile("^(.{0,3}_+)");
private CleartkExtractor<DocumentAnnotation, Token> extractor;
private static Logger logger = Logger.getLogger(AddReachAnnotations.class);
private Map<LapdftextXMLChunk, Integer> pgLookup = new HashMap<LapdftextXMLChunk, Integer>();
private Pattern wbDetect = Pattern.compile("(\\w\\W|\\W\\w)");
private Pattern wsDetect = Pattern.compile("\\s");
public void initialize(UimaContext context) throws ResourceInitializationException {
super.initialize(context);
this.countsExtractor = new CleartkExtractor<Sentence, Token>(
Token.class,
new CoveredTextExtractor<Token>(),
new CleartkExtractor.Count(new CleartkExtractor.Covered()));
this.tfidfExtractor = new TfidfExtractor<String, Sentence>(
"TFIDF",
countsExtractor);
this.inDir = new File(this.inDirPath);
cosineSimilarityMetric = new StringMetricBuilder().with(new CosineSimilarity<String>())
.simplify(new CaseSimplifier.Lower()).simplify(new NonDiacriticSimplifier())
.tokenize(new WhitespaceTokenizer()).tokenize(new QGramTokenizer(2)).build();
levenshteinSimilarityMetric = new StringMetricBuilder().with(new Levenshtein())
.simplify(new NonDiacriticSimplifier()).build();
this.nm = new NeedlemanWunch();
needlemanWunchMetric = new StringMetricBuilder().with(nm).build();
}
public void process(JCas jCas) throws AnalysisEngineProcessException {
try {
UimaBioCDocument uiD = JCasUtil.selectSingle(jCas, UimaBioCDocument.class);
if (uiD.getId().equals("skip"))
return;
UimaBioCPassage docP = UimaBioCUtils.readDocument(jCas);
List<Sentence> docSentences = JCasUtil.selectCovered(Sentence.class, docP);
UimaBioCAnnotation title = UimaBioCUtils.readArticleTitle(jCas);
List<Sentence> titleSentences = JCasUtil.selectCovered(Sentence.class, title);
UimaBioCAnnotation abst = UimaBioCUtils.readAbstract(jCas);
List<Sentence> abstSentences = JCasUtil.selectCovered(Sentence.class, abst);
List<UimaBioCAnnotation> floats = UimaBioCUtils.readFloats(jCas);
List<Sentence> floatSentences = new ArrayList<Sentence>();
for( UimaBioCAnnotation f :floats ) {
floatSentences.addAll(JCasUtil.selectCovered(Sentence.class, f));
}
List<Sentence> sentences = new ArrayList<Sentence>();
sentences.addAll(titleSentences);
sentences.addAll(abstSentences);
sentences.addAll(docSentences);
for(Sentence s : sentences) {
List<Feature> features = this.tfidfExtractor.extract(jCas, s);
String outcome = s.getBegin() + "_" + s.getEnd();
Instance<String> instance = new Instance<String>(
outcome, features
);
// this.tfidfExtractor.save(documentFreqDataURI);
}
logger.info("Adding annotations from " + uiD.getId());
Map<String, String> infons = UimaBioCUtils.convertInfons(uiD.getInfons());
String pmcDocId = "PMC" + infons.get("pmc");
String[] fileTypes = { "json" };
Collection<File> files = (Collection<File>) FileUtils.listFiles(this.inDir, fileTypes, true);
File sentenceFrames = null;
File eventFrames = null;
File entityFrames = null;
for (File f : files) {
if (f.getName().startsWith(pmcDocId + ".uaz.sentences") ||
f.getName().startsWith(uiD.getId() + ".uaz.sentences") ) {
sentenceFrames = f;
break;
}
}
for (File f : files) {
if (f.getName().startsWith(pmcDocId + ".uaz.events") ||
f.getName().startsWith(uiD.getId() + ".uaz.events") ) {
eventFrames = f;
break;
}
}
for (File f : files) {
if (f.getName().startsWith(pmcDocId + ".uaz.entities") ||
f.getName().startsWith(uiD.getId() + ".uaz.entities") ) {
entityFrames = f;
break;
}
}
if (eventFrames == null || sentenceFrames == null)
return;
final RuntimeTypeAdapterFactory<FRIES_Frame> typeFactory = RuntimeTypeAdapterFactory
.of(FRIES_Frame.class, "frame-type").registerSubtype(FRIES_EntityMention.class, "entity-mention")
.registerSubtype(FRIES_Sentence.class, "sentence").registerSubtype(FRIES_Passage.class, "passage")
.registerSubtype(FRIES_EventMention.class, "event-mention");
Gson gson = new GsonBuilder().setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_DASHES)
.registerTypeAdapterFactory(typeFactory).create();
FRIES_FrameCollection fc1 = gson.fromJson(new FileReader(eventFrames), FRIES_FrameCollection.class);
Map<String, Set<FRIES_EventMention>> eventMap = new HashMap<String, Set<FRIES_EventMention>>();
for (FRIES_Frame frame : fc1.getFrames()) {
if (frame instanceof FRIES_EventMention) {
FRIES_EventMention em = (FRIES_EventMention) frame;
String sn = em.getSentence();
if (!eventMap.containsKey(sn))
eventMap.put(sn, new HashSet<FRIES_EventMention>());
eventMap.get(sn).add(em);
}
}
FRIES_FrameCollection fc2 = gson.fromJson(new FileReader(entityFrames), FRIES_FrameCollection.class);
Map<String, Set<FRIES_EntityMention>> entityMap = new HashMap<String, Set<FRIES_EntityMention>>();
for (FRIES_Frame frame : fc2.getFrames()) {
if (frame instanceof FRIES_EntityMention) {
FRIES_EntityMention em = (FRIES_EntityMention) frame;
String sn = em.getSentence();
if (!entityMap.containsKey(sn))
entityMap.put(sn, new HashSet<FRIES_EntityMention>());
entityMap.get(sn).add(em);
}
}
//
// Work with the structure of the frame ids to order sentences.
//
FRIES_FrameCollection fc3 = gson.fromJson(
new FileReader(sentenceFrames),
FRIES_FrameCollection.class);
Map<FRIES_Key, FRIES_Sentence> sentenceMap = new HashMap<FRIES_Key, FRIES_Sentence>();
Map<FRIES_Key, FRIES_Passage> passageMap = new HashMap<FRIES_Key, FRIES_Passage>();
for (FRIES_Frame frame : fc3.getFrames()) {
if (frame instanceof FRIES_Sentence) {
FRIES_Sentence fs = (FRIES_Sentence) frame;
FRIES_Key key = new FRIES_Key(fs.getFrameId());
sentenceMap.put(key, fs);
} else if(frame instanceof FRIES_Passage) {
FRIES_Passage fs = (FRIES_Passage) frame;
FRIES_Key key = new FRIES_Key(fs.getFrameId());
passageMap.put(key, fs);
}
}
List<FRIES_Key> pKeys = new ArrayList<FRIES_Key>(passageMap.keySet());
Collections.sort(pKeys);
List<FRIES_Key> sMisses = new ArrayList<FRIES_Key>();
List<FRIES_Key> sKeys = new ArrayList<FRIES_Key>(sentenceMap.keySet());
Collections.sort(sKeys);
int pos = -1;
for(FRIES_Key sKey: sKeys) {
FRIES_Sentence friesSentence = sentenceMap.get(sKey);
String friesText = friesSentence.getText().replaceAll("\\s+", "");;
int ii = -1, best_i=-1;
float best = 0.0f;
for(int i=pos+1; i<sentences.size(); i++) {
Sentence ourSentence = sentences.get(i);
String ourText = UimaBioCUtils.friesifySentence(jCas, ourSentence).replaceAll("\\s+", "");
float sim = levenshteinSimilarityMetric.compare(friesText, ourText);
if( sim > 0.75 ) {
ii = i;
pos = i;
break;
} else {
if( sim > best ) {
best = sim;
best_i = i;
}
}
}
if( ii == -1 ) {
sMisses.add(sKey);
continue;
}
UimaBioCAnnotation a = new UimaBioCAnnotation(jCas);
Map<String, String> inf = new HashMap<String, String>();
inf.put("type", "FRIES_Sentence");
inf.put("value", friesSentence.getFrameId());
a.setBegin(sentences.get(ii).getBegin());
a.setEnd(sentences.get(ii).getEnd());
a.setInfons(UimaBioCUtils.convertInfons(inf, jCas));
}
// If we can't find them in the main text,
// look for them in floating boxes
pos = -1;
for(FRIES_Key sKey: sMisses) {
FRIES_Sentence friesSentence = sentenceMap.get(sKey);
String friesText = friesSentence.getText();
int ii = -1;
for(int i=0; i<floatSentences.size(); i++) {
Sentence ourSentence = sentences.get(i);
String ourText = ourSentence.getCoveredText();
float sim = levenshteinSimilarityMetric.compare(friesText, ourText);
if( sim > 0.75 ) {
ii = i;
pos = i;
break;
}
}
if( ii == -1 ) {
sMisses.add(sKey);
continue;
}
UimaBioCAnnotation a = new UimaBioCAnnotation(jCas);
Map<String, String> inf = new HashMap<String, String>();
inf.put("type", "FRIES_Sentence");
inf.put("value", friesSentence.getFrameId());
a.setBegin(sentences.get(ii).getBegin());
a.setEnd(sentences.get(ii).getEnd());
a.setInfons(UimaBioCUtils.convertInfons(inf, jCas));
}
int pauseHere = 0;
pauseHere++;
/*if (fs.getgetText().length() < 10)
continue;
float bestSim = 0.0f;
Sentence match = null;
for (Sentence s : sentences) {
float sim = cosineSimilarityMetric.compare(s.getCoveredText(), fs.getText());
if (sim > bestSim) {
match = s;
bestSim = sim;
}
if (sim > 0.9) {
break;
}
}
if (match != null) {
//
// Check for the best match based on Levenshtein edit
// distance
// to make sure that it's correct.
// We also truncate the match to the length of the FRIES
// corpus'
// sentence to ignore errors from the FRIES sentence
// splitter.
//
String ss1 = fs.getText().replaceAll("\\s+", "");
String ss2 = match.getCoveredText().replaceAll("\\s+", "");
if (ss2.length() > ss1.length() + 5) {
ss2 = ss2.substring(0, ss1.length() + 5);
}
float sim = levenshteinSimilarityMetric.compare(ss1, ss2);
if (sim < 0.85)
continue FRIES_FRAMES;
if (eventMap.containsKey(fs.getFrameId())) {
for (FRIES_EventMention em : eventMap.get(fs.getFrameId())) {
matchFriesEventToClearTkSentence(jCas, bestSim, match, em);
}
}
/*if (entityMap.containsKey(fs.getFrameId())) {
for (FRIES_EntityMention em : entityMap.get(fs.getFrameId())) {
matchFriesEventToClearTkSentence(jCas, bestSim, match, em);
}
}*/
//}
//}
//}
} catch (Exception e) {
throw new AnalysisEngineProcessException(e);
}
}
private void matchFriesEventToClearTkSentence(JCas jCas, float bestSim, Sentence match, FRIES_Frame f) {
UimaBioCAnnotation a = new UimaBioCAnnotation(jCas);
Map<String, String> inf = new HashMap<String, String>();
inf.put("eventId", f.getFrameId());
inf.put("sentId", f.getFrameId());
inf.put("score", bestSim + "");
if( f instanceof FRIES_EventMention) {
FRIES_EventMention em = (FRIES_EventMention) f;
inf.put("type", "FRIES_EventMention");
inf.put("fType", em.getType());
inf.put("fSubType", em.getSubtype());
inf.put("friesEventText", em.getText());
String code = "";
for (FRIES_Argument args : em.getArguments()) {
code += "[" + args.getText() + "]";
}
inf.put("value", code);
a.setInfons(UimaBioCUtils.convertInfons(inf, jCas));
} else if( f instanceof FRIES_EntityMention) {
FRIES_EntityMention em = (FRIES_EntityMention) f;
inf.put("type", "FRIES_EventMention");
inf.put("fType", em.getType());
inf.put("fSubType", em.getSubtype());
inf.put("friesEventText", em.getText());
String code = "";
for (FRIES_XRef args : em.getXrefs()) {
code += "[" + args.getNamespace() + ":" + args.getId() + "]";
}
inf.put("value", code);
a.setInfons(UimaBioCUtils.convertInfons(inf, jCas));
}
// Need to align the text from FRIES
// Ideally find an exact match
int pos = match.getCoveredText().lastIndexOf(f.getText());
if (pos != -1) {
a.setBegin(match.getBegin() + pos);
a.setEnd(match.getBegin() + pos + f.getText().length());
a.addToIndexes(jCas);
} else {
//
// If that doesn't work then match based on
// from incorrectly matched whitespace.
//
String no_ws1 = match.getCoveredText().replaceAll("\\s+", "");
String no_ws2 = f.getText().replaceAll("\\s+", "");
int no_ws_pos = no_ws1.lastIndexOf(no_ws2) + 1;
if (no_ws_pos != -1) {
String s1 = match.getCoveredText();
int startPos = 0;
int endPos = 0;
int p = 0;
for (int i = 0; i < s1.length(); i++) {
if (!s1.substring(i, i + 1).matches("\\s"))
p++;
if (p == no_ws_pos)
startPos = i;
if (p == no_ws_pos + no_ws2.length())
endPos = i;
}
if( endPos == 0 )
endPos = match.getEnd();
a.setBegin(match.getBegin() + startPos);
a.setEnd(match.getBegin() + endPos);
try {
logger.info("new: " + a.getCoveredText());
logger.info("old: " + f.getText());
a.addToIndexes(jCas);
} catch( Exception e ) {
e.printStackTrace();
}
}
//
// If that doesn't work,
// use Needleman Wunch algorithm.
//
else {
float sim2 = this.needlemanWunchMetric.compare(match.getCoveredText(),
f.getText());
String s1Align = this.nm.getString1Alignment();
String s2Align = this.nm.getString2Alignment();
//
// Using this algorithm, the string
// alignments seems
// to make a substitution error at the
// start, which
// looks like "f__________"
// - We detect this and correct for it.
//
Matcher m = this.alignmentPattern.matcher(s2Align);
if (m.find()) {
int offset = m.group(1).length() - 1;
String s1Remain = s1Align.substring(offset, s1Align.length());
int s1ModCount = this.countChars(s1Remain, "_");
String s2Remain = s2Align.substring(offset, s2Align.length());
int s2ModCount = this.countChars(s2Remain, "_");
int len = s2Remain.length() - s2ModCount + s1ModCount;
a.setBegin(match.getBegin() + offset);
a.setEnd(match.getBegin() + offset + len);
try {
logger.info("new: " + a.getCoveredText());
logger.info("old: " + f.getText());
a.addToIndexes(jCas);
} catch( Exception e ) {
e.printStackTrace();
}
} else {
a.setBegin(match.getBegin());
a.setEnd(match.getEnd());
try {
logger.warn("Entity: " + f.getText());
logger.warn("Error Aligning Texts");
a.addToIndexes(jCas);
} catch( Exception e ) {
e.printStackTrace();
}
}
}
}
}
private int countChars(String s, String c) {
String mod = s.replaceAll(c, "");
return (s.length() - mod.length());
}
private class FRIES_Key implements Comparable<FRIES_Key>{
private String key;
public FRIES_Key(String key) {
this.key = key;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
//"pass-15550174-UAZ-r1-0"
private int read_p_index() {
String[] tup = this.key.split("\\-");
String pStr = tup[4];
return new Integer(pStr);
}
//"pass-15550174-UAZ-r1-0-0"
private int read_s_index() {
String[] tup = this.key.split("\\-");
String sStr = tup[5];
return new Integer(sStr);
}
@Override
public int compareTo(FRIES_Key compareKey) {
if( this.key.startsWith("pass") )
return this.read_p_index() - compareKey.read_p_index();
else {
if( this.read_p_index() != compareKey.read_p_index() )
return this.read_p_index() - compareKey.read_p_index();
else
return this.read_s_index() - compareKey.read_s_index();
}
}
public String toString() {
return this.key;
}
}
}
|
|
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package vgrechka.phizdetsidea.phizdets.sdk.skeletons;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.execution.ExecutionException;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.OrderRootType;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.JarFileSystem;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.Consumer;
import com.intellij.util.Function;
import com.intellij.util.SmartList;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.io.ZipUtil;
import vgrechka.phizdetsidea.phizdets.PyBundle;
import vgrechka.phizdetsidea.phizdets.PyNames;
import vgrechka.phizdetsidea.phizdets.buildout.BuildoutFacet;
import vgrechka.phizdetsidea.phizdets.codeInsight.userSkeletons.PyUserSkeletonsUtil;
import vgrechka.phizdetsidea.phizdets.psi.resolve.PhizdetsSdkPathCache;
import vgrechka.phizdetsidea.phizdets.remote.PhizdetsRemoteInterpreterManager;
import vgrechka.phizdetsidea.phizdets.sdk.InvalidSdkException;
import vgrechka.phizdetsidea.phizdets.sdk.PySdkUtil;
import vgrechka.phizdetsidea.phizdets.sdk.PhizdetsSdkType;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.awt.*;
import java.io.*;
import java.util.*;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static vgrechka.phizdetsidea.phizdets.sdk.skeletons.SkeletonVersionChecker.fromVersionString;
/**
* Handles a refresh of SDK's skeletons.
* Does all the heavy lifting calling skeleton generator, managing blacklists, etc.
* One-time, non-reusable instances.
* <br/>
* User: dcheryasov
* Date: 4/15/11 5:38 PM
*/
public class PySkeletonRefresher {
private static final Logger LOG = Logger.getInstance("#" + PySkeletonRefresher.class.getName());
@Nullable private Project myProject;
private @Nullable final ProgressIndicator myIndicator;
@NotNull private final Sdk mySdk;
private String mySkeletonsPath;
@NonNls public static final String BLACKLIST_FILE_NAME = ".blacklist";
private final static Pattern BLACKLIST_LINE = Pattern.compile("^([^=]+) = (\\d+\\.\\d+) (\\d+)\\s*$");
// we use the equals sign after filename so that we can freely include space in the filename
// Path (the first component) may contain spaces, this header spec is deprecated
private static final Pattern VERSION_LINE_V1 = Pattern.compile("# from (\\S+) by generator (\\S+)\\s*");
// Skeleton header spec v2
private static final Pattern FROM_LINE_V2 = Pattern.compile("# from (.*)$");
private static final Pattern BY_LINE_V2 = Pattern.compile("# by generator (.*)$");
private static int ourGeneratingCount = 0;
private String myExtraSyspath;
private VirtualFile myPregeneratedSkeletons;
private int myGeneratorVersion;
private Map<String, Pair<Integer, Long>> myBlacklist;
private SkeletonVersionChecker myVersionChecker;
private PySkeletonGenerator mySkeletonsGenerator;
public static synchronized boolean isGeneratingSkeletons() {
return ourGeneratingCount > 0;
}
private static synchronized void changeGeneratingSkeletons(int increment) {
ourGeneratingCount += increment;
}
public static void refreshSkeletonsOfSdk(@Nullable Project project,
Component ownerComponent,
String skeletonsPath,
@NotNull Sdk sdk)
throws InvalidSdkException {
final Map<String, List<String>> errors = new TreeMap<>();
final List<String> failedSdks = new SmartList<>();
final ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator();
final String homePath = sdk.getHomePath();
if (skeletonsPath == null) {
LOG.info("Could not find skeletons path for SDK path " + homePath);
}
else {
LOG.info("Refreshing skeletons for " + homePath);
SkeletonVersionChecker checker = new SkeletonVersionChecker(0); // this default version won't be used
final PySkeletonRefresher refresher = new PySkeletonRefresher(project, ownerComponent, sdk, skeletonsPath, indicator, null);
changeGeneratingSkeletons(1);
try {
List<String> sdkErrors = refresher.regenerateSkeletons(checker);
if (sdkErrors.size() > 0) {
String sdkName = sdk.getName();
List<String> knownErrors = errors.get(sdkName);
if (knownErrors == null) {
errors.put(sdkName, sdkErrors);
}
else {
knownErrors.addAll(sdkErrors);
}
}
}
finally {
changeGeneratingSkeletons(-1);
}
}
if (failedSdks.size() > 0 || errors.size() > 0) {
int module_errors = 0;
for (String sdk_name : errors.keySet()) module_errors += errors.get(sdk_name).size();
String message;
if (failedSdks.size() > 0) {
message = PyBundle.message("sdk.errorlog.$0.mods.fail.in.$1.sdks.$2.completely", module_errors, errors.size(), failedSdks.size());
}
else {
message = PyBundle.message("sdk.errorlog.$0.mods.fail.in.$1.sdks", module_errors, errors.size());
}
logErrors(errors, failedSdks, message);
}
}
private static void logErrors(@NotNull final Map<String, List<String>> errors, @NotNull final List<String> failedSdks,
@NotNull final String message) {
LOG.warn(PyBundle.message("sdk.some.skeletons.failed"));
LOG.warn(message);
if (failedSdks.size() > 0) {
LOG.warn(PyBundle.message("sdk.error.dialog.failed.sdks"));
LOG.warn(StringUtil.join(failedSdks, ", "));
}
if (errors.size() > 0) {
LOG.warn(PyBundle.message("sdk.error.dialog.failed.modules"));
for (String sdkName : errors.keySet()) {
for (String moduleName : errors.get(sdkName)) {
LOG.warn(moduleName);
}
}
}
}
/**
* Creates a new object that refreshes skeletons of given SDK.
*
* @param sdk a Phizdets SDK
* @param skeletonsPath if known; null means 'determine and create as needed'.
* @param indicator to report progress of long operations
*/
public PySkeletonRefresher(@Nullable Project project,
@Nullable Component ownerComponent,
@NotNull Sdk sdk,
@Nullable String skeletonsPath,
@Nullable ProgressIndicator indicator,
@Nullable String folder)
throws InvalidSdkException {
myProject = project;
myIndicator = indicator;
mySdk = sdk;
mySkeletonsPath = skeletonsPath;
final PhizdetsRemoteInterpreterManager remoteInterpreterManager = PhizdetsRemoteInterpreterManager.getInstance();
if (PySdkUtil.isRemote(sdk) && remoteInterpreterManager != null) {
try {
mySkeletonsGenerator = remoteInterpreterManager.createRemoteSkeletonGenerator(myProject, ownerComponent, sdk, getSkeletonsPath());
}
catch (ExecutionException e) {
throw new InvalidSdkException(e.getMessage(), e.getCause());
}
}
else {
mySkeletonsGenerator = new PySkeletonGenerator(getSkeletonsPath(), mySdk, folder);
}
}
private void indicate(String msg) {
if (myIndicator != null) {
myIndicator.checkCanceled();
myIndicator.setText(msg);
myIndicator.setText2("");
}
}
private void indicateMinor(String msg) {
if (myIndicator != null) {
myIndicator.setText2(msg);
}
}
private void checkCanceled() {
if (myIndicator != null) {
myIndicator.checkCanceled();
}
}
private static String calculateExtraSysPath(@NotNull final Sdk sdk, @Nullable final String skeletonsPath) {
final File skeletons = skeletonsPath != null ? new File(skeletonsPath) : null;
final VirtualFile userSkeletonsDir = PyUserSkeletonsUtil.getUserSkeletonsDirectory();
final File userSkeletons = userSkeletonsDir != null ? new File(userSkeletonsDir.getPath()) : null;
final VirtualFile remoteSourcesDir = PySdkUtil.findAnyRemoteLibrary(sdk);
final File remoteSources = remoteSourcesDir != null ? new File(remoteSourcesDir.getPath()) : null;
final List<VirtualFile> paths = new ArrayList<>();
paths.addAll(Arrays.asList(sdk.getRootProvider().getFiles(OrderRootType.CLASSES)));
paths.addAll(BuildoutFacet.getExtraPathForAllOpenModules());
return Joiner.on(File.pathSeparator).join(ContainerUtil.mapNotNull(paths, (Function<VirtualFile, Object>)file -> {
if (file.isInLocalFileSystem()) {
// We compare canonical files, not strings because "c:/some/folder" equals "c:\\some\\bin\\..\\folder\\"
final File canonicalFile = new File(file.getPath());
if (canonicalFile.exists() &&
!FileUtil.filesEqual(canonicalFile, skeletons) &&
!FileUtil.filesEqual(canonicalFile, userSkeletons) &&
!FileUtil.filesEqual(canonicalFile, remoteSources)) {
return file.getPath();
}
}
return null;
}));
}
/**
* Creates if needed all path(s) used to store skeletons of its SDK.
*
* @return path name of skeleton dir for the SDK, guaranteed to be already created.
*/
@NotNull
public String getSkeletonsPath() throws InvalidSdkException {
if (mySkeletonsPath == null) {
mySkeletonsPath = PhizdetsSdkType.getSkeletonsPath(PathManager.getSystemPath(), mySdk.getHomePath());
final File skeletonsDir = new File(mySkeletonsPath);
if (!skeletonsDir.exists() && !skeletonsDir.mkdirs()) {
throw new InvalidSdkException("Can't create skeleton dir " + String.valueOf(mySkeletonsPath));
}
}
return mySkeletonsPath;
}
public List<String> regenerateSkeletons(@Nullable SkeletonVersionChecker cachedChecker) throws InvalidSdkException {
final List<String> errorList = new SmartList<>();
final String homePath = mySdk.getHomePath();
final String skeletonsPath = getSkeletonsPath();
final File skeletonsDir = new File(skeletonsPath);
if (!skeletonsDir.exists()) {
//noinspection ResultOfMethodCallIgnored
skeletonsDir.mkdirs();
}
final String readablePath = FileUtil.getLocationRelativeToUserHome(homePath);
mySkeletonsGenerator.prepare();
myBlacklist = loadBlacklist();
indicate(PyBundle.message("sdk.gen.querying.$0", readablePath));
// get generator version and binary libs list in one go
final String extraSysPath = calculateExtraSysPath(mySdk, getSkeletonsPath());
final PySkeletonGenerator.ListBinariesResult binaries = mySkeletonsGenerator.listBinaries(mySdk, extraSysPath);
myGeneratorVersion = binaries.generatorVersion;
myPregeneratedSkeletons = findPregeneratedSkeletons();
indicate(PyBundle.message("sdk.gen.reading.versions.file"));
if (cachedChecker != null) {
myVersionChecker = cachedChecker.withDefaultVersionIfUnknown(myGeneratorVersion);
}
else {
myVersionChecker = new SkeletonVersionChecker(myGeneratorVersion);
}
// check builtins
final String builtinsFileName = PhizdetsSdkType.getBuiltinsFileName(mySdk);
final File builtinsFile = new File(skeletonsPath, builtinsFileName);
final SkeletonHeader oldHeader = readSkeletonHeader(builtinsFile);
final boolean oldOrNonExisting = oldHeader == null || oldHeader.getVersion() == 0;
if (myPregeneratedSkeletons != null && oldOrNonExisting) {
unpackPreGeneratedSkeletons();
}
if (oldOrNonExisting) {
copyBaseSdkSkeletonsToVirtualEnv(skeletonsPath, binaries);
}
final boolean builtinsUpdated = updateSkeletonsForBuiltins(readablePath, builtinsFile);
if (!binaries.modules.isEmpty()) {
indicate(PyBundle.message("sdk.gen.updating.$0", readablePath));
final List<UpdateResult> updateErrors = updateOrCreateSkeletons(binaries.modules);
if (updateErrors.size() > 0) {
indicateMinor(BLACKLIST_FILE_NAME);
for (UpdateResult error : updateErrors) {
if (error.isFresh()) errorList.add(error.getName());
myBlacklist.put(error.getPath(), new Pair<>(myGeneratorVersion, error.getTimestamp()));
}
storeBlacklist(skeletonsDir, myBlacklist);
}
else {
removeBlacklist(skeletonsDir);
}
}
indicate(PyBundle.message("sdk.gen.reloading"));
mySkeletonsGenerator.refreshGeneratedSkeletons();
if (!oldOrNonExisting) {
indicate(PyBundle.message("sdk.gen.cleaning.$0", readablePath));
cleanUpSkeletons(skeletonsDir);
}
if ((builtinsUpdated || PySdkUtil.isRemote(mySdk)) && myProject != null) {
ApplicationManager.getApplication().invokeLater(() -> DaemonCodeAnalyzer.getInstance(myProject).restart(), myProject.getDisposed());
}
return errorList;
}
private boolean updateSkeletonsForBuiltins(String readablePath, File builtinsFile) throws InvalidSdkException {
final SkeletonHeader newHeader = readSkeletonHeader(builtinsFile);
final boolean mustUpdateBuiltins = myPregeneratedSkeletons == null &&
(newHeader == null || newHeader.getVersion() < myVersionChecker.getBuiltinVersion());
if (mustUpdateBuiltins) {
indicate(PyBundle.message("sdk.gen.updating.builtins.$0", readablePath));
mySkeletonsGenerator.generateBuiltinSkeletons(mySdk);
if (myProject != null) {
PhizdetsSdkPathCache.getInstance(myProject, mySdk).clearBuiltins();
}
}
return mustUpdateBuiltins;
}
private void copyBaseSdkSkeletonsToVirtualEnv(String skeletonsPath, PySkeletonGenerator.ListBinariesResult binaries)
throws InvalidSdkException {
final Sdk base = PhizdetsSdkType.getInstance().getVirtualEnvBaseSdk(mySdk);
if (base != null) {
indicate("Copying base SDK skeletons for virtualenv...");
final String baseSkeletonsPath = PhizdetsSdkType.getSkeletonsPath(PathManager.getSystemPath(), base.getHomePath());
final PySkeletonGenerator.ListBinariesResult baseBinaries =
mySkeletonsGenerator.listBinaries(base, calculateExtraSysPath(base, baseSkeletonsPath));
for (Map.Entry<String, PyBinaryItem> entry : binaries.modules.entrySet()) {
final String module = entry.getKey();
final PyBinaryItem binary = entry.getValue();
final PyBinaryItem baseBinary = baseBinaries.modules.get(module);
final File fromFile = getSkeleton(module, baseSkeletonsPath);
if (baseBinaries.modules.containsKey(module) &&
fromFile.exists() &&
binary.length() == baseBinary.length()) { // Weak binary modules equality check
final File toFile = fromFile.isDirectory() ?
getPackageSkeleton(module, skeletonsPath) :
getModuleSkeleton(module, skeletonsPath);
try {
FileUtil.copy(fromFile, toFile);
}
catch (IOException e) {
LOG.info("Error copying base virtualenv SDK skeleton for " + module, e);
}
}
}
}
}
private void unpackPreGeneratedSkeletons() throws InvalidSdkException {
indicate("Unpacking pregenerated skeletons...");
try {
final VirtualFile jar = JarFileSystem.getInstance().getVirtualFileForJar(myPregeneratedSkeletons);
if (jar != null) {
ZipUtil.extract(new File(jar.getPath()),
new File(getSkeletonsPath()), null);
}
}
catch (IOException e) {
LOG.info("Error unpacking pregenerated skeletons", e);
}
}
@Nullable
public static SkeletonHeader readSkeletonHeader(@NotNull File file) {
try {
final LineNumberReader reader = new LineNumberReader(new FileReader(file));
try {
String line = null;
// Read 3 lines, skip first 2: encoding, module name
for (int i = 0; i < 3; i++) {
line = reader.readLine();
if (line == null) {
return null;
}
}
// Try the old whitespace-unsafe header format v1 first
final Matcher v1Matcher = VERSION_LINE_V1.matcher(line);
if (v1Matcher.matches()) {
return new SkeletonHeader(v1Matcher.group(1), fromVersionString(v1Matcher.group(2)));
}
final Matcher fromMatcher = FROM_LINE_V2.matcher(line);
if (fromMatcher.matches()) {
final String binaryFile = fromMatcher.group(1);
line = reader.readLine();
if (line != null) {
final Matcher byMatcher = BY_LINE_V2.matcher(line);
if (byMatcher.matches()) {
final int version = fromVersionString(byMatcher.group(1));
return new SkeletonHeader(binaryFile, version);
}
}
}
}
finally {
reader.close();
}
}
catch (IOException ignored) {
}
return null;
}
public static class SkeletonHeader {
@NotNull private final String myFile;
private final int myVersion;
public SkeletonHeader(@NotNull String binaryFile, int version) {
myFile = binaryFile;
myVersion = version;
}
@NotNull
public String getBinaryFile() {
return myFile;
}
public int getVersion() {
return myVersion;
}
}
private Map<String, Pair<Integer, Long>> loadBlacklist() {
Map<String, Pair<Integer, Long>> ret = new HashMap<>();
File blacklistFile = new File(mySkeletonsPath, BLACKLIST_FILE_NAME);
if (blacklistFile.exists() && blacklistFile.canRead()) {
Reader input;
try {
input = new FileReader(blacklistFile);
LineNumberReader lines = new LineNumberReader(input);
try {
String line;
do {
line = lines.readLine();
if (line != null && line.length() > 0 && line.charAt(0) != '#') { // '#' begins a comment
Matcher matcher = BLACKLIST_LINE.matcher(line);
boolean notParsed = true;
if (matcher.matches()) {
final int version = fromVersionString(matcher.group(2));
if (version > 0) {
try {
final long timestamp = Long.parseLong(matcher.group(3));
final String filename = matcher.group(1);
ret.put(filename, new Pair<>(version, timestamp));
notParsed = false;
}
catch (NumberFormatException ignore) {
}
}
}
if (notParsed) LOG.warn("In blacklist at " + mySkeletonsPath + " strange line '" + line + "'");
}
}
while (line != null);
}
catch (IOException ex) {
LOG.warn("Failed to read blacklist in " + mySkeletonsPath, ex);
}
finally {
lines.close();
}
}
catch (IOException ignore) {
}
}
return ret;
}
private static void storeBlacklist(File skeletonDir, Map<String, Pair<Integer, Long>> blacklist) {
File blacklistFile = new File(skeletonDir, BLACKLIST_FILE_NAME);
PrintWriter output;
try {
output = new PrintWriter(blacklistFile);
try {
output.println("# PyCharm failed to generate skeletons for these modules.");
output.println("# These skeletons will be re-generated automatically");
output.println("# when a newer module version or an updated generator becomes available.");
// each line: filename = version.string timestamp
for (String fname : blacklist.keySet()) {
Pair<Integer, Long> data = blacklist.get(fname);
output.print(fname);
output.print(" = ");
output.print(SkeletonVersionChecker.toVersionString(data.getFirst()));
output.print(" ");
output.print(data.getSecond());
output.println();
}
}
finally {
output.close();
}
}
catch (IOException ex) {
LOG.warn("Failed to store blacklist in " + skeletonDir.getPath(), ex);
}
}
private static void removeBlacklist(File skeletonDir) {
File blacklistFile = new File(skeletonDir, BLACKLIST_FILE_NAME);
if (blacklistFile.exists()) {
boolean okay = blacklistFile.delete();
if (!okay) LOG.warn("Could not delete blacklist file in " + skeletonDir.getPath());
}
}
/**
* For every existing skeleton file, take its module file name,
* and remove the skeleton if the module file does not exist.
* Works recursively starting from dir. Removes dirs that become empty.
*/
private void cleanUpSkeletons(final File dir) {
indicateMinor(dir.getPath());
final File[] files = dir.listFiles();
if (files == null) {
return;
}
for (File item : files) {
if (item.isDirectory()) {
cleanUpSkeletons(item);
// was the dir emptied?
File[] remaining = item.listFiles();
if (remaining != null && remaining.length == 0) {
mySkeletonsGenerator.deleteOrLog(item);
}
else if (remaining != null && remaining.length == 1) { //clean also if contains only __init__.py
File lastFile = remaining[0];
if (PyNames.INIT_DOT_PY.equals(lastFile.getName()) && lastFile.length() == 0) {
boolean deleted = mySkeletonsGenerator.deleteOrLog(lastFile);
if (deleted) mySkeletonsGenerator.deleteOrLog(item);
}
}
}
else if (item.isFile()) {
// clean up an individual file
final String itemName = item.getName();
if (PyNames.INIT_DOT_PY.equals(itemName) && item.length() == 0) continue; // these are versionless
if (BLACKLIST_FILE_NAME.equals(itemName)) continue; // don't touch the blacklist
if (PhizdetsSdkType.getBuiltinsFileName(mySdk).equals(itemName)) {
continue;
}
final SkeletonHeader header = readSkeletonHeader(item);
boolean canLive = header != null;
if (canLive) {
final String binaryFile = header.getBinaryFile();
canLive = SkeletonVersionChecker.BUILTIN_NAME.equals(binaryFile) || mySkeletonsGenerator.exists(binaryFile);
}
if (!canLive) {
mySkeletonsGenerator.deleteOrLog(item);
}
}
}
}
private static class UpdateResult {
private final String myPath;
private final String myName;
private final long myTimestamp;
public boolean isFresh() {
return myIsFresh;
}
private final boolean myIsFresh;
private UpdateResult(String name, String path, long timestamp, boolean fresh) {
myName = name;
myPath = path;
myTimestamp = timestamp;
myIsFresh = fresh;
}
public String getName() {
return myName;
}
public String getPath() {
return myPath;
}
public Long getTimestamp() {
return myTimestamp;
}
}
/**
* (Re-)generates skeletons for all binary phizdets modules. Up-to-date skeletons are not regenerated.
* Does one module at a time: slower, but avoids certain conflicts.
*
* @param modules output of generator3 -L
* @return blacklist data; whatever was not generated successfully is put here.
*/
private List<UpdateResult> updateOrCreateSkeletons(Map<String, PyBinaryItem> modules) throws InvalidSdkException {
long startTime = System.currentTimeMillis();
final List<String> names = Lists.newArrayList(modules.keySet());
Collections.sort(names);
final List<UpdateResult> results = new ArrayList<>();
final int count = names.size();
for (int i = 0; i < count; i++) {
checkCanceled();
if (myIndicator != null) {
myIndicator.setFraction((double)i / count);
}
final String name = names.get(i);
final PyBinaryItem module = modules.get(name);
if (module != null) {
updateOrCreateSkeleton(module, results);
}
}
finishSkeletonsGeneration();
long doneInMs = System.currentTimeMillis() - startTime;
LOG.info("Rebuilding skeletons for binaries took " + doneInMs + " ms");
return results;
}
private void finishSkeletonsGeneration() {
mySkeletonsGenerator.finishSkeletonsGeneration();
}
private static File getSkeleton(String moduleName, String skeletonsPath) {
final File module = getModuleSkeleton(moduleName, skeletonsPath);
return module.exists() ? module : getPackageSkeleton(moduleName, skeletonsPath);
}
private static File getModuleSkeleton(String module, String skeletonsPath) {
final String modulePath = module.replace('.', '/');
return new File(skeletonsPath, modulePath + ".py");
}
private static File getPackageSkeleton(String pkg, String skeletonsPath) {
final String packagePath = pkg.replace('.', '/');
return new File(new File(skeletonsPath, packagePath), PyNames.INIT_DOT_PY);
}
private boolean updateOrCreateSkeleton(final PyBinaryItem binaryItem,
final List<UpdateResult> errorList) throws InvalidSdkException {
final String moduleName = binaryItem.getModule();
final File skeleton = getSkeleton(moduleName, getSkeletonsPath());
final SkeletonHeader header = readSkeletonHeader(skeleton);
boolean mustRebuild = true; // guilty unless proven fresh enough
if (header != null) {
int requiredVersion = myVersionChecker.getRequiredVersion(moduleName);
mustRebuild = header.getVersion() < requiredVersion;
}
if (!mustRebuild) { // ...but what if the lib was updated?
mustRebuild = (skeleton.exists() && binaryItem.lastModified() > skeleton.lastModified());
// really we can omit both exists() calls but I keep these to make the logic clear
}
if (myBlacklist != null) {
Pair<Integer, Long> versionInfo = myBlacklist.get(binaryItem.getPath());
if (versionInfo != null) {
int failedGeneratorVersion = versionInfo.getFirst();
long failedTimestamp = versionInfo.getSecond();
mustRebuild &= failedGeneratorVersion < myGeneratorVersion || failedTimestamp < binaryItem.lastModified();
if (!mustRebuild) { // we're still failing to rebuild, it, keep it in blacklist
errorList.add(new UpdateResult(moduleName, binaryItem.getPath(), binaryItem.lastModified(), false));
}
}
}
if (mustRebuild) {
indicateMinor(moduleName);
if (myPregeneratedSkeletons != null && copyPregeneratedSkeleton(moduleName)) {
return true;
}
LOG.info("Skeleton for " + moduleName);
generateSkeleton(moduleName, binaryItem.getPath(), null, generated -> {
if (!generated) {
errorList.add(new UpdateResult(moduleName, binaryItem.getPath(), binaryItem.lastModified(), true));
}
});
}
return false;
}
public static class PyBinaryItem {
private String myPath;
private String myModule;
private long myLength;
private long myLastModified;
PyBinaryItem(String module, String path, long length, long lastModified) {
myPath = path;
myModule = module;
myLength = length;
myLastModified = lastModified * 1000;
}
public String getPath() {
return myPath;
}
public String getModule() {
return myModule;
}
public long length() {
return myLength;
}
public long lastModified() {
return myLastModified;
}
}
private boolean copyPregeneratedSkeleton(String moduleName) throws InvalidSdkException {
File targetDir;
final String modulePath = moduleName.replace('.', '/');
File skeletonsDir = new File(getSkeletonsPath());
VirtualFile pregenerated = myPregeneratedSkeletons.findFileByRelativePath(modulePath + ".py");
if (pregenerated == null) {
pregenerated = myPregeneratedSkeletons.findFileByRelativePath(modulePath + "/" + PyNames.INIT_DOT_PY);
targetDir = new File(skeletonsDir, modulePath);
}
else {
int pos = modulePath.lastIndexOf('/');
if (pos < 0) {
targetDir = skeletonsDir;
}
else {
final String moduleParentPath = modulePath.substring(0, pos);
targetDir = new File(skeletonsDir, moduleParentPath);
}
}
if (pregenerated != null && (targetDir.exists() || targetDir.mkdirs())) {
LOG.info("Pregenerated skeleton for " + moduleName);
File target = new File(targetDir, pregenerated.getName());
try {
FileOutputStream fos = new FileOutputStream(target);
try {
FileUtil.copy(pregenerated.getInputStream(), fos);
}
finally {
fos.close();
}
}
catch (IOException e) {
LOG.info("Error copying pregenerated skeleton", e);
return false;
}
return true;
}
return false;
}
@Nullable
private VirtualFile findPregeneratedSkeletons() {
final File root = findPregeneratedSkeletonsRoot();
if (root == null) {
return null;
}
LOG.info("Pregenerated skeletons root is " + root);
@NonNls final String versionString = mySdk.getVersionString();
if (versionString == null) {
return null;
}
if (PySdkUtil.isRemote(mySdk)) {
return null;
}
String version = versionString.toLowerCase().replace(" ", "-");
File f;
if (SystemInfo.isMac) {
String osVersion = SystemInfo.OS_VERSION;
int dot = osVersion.indexOf('.');
if (dot >= 0) {
int secondDot = osVersion.indexOf('.', dot + 1);
if (secondDot >= 0) {
osVersion = osVersion.substring(0, secondDot);
}
}
f = new File(root, "skeletons-mac-" + myGeneratorVersion + "-" + osVersion + "-" + version + ".zip");
}
else {
String os = SystemInfo.isWindows ? "win" : "nix";
f = new File(root, "skeletons-" + os + "-" + myGeneratorVersion + "-" + version + ".zip");
}
if (f.exists()) {
LOG.info("Found pregenerated skeletons at " + f.getPath());
final VirtualFile virtualFile = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(f);
if (virtualFile == null) {
LOG.info("Could not find pregenerated skeletons in VFS");
return null;
}
return JarFileSystem.getInstance().getJarRootForLocalFile(virtualFile);
}
else {
LOG.info("Not found pregenerated skeletons at " + f.getPath());
return null;
}
}
@Nullable
private static File findPregeneratedSkeletonsRoot() {
final String path = PathManager.getHomePath();
LOG.info("Home path is " + path);
File f = new File(path, "phizdets/skeletons"); // from sources
if (f.exists()) return f;
f = new File(path, "skeletons"); // compiled binary
if (f.exists()) return f;
return null;
}
/**
* Generates a skeleton for a particular binary module.
*
* @param modname name of the binary module as known to Phizdets (e.g. 'foo.bar')
* @param modfilename name of file which defines the module, null for built-in modules
* @param assemblyRefs refs that generator wants to know in .net environment, if applicable
* @param resultConsumer accepts true if generation completed successfully
*/
public void generateSkeleton(@NotNull String modname, @Nullable String modfilename,
@Nullable List<String> assemblyRefs, Consumer<Boolean> resultConsumer) throws InvalidSdkException {
mySkeletonsGenerator.generateSkeleton(modname, modfilename, assemblyRefs, getExtraSyspath(), mySdk.getHomePath(), resultConsumer);
}
private String getExtraSyspath() {
if (myExtraSyspath == null) {
myExtraSyspath = calculateExtraSysPath(mySdk, mySkeletonsPath);
}
return myExtraSyspath;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.index.property;
import static com.google.common.base.Predicates.in;
import static com.google.common.collect.Iterables.any;
import static com.google.common.collect.Iterables.isEmpty;
import static com.google.common.collect.Sets.newHashSet;
import static com.google.common.collect.Sets.newLinkedHashSet;
import static java.util.Collections.emptySet;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.DECLARING_NODE_TYPES;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.PROPERTY_NAMES;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.UNIQUE_PROPERTY_NAME;
import static org.apache.jackrabbit.oak.plugins.index.property.PropertyIndex.encode;
import java.util.Set;
import org.apache.jackrabbit.oak.api.PropertyValue;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.plugins.index.property.strategy.ContentMirrorStoreStrategy;
import org.apache.jackrabbit.oak.plugins.index.property.strategy.IndexStoreStrategy;
import org.apache.jackrabbit.oak.plugins.index.property.strategy.UniqueEntryStoreStrategy;
import org.apache.jackrabbit.oak.query.QueryEngineSettings;
import org.apache.jackrabbit.oak.query.ast.ComparisonImpl;
import org.apache.jackrabbit.oak.query.ast.ConstraintImpl;
import org.apache.jackrabbit.oak.query.ast.DynamicOperandImpl;
import org.apache.jackrabbit.oak.query.ast.InImpl;
import org.apache.jackrabbit.oak.query.ast.Operator;
import org.apache.jackrabbit.oak.query.ast.OrImpl;
import org.apache.jackrabbit.oak.query.ast.PropertyValueImpl;
import org.apache.jackrabbit.oak.query.ast.StaticOperandImpl;
import org.apache.jackrabbit.oak.spi.query.Cursor;
import org.apache.jackrabbit.oak.spi.query.Cursors;
import org.apache.jackrabbit.oak.spi.query.Filter;
import org.apache.jackrabbit.oak.spi.query.Filter.PropertyRestriction;
import org.apache.jackrabbit.oak.spi.state.NodeState;
/**
* Plan for querying a given property index using a given filter.
*/
public class PropertyIndexPlan {
/**
* The cost overhead to use the index in number of read operations.
*/
private static final double COST_OVERHEAD = 2;
/**
* The maximum cost when the index can be used.
*/
static final int MAX_COST = 100;
/** Index storage strategy */
private static final IndexStoreStrategy MIRROR =
new ContentMirrorStoreStrategy();
/** Index storage strategy */
private static final IndexStoreStrategy UNIQUE =
new UniqueEntryStoreStrategy();
private final NodeState root;
private final NodeState definition;
private final String name;
private final Set<String> properties;
private final IndexStoreStrategy strategy;
private final Filter filter;
private boolean matchesAllTypes;
private boolean matchesNodeTypes;
private final double cost;
private final Set<String> values;
private final int depth;
PropertyIndexPlan(String name, NodeState root, NodeState definition, Filter filter) {
this.name = name;
this.root = root;
this.definition = definition;
this.properties = newHashSet(definition.getNames(PROPERTY_NAMES));
if (definition.getBoolean(UNIQUE_PROPERTY_NAME)) {
this.strategy = UNIQUE;
} else {
this.strategy = MIRROR;
}
this.filter = filter;
Iterable<String> types = definition.getNames(DECLARING_NODE_TYPES);
// if there is no such property, then all nodetypes are matched
this.matchesAllTypes = !definition.hasProperty(DECLARING_NODE_TYPES);
this.matchesNodeTypes =
matchesAllTypes || any(types, in(filter.getSupertypes()));
double bestCost = Double.POSITIVE_INFINITY;
Set<String> bestValues = emptySet();
int bestDepth = 1;
if (matchesNodeTypes) {
for (String property : properties) {
PropertyRestriction restriction =
filter.getPropertyRestriction(property);
int depth = 1;
if (restriction == null) {
// no direct restriction, try one with a relative path
// TODO: avoid repeated scans through the restrictions
String suffix = "/" + property;
for (PropertyRestriction relative
: filter.getPropertyRestrictions()) {
if (relative.propertyName.endsWith(suffix)) {
restriction = relative;
depth = PathUtils.getDepth(relative.propertyName);
}
}
}
if (restriction != null) {
if (restriction.isNullRestriction()) {
// covering indexes are not currently supported
continue;
}
Set<String> values = getValues(restriction);
double cost = strategy.count(filter, root, definition, values, MAX_COST);
if (cost < bestCost) {
bestDepth = depth;
bestValues = values;
bestCost = cost;
}
}
}
// OAK-1965: let's see if we can find a (x='...' OR y='...')
// constraint where both x and y are covered by this index
// TODO: avoid repeated scans through the constraints
for (ConstraintImpl constraint
: filter.getSelector().getSelectorConstraints()) {
if (constraint instanceof OrImpl) {
Set<String> values = findMultiProperty((OrImpl) constraint);
if (values != null) {
double cost = strategy.count(filter, root, definition, values, MAX_COST);
if (cost < bestCost) {
bestDepth = 1;
bestValues = values;
bestCost = cost;
}
}
}
}
}
this.depth = bestDepth;
this.values = bestValues;
this.cost = COST_OVERHEAD + bestCost;
}
private Set<String> findMultiProperty(OrImpl or) {
Set<String> values = newLinkedHashSet();
for (ConstraintImpl constraint : or.getConstraints()) {
if (constraint instanceof ComparisonImpl) {
ComparisonImpl comparison = (ComparisonImpl) constraint;
if (isIndexed(comparison.getOperand1())
&& comparison.getOperator() == Operator.EQUAL) {
values.addAll(encode(comparison.getOperand2().currentValue()));
} else {
return null;
}
} else if (constraint instanceof InImpl) {
InImpl in = (InImpl) constraint;
if (isIndexed(in.getOperand1())) {
for (StaticOperandImpl operand : in.getOperand2()) {
values.addAll(encode(operand.currentValue()));
}
} else {
return null;
}
} else {
return null;
}
}
return values;
}
/**
* Checks whether the given dynamic operand is a property
* covered by this index.
*/
private boolean isIndexed(DynamicOperandImpl operand) {
if (operand instanceof PropertyValueImpl) {
PropertyValueImpl property = (PropertyValueImpl) operand;
return properties.contains(property.getPropertyName());
} else {
return false;
}
}
private static Set<String> getValues(PropertyRestriction restriction) {
if (restriction.firstIncluding
&& restriction.lastIncluding
&& restriction.first != null
&& restriction.first.equals(restriction.last)) {
// "[property] = $value"
return encode(restriction.first);
} else if (restriction.list != null) {
// "[property] IN (...)
Set<String> values = newLinkedHashSet(); // keep order for testing
for (PropertyValue value : restriction.list) {
values.addAll(encode(value));
}
return values;
} else {
// "[property] is not null" or "[property] is null"
return null;
}
}
String getName() {
return name;
}
double getCost() {
return cost;
}
Cursor execute() {
QueryEngineSettings settings = filter.getQueryEngineSettings();
Cursor cursor = Cursors.newPathCursor(
strategy.query(filter, name, definition, values),
settings);
if (depth > 1) {
cursor = Cursors.newAncestorCursor(cursor, depth - 1, settings);
}
return cursor;
}
//------------------------------------------------------------< Object >--
@Override
public String toString() {
StringBuilder buffer = new StringBuilder("property ");
buffer.append(name);
if (values == null) {
buffer.append(" IS NOT NULL");
} else if (values.isEmpty()) {
buffer.append(" NOT APPLICABLE");
} else if (values.size() == 1) {
buffer.append(" = ");
buffer.append(values.iterator().next());
} else {
buffer.append(" IN (");
boolean comma = false;
for (String value : values) {
if (comma) {
buffer.append(", ");
}
buffer.append(value);
comma = true;
}
buffer.append(")");
}
return buffer.toString();
}
}
|
|
/**
* Copyright (C) 2012 KRM Associates, Inc. [email protected]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.krminc.phr.domain.carenotebook;
import com.krminc.phr.web.HealthSummary;
import java.io.Serializable;
import java.math.BigInteger;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import javax.persistence.*;
import javax.xml.bind.annotation.XmlRootElement;
/**
*
* @author cmccall
*/
@Entity
@Table(name = "carenotebook_rest", catalog = "phr", schema = "")
@XmlRootElement
@NamedQueries({
@NamedQuery(name = "Rest.findAll", query = "SELECT r FROM Rest r"),
@NamedQuery(name = "Rest.findByRestId", query = "SELECT r FROM Rest r WHERE r.restId = :restId"),
@NamedQuery(name = "Rest.findByObservedDate", query = "SELECT r FROM Rest r WHERE r.observedDate = :observedDate"),
@NamedQuery(name = "Rest.findByRestText", query = "SELECT r FROM Rest r WHERE r.restText = :restText"),
@NamedQuery(name = "Rest.findByHealthRecordId", query = "SELECT r FROM Rest r WHERE r.healthRecordId = :healthRecordId"),
@NamedQuery(name = "Rest.findByDataSourceId", query = "SELECT r FROM Rest r WHERE r.dataSourceId = :dataSourceId"),
@NamedQuery(name = "Rest.findByCareDocumentId", query = "SELECT r FROM Rest r WHERE r.careDocumentId = :careDocumentId"),
@NamedQuery(name = "Rest.findBySourceId", query = "SELECT r FROM Rest r WHERE r.sourceId = :sourceId"),
@NamedQuery(name = "Rest.findByDateAdded", query = "SELECT r FROM Rest r WHERE r.dateAdded = :dateAdded"),
@NamedQuery(name = "Rest.findByComments", query = "SELECT r FROM Rest r WHERE r.comments = :comments"),
@NamedQuery(name = "Rest.findByPrimaryKeyForRecord", query = "SELECT d FROM Rest d WHERE d.restId = :restId AND d.healthRecordId = :healthRecordId"),
@NamedQuery(name = "Rest.findByMask", query = "SELECT r FROM Rest r WHERE r.mask = :mask")})
public class Rest extends HealthSummary implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Basic(optional = false)
@Column(name = "rest_id", nullable = false)
private Long restId;
@Column(name = "observed_date")
@Temporal(TemporalType.DATE)
private Date observedDate;
@Column(name = "rest_text", length = 3000)
private String restText;
@Basic(optional = false)
@Column(name = "rec_id", nullable = false)
private long healthRecordId;
@Basic(optional = false)
@Column(name = "data_source_id", nullable = false)
private long dataSourceId;
@Column(name = "care_document_id")
private BigInteger careDocumentId;
@Column(name = "source_id")
private BigInteger sourceId;
@Basic(optional = false)
@Column(name = "date_added", nullable = false)
@Temporal(TemporalType.TIMESTAMP)
private Date dateAdded;
@Column(name = "comments", length = 512)
private String comments;
@Column(name = "mask", length = 50)
private String mask;
public Rest() {
}
public Rest(Long healthRecordId) {
super(healthRecordId);
this.healthRecordId = healthRecordId;
}
public Long getRestId() {
return restId;
}
/** needed to map existing entities by carenotebook form processor **/
public void setRestId(String restId){
this.restId = Long.parseLong(restId);
}
// public void setRestId(Long restId) {
// this.restId = restId;
// }
public Date getObservedDate() {
return observedDate;
}
public void setObservedDate(String observedDate) throws ParseException {
DateFormat df = new SimpleDateFormat("MM/dd/yyyy");
this.observedDate = df.parse(observedDate);
}
public void setObservedDate(Date observedDate) {
this.observedDate = observedDate;
}
public String getRestText() {
return restText;
}
public void setRestText(String restText) {
this.restText = restText;
}
@Override
public Long getHealthRecordId() {
return healthRecordId;
}
public long getDataSourceId() {
return dataSourceId;
}
public void setDataSourceId(long dataSourceId) {
this.dataSourceId = dataSourceId;
}
public BigInteger getCareDocumentId() {
return careDocumentId;
}
public void setCareDocumentId(BigInteger careDocumentId) {
this.careDocumentId = careDocumentId;
}
public BigInteger getSourceId() {
return sourceId;
}
public void setSourceId(BigInteger sourceId) {
this.sourceId = sourceId;
}
public Date getDateAdded() {
return dateAdded;
}
public void setDateAdded(Date dateAdded) {
this.dateAdded = dateAdded;
}
public String getComments() {
return comments;
}
public void setComments(String comments) {
this.comments = comments;
}
public String getMask() {
return mask;
}
public void setMask(String mask) {
this.mask = mask;
}
@Override
public int hashCode() {
int hash = 0;
hash += ( restId != null ? restId.hashCode() : 0 );
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
if (!( object instanceof Rest )) {
return false;
}
Rest other = (Rest) object;
if (( this.restId == null && other.restId != null ) || ( this.restId != null && !this.restId.equals(other.restId) )) {
return false;
}
return true;
}
@Override
public String toString() {
return "com.krminc.phr.domain.carenotebook.Rest[ restId=" + restId + " ]";
}
}
|
|
package org.basex.query.util.format;
import static org.basex.query.QueryError.*;
import static org.basex.util.Token.*;
import java.math.*;
import java.util.*;
import org.basex.query.*;
import org.basex.query.value.item.*;
import org.basex.query.value.type.*;
import org.basex.util.*;
import org.basex.util.hash.*;
import org.basex.util.list.*;
/**
* Abstract class for formatting data in different languages.
*
* @author BaseX Team 2005-20, BSD License
* @author Christian Gruen
*/
public abstract class Formatter extends FormatUtil {
/** Military timezones. */
private static final byte[] MIL = token("YXWVUTSRQPONZABCDEFGHIKLM");
/** Token: n. */
private static final byte[] N = { 'n' };
/** Allowed calendars. */
private static final byte[][] CALENDARS = tokens(
"ISO", "AD", "AH", "AME", "AM", "AP", "AS", "BE", "CB", "CE", "CL", "CS", "EE", "FE",
"JE", "KE", "KY", "ME", "MS", "NS", "OS", "RS", "SE", "SH", "SS", "TE", "VE", "VS");
/** Default language: English. */
public static final byte[] EN = token("en");
/** Formatter instances. */
private static final TokenObjMap<Formatter> MAP = new TokenObjMap<>();
// initialize hash map with English formatter as default
static {
MAP.put(EN, new FormatterEN());
MAP.put(token("de"), new FormatterDE());
}
/**
* Returns a formatter for the specified language.
* @param language language
* @return formatter instance
*/
public static Formatter get(final byte[] language) {
final Formatter form = MAP.get(language);
return form != null ? form : MAP.get(EN);
}
/**
* Returns a word representation for the specified number.
* @param n number to be formatted
* @param ord ordinal suffix
* @return token
*/
protected abstract byte[] word(long n, byte[] ord);
/**
* Returns an ordinal representation for the specified number.
* @param n number to be formatted
* @param ord ordinal suffix
* @return ordinal
*/
protected abstract byte[] ordinal(long n, byte[] ord);
/**
* Returns the specified month (0-11).
* @param n number to be formatted
* @param min minimum length
* @param max maximum length
* @return month
*/
protected abstract byte[] month(int n, int min, int max);
/**
* Returns the specified day of the week (0-6, Sunday-Saturday).
* @param n number to be formatted
* @param min minimum length
* @param max maximum length
* @return day of week
*/
protected abstract byte[] day(int n, int min, int max);
/**
* Returns the am/pm marker.
* @param am am flag
* @return am/pm marker
*/
protected abstract byte[] ampm(boolean am);
/**
* Returns the calendar.
* @return calendar
*/
protected abstract byte[] calendar();
/**
* Returns the era.
* @param year year
* @return era
*/
protected abstract byte[] era(long year);
/**
* Formats the specified date.
* @param date date to be formatted
* @param language language
* @param picture picture
* @param calendar calendar (can be {@code null})
* @param place place
* @param sc static context
* @param ii input info
* @return formatted string
* @throws QueryException query exception
*/
public final byte[] formatDate(final ADate date, final byte[] language, final byte[] picture,
final byte[] calendar, final byte[] place, final StaticContext sc, final InputInfo ii)
throws QueryException {
final TokenBuilder tb = new TokenBuilder();
if(language.length != 0 && MAP.get(language) == null) tb.add("[Language: en]");
if(calendar != null) {
final QNm qnm;
try {
qnm = QNm.resolve(trim(calendar), sc);
} catch(final QueryException ex) {
throw CALWHICH_X.get(ii, calendar);
}
if(qnm.uri().length == 0) {
int c = -1;
final byte[] ln = qnm.local();
final int cl = CALENDARS.length;
while(++c < cl && !eq(CALENDARS[c], ln));
if(c == cl) throw CALWHICH_X.get(ii, calendar);
if(c > 1) tb.add("[Calendar: AD]");
}
}
if(place.length != 0) tb.add("[Place: ]");
final DateParser dp = new DateParser(ii, picture);
while(dp.more()) {
final int ch = dp.literal();
if(ch == -1) {
// retrieve variable marker
final byte[] marker = dp.marker();
if(marker.length == 0) throw PICDATE_X.get(ii, picture);
// parse component specifier
final int compSpec = ch(marker, 0);
byte[] pres = ONE;
boolean max = false;
BigDecimal frac = null;
long num = 0;
final boolean dat = date.type == AtomType.DAT;
final boolean tim = date.type == AtomType.TIM;
boolean err = false;
switch(compSpec) {
case 'Y':
num = Math.abs(date.yea());
max = true;
err = tim;
break;
case 'M':
num = date.mon();
err = tim;
break;
case 'D':
num = date.day();
err = tim;
break;
case 'd':
final long y = date.yea();
for(int m = (int) date.mon() - 1; --m >= 0;) num += ADate.dpm(y, m);
num += date.day();
err = tim;
break;
case 'F':
num = date.toJava().toGregorianCalendar().get(Calendar.DAY_OF_WEEK) - 1;
if(num == 0) num = 7;
pres = N;
err = tim;
break;
case 'W':
num = date.toJava().toGregorianCalendar().get(Calendar.WEEK_OF_YEAR);
err = tim;
break;
case 'w':
num = date.toJava().toGregorianCalendar().get(Calendar.WEEK_OF_MONTH);
// first week of month: fix value, according to ISO 8601
if(num == 0) num = new Dtm(new Dtm(date), new DTDur(date.day() * 24, 0), false, ii).
toJava().toGregorianCalendar().get(Calendar.WEEK_OF_MONTH);
err = tim;
break;
case 'H':
num = date.hour();
err = dat;
break;
case 'h':
num = date.hour() % 12;
if(num == 0) num = 12;
err = dat;
break;
case 'P':
num = date.hour() / 12;
pres = N;
err = dat;
break;
case 'm':
num = date.minute();
pres = token("01");
err = dat;
break;
case 's':
num = date.sec().intValue();
pres = token("01");
err = dat;
break;
case 'f':
frac = date.sec().remainder(BigDecimal.ONE);
err = dat;
break;
case 'Z':
case 'z':
num = date.tz();
pres = token("01:01");
break;
case 'C':
pres = N;
break;
case 'E':
num = date.yea();
pres = N;
err = tim;
break;
default:
throw INVCOMPSPEC_X.get(ii, marker);
}
if(err) throw PICINVCOMP_X_X.get(ii, marker, date.type);
if(pres == null) continue;
// parse presentation modifier(s) and width modifier
final DateFormat fp = new DateFormat(substring(marker, 1), pres, ii);
if(max && fp.max == Integer.MAX_VALUE) {
// limit maximum length of numeric output
int mx = 0;
final int fl = fp.primary.length;
for(int s = 0; s < fl; s += cl(fp.primary, s)) mx++;
if(mx > 1) fp.max = mx;
}
if(compSpec == 'z' || compSpec == 'Z') {
// output timezone
tb.add(formatZone((int) num, fp, marker));
} else if(fp.first == 'n') {
// output name representation
byte[] in = null;
switch(compSpec) {
case 'M': in = month((int) num - 1, fp.min, fp.max); break;
case 'F': in = day((int) num - 1, fp.min, fp.max); break;
case 'P': in = ampm(num == 0); break;
case 'C': in = calendar(); break;
case 'E': in = era((int) num); break;
}
if(in != null) {
if(fp.cs == Case.LOWER) in = lc(in);
if(fp.cs == Case.UPPER) in = uc(in);
tb.add(in);
} else {
// fallback representation
fp.first = '0';
fp.primary = ONE;
tb.add(formatInt(num, fp));
}
} else if(frac != null) {
String s = frac.toString().replace("0.", "").replaceAll("0+$", "");
if(frac.compareTo(BigDecimal.ZERO) != 0) {
final int sl = s.length();
if(fp.min > sl) {
s = frac(frac, fp.min);
} else if(fp.max < sl) {
s = frac(frac, fp.max);
} else {
final int fl = length(fp.primary);
if(fl != 1 && fl != sl) s = frac(frac, fl);
}
}
tb.add(number(token(s), fp, fp.first));
} else {
tb.add(formatInt(num, fp));
}
} else {
// print literal
tb.add(ch);
}
}
return tb.finish();
}
/**
* Returns the fractional part of a decimal number.
* @param num number
* @param len length of fractional part
* @return string representation
*/
private static String frac(final BigDecimal num, final int len) {
final String s = num.setScale(len, RoundingMode.DOWN).toString();
final int d = s.indexOf('.');
return d == -1 ? s : s.substring(d + 1);
}
/**
* Returns a formatted integer.
* @param num integer to be formatted
* @param fp format parser
* @return string representation
*/
public final byte[] formatInt(final long num, final FormatParser fp) {
// prepend minus sign to negative values
long n = num;
final boolean sign = n < 0;
if(sign) n = -n;
final TokenBuilder tb = new TokenBuilder();
final int ch = fp.first;
if(ch == 'w') {
tb.add(word(n, fp.ordinal));
} else if(ch == KANJI[1]) {
japanese(tb, n);
} else if(ch == 'i') {
roman(tb, n, fp.min);
} else if(ch == '\u2460' || ch == '\u2474' || ch == '\u2488') {
if(num < 1 || num > 20) tb.addLong(num);
else tb.add((int) (ch + num - 1));
} else {
final String seq = sequence(ch);
if(seq != null) alpha(tb, num, seq);
else tb.add(number(n, fp, ch));
}
// finalize formatted string
byte[] in = tb.finish();
if(fp.cs == Case.LOWER) in = lc(in);
if(fp.cs == Case.UPPER) in = uc(in);
return sign ? concat(new byte[] { '-' }, in) : in;
}
/**
* Returns a formatted timezone.
* @param num integer to be formatted
* @param fp format parser
* @param marker marker
* @return string representation
* @throws QueryException query exception
*/
private byte[] formatZone(final int num, final FormatParser fp, final byte[] marker)
throws QueryException {
final boolean uc = ch(marker, 0) == 'Z';
final boolean mil = uc && ch(marker, 1) == 'Z';
// ignore values without timezone. exception: military timezone
if(num == Short.MAX_VALUE) return mil ? new byte[] { 'J' } : EMPTY;
final TokenBuilder tb = new TokenBuilder();
if(!mil || !addMilZone(num, tb)) {
if(!uc) tb.add("GMT");
final boolean minus = num < 0;
if(fp.trad && num == 0) {
tb.add('Z');
} else {
tb.add(minus ? '-' : '+');
final TokenParser tp = new TokenParser(fp.primary);
final int c1 = tp.next(), c2 = tp.next(), c3 = tp.next(), c4 = tp.next();
final int z1 = zeroes(c1), z2 = zeroes(c2), z3 = zeroes(c3), z4 = zeroes(c4);
if(z1 == -1) {
tb.add(addZone(num, 0, new TokenBuilder().add("00"))).add(':');
tb.add(addZone(num, 1, new TokenBuilder().add("00")));
} else if(z2 == -1) {
tb.add(addZone(num, 0, new TokenBuilder().add(c1)));
if(c2 == -1) {
if(num % 60 != 0) tb.add(':').add(addZone(num, 1, new TokenBuilder().add("00")));
} else {
final TokenBuilder t = new TokenBuilder().add(z3 == -1 ? '0' : z3);
if(z3 != -1 && z4 != -1) t.add(z4);
tb.add(c2).add(addZone(num, 1, t));
}
} else if(z3 == -1) {
tb.add(addZone(num, 0, new TokenBuilder().add(c1).add(c2)));
if(c3 == -1) {
if(num % 60 != 0) tb.add(':').add(addZone(num, 1, new TokenBuilder().add("00")));
} else {
final int c5 = tp.next(), z5 = zeroes(c5);
final TokenBuilder t = new TokenBuilder().add(z4 == -1 ? '0' : z4);
if(z4 != -1 && z5 != -1) t.add(z5);
tb.add(c3).add(addZone(num % 60, 1, t));
}
} else if(z4 == -1) {
tb.add(addZone(num, 0, new TokenBuilder().add(c1)));
tb.add(addZone(num, 1, new TokenBuilder().add(c2).add(c3)));
} else {
tb.add(addZone(num, 0, new TokenBuilder().add(c1).add(c2)));
tb.add(addZone(num, 1, new TokenBuilder().add(c3).add(c4)));
}
}
}
return tb.finish();
}
/**
* Returns a timezone component.
* @param num number to be formatted
* @param c counter
* @param format presentation format
* @return timezone component
* @throws QueryException query exception
*/
private byte[] addZone(final int num, final int c, final TokenBuilder format)
throws QueryException {
int n = c == 0 ? num / 60 : num % 60;
if(num < 0) n = -n;
return number(n, new IntFormat(format.toArray(), null), format.cp(0));
}
/**
* Adds a military timezone component to the specified token builder.
* @param num number to be formatted
* @param tb token builder
* @return {@code true} if timezone was added
*/
private static boolean addMilZone(final int num, final TokenBuilder tb) {
final int n = num / 60;
if(num % 60 != 0 || n < -12 || n > 12) return false;
tb.add(MIL[n + 12]);
return true;
}
/**
* Returns a character sequence based on the specified alphabet.
* @param tb token builder
* @param n number to be formatted
* @param a alphabet
*/
private static void alpha(final TokenBuilder tb, final long n, final String a) {
final int al = a.length();
if(n > al) alpha(tb, (n - 1) / al, a);
if(n > 0) tb.add(a.charAt((int) ((n - 1) % al)));
else tb.add(ZERO);
}
/**
* Adds a Roman character sequence.
* @param tb token builder
* @param n number to be formatted
* @param min minimum width
*/
private static void roman(final TokenBuilder tb, final long n, final int min) {
final int sz = tb.size();
if(n > 0 && n < 4000) {
final int v = (int) n;
tb.add(ROMANM[v / 1000]);
tb.add(ROMANC[v / 100 % 10]);
tb.add(ROMANX[v / 10 % 10]);
tb.add(ROMANI[v % 10]);
} else {
tb.addLong(n);
}
while(tb.size() - sz < min) tb.add(' ');
}
/**
* Adds a Japanese character sequence.
* @param tb token builder
* @param n number to be formatted
*/
private static void japanese(final TokenBuilder tb, final long n) {
if(n == 0) {
tb.add(KANJI[0]);
} else {
jp(tb, n, false);
}
}
/**
* Recursively adds a Japanese character sequence.
* @param tb token builder
* @param n number to be formatted
* @param i initial call
*/
private static void jp(final TokenBuilder tb, final long n, final boolean i) {
if(n == 0) {
} else if(n <= 9) {
if(n != 1 || !i) tb.add(KANJI[(int) n]);
} else if(n == 10) {
tb.add(KANJI[10]);
} else if(n <= 99) {
jp(tb, n, 10, 10);
} else if(n <= 999) {
jp(tb, n, 100, 11);
} else if(n <= 9999) {
jp(tb, n, 1000, 12);
} else if(n <= 99999999) {
jp(tb, n, 10000, 13);
} else if(n <= 999999999999L) {
jp(tb, n, 100000000, 14);
} else if(n <= 9999999999999999L) {
jp(tb, n, 1000000000000L, 15);
} else {
tb.addLong(n);
}
}
/**
* Recursively adds a Japanese character sequence.
* @param tb token builder
* @param n number to be formatted
* @param f factor
* @param o kanji offset
*/
private static void jp(final TokenBuilder tb, final long n, final long f,
final int o) {
jp(tb, n / f, true);
tb.add(KANJI[o]);
jp(tb, n % f, false);
}
/**
* Creates a number character sequence.
* @param num number to be formatted
* @param fp format parser
* @param first first digit
* @return number character sequence
*/
private byte[] number(final long num, final FormatParser fp, final int first) {
final byte[] n = number(token(num), fp, first);
return concat(n, ordinal(num, fp.ordinal));
}
/**
* Creates a number character sequence.
* @param num number to be formatted
* @param fp format parser
* @param first first digit
* @return number character sequence
*/
private static byte[] number(final byte[] num, final FormatParser fp, final int first) {
final int zero = zeroes(first);
// cache characters of presentation modifier
final int[] mod = new TokenParser(fp.primary).toArray();
final int modSize = mod.length;
int modStart = 0;
while(modStart < modSize && mod[modStart] == '#') modStart++;
// try to find regular separator pattern
int sepPos = -1, sepChar = -1, digitPos = 0;
boolean regSep = false;
for(int mp = modSize - 1; mp >= modStart; --mp) {
final int ch = mod[mp];
if(ch >= zero && ch <= zero + 9) {
digitPos = mp;
continue;
}
if(ch == '#') continue;
if(sepPos == -1) {
sepPos = modSize - mp;
sepChar = ch;
regSep = true;
} else if(regSep) {
regSep = (modSize - mp) % sepPos == 0 && ch == sepChar;
}
}
if(!regSep) sepPos = Integer.MAX_VALUE;
// cache characters in reverse order
final IntList reverse = new IntList();
int inPos = num.length - 1, modPos = modSize - 1;
// add numbers and separators
int min = fp.min, max = fp.max;
while((--min >= 0 || inPos >= 0 || modPos >= modStart) && --max >= 0) {
final boolean sep = reverse.size() % sepPos == sepPos - 1;
int ch;
if(modPos >= modStart) {
ch = mod[modPos--];
if(inPos >= 0) {
if(ch == '#' && sep) reverse.add(sepChar);
if(ch == '#' || ch >= zero && ch <= zero + 9) ch = num[inPos--] - '0' + zero;
} else {
// add remaining modifiers
if(ch == '#') break;
if(ch >= zero && ch <= zero + 9) ch = zero;
if(regSep && modPos + 1 < digitPos) break;
}
} else if(inPos >= 0) {
// add remaining numbers
if(sep) reverse.add(sepChar);
ch = num[inPos--] - '0' + zero;
} else {
// add minimum number of digits
ch = zero;
}
reverse.add(ch);
}
while(min-- >= 0) reverse.add(zero);
// reverse result and add ordinal suffix
final TokenBuilder result = new TokenBuilder();
for(int rs = reverse.size() - 1; rs >= 0; --rs) result.add(reverse.get(rs));
return result.finish();
}
}
|
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.core.test;
import com.azure.core.http.HttpClient;
import com.azure.core.http.policy.HttpPipelinePolicy;
import com.azure.core.test.http.PlaybackClient;
import com.azure.core.test.models.NetworkCallRecord;
import com.azure.core.test.models.RecordedData;
import com.azure.core.test.models.RecordingRedactor;
import com.azure.core.test.policy.RecordNetworkCallPolicy;
import com.azure.core.util.CoreUtils;
import com.azure.core.util.logging.ClientLogger;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.net.URL;
import java.nio.file.Files;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
/**
* A class that keeps track of network calls by either reading the data from an existing test session record or
* recording the network calls in memory. Test session records are saved or read from: "<i>session-records/{@code
* testName}.json</i>"
*
* <ul>
* <li>If the {@code testMode} is {@link TestMode#PLAYBACK}, the manager tries to find an existing test session
* record to read network calls from.</li>
* <li>If the {@code testMode} is {@link TestMode#RECORD}, the manager creates a new test session record and saves
* all the network calls to it.</li>
* <li>If the {@code testMode} is {@link TestMode#LIVE}, the manager won't attempt to read or create a test session
* record.</li>
* </ul>
*
* When the {@link InterceptorManager} is disposed, if the {@code testMode} is {@link TestMode#RECORD}, the network
* calls that were recorded are persisted to: "<i>session-records/{@code testName}.json</i>"
*/
public class InterceptorManager implements AutoCloseable {
private static final String RECORD_FOLDER = "session-records/";
private static final ObjectMapper RECORD_MAPPER = new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);
private final ClientLogger logger = new ClientLogger(InterceptorManager.class);
private final Map<String, String> textReplacementRules;
private final String testName;
private final String playbackRecordName;
private final TestMode testMode;
private final boolean allowedToReadRecordedValues;
private final boolean allowedToRecordValues;
// Stores a map of all the HTTP properties in a session
// A state machine ensuring a test is always reset before another one is setup
private final RecordedData recordedData;
/**
* Creates a new InterceptorManager that either replays test-session records or saves them.
*
* <ul>
* <li>If {@code testMode} is {@link TestMode#PLAYBACK}, the manager tries to find an existing test session
* record to read network calls from.</li>
* <li>If {@code testMode} is {@link TestMode#RECORD}, the manager creates a new test session record and saves
* all the network calls to it.</li>
* </ul>
*
* The test session records are persisted in the path: "<i>session-records/{@code testName}.json</i>"
*
* @param testName Name of the test session record.
* @param testMode The {@link TestMode} for this interceptor.
* @throws UncheckedIOException If {@code testMode} is {@link TestMode#PLAYBACK} and an existing test session record
* could not be located or the data could not be deserialized into an instance of {@link RecordedData}.
* @throws NullPointerException If {@code testName} is {@code null}.
* @deprecated Use {@link #InterceptorManager(TestContextManager)} instead.
*/
@Deprecated
public InterceptorManager(String testName, TestMode testMode) {
this(testName, testName, testMode, false);
}
/**
* Creates a new InterceptorManager that either replays test-session records or saves them.
*
* <ul>
* <li>If {@code testMode} is {@link TestMode#PLAYBACK}, the manager tries to find an existing test session
* record to read network calls from.</li>
* <li>If {@code testMode} is {@link TestMode#RECORD}, the manager creates a new test session record and saves
* all the network calls to it.</li>
* <li>If {@code testMode} is {@link TestMode#LIVE}, the manager won't attempt to read or create a test session
* record.</li>
* </ul>
*
* The test session records are persisted in the path: "<i>session-records/{@code testName}.json</i>"
*
* @param testContextManager Contextual information about the test being ran, such as test name, {@link TestMode},
* and others.
* @throws UncheckedIOException If {@code testMode} is {@link TestMode#PLAYBACK} and an existing test session record
* could not be located or the data could not be deserialized into an instance of {@link RecordedData}.
* @throws NullPointerException If {@code testName} is {@code null}.
*/
public InterceptorManager(TestContextManager testContextManager) {
this(testContextManager.getTestName(), testContextManager.getTestPlaybackRecordingName(),
testContextManager.getTestMode(), testContextManager.doNotRecordTest());
}
private InterceptorManager(String testName, String playbackRecordName, TestMode testMode, boolean doNotRecord) {
Objects.requireNonNull(testName, "'testName' cannot be null.");
this.testName = testName;
this.playbackRecordName = CoreUtils.isNullOrEmpty(playbackRecordName) ? testName : playbackRecordName;
this.testMode = testMode;
this.textReplacementRules = new HashMap<>();
this.allowedToReadRecordedValues = (testMode == TestMode.PLAYBACK && !doNotRecord);
this.allowedToRecordValues = (testMode == TestMode.RECORD && !doNotRecord);
if (allowedToReadRecordedValues) {
this.recordedData = readDataFromFile();
} else if (allowedToRecordValues) {
this.recordedData = new RecordedData();
} else {
this.recordedData = null;
}
}
/**
* Creates a new InterceptorManager that replays test session records. It takes a set of {@code
* textReplacementRules}, that can be used by {@link PlaybackClient} to replace values in a {@link
* NetworkCallRecord#getResponse()}.
*
* The test session records are read from: "<i>session-records/{@code testName}.json</i>"
*
* @param testName Name of the test session record.
* @param textReplacementRules A set of rules to replace text in {@link NetworkCallRecord#getResponse()} when
* playing back network calls.
* @throws UncheckedIOException An existing test session record could not be located or the data could not be
* deserialized into an instance of {@link RecordedData}.
* @throws NullPointerException If {@code testName} or {@code textReplacementRules} is {@code null}.
* @deprecated Use {@link #InterceptorManager(String, Map, boolean)} instead.
*/
@Deprecated
public InterceptorManager(String testName, Map<String, String> textReplacementRules) {
this(testName, textReplacementRules, false, testName);
}
/**
* Creates a new InterceptorManager that replays test session records. It takes a set of {@code
* textReplacementRules}, that can be used by {@link PlaybackClient} to replace values in a {@link
* NetworkCallRecord#getResponse()}.
*
* The test session records are read from: "<i>session-records/{@code testName}.json</i>"
*
* @param testName Name of the test session record.
* @param textReplacementRules A set of rules to replace text in {@link NetworkCallRecord#getResponse()} when
* playing back network calls.
* @param doNotRecord Flag indicating whether network calls should be record or played back.
* @throws UncheckedIOException An existing test session record could not be located or the data could not be
* deserialized into an instance of {@link RecordedData}.
* @throws NullPointerException If {@code testName} or {@code textReplacementRules} is {@code null}.
* @deprecated Use {@link #InterceptorManager(String, Map, boolean, String)} instead.
*/
@Deprecated
public InterceptorManager(String testName, Map<String, String> textReplacementRules, boolean doNotRecord) {
this(testName, textReplacementRules, doNotRecord, testName);
}
/**
* Creates a new InterceptorManager that replays test session records. It takes a set of {@code
* textReplacementRules}, that can be used by {@link PlaybackClient} to replace values in a {@link
* NetworkCallRecord#getResponse()}.
*
* The test session records are read from: "<i>session-records/{@code testName}.json</i>"
*
* @param testName Name of the test.
* @param textReplacementRules A set of rules to replace text in {@link NetworkCallRecord#getResponse()} when
* playing back network calls.
* @param doNotRecord Flag indicating whether network calls should be record or played back.
* @param playbackRecordName Full name of the test including its iteration, used as the playback record name.
* @throws UncheckedIOException An existing test session record could not be located or the data could not be
* deserialized into an instance of {@link RecordedData}.
* @throws NullPointerException If {@code testName} or {@code textReplacementRules} is {@code null}.
*/
public InterceptorManager(String testName, Map<String, String> textReplacementRules, boolean doNotRecord,
String playbackRecordName) {
Objects.requireNonNull(testName, "'testName' cannot be null.");
Objects.requireNonNull(textReplacementRules, "'textReplacementRules' cannot be null.");
this.testName = testName;
this.playbackRecordName = CoreUtils.isNullOrEmpty(playbackRecordName) ? testName : playbackRecordName;
this.testMode = TestMode.PLAYBACK;
this.allowedToReadRecordedValues = !doNotRecord;
this.allowedToRecordValues = false;
this.recordedData = allowedToReadRecordedValues ? readDataFromFile() : null;
this.textReplacementRules = textReplacementRules;
}
/**
* Gets whether this InterceptorManager is in playback mode.
*
* @return true if the InterceptorManager is in playback mode and false otherwise.
*/
public boolean isPlaybackMode() {
return testMode == TestMode.PLAYBACK;
}
/**
* Gets whether this InterceptorManager is in live mode.
*
* @return true if the InterceptorManager is in live mode and false otherwise.
*/
public boolean isLiveMode() {
return testMode == TestMode.LIVE;
}
/**
* Gets the recorded data InterceptorManager is keeping track of.
*
* @return The recorded data managed by InterceptorManager.
*/
public RecordedData getRecordedData() {
return recordedData;
}
/**
* Gets a new HTTP pipeline policy that records network calls and its data is managed by {@link
* InterceptorManager}.
*
* @return HttpPipelinePolicy to record network calls.
*/
public HttpPipelinePolicy getRecordPolicy() {
return getRecordPolicy(Collections.emptyList());
}
/**
* Gets a new HTTP pipeline policy that records network calls. The recorded content is redacted by the given list
* of redactor functions to hide sensitive information.
*
* @param recordingRedactors The custom redactor functions that are applied in addition to the default redactor
* functions defined in {@link RecordingRedactor}.
* @return {@link HttpPipelinePolicy} to record network calls.
*/
public HttpPipelinePolicy getRecordPolicy(List<Function<String, String>> recordingRedactors) {
return new RecordNetworkCallPolicy(recordedData, recordingRedactors);
}
/**
* Gets a new HTTP client that plays back test session records managed by {@link InterceptorManager}.
*
* @return An HTTP client that plays back network calls from its recorded data.
*/
public HttpClient getPlaybackClient() {
return new PlaybackClient(recordedData, textReplacementRules);
}
/**
* Disposes of resources used by this InterceptorManager.
*
* If {@code testMode} is {@link TestMode#RECORD}, all the network calls are persisted to:
* "<i>session-records/{@code testName}.json</i>"
*/
@Override
public void close() {
if (allowedToRecordValues) {
try (BufferedWriter writer = Files.newBufferedWriter(createRecordFile(playbackRecordName).toPath())) {
RECORD_MAPPER.writeValue(writer, recordedData);
} catch (IOException ex) {
throw logger.logExceptionAsError(
new UncheckedIOException("Unable to write data to playback file.", ex));
}
}
}
private RecordedData readDataFromFile() {
File recordFile = getRecordFile();
try (BufferedReader reader = Files.newBufferedReader(recordFile.toPath())) {
return RECORD_MAPPER.readValue(reader, RecordedData.class);
} catch (IOException ex) {
throw logger.logExceptionAsWarning(new UncheckedIOException(ex));
}
}
/*
* Creates a File which is the session-records folder.
*/
private File getRecordFolder() {
URL folderUrl = InterceptorManager.class.getClassLoader().getResource(".");
return new File(folderUrl.getPath(), RECORD_FOLDER);
}
/*
* Attempts to retrieve the playback file, if it is not found an exception is thrown as playback can't continue.
*/
private File getRecordFile() {
File recordFolder = getRecordFolder();
File playbackFile = new File(recordFolder, playbackRecordName + ".json");
File oldPlaybackFile = new File(recordFolder, testName + ".json");
if (!playbackFile.exists() && !oldPlaybackFile.exists()) {
throw logger.logExceptionAsError(new RuntimeException(String.format(
"Missing both new and old playback files. Files are %s and %s.", playbackFile.getPath(),
oldPlaybackFile.getPath())));
}
if (playbackFile.exists()) {
logger.info("==> Playback file path: {}", playbackFile.getPath());
return playbackFile;
} else {
logger.info("==> Playback file path: {}", oldPlaybackFile.getPath());
return oldPlaybackFile;
}
}
/*
* Retrieves or creates the file that will be used to store the recorded test values.
*/
private File createRecordFile(String testName) throws IOException {
File recordFolder = getRecordFolder();
if (!recordFolder.exists()) {
if (recordFolder.mkdir()) {
logger.verbose("Created directory: {}", recordFolder.getPath());
}
}
File recordFile = new File(recordFolder, testName + ".json");
if (recordFile.createNewFile()) {
logger.verbose("Created record file: {}", recordFile.getPath());
}
logger.info("==> Playback file path: " + recordFile);
return recordFile;
}
/**
* Add text replacement rule (regex as key, the replacement text as value) into {@link
* InterceptorManager#textReplacementRules}
*
* @param regex the pattern to locate the position of replacement
* @param replacement the replacement text
*/
public void addTextReplacementRule(String regex, String replacement) {
textReplacementRules.put(regex, replacement);
}
}
|
|
/**
* $Id: DeveloperHelperServiceImpl.java 105077 2012-02-24 22:54:29Z [email protected] $
* $URL: https://source.sakaiproject.org/svn/entitybroker/tags/sakai-10.1/impl/src/java/org/sakaiproject/entitybroker/impl/devhelper/DeveloperHelperServiceImpl.java $
* DeveloperHelperServiceImpl.java - entity-broker - Apr 13, 2008 6:30:08 PM - azeckoski
**************************************************************************
* Copyright (c) 2008, 2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sakaiproject.entitybroker.impl.devhelper;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import org.sakaiproject.authz.api.AuthzGroupService;
import org.sakaiproject.authz.api.FunctionManager;
import org.sakaiproject.authz.api.SecurityService;
import org.sakaiproject.component.api.ServerConfigurationService;
import org.sakaiproject.entitybroker.EntityBroker;
import org.sakaiproject.entitybroker.EntityBrokerManager;
import org.sakaiproject.entitybroker.EntityReference;
import org.sakaiproject.entitybroker.util.SakaiToolData;
import org.sakaiproject.entitybroker.util.devhelper.AbstractDeveloperHelperService;
import org.sakaiproject.exception.IdUnusedException;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.site.api.SitePage;
import org.sakaiproject.site.api.SiteService;
import org.sakaiproject.site.api.ToolConfiguration;
import org.sakaiproject.thread_local.api.ThreadLocalManager;
import org.sakaiproject.tool.api.Session;
import org.sakaiproject.tool.api.SessionManager;
import org.sakaiproject.tool.api.Tool;
import org.sakaiproject.tool.api.ToolManager;
import org.sakaiproject.user.api.User;
import org.sakaiproject.user.api.UserDirectoryService;
import org.sakaiproject.user.api.UserNotDefinedException;
import org.sakaiproject.util.ResourceLoader;
/**
* implementation of the helper service methods
*
* @author Aaron Zeckoski ([email protected])
*/
public class DeveloperHelperServiceImpl extends AbstractDeveloperHelperService {
protected DeveloperHelperServiceImpl() {}
/**
* Full constructor
* @param entityBroker
* @param entityBrokerManager
* @param authzGroupService
* @param functionManager
* @param securityService
* @param serverConfigurationService
* @param sessionManager
* @param siteService
* @param toolManager
* @param userDirectoryService
*/
public DeveloperHelperServiceImpl(EntityBroker entityBroker,
EntityBrokerManager entityBrokerManager,
AuthzGroupService authzGroupService,
FunctionManager functionManager, SecurityService securityService,
ServerConfigurationService serverConfigurationService, SessionManager sessionManager,
SiteService siteService, ToolManager toolManager,
UserDirectoryService userDirectoryService) {
super(entityBroker, entityBrokerManager);
this.authzGroupService = authzGroupService;
this.functionManager = functionManager;
this.securityService = securityService;
this.serverConfigurationService = serverConfigurationService;
this.sessionManager = sessionManager;
this.siteService = siteService;
this.toolManager = toolManager;
this.userDirectoryService = userDirectoryService;
}
/**
* Location id for the Sakai Gateway site
*/
public static String GATEWAY_ID = "!gateway";
/**
* The portal base URL
*/
public static String PORTAL_BASE = "/portal";
protected final String CURRENT_USER_MARKER = "originalCurrentUser";
// SAKAI
private AuthzGroupService authzGroupService;
private FunctionManager functionManager;
private SecurityService securityService;
private ServerConfigurationService serverConfigurationService;
private SessionManager sessionManager;
private SiteService siteService;
private ToolManager toolManager;
private UserDirectoryService userDirectoryService;
private ThreadLocalManager threadLocalManager;
// ENTITY
@Override
public Object fetchEntity(String reference) {
Object entity = super.fetchEntity(reference);
if (entity == null
&& reference.startsWith("/user")) {
// this sucks but legacy user cannot be resolved for some reason
// so look up directly since it is one of the top entities being fetched
String userId = getUserIdFromRef(reference);
if (userId != null) {
try {
entity = userDirectoryService.getUser(userId);
} catch (UserNotDefinedException e) {
entity = null;
}
}
}
return entity;
}
public String setCurrentUser(String userReference) {
if (userReference == null) {
throw new IllegalArgumentException("userReference cannot be null");
}
String userId = getUserIdFromRef(userReference);
try {
// make sure the user id is valid
userDirectoryService.getUser(userId);
} catch (UserNotDefinedException e) {
throw new IllegalArgumentException("Invalid user reference ("+userReference+"), could not find user");
}
Session currentSession = sessionManager.getCurrentSession();
if (currentSession == null) {
// start a session if none is around
currentSession = sessionManager.startSession(userId);
}
String currentUserId = currentSession.getUserId();
if (currentSession.getAttribute(CURRENT_USER_MARKER) == null) {
// only set this if it is not already set
if (currentUserId == null) {
currentUserId = "";
}
currentSession.setAttribute(CURRENT_USER_MARKER, currentUserId);
}
currentSession.setUserId(userId);
currentSession.setActive();
sessionManager.setCurrentSession(currentSession);
authzGroupService.refreshUser(userId);
return getUserRefFromUserId(currentUserId);
}
public String restoreCurrentUser() {
// switch user session back if it was taken over
Session currentSession = sessionManager.getCurrentSession();
String currentUserId = null;
if (currentSession != null) {
currentUserId = (String) currentSession.getAttribute(CURRENT_USER_MARKER);
if (currentUserId != null) {
currentSession.removeAttribute(CURRENT_USER_MARKER);
currentSession.setUserId(currentUserId);
authzGroupService.refreshUser(currentUserId);
sessionManager.setCurrentSession(currentSession);
}
if ("".equals(currentUserId)) {
currentUserId = null;
}
}
return getUserRefFromUserId(currentUserId);
}
// CONFIG
/* (non-Javadoc)
* @see org.sakaiproject.entitybroker.DeveloperHelperService#getConfigurationSetting(java.lang.String, java.lang.Object)
*/
@SuppressWarnings("unchecked")
public <T> T getConfigurationSetting(String settingName, T defaultValue) {
T returnValue = defaultValue;
if (SETTING_SERVER_NAME.equals(settingName)) {
returnValue = (T) serverConfigurationService.getServerName();
} else if (SETTING_SERVER_URL.equals(settingName)) {
returnValue = (T) serverConfigurationService.getServerUrl();
} else if (SETTING_PORTAL_URL.equals(settingName)) {
returnValue = (T) serverConfigurationService.getPortalUrl();
} else if (SETTING_SERVER_ID.equals(settingName)) {
returnValue = (T) serverConfigurationService.getServerIdInstance();
} else {
if (defaultValue == null) {
returnValue = (T) serverConfigurationService.getString(settingName);
if ("".equals(returnValue)) { returnValue = null; }
} else {
if (defaultValue instanceof Number) {
int num = ((Number) defaultValue).intValue();
int value = serverConfigurationService.getInt(settingName, num);
returnValue = (T) Integer.valueOf(value);
} else if (defaultValue instanceof Boolean) {
boolean bool = ((Boolean) defaultValue).booleanValue();
boolean value = serverConfigurationService.getBoolean(settingName, bool);
returnValue = (T) Boolean.valueOf(value);
} else if (defaultValue instanceof String) {
returnValue = (T) serverConfigurationService.getString(settingName, (String) defaultValue);
}
}
}
return returnValue;
}
// USER
/* (non-Javadoc)
* @see org.sakaiproject.entitybroker.DeveloperHelperService#getCurrentLocale()
*/
@Override
public Locale getCurrentLocale() {
return new ResourceLoader().getLocale();
}
/* (non-Javadoc)
* @see org.sakaiproject.entitybroker.DeveloperHelperService#getCurrentUserReference()
*/
public String getCurrentUserReference() {
String userId = sessionManager.getCurrentSessionUserId();
return getUserRefFromUserId(userId);
}
public String getCurrentUserId() {
String userId = sessionManager.getCurrentSessionUserId();
return userId;
}
public String getUserRefFromUserEid(String userEid) {
String userRef = null;
try {
User u = userDirectoryService.getUserByEid(userEid);
userRef = u.getReference();
} catch (UserNotDefinedException e) {
userRef = null;
}
return userRef;
}
// LOCATION
/* (non-Javadoc)
* @see org.sakaiproject.entitybroker.DeveloperHelperService#getCurrentLocationReference()
*/
public String getCurrentLocationReference() {
String location = null;
try {
String context = toolManager.getCurrentPlacement().getContext();
Site s = siteService.getSite( context );
location = s.getReference(); // get the entity reference to the site
} catch (Exception e) {
// sakai failed to get us a location so we can assume we are not inside the portal
location = null;
}
return location;
}
public String getCurrentLocationId() {
String locationId = null;
try {
String context = toolManager.getCurrentPlacement().getContext();
locationId = context;
} catch (Exception e) {
// sakai failed to get us a location so we can assume we are not inside the portal
locationId = null;
}
return locationId;
}
public String getStartingLocationReference() {
return GROUP_BASE + GATEWAY_ID;
}
// TOOLS
public String getCurrentToolReference() {
String toolRef = null;
String toolId = toolManager.getCurrentTool().getId();
// assume the form /tool/toolId
if (toolId != null) {
toolRef = new EntityReference("tool", toolId).toString();
}
return toolRef;
}
/* (non-Javadoc)
* @see org.sakaiproject.entitybroker.DeveloperHelperService#getToolData(java.lang.String, java.lang.String)
*/
public SakaiToolData getToolData(String toolRegistrationId, String locationReference) {
SakaiToolData toolData = new SakaiToolData();
if (locationReference == null) {
locationReference = getCurrentLocationReference();
}
toolData.setLocationReference(locationReference);
String locationId = getLocationIdFromRef(locationReference);
Site site = null;
try {
site = siteService.getSite( locationId );
} catch (IdUnusedException e) {
throw new IllegalArgumentException("Could not find a site by locationId=" + locationId, e);
} catch (Exception e) {
throw new IllegalArgumentException("Could not locate tool"
+ " in location=" + locationReference
+ " with toolRegistrationId=" + toolRegistrationId, e);
}
toolData.setRegistrationId(toolRegistrationId);
// get the pages for this site
List<SitePage> pages = site.getOrderedPages();
for (SitePage page : pages) {
// get the tool configs for each
for (ToolConfiguration tc : (List<ToolConfiguration>) page.getTools(0)) {
// get the tool from column 0 for this tool config (if there is one)
Tool tool = tc.getTool();
if (tool != null
&& tool.getId().equals(toolRegistrationId)) {
// hardcoding to make this backwards compatible with 2.3 - ServerConfigurationService.CURRENT_PORTAL_PATH, PORTAL_BASE);
String portalBase = (String) threadLocalManager.get("sakai:request.portal.path");
if (portalBase == null || "".equals(portalBase)) {
// this has to be here because the tc will expect it when the portal urls are generated and fail if it is missing -AZ
threadLocalManager.set("sakai:request.portal.path", PORTAL_BASE);
}
// back to normal stuff again
toolData.setToolURL(page.getUrl());
toolData.setPlacementId(tc.getId());
toolData.setTitle(tool.getTitle());
toolData.setDescription(tool.getDescription());
}
}
}
if (toolData.getPlacementId() == null) {
throw new IllegalArgumentException("Could not locate tool"
+ " in location=" + locationReference
+ " with toolRegistrationId=" + toolRegistrationId);
}
return toolData;
}
// URLS
public String getPortalURL() {
return serverConfigurationService.getPortalUrl();
}
public String getServerURL() {
return serverConfigurationService.getServerUrl();
}
public String getToolViewURL(String toolRegistrationId, String localView,
Map<String, String> parameters, String locationReference) {
if (toolRegistrationId == null || "".equals(toolRegistrationId)) {
throw new IllegalArgumentException("toolRegistrationId must be set and cannot be null or blank");
}
SakaiToolData info = getToolData(toolRegistrationId, locationReference);
StringBuilder viewURL = new StringBuilder();
if (localView == null || "".equals(localView)) {
// do nothing
} else {
viewURL.append(localView);
}
// build the params map into a string
boolean firstParamUsed = false;
if (parameters != null && parameters.size() > 0) {
for (Entry<String, String> es : parameters.entrySet()) {
if (es.getValue() != null) {
if (firstParamUsed) {
viewURL.append("&");
} else {
viewURL.append("?");
firstParamUsed = true;
}
viewURL.append(es.getKey());
viewURL.append("=");
viewURL.append(es.getValue());
}
}
}
// urlencode the view part to append
String encodedViewURL = null;
try {
encodedViewURL = URLEncoder.encode(viewURL.toString(), URL_ENCODING);
} catch (UnsupportedEncodingException e) {
throw new IllegalStateException("Invalid character encoding specified: " + URL_ENCODING);
}
// use the base URL or add in the extra bits if desired
String toolURL = info.getToolURL();
if (encodedViewURL != null && encodedViewURL.length() > 0) {
toolURL = info.getToolURL() + "?toolstate-" + info.getPlacementId() + "=" + encodedViewURL;
}
// Sample URL: http://server:port/portal/site/siteId/page/pageId?toolstate-toolpid=/newpage?thing=value
return toolURL;
}
// PERMISSIONS
public void registerPermission(String permission) {
functionManager.registerFunction(permission);
}
/* (non-Javadoc)
* @see org.sakaiproject.entitybroker.DeveloperHelperService#isUserAdmin(java.lang.String)
*/
public boolean isUserAdmin(String userReference) {
boolean admin = false;
String userId = getUserIdFromRef(userReference);
if (userId != null) {
admin = securityService.isSuperUser(userId);
}
return admin;
}
/* (non-Javadoc)
* @see org.sakaiproject.entitybroker.DeveloperHelperService#isUserAllowedInReference(java.lang.String, java.lang.String, java.lang.String)
*/
public boolean isUserAllowedInEntityReference(String userReference, String permission, String reference) {
if (permission == null) {
throw new IllegalArgumentException("permission must both be set");
}
boolean allowed = false;
if (userReference != null) {
String userId = getUserIdFromRef(userReference);
if (userId != null) {
if (reference == null) {
// special check for the admin user
if ( securityService.isSuperUser(userId) ) {
allowed = true;
}
} else {
if ( securityService.unlock(userId, permission, reference) ) {
allowed = true;
}
}
}
} else {
// special anonymous user case - http://jira.sakaiproject.org/jira/browse/SAK-14840
allowed = securityService.unlock(permission, reference);
}
return allowed;
}
public Set<String> getEntityReferencesForUserAndPermission(String userReference, String permission) {
if (permission == null) {
throw new IllegalArgumentException("permission must both be set");
}
Set<String> s = new HashSet<String>();
// get the groups from Sakai
String userId = null;
if (userReference != null) {
userId = getUserIdFromRef(userReference);
}
// anonymous user case - http://jira.sakaiproject.org/jira/browse/SAK-14840
Set<String> authzGroupIds =
authzGroupService.getAuthzGroupsIsAllowed(userId, permission, null);
if (authzGroupIds != null) {
s.addAll(authzGroupIds);
}
return s;
}
public Set<String> getUserReferencesForEntityReference(String reference, String permission) {
if (reference == null || permission == null) {
throw new IllegalArgumentException("reference and permission must both be set");
}
List<String> azGroups = new ArrayList<String>();
azGroups.add(reference);
Set<String> userIds = authzGroupService.getUsersIsAllowed(permission, azGroups);
// need to remove the admin user or else they show up in unwanted places (I think, maybe this is not needed)
if (userIds.contains(ADMIN_USER_ID)) {
userIds.remove(ADMIN_USER_ID);
}
// now convert to userRefs
Set<String> userRefs = new HashSet<String>();
for (String userId : userIds) {
userRefs.add( getUserRefFromUserId(userId) );
}
return userRefs;
}
public void setAuthzGroupService(AuthzGroupService authzGroupService) {
this.authzGroupService = authzGroupService;
}
public void setFunctionManager(FunctionManager functionManager) {
this.functionManager = functionManager;
}
public void setSecurityService(SecurityService securityService) {
this.securityService = securityService;
}
public void setServerConfigurationService(ServerConfigurationService serverConfigurationService) {
this.serverConfigurationService = serverConfigurationService;
}
public void setSessionManager(SessionManager sessionManager) {
this.sessionManager = sessionManager;
}
public void setSiteService(SiteService siteService) {
this.siteService = siteService;
}
public void setToolManager(ToolManager toolManager) {
this.toolManager = toolManager;
}
public void setUserDirectoryService(UserDirectoryService userDirectoryService) {
this.userDirectoryService = userDirectoryService;
}
public void setThreadLocalManager(ThreadLocalManager threadLocalManager) {
this.threadLocalManager = threadLocalManager;
}
}
|
|
/*
* Copyright 2013-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.contract.verifier.util;
import java.util.LinkedList;
import java.util.regex.Pattern;
import com.toomuchcoding.jsonassert.JsonVerifiable;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.text.StringEscapeUtils;
/**
* Implementation of the {@link MethodBufferingJsonVerifiable} that contains a list of
* String method commands that need to be executed to assert JSONs.
*
* @author Marcin Grzejszczak
* @since 1.0.0
*/
class DelegatingJsonVerifiable implements MethodBufferingJsonVerifiable {
private static final Log log = LogFactory.getLog(DelegatingJsonVerifiable.class);
private static final Pattern FIELD_PATTERN = Pattern.compile("\\.field\\((\")?(.)+(\")?\\)");
private static final Pattern ARRAY_PATTERN = Pattern.compile("\\.array\\((\")?(.)+(\")?\\)");
final JsonVerifiable delegate;
final LinkedList<String> methodsBuffer;
final Object valueToCheck;
DelegatingJsonVerifiable(JsonVerifiable delegate, LinkedList<String> methodsBuffer, Object valueToCheck) {
this.delegate = delegate;
this.methodsBuffer = new LinkedList<>(methodsBuffer);
this.valueToCheck = valueToCheck;
}
DelegatingJsonVerifiable(JsonVerifiable delegate, LinkedList<String> methodsBuffer) {
this.delegate = delegate;
this.methodsBuffer = new LinkedList<>(methodsBuffer);
this.valueToCheck = null;
}
DelegatingJsonVerifiable(JsonVerifiable delegate) {
this.delegate = delegate;
this.methodsBuffer = new LinkedList<>();
this.valueToCheck = null;
}
private static String stringWithEscapedQuotes(Object object) {
String stringValue = object.toString();
return stringValue.replaceAll("\"", "\\\\\"");
}
private static String wrapValueWithQuotes(Object value) {
return value instanceof String ? "\"" + stringWithEscapedQuotes(value) + "\"" : value.toString();
}
private void appendMethodWithValue(String methodName, Object value) {
this.methodsBuffer.offer("." + methodName + "(" + value + ")");
}
private void appendMethodWithQuotedValue(String methodName, Object value) {
appendMethodWithValue(methodName, wrapValueWithQuotes(value));
}
@Override
public MethodBufferingJsonVerifiable contains(Object value) {
DelegatingJsonVerifiable verifiable = new FinishedDelegatingJsonVerifiable(this.delegate.jsonPath(),
this.delegate.contains(value), this.methodsBuffer);
verifiable.appendMethodWithQuotedValue("contains", value);
if (isAssertingAValueInArray()) {
verifiable.methodsBuffer.offer(".value()");
}
return verifiable;
}
@Override
public MethodBufferingJsonVerifiable field(Object value) {
Object valueToPut = value instanceof ShouldTraverse ? ((ShouldTraverse) value).value : value;
Object wrappedValue = wrapInBrackets(valueToPut);
DelegatingJsonVerifiable verifiable = new DelegatingJsonVerifiable(this.delegate.field(wrappedValue),
this.methodsBuffer, value);
if (this.delegate.isIteratingOverArray() && !(value instanceof ShouldTraverse)) {
verifiable.appendMethodWithQuotedValue("contains", wrappedValue);
}
else {
verifiable.appendMethodWithQuotedValue("field", wrappedValue);
}
return verifiable;
}
@Override
public MethodBufferingJsonVerifiable field(String... strings) {
MethodBufferingJsonVerifiable verifiable = null;
for (String string : strings) {
verifiable = verifiable == null ? field(string) : verifiable.field(string);
}
return verifiable;
}
@Override
public MethodBufferingJsonVerifiable array(Object value) {
Object valueToPut = wrapInBrackets(value);
DelegatingJsonVerifiable verifiable = new DelegatingJsonVerifiable(this.delegate.array(valueToPut),
this.methodsBuffer, value);
verifiable.appendMethodWithQuotedValue("array", valueToPut);
return verifiable;
}
@Override
public MethodBufferingJsonVerifiable arrayField(Object value) {
Object valueToPut = wrapInBrackets(value);
DelegatingJsonVerifiable verifiable = new DelegatingJsonVerifiable(this.delegate.field(valueToPut).arrayField(),
this.methodsBuffer, value);
verifiable.appendMethodWithQuotedValue("array", valueToPut);
return verifiable;
}
@Override
public MethodBufferingJsonVerifiable arrayField() {
DelegatingJsonVerifiable verifiable = new DelegatingJsonVerifiable(this.delegate.arrayField(),
this.methodsBuffer);
verifiable.methodsBuffer.offer(".arrayField()");
return verifiable;
}
@Override
public MethodBufferingJsonVerifiable array() {
DelegatingJsonVerifiable verifiable = new DelegatingJsonVerifiable(this.delegate.array(), this.methodsBuffer);
verifiable.methodsBuffer.offer(".array()");
return verifiable;
}
@Override
public JsonVerifiable elementWithIndex(int i) {
DelegatingJsonVerifiable verifiable = new DelegatingJsonVerifiable(this.delegate.elementWithIndex(i),
this.methodsBuffer);
this.methodsBuffer.offer(".elementWithIndex(" + i + ")");
return verifiable;
}
@Override
public MethodBufferingJsonVerifiable iterationPassingArray() {
return new DelegatingJsonVerifiable(this.delegate, this.methodsBuffer);
}
@Override
public MethodBufferingJsonVerifiable isEqualTo(String value) {
DelegatingJsonVerifiable readyToCheck = new FinishedDelegatingJsonVerifiable(this.delegate.jsonPath(),
this.delegate.isEqualTo(value), this.methodsBuffer, value);
if (this.delegate.isAssertingAValueInArray() && readyToCheck.methodsBuffer.peekLast().equals(".arrayField()")) {
readyToCheck.appendMethodWithQuotedValue("isEqualTo", escapedHackedJavaText(value));
readyToCheck.methodsBuffer.offer(".value()");
}
else if (this.delegate.isAssertingAValueInArray() && !readyToCheck.methodsBuffer.peekLast().contains("array")) {
readyToCheck.methodsBuffer.offer(".value()");
}
else {
readyToCheck.appendMethodWithQuotedValue("isEqualTo", escapedHackedJavaText(value));
}
return readyToCheck;
}
@Override
public MethodBufferingJsonVerifiable isEqualTo(Object value) {
if (value == null) {
return isNull();
}
return isEqualTo(value.toString());
}
@Override
public MethodBufferingJsonVerifiable isEqualTo(Number value) {
DelegatingJsonVerifiable readyToCheck = new FinishedDelegatingJsonVerifiable(this.delegate.jsonPath(),
this.delegate.isEqualTo(value), this.methodsBuffer, value);
// related to #271 - the problem is with asserting arrays of maps vs arrays of
// primitives
String last = readyToCheck.methodsBuffer.peekLast();
boolean containsAMatcher = containsAnyMatcher(last);
if (this.delegate.isAssertingAValueInArray() && containsAMatcher) {
readyToCheck.methodsBuffer.offer(".value()");
}
else {
readyToCheck.appendMethodWithValue("isEqualTo",
value instanceof Long ? String.valueOf(value).concat("L") : String.valueOf(value));
}
return readyToCheck;
}
private boolean containsAnyMatcher(String string) {
return string.contains("isEqualTo") || string.contains("matches") || string.contains("isNull");
}
@Override
public MethodBufferingJsonVerifiable isNull() {
DelegatingJsonVerifiable readyToCheck = new FinishedDelegatingJsonVerifiable(this.delegate.jsonPath(),
this.delegate.isNull(), this.methodsBuffer);
readyToCheck.methodsBuffer.offer(".isNull()");
return readyToCheck;
}
@Override
public MethodBufferingJsonVerifiable isEmpty() {
DelegatingJsonVerifiable readyToCheck = new FinishedDelegatingJsonVerifiable(this.delegate.jsonPath(),
this.delegate.isEmpty(), this.methodsBuffer);
readyToCheck.methodsBuffer.offer(".isEmpty()");
return readyToCheck;
}
@Override
public MethodBufferingJsonVerifiable isInstanceOf(Class clazz) throws IllegalStateException {
DelegatingJsonVerifiable readyToCheck = new FinishedDelegatingJsonVerifiable(this.delegate.jsonPath(),
this.delegate.isInstanceOf(clazz), this.methodsBuffer);
readyToCheck.methodsBuffer.offer(".isInstanceOf(" + clazz.getName() + ")");
return readyToCheck;
}
@Override
public MethodBufferingJsonVerifiable matches(String value) {
DelegatingJsonVerifiable readyToCheck = new FinishedDelegatingJsonVerifiable(this.delegate.jsonPath(),
this.delegate.matches(value), this.methodsBuffer, compilePattern(value));
if (this.delegate.isAssertingAValueInArray()) {
readyToCheck.appendMethodWithQuotedValue("matches", escapedHackedJavaText(value));
readyToCheck.methodsBuffer.offer(".value()");
}
else {
readyToCheck.appendMethodWithQuotedValue("matches", escapedHackedJavaText(value));
}
return readyToCheck;
}
private Object compilePattern(String value) {
try {
return Pattern.compile(value);
}
catch (Exception e) {
log.warn("Exception occurred while trying to compile the pattern [" + value
+ "]. Will return the value explicitly. Hopefully, you know what you're doing...");
return value;
}
}
/**
* We need to escape the pattern in order for the produced text to be compilable. The
* problem is that sometimes we get quotes that already escaped. If we escape them we
* get code that doesn't compile. That's why we're doing this hack to unescape an
* double escaped text. Related to
* https://github.com/spring-cloud/spring-cloud-contract/issues/169
* @param value text to escape
* @return escaped text with lower number of escaped backslashes
*/
private String escapedHackedJavaText(String value) {
return StringEscapeUtils.escapeJava(value).replace("\\\"", "\"");
}
@Override
public MethodBufferingJsonVerifiable isEqualTo(Boolean value) {
DelegatingJsonVerifiable readyToCheck = new FinishedDelegatingJsonVerifiable(this.delegate.jsonPath(),
this.delegate.isEqualTo(value), this.methodsBuffer, value);
if (this.delegate.isAssertingAValueInArray()) {
readyToCheck.methodsBuffer.offer(".value()");
}
else {
readyToCheck.appendMethodWithValue("isEqualTo", String.valueOf(value));
}
return readyToCheck;
}
@Override
public MethodBufferingJsonVerifiable value() {
return new FinishedDelegatingJsonVerifiable(this.delegate, this.methodsBuffer);
}
@Override
public String keyBeforeChecking() {
return this.delegate.jsonPath();
}
@Override
public Object valueBeforeChecking() {
return this.valueToCheck;
}
@Override
public boolean assertsSize() {
for (String s : this.methodsBuffer) {
if (s.contains(".hasSize(") || s.contains(".isEmpty()")) {
return true;
}
}
return false;
}
@Override
public boolean assertsConcreteValue() {
for (String s : this.methodsBuffer) {
if (FIELD_PATTERN.matcher(s).matches() || ARRAY_PATTERN.matcher(s).matches()) {
return true;
}
}
return false;
}
@Override
public JsonVerifiable withoutThrowingException() {
return this.delegate.withoutThrowingException();
}
@Override
public String jsonPath() {
return this.delegate.jsonPath();
}
@Override
public void matchesJsonPath(String s) {
this.delegate.matchesJsonPath(s);
}
@Override
public JsonVerifiable hasSize(int size) {
FinishedDelegatingJsonVerifiable verifiable = new FinishedDelegatingJsonVerifiable(this.delegate.jsonPath(),
this.delegate.hasSize(size), this.methodsBuffer);
verifiable.methodsBuffer.offer(".hasSize(" + size + ")");
return verifiable;
}
@Override
public boolean isIteratingOverNamelessArray() {
return this.delegate.isIteratingOverNamelessArray();
}
@Override
public boolean isIteratingOverArray() {
return this.delegate.isIteratingOverArray();
}
@Override
public boolean isAssertingAValueInArray() {
return this.delegate.isAssertingAValueInArray();
}
// Related to https://github.com/spring-cloud/spring-cloud-contract/issues/269
private Object wrapInBrackets(Object name) {
if (name instanceof Number) {
return name;
}
return "['" + name.toString() + "']";
}
@Override
public String method() {
return createMethodString();
}
private String createMethodString() {
LinkedList<String> queue = new LinkedList<>(this.methodsBuffer);
StringBuilder stringBuffer = new StringBuilder();
while (!queue.isEmpty()) {
stringBuffer.append(queue.remove());
}
return stringBuffer.toString();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DelegatingJsonVerifiable that = (DelegatingJsonVerifiable) o;
return this.methodsBuffer.toString().equals(that.methodsBuffer.toString());
}
@Override
public int hashCode() {
int result = this.methodsBuffer.toString().hashCode();
return 31 * result;
}
@Override
public String toString() {
return "DelegatingJsonVerifiable{" + "delegate=\n" + this.delegate + ", methodsBuffer=" + this.methodsBuffer
+ '}';
}
@Override
public <T> T read(Class<T> aClass) {
return this.delegate.read(aClass);
}
}
|
|
/*
* Copyright 2014-2016 EMBL - European Bioinformatics Institute
* Copyright 2015 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.ebi.eva.pipeline.io.mappers;
import org.apache.commons.lang3.StringUtils;
import org.opencb.biodata.models.feature.Genotype;
import org.opencb.biodata.models.variant.VariantFactory;
import org.opencb.biodata.models.variant.exceptions.NonStandardCompliantSampleField;
import org.opencb.biodata.models.variant.exceptions.NotAVariantException;
import uk.ac.ebi.eva.commons.models.data.Variant;
import uk.ac.ebi.eva.commons.models.data.VariantSourceEntry;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import static java.lang.Math.max;
/**
* Class that parses VCF lines to create Variants.
*/
public class VariantVcfFactory {
/**
* Creates a list of Variant objects using the fields in a record of a VCF
* file. A new Variant object is created per allele, so several of them can
* be created from a single line.
* <p>
* Start/end coordinates assignment tries to work as similarly as possible
* as Ensembl does, except for insertions, where start is greater than end:
* http://www.ensembl.org/info/docs/tools/vep/vep_formats.html#vcf
*
* @param fileId,
* @param studyId
* @param line Contents of the line in the file
* @return The list of Variant objects that can be created using the fields from a VCF record
*/
public List<Variant> create(String fileId, String studyId,
String line) throws IllegalArgumentException, NotAVariantException {
String[] fields = line.split("\t");
if (fields.length < 8) {
throw new IllegalArgumentException("Not enough fields provided (min 8)");
}
String chromosome = getChromosomeWithoutPrefix(fields);
int position = getPosition(fields);
Set<String> ids = getIds(fields);
String reference = getReference(fields);
String[] alternateAlleles = getAlternateAlleles(fields, chromosome, position, reference);
float quality = getQuality(fields);
String filter = getFilter(fields);
String info = getInfo(fields);
String format = getFormat(fields);
List<VariantKeyFields> generatedKeyFields = buildVariantKeyFields(chromosome, position, reference,
alternateAlleles);
List<Variant> variants = new LinkedList<>();
// Now create all the Variant objects read from the VCF record
for (int altAlleleIdx = 0; altAlleleIdx < alternateAlleles.length; altAlleleIdx++) {
VariantKeyFields keyFields = generatedKeyFields.get(altAlleleIdx);
Variant variant = new Variant(chromosome, keyFields.start, keyFields.end, keyFields.reference,
keyFields.alternate);
String[] secondaryAlternates = getSecondaryAlternates(keyFields.getNumAllele(), alternateAlleles);
VariantSourceEntry file = new VariantSourceEntry(fileId, studyId, secondaryAlternates, format);
variant.addSourceEntry(file);
try {
parseSplitSampleData(variant, fileId, studyId, fields, alternateAlleles, secondaryAlternates, altAlleleIdx);
// Fill the rest of fields (after samples because INFO depends on them)
setOtherFields(variant, fileId, studyId, ids, quality, filter, info, format, keyFields.getNumAllele(),
alternateAlleles, line);
variants.add(variant);
} catch (NonStandardCompliantSampleField ex) {
Logger.getLogger(VariantFactory.class.getName())
.log(Level.SEVERE,
String.format("Variant %s:%d:%s>%s will not be saved\n%s", chromosome, position, reference,
alternateAlleles[altAlleleIdx], ex.getMessage()));
}
}
return variants;
}
/**
* Replace "chr" references only at the beginning of the chromosome name.
* For instance, tomato has SL2.40ch00 and that should be kept that way
*/
private String getChromosomeWithoutPrefix(String[] fields) {
String chromosome = fields[0];
boolean ignoreCase = true;
int startOffset = 0;
String prefixToRemove = "chr";
if (chromosome.regionMatches(ignoreCase, startOffset, prefixToRemove, startOffset, prefixToRemove.length())) {
return chromosome.substring(prefixToRemove.length());
}
return chromosome;
}
private int getPosition(String[] fields) {
return Integer.parseInt(fields[1]);
}
private Set<String> getIds(String[] fields) {
Set<String> ids = new HashSet<>();
if (!fields[2].equals(".")) { // note!: we store a "." as an empty set, not a set with an empty string
ids.addAll(Arrays.asList(fields[2].split(";")));
}
return ids;
}
private String getReference(String[] fields) {
return fields[3].equals(".") ? "" : fields[3];
}
private String[] getAlternateAlleles(String[] fields, String chromosome, int position, String reference) {
if (fields[4].equals(".")) {
throw new NotAVariantException(
"Alternative allele is a '.'. This is not an actual variant but a reference position. " +
"Variant found as: " + chromosome + ":" + position + ":" + reference + ">" + fields[4]);
}
return fields[4].split(",");
}
private float getQuality(String[] fields) {
return fields[5].equals(".") ? -1 : Float.parseFloat(fields[5]);
}
private String getFilter(String[] fields) {
return fields[6].equals(".") ? "" : fields[6];
}
private String getInfo(String[] fields) {
return fields[7].equals(".") ? "" : fields[7];
}
private String getFormat(String[] fields) {
return (fields.length <= 8 || fields[8].equals(".")) ? "" : fields[8];
}
private List<VariantKeyFields> buildVariantKeyFields(String chromosome, int position, String reference,
String[] alternateAlleles) {
List<VariantKeyFields> generatedKeyFields = new ArrayList<>();
for (int i = 0; i < alternateAlleles.length; i++) { // This index is necessary for getting the samples where the mutated allele is present
VariantKeyFields keyFields = normalizeLeftAlign(chromosome, position, reference, alternateAlleles[i]);
keyFields.setNumAllele(i);
// Since the reference and alternate alleles won't necessarily match
// the ones read from the VCF file but they are still needed for
// instantiating the variants, they must be updated
alternateAlleles[i] = keyFields.alternate;
generatedKeyFields.add(keyFields);
}
return generatedKeyFields;
}
/**
* Calculates the normalized start, end, reference and alternate of a variant where the
* reference and the alternate are not identical.
* <p>
* This task comprises 2 steps: removing the trailing bases that are
* identical in both alleles, then the leading identical bases.
* <p>
* It is left aligned because the traling bases are removed before the leading ones, implying a normalization where
* the position is moved the least possible from its original location.
* @param chromosome needed for error reporting and logging
* @param position Input starting position
* @param reference Input reference allele
* @param alternate Input alternate allele
* @return The new start, end, reference and alternate alleles wrapped in a VariantKeyFields
*/
protected VariantKeyFields normalizeLeftAlign(String chromosome, int position, String reference, String alternate)
throws NotAVariantException {
if (reference.equals(alternate)) {
throw new NotAVariantException("One alternate allele is identical to the reference. Variant found as: "
+ chromosome + ":" + position + ":" + reference + ">" + alternate);
}
// Remove the trailing bases
String refReversed = StringUtils.reverse(reference);
String altReversed = StringUtils.reverse(alternate);
int indexOfDifference = StringUtils.indexOfDifference(refReversed, altReversed);
reference = StringUtils.reverse(refReversed.substring(indexOfDifference));
alternate = StringUtils.reverse(altReversed.substring(indexOfDifference));
// Remove the leading bases
indexOfDifference = StringUtils.indexOfDifference(reference, alternate);
int start = position + indexOfDifference;
int length = max(reference.length(), alternate.length());
int end = position + length - 1; // -1 because end is inclusive
if (indexOfDifference > 0) {
reference = reference.substring(indexOfDifference);
alternate = alternate.substring(indexOfDifference);
}
return new VariantKeyFields(start, end, reference, alternate);
}
protected String[] getSecondaryAlternates(int numAllele, String[] alternateAlleles) {
String[] secondaryAlternates = new String[alternateAlleles.length - 1];
for (int i = 0, j = 0; i < alternateAlleles.length; i++) {
if (i != numAllele) {
secondaryAlternates[j++] = alternateAlleles[i];
}
}
return secondaryAlternates;
}
protected void parseSplitSampleData(Variant variant, String fileId, String studyId, String[] fields,
String[] alternateAlleles, String[] secondaryAlternates,
int alternateAlleleIdx) throws NonStandardCompliantSampleField {
String[] formatFields = variant.getSourceEntry(fileId, studyId).getFormat().split(":");
for (int i = 9; i < fields.length; i++) {
Map<String, String> map = new TreeMap<>();
// Fill map of a sample
String[] sampleFields = fields[i].split(":");
// Samples may remove the trailing fields (only GT is mandatory),
// so the loop iterates to sampleFields.length, not formatFields.length
for (int j = 0; j < sampleFields.length; j++) {
String formatField = formatFields[j];
String sampleField = processSampleField(alternateAlleleIdx, formatField, sampleFields[j]);
map.put(formatField, sampleField);
}
// Add sample to the variant entry in the source file
variant.getSourceEntry(fileId, studyId).addSampleData(map);
}
}
/**
* If this is a field other than the genotype (GT), return unmodified. Otherwise,
* see {@link VariantVcfFactory#processGenotypeField(int, java.lang.String)}
*
* @param alternateAlleleIdx current alternate being processed. 0 for first alternate, 1 or more for a secondary alternate.
* @param formatField as shown in the FORMAT column. most probably the GT field.
* @param sampleField parsed value in a column of a sample, such as a genotype, e.g. "0/0".
* @return processed sample field, ready to be stored.
*/
private String processSampleField(int alternateAlleleIdx, String formatField, String sampleField) {
if (formatField.equalsIgnoreCase("GT")) {
return processGenotypeField(alternateAlleleIdx, sampleField);
} else {
return sampleField;
}
}
/**
* Intern the genotype String into the String pool to avoid storing lots of "0/0". In case that the variant is
* multiallelic and we are currently processing one of the secondary alternates (T is the only secondary alternate
* in a variant like A -> C,T), change the allele codes to represent the current alternate as allele 1. For details
* on changing this indexes, see {@link VariantVcfFactory#mapToMultiallelicIndex(int, int)}
*
* @param alternateAlleleIdx current alternate being processed. 0 for first alternate, 1 or more for a secondary alternate.
* @param genotype first field in the samples column, e.g. "0/0"
* @return the processed genotype string, as described above (interned and changed if multiallelic).
*/
private String processGenotypeField(int alternateAlleleIdx, String genotype) {
boolean isNotTheFirstAlternate = alternateAlleleIdx >= 1;
if (isNotTheFirstAlternate) {
Genotype parsedGenotype = new Genotype(genotype);
StringBuilder genotypeStr = new StringBuilder();
for (int allele : parsedGenotype.getAllelesIdx()) {
if (allele < 0) { // Missing
genotypeStr.append(".");
} else {
// Replace numerical indexes when they refer to another alternate allele
genotypeStr.append(String.valueOf(mapToMultiallelicIndex(allele, alternateAlleleIdx)));
}
genotypeStr.append(parsedGenotype.isPhased() ? "|" : "/");
}
genotype = genotypeStr.substring(0, genotypeStr.length() - 1);
}
return genotype.intern();
}
protected void setOtherFields(Variant variant, String fileId, String studyId, Set<String> ids, float quality, String filter,
String info, String format, int numAllele, String[] alternateAlleles, String line) {
// Fields not affected by the structure of REF and ALT fields
variant.setIds(ids);
if (quality > -1) {
variant.getSourceEntry(fileId, studyId)
.addAttribute("QUAL", String.valueOf(quality));
}
if (!filter.isEmpty()) {
variant.getSourceEntry(fileId, studyId).addAttribute("FILTER", filter);
}
if (!info.isEmpty()) {
parseInfo(variant, fileId, studyId, info, numAllele);
}
variant.getSourceEntry(fileId, studyId).addAttribute("src", line);
}
protected void parseInfo(Variant variant, String fileId, String studyId, String info, int numAllele) {
VariantSourceEntry file = variant.getSourceEntry(fileId, studyId);
for (String var : info.split(";")) {
String[] splits = var.split("=");
if (splits.length == 2) {
switch (splits[0]) {
case "ACC":
// Managing accession ID for the allele
String[] ids = splits[1].split(",");
file.addAttribute(splits[0], ids[numAllele]);
break;
case "AC":
// TODO For now, only one alternate is supported
String[] counts = splits[1].split(",");
file.addAttribute(splits[0], counts[numAllele]);
break;
case "AF":
// TODO For now, only one alternate is supported
String[] frequencies = splits[1].split(",");
file.addAttribute(splits[0], frequencies[numAllele]);
break;
// case "AN":
// // TODO For now, only two alleles (reference and one alternate) are supported, but this should be changed
// file.addAttribute(splits[0], "2");
// break;
case "NS":
// Count the number of samples that are associated with the allele
file.addAttribute(splits[0], String.valueOf(file.getSamplesData().size()));
break;
case "DP":
int dp = 0;
for (Map<String, String> sampleData : file.getSamplesData()) {
String sampleDp = sampleData.get("DP");
if (StringUtils.isNumeric(sampleDp)) {
dp += Integer.parseInt(sampleDp);
}
}
file.addAttribute(splits[0], String.valueOf(dp));
break;
case "MQ":
case "MQ0":
int mq = 0;
int mq0 = 0;
for (Map<String, String> sampleData : file.getSamplesData()) {
String sampleGq = sampleData.get("GQ");
if (StringUtils.isNumeric(sampleGq)) {
int gq = Integer.parseInt(sampleGq);
mq += gq * gq;
if (gq == 0) {
mq0++;
}
}
}
file.addAttribute("MQ", String.valueOf(mq));
file.addAttribute("MQ0", String.valueOf(mq0));
break;
default:
file.addAttribute(splits[0], splits[1]);
break;
}
} else {
variant.getSourceEntry(fileId, studyId).addAttribute(splits[0], "");
}
}
}
protected class VariantKeyFields {
int start, end, numAllele;
String reference, alternate;
public VariantKeyFields(int start, int end, String reference, String alternate) {
this.start = start;
this.end = end;
this.reference = reference;
this.alternate = alternate;
}
public void setNumAllele(int numAllele) {
this.numAllele = numAllele;
}
public int getNumAllele() {
return numAllele;
}
}
/**
* In multiallelic variants, we have a list of alternates, where numAllele is the one whose variant we are parsing
* now. If we are parsing the first variant (numAllele == 0) A1 refers to first alternative, (i.e.
* alternateAlleles[0]), A2 to second alternative (alternateAlleles[1]), and so on. However, if numAllele == 1, A1
* refers to second alternate (alternateAlleles[1]), A2 to first (alternateAlleles[0]) and higher alleles remain
* unchanged. Moreover, if NumAllele == 2, A1 is third alternate, A2 is first alternate and A3 is second alternate.
* It's also assumed that A0 would be the reference, so it remains unchanged too.
* <p>
* This pattern of the first allele moving along (and swapping) is what describes this function. Also, look
* VariantVcfFactory.getSecondaryAlternates().
*
* @param parsedAllele the value of parsed alleles. e.g. 1 if genotype was "A1" (first allele).
* @param numAllele current variant of the alternates.
* @return the correct allele index depending on numAllele.
*/
protected static int mapToMultiallelicIndex(int parsedAllele, int numAllele) {
int correctedAllele = parsedAllele;
if (parsedAllele > 0) {
if (parsedAllele == numAllele + 1) {
correctedAllele = 1;
} else if (parsedAllele < numAllele + 1) {
correctedAllele = parsedAllele + 1;
}
}
return correctedAllele;
}
}
|
|
/*******************************************************************************
* Copyright (c) 2000, 2011 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.jdt.internal.ui.text;
import java.util.List;
import org.eclipse.osgi.util.TextProcessor;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.DisposeEvent;
import org.eclipse.swt.events.DisposeListener;
import org.eclipse.swt.events.FocusListener;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.events.KeyListener;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.MouseMoveListener;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Item;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.eclipse.swt.widgets.Tree;
import org.eclipse.swt.widgets.TreeItem;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.action.IMenuManager;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.dialogs.IDialogSettings;
import org.eclipse.jface.dialogs.PopupDialog;
import org.eclipse.jface.viewers.ILabelProvider;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.viewers.ITreeContentProvider;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.jface.viewers.TreeViewer;
import org.eclipse.jface.viewers.Viewer;
import org.eclipse.jface.viewers.ViewerFilter;
import org.eclipse.jface.text.IInformationControl;
import org.eclipse.jface.text.IInformationControlExtension;
import org.eclipse.jface.text.IInformationControlExtension2;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IWorkbenchCommandConstants;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.commands.ActionHandler;
import org.eclipse.ui.commands.HandlerSubmission;
import org.eclipse.ui.commands.ICommand;
import org.eclipse.ui.commands.ICommandManager;
import org.eclipse.ui.commands.IKeySequenceBinding;
import org.eclipse.ui.commands.Priority;
import org.eclipse.ui.keys.KeySequence;
import org.eclipse.jdt.core.IJavaElement;
import org.eclipse.jdt.core.IParent;
import org.eclipse.jdt.core.IType;
import org.eclipse.jdt.ui.actions.CustomFiltersActionGroup;
import org.eclipse.jdt.internal.ui.JavaPlugin;
import org.eclipse.jdt.internal.ui.javaeditor.EditorUtility;
import org.eclipse.jdt.internal.ui.util.StringMatcher;
/**
* Abstract class for Show hierarchy in light-weight controls.
*
* @since 2.1
*/
public abstract class AbstractInformationControl extends PopupDialog implements IInformationControl, IInformationControlExtension, IInformationControlExtension2, DisposeListener {
/**
* The NamePatternFilter selects the elements which
* match the given string patterns.
*
* @since 2.0
*/
protected class NamePatternFilter extends ViewerFilter {
public NamePatternFilter() {
}
/*
* @see org.eclipse.jface.viewers.ViewerFilter#select(org.eclipse.jface.viewers.Viewer, java.lang.Object, java.lang.Object)
*/
@Override
public boolean select(Viewer viewer, Object parentElement, Object element) {
StringMatcher matcher= getMatcher();
if (matcher == null || !(viewer instanceof TreeViewer))
return true;
TreeViewer treeViewer= (TreeViewer) viewer;
String matchName= ((ILabelProvider) treeViewer.getLabelProvider()).getText(element);
matchName= TextProcessor.deprocess(matchName);
if (matchName != null && matcher.match(matchName))
return true;
return hasUnfilteredChild(treeViewer, element);
}
private boolean hasUnfilteredChild(TreeViewer viewer, Object element) {
if (element instanceof IParent) {
Object[] children= ((ITreeContentProvider) viewer.getContentProvider()).getChildren(element);
for (int i= 0; i < children.length; i++)
if (select(viewer, element, children[i]))
return true;
}
return false;
}
}
/** The control's text widget */
private Text fFilterText;
/** The control's tree widget */
private TreeViewer fTreeViewer;
/** The current string matcher */
protected StringMatcher fStringMatcher;
private ICommand fInvokingCommand;
private KeySequence[] fInvokingCommandKeySequences;
/**
* Fields that support the dialog menu
* @since 3.0
* @since 3.2 - now appended to framework menu
*/
private Composite fViewMenuButtonComposite;
private CustomFiltersActionGroup fCustomFiltersActionGroup;
private IAction fShowViewMenuAction;
private HandlerSubmission fShowViewMenuHandlerSubmission;
/**
* Field for tree style since it must be remembered by the instance.
*
* @since 3.2
*/
private int fTreeStyle;
/**
* The initially selected type.
* @since 3.5
*/
protected IType fInitiallySelectedType;
/**
* Creates a tree information control with the given shell as parent. The given
* styles are applied to the shell and the tree widget.
*
* @param parent the parent shell
* @param shellStyle the additional styles for the shell
* @param treeStyle the additional styles for the tree widget
* @param invokingCommandId the id of the command that invoked this control or <code>null</code>
* @param showStatusField <code>true</code> iff the control has a status field at the bottom
*/
public AbstractInformationControl(Shell parent, int shellStyle, int treeStyle, String invokingCommandId, boolean showStatusField) {
super(parent, shellStyle, true, true, false, true, true, null, null);
if (invokingCommandId != null) {
ICommandManager commandManager= PlatformUI.getWorkbench().getCommandSupport().getCommandManager();
fInvokingCommand= commandManager.getCommand(invokingCommandId);
if (fInvokingCommand != null && !fInvokingCommand.isDefined())
fInvokingCommand= null;
else
// Pre-fetch key sequence - do not change because scope will change later.
getInvokingCommandKeySequences();
}
fTreeStyle= treeStyle;
// Title and status text must be set to get the title label created, so force empty values here.
if (hasHeader())
setTitleText(""); //$NON-NLS-1$
setInfoText(""); // //$NON-NLS-1$
// Create all controls early to preserve the life cycle of the original implementation.
create();
// Status field text can only be computed after widgets are created.
setInfoText(getStatusFieldText());
}
/**
* Create the main content for this information control.
*
* @param parent The parent composite
* @return The control representing the main content.
* @since 3.2
*/
@Override
protected Control createDialogArea(Composite parent) {
fTreeViewer= createTreeViewer(parent, fTreeStyle);
fCustomFiltersActionGroup= new CustomFiltersActionGroup(getId(), fTreeViewer);
final Tree tree= fTreeViewer.getTree();
tree.addKeyListener(new KeyListener() {
public void keyPressed(KeyEvent e) {
if (e.character == 0x1B) // ESC
dispose();
}
public void keyReleased(KeyEvent e) {
// do nothing
}
});
tree.addSelectionListener(new SelectionListener() {
public void widgetSelected(SelectionEvent e) {
// do nothing
}
public void widgetDefaultSelected(SelectionEvent e) {
gotoSelectedElement();
}
});
tree.addMouseMoveListener(new MouseMoveListener() {
TreeItem fLastItem= null;
public void mouseMove(MouseEvent e) {
if (tree.equals(e.getSource())) {
Object o= tree.getItem(new Point(e.x, e.y));
if (fLastItem == null ^ o == null) {
tree.setCursor(o == null ? null : tree.getDisplay().getSystemCursor(SWT.CURSOR_HAND));
}
if (o instanceof TreeItem) {
Rectangle clientArea = tree.getClientArea();
if (!o.equals(fLastItem)) {
fLastItem= (TreeItem)o;
tree.setSelection(new TreeItem[] { fLastItem });
} else if (e.y - clientArea.y < tree.getItemHeight() / 4) {
// Scroll up
Point p= tree.toDisplay(e.x, e.y);
Item item= fTreeViewer.scrollUp(p.x, p.y);
if (item instanceof TreeItem) {
fLastItem= (TreeItem)item;
tree.setSelection(new TreeItem[] { fLastItem });
}
} else if (clientArea.y + clientArea.height - e.y < tree.getItemHeight() / 4) {
// Scroll down
Point p= tree.toDisplay(e.x, e.y);
Item item= fTreeViewer.scrollDown(p.x, p.y);
if (item instanceof TreeItem) {
fLastItem= (TreeItem)item;
tree.setSelection(new TreeItem[] { fLastItem });
}
}
} else if (o == null) {
fLastItem= null;
}
}
}
});
tree.addMouseListener(new MouseAdapter() {
@Override
public void mouseUp(MouseEvent e) {
if (tree.getSelectionCount() < 1)
return;
if (e.button != 1)
return;
if (tree.equals(e.getSource())) {
Object o= tree.getItem(new Point(e.x, e.y));
TreeItem selection= tree.getSelection()[0];
if (selection.equals(o))
gotoSelectedElement();
}
}
});
installFilter();
addDisposeListener(this);
return fTreeViewer.getControl();
}
/**
* Creates a tree information control with the given shell as parent. The given
* styles are applied to the shell and the tree widget.
*
* @param parent the parent shell
* @param shellStyle the additional styles for the shell
* @param treeStyle the additional styles for the tree widget
*/
public AbstractInformationControl(Shell parent, int shellStyle, int treeStyle) {
this(parent, shellStyle, treeStyle, null, false);
}
protected abstract TreeViewer createTreeViewer(Composite parent, int style);
/**
* Returns the name of the dialog settings section.
*
* @return the name of the dialog settings section
*/
protected abstract String getId();
protected TreeViewer getTreeViewer() {
return fTreeViewer;
}
/**
* Returns <code>true</code> if the control has a header, <code>false</code> otherwise.
* <p>
* The default is to return <code>false</code>.
* </p>
*
* @return <code>true</code> if the control has a header
*/
protected boolean hasHeader() {
// default is to have no header
return false;
}
protected Text getFilterText() {
return fFilterText;
}
protected Text createFilterText(Composite parent) {
fFilterText= new Text(parent, SWT.NONE);
Dialog.applyDialogFont(fFilterText);
GridData data= new GridData(GridData.FILL_HORIZONTAL);
data.horizontalAlignment= GridData.FILL;
data.verticalAlignment= GridData.CENTER;
fFilterText.setLayoutData(data);
fFilterText.addKeyListener(new KeyListener() {
public void keyPressed(KeyEvent e) {
if (e.keyCode == 0x0D) // return
gotoSelectedElement();
if (e.keyCode == SWT.ARROW_DOWN)
fTreeViewer.getTree().setFocus();
if (e.keyCode == SWT.ARROW_UP)
fTreeViewer.getTree().setFocus();
if (e.character == 0x1B) // ESC
dispose();
}
public void keyReleased(KeyEvent e) {
// do nothing
}
});
return fFilterText;
}
protected void createHorizontalSeparator(Composite parent) {
Label separator= new Label(parent, SWT.SEPARATOR | SWT.HORIZONTAL | SWT.LINE_DOT);
separator.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
}
protected void updateStatusFieldText() {
setInfoText(getStatusFieldText());
}
protected String getStatusFieldText() {
return ""; //$NON-NLS-1$
}
private void installFilter() {
fFilterText.setText(""); //$NON-NLS-1$
fFilterText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
String text= ((Text) e.widget).getText();
int length= text.length();
if (length > 0 && text.charAt(length -1 ) != '*') {
text= text + '*';
}
setMatcherString(text, true);
}
});
}
/**
* The string matcher has been modified. The default implementation
* refreshes the view and selects the first matched element
*/
protected void stringMatcherUpdated() {
// refresh viewer to re-filter
fTreeViewer.getControl().setRedraw(false);
fTreeViewer.refresh();
fTreeViewer.expandAll();
selectFirstMatch();
fTreeViewer.getControl().setRedraw(true);
}
/**
* Sets the patterns to filter out for the receiver.
* <p>
* The following characters have special meaning:
* ? => any character
* * => any string
* </p>
*
* @param pattern the pattern
* @param update <code>true</code> if the viewer should be updated
*/
protected void setMatcherString(String pattern, boolean update) {
if (pattern.length() == 0) {
fStringMatcher= null;
} else {
boolean ignoreCase= pattern.toLowerCase().equals(pattern);
fStringMatcher= new StringMatcher(pattern, ignoreCase, false);
}
if (update)
stringMatcherUpdated();
}
protected StringMatcher getMatcher() {
return fStringMatcher;
}
/**
* Implementers can modify
*
* @return the selected element
*/
protected Object getSelectedElement() {
if (fTreeViewer == null)
return null;
return ((IStructuredSelection) fTreeViewer.getSelection()).getFirstElement();
}
private void gotoSelectedElement() {
Object selectedElement= getSelectedElement();
if (selectedElement != null) {
try {
dispose();
IEditorPart part= EditorUtility.openInEditor(selectedElement, true);
if (part != null && selectedElement instanceof IJavaElement)
EditorUtility.revealInEditor(part, (IJavaElement) selectedElement);
} catch (CoreException ex) {
JavaPlugin.log(ex);
}
}
}
/**
* Selects the first element in the tree which
* matches the current filter pattern.
*/
protected void selectFirstMatch() {
Object selectedElement= fTreeViewer.testFindItem(fInitiallySelectedType);
TreeItem element;
final Tree tree= fTreeViewer.getTree();
if (selectedElement instanceof TreeItem)
element= findElement(new TreeItem[] { (TreeItem)selectedElement });
else
element= findElement(tree.getItems());
if (element != null) {
tree.setSelection(element);
tree.showItem(element);
} else
fTreeViewer.setSelection(StructuredSelection.EMPTY);
}
private TreeItem findElement(TreeItem[] items) {
return findElement(items, null, true);
}
private TreeItem findElement(TreeItem[] items, TreeItem[] toBeSkipped, boolean allowToGoUp) {
if (fStringMatcher == null)
return items.length > 0 ? items[0] : null;
ILabelProvider labelProvider= (ILabelProvider)fTreeViewer.getLabelProvider();
// First search at same level
for (int i= 0; i < items.length; i++) {
final TreeItem item= items[i];
IJavaElement element= (IJavaElement)item.getData();
if (element != null) {
String label= labelProvider.getText(element);
if (fStringMatcher.match(label))
return item;
}
}
// Go one level down for each item
for (int i= 0; i < items.length; i++) {
final TreeItem item= items[i];
TreeItem foundItem= findElement(selectItems(item.getItems(), toBeSkipped), null, false);
if (foundItem != null)
return foundItem;
}
if (!allowToGoUp || items.length == 0)
return null;
// Go one level up (parent is the same for all items)
TreeItem parentItem= items[0].getParentItem();
if (parentItem != null)
return findElement(new TreeItem[] { parentItem }, items, true);
// Check root elements
return findElement(selectItems(items[0].getParent().getItems(), items), null, false);
}
private boolean canSkip(TreeItem item, TreeItem[] toBeSkipped) {
if (toBeSkipped == null)
return false;
for (int i= 0; i < toBeSkipped.length; i++) {
if (toBeSkipped[i] == item)
return true;
}
return false;
}
private TreeItem[] selectItems(TreeItem[] items, TreeItem[] toBeSkipped) {
if (toBeSkipped == null || toBeSkipped.length == 0)
return items;
int j= 0;
for (int i= 0; i < items.length; i++) {
TreeItem item= items[i];
if (!canSkip(item, toBeSkipped))
items[j++]= item;
}
if (j == items.length)
return items;
TreeItem[] result= new TreeItem[j];
System.arraycopy(items, 0, result, 0, j);
return result;
}
/**
* {@inheritDoc}
*/
public void setInformation(String information) {
// this method is ignored, see IInformationControlExtension2
}
/**
* {@inheritDoc}
*/
public abstract void setInput(Object information);
/**
* Fills the view menu.
* Clients can extend or override.
*
* @param viewMenu the menu manager that manages the menu
* @since 3.0
*/
protected void fillViewMenu(IMenuManager viewMenu) {
fCustomFiltersActionGroup.fillViewMenu(viewMenu);
}
/*
* Overridden to call the old framework method.
*
* @see org.eclipse.jface.dialogs.PopupDialog#fillDialogMenu(IMenuManager)
* @since 3.2
*/
@Override
protected void fillDialogMenu(IMenuManager dialogMenu) {
super.fillDialogMenu(dialogMenu);
fillViewMenu(dialogMenu);
}
protected void inputChanged(Object newInput, Object newSelection) {
fFilterText.setText(""); //$NON-NLS-1$
fInitiallySelectedType= null;
if (newSelection instanceof IJavaElement) {
IJavaElement javaElement= ((IJavaElement)newSelection);
if (javaElement.getElementType() == IJavaElement.TYPE)
fInitiallySelectedType= (IType)javaElement;
else
fInitiallySelectedType= (IType)javaElement.getAncestor(IJavaElement.TYPE);
}
fTreeViewer.setInput(newInput);
if (newSelection != null)
fTreeViewer.setSelection(new StructuredSelection(newSelection));
}
/**
* {@inheritDoc}
*/
public void setVisible(boolean visible) {
if (visible) {
open();
} else {
removeHandlerAndKeyBindingSupport();
saveDialogBounds(getShell());
getShell().setVisible(false);
}
}
/*
* @see org.eclipse.jface.dialogs.PopupDialog#open()
* @since 3.3
*/
@Override
public int open() {
addHandlerAndKeyBindingSupport();
return super.open();
}
/**
* {@inheritDoc}
*/
public final void dispose() {
close();
}
/**
* {@inheritDoc}
* @param event can be null
* <p>
* Subclasses may extend.
* </p>
*/
public void widgetDisposed(DisposeEvent event) {
removeHandlerAndKeyBindingSupport();
fTreeViewer= null;
fFilterText= null;
}
/**
* Adds handler and key binding support.
*
* @since 3.2
*/
protected void addHandlerAndKeyBindingSupport() {
// Register action with command support
if (fShowViewMenuHandlerSubmission == null) {
fShowViewMenuHandlerSubmission= new HandlerSubmission(null, getShell(), null, fShowViewMenuAction.getActionDefinitionId(), new ActionHandler(fShowViewMenuAction), Priority.MEDIUM);
PlatformUI.getWorkbench().getCommandSupport().addHandlerSubmission(fShowViewMenuHandlerSubmission);
}
}
/**
* Removes handler and key binding support.
*
* @since 3.2
*/
protected void removeHandlerAndKeyBindingSupport() {
// Remove handler submission
if (fShowViewMenuHandlerSubmission != null)
PlatformUI.getWorkbench().getCommandSupport().removeHandlerSubmission(fShowViewMenuHandlerSubmission);
}
/**
* {@inheritDoc}
*/
public boolean hasContents() {
return fTreeViewer != null && fTreeViewer.getInput() != null;
}
/**
* {@inheritDoc}
*/
public void setSizeConstraints(int maxWidth, int maxHeight) {
// ignore
}
/**
* {@inheritDoc}
*/
public Point computeSizeHint() {
// return the shell's size - note that it already has the persisted size if persisting
// is enabled.
return getShell().getSize();
}
/**
* {@inheritDoc}
*/
public void setLocation(Point location) {
/*
* If the location is persisted, it gets managed by PopupDialog - fine. Otherwise, the location is
* computed in Window#getInitialLocation, which will center it in the parent shell / main
* monitor, which is wrong for two reasons:
* - we want to center over the editor / subject control, not the parent shell
* - the center is computed via the initalSize, which may be also wrong since the size may
* have been updated since via min/max sizing of AbstractInformationControlManager.
* In that case, override the location with the one computed by the manager. Note that
* the call to constrainShellSize in PopupDialog.open will still ensure that the shell is
* entirely visible.
*/
if (!getPersistLocation() || getDialogSettings() == null)
getShell().setLocation(location);
}
/**
* {@inheritDoc}
*/
public void setSize(int width, int height) {
getShell().setSize(width, height);
}
/**
* {@inheritDoc}
*/
public void addDisposeListener(DisposeListener listener) {
getShell().addDisposeListener(listener);
}
/**
* {@inheritDoc}
*/
public void removeDisposeListener(DisposeListener listener) {
getShell().removeDisposeListener(listener);
}
/**
* {@inheritDoc}
*/
public void setForegroundColor(Color foreground) {
applyForegroundColor(foreground, getContents());
}
/**
* {@inheritDoc}
*/
public void setBackgroundColor(Color background) {
applyBackgroundColor(background, getContents());
}
/**
* {@inheritDoc}
*/
public boolean isFocusControl() {
return getShell().getDisplay().getActiveShell() == getShell();
}
/**
* {@inheritDoc}
*/
public void setFocus() {
getShell().forceFocus();
fFilterText.setFocus();
}
/**
* {@inheritDoc}
*/
public void addFocusListener(FocusListener listener) {
getShell().addFocusListener(listener);
}
/**
* {@inheritDoc}
*/
public void removeFocusListener(FocusListener listener) {
getShell().removeFocusListener(listener);
}
final protected ICommand getInvokingCommand() {
return fInvokingCommand;
}
final protected KeySequence[] getInvokingCommandKeySequences() {
if (fInvokingCommandKeySequences == null) {
if (getInvokingCommand() != null) {
List<IKeySequenceBinding> list= getInvokingCommand().getKeySequenceBindings();
if (!list.isEmpty()) {
fInvokingCommandKeySequences= new KeySequence[list.size()];
for (int i= 0; i < fInvokingCommandKeySequences.length; i++) {
fInvokingCommandKeySequences[i]= list.get(i).getKeySequence();
}
return fInvokingCommandKeySequences;
}
}
}
return fInvokingCommandKeySequences;
}
/*
* @see org.eclipse.jface.dialogs.PopupDialog#getDialogSettings()
*/
@Override
protected IDialogSettings getDialogSettings() {
String sectionName= getId();
IDialogSettings settings= JavaPlugin.getDefault().getDialogSettings().getSection(sectionName);
if (settings == null)
settings= JavaPlugin.getDefault().getDialogSettings().addNewSection(sectionName);
return settings;
}
/*
* Overridden to insert the filter text into the title and menu area.
*
* @since 3.2
*/
@Override
protected Control createTitleMenuArea(Composite parent) {
fViewMenuButtonComposite= (Composite) super.createTitleMenuArea(parent);
// If there is a header, then the filter text must be created
// underneath the title and menu area.
if (hasHeader()) {
fFilterText= createFilterText(parent);
}
// Create show view menu action
fShowViewMenuAction= new Action("showViewMenu") { //$NON-NLS-1$
/*
* @see org.eclipse.jface.action.Action#run()
*/
@Override
public void run() {
showDialogMenu();
}
};
fShowViewMenuAction.setEnabled(true);
fShowViewMenuAction.setActionDefinitionId(IWorkbenchCommandConstants.WINDOW_SHOW_VIEW_MENU);
return fViewMenuButtonComposite;
}
/*
* Overridden to insert the filter text into the title control
* if there is no header specified.
* @since 3.2
*/
@Override
protected Control createTitleControl(Composite parent) {
if (hasHeader()) {
return super.createTitleControl(parent);
}
fFilterText= createFilterText(parent);
return fFilterText;
}
/*
* @see org.eclipse.jface.dialogs.PopupDialog#setTabOrder(org.eclipse.swt.widgets.Composite)
*/
@Override
protected void setTabOrder(Composite composite) {
if (hasHeader()) {
composite.setTabList(new Control[] { fFilterText, fTreeViewer.getTree() });
} else {
fViewMenuButtonComposite.setTabList(new Control[] { fFilterText });
composite.setTabList(new Control[] { fViewMenuButtonComposite, fTreeViewer.getTree() });
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.containermanager.container;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.server.api.protocolrecords.NMContainerStatus;
import org.apache.hadoop.yarn.server.nodemanager.NMAuditLogger;
import org.apache.hadoop.yarn.server.nodemanager.NMAuditLogger.AuditConstants;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationContainerFinishedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainersLauncherEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainersLauncherEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.LocalResourceRequest;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ContainerLocalizationCleanupEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ContainerLocalizationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ContainerLocalizationRequestEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizationEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.sharedcache.SharedCacheUploadEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.sharedcache.SharedCacheUploadEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerContainerFinishedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainerStartMonitoringEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainerStopMonitoringEvent;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMStateStoreService;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMStateStoreService.RecoveredContainerState;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMStateStoreService.RecoveredContainerStatus;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.state.InvalidStateTransitionException;
import org.apache.hadoop.yarn.state.MultipleArcTransition;
import org.apache.hadoop.yarn.state.SingleArcTransition;
import org.apache.hadoop.yarn.state.StateMachine;
import org.apache.hadoop.yarn.state.StateMachineFactory;
import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.SystemClock;
import org.apache.hadoop.yarn.util.resource.Resources;
public class ContainerImpl implements Container {
private final Lock readLock;
private final Lock writeLock;
private final Dispatcher dispatcher;
private final NMStateStoreService stateStore;
private final Credentials credentials;
private final NodeManagerMetrics metrics;
private final ContainerLaunchContext launchContext;
private final ContainerTokenIdentifier containerTokenIdentifier;
private final ContainerId containerId;
private volatile Resource resource;
private final String user;
private int version;
private int exitCode = ContainerExitStatus.INVALID;
private final StringBuilder diagnostics;
private boolean wasLaunched;
private long containerLocalizationStartTime;
private long containerLaunchStartTime;
private static Clock clock = new SystemClock();
/** The NM-wide configuration - not specific to this container */
private final Configuration daemonConf;
private static final Log LOG = LogFactory.getLog(ContainerImpl.class);
private final Map<LocalResourceRequest,List<String>> pendingResources =
new HashMap<LocalResourceRequest,List<String>>();
private final Map<Path,List<String>> localizedResources =
new HashMap<Path,List<String>>();
private final List<LocalResourceRequest> publicRsrcs =
new ArrayList<LocalResourceRequest>();
private final List<LocalResourceRequest> privateRsrcs =
new ArrayList<LocalResourceRequest>();
private final List<LocalResourceRequest> appRsrcs =
new ArrayList<LocalResourceRequest>();
private final Map<LocalResourceRequest, Path> resourcesToBeUploaded =
new ConcurrentHashMap<LocalResourceRequest, Path>();
private final Map<LocalResourceRequest, Boolean> resourcesUploadPolicies =
new ConcurrentHashMap<LocalResourceRequest, Boolean>();
// whether container has been recovered after a restart
private RecoveredContainerStatus recoveredStatus =
RecoveredContainerStatus.REQUESTED;
// whether container was marked as killed after recovery
private boolean recoveredAsKilled = false;
private Context context;
public ContainerImpl(Configuration conf, Dispatcher dispatcher,
ContainerLaunchContext launchContext, Credentials creds,
NodeManagerMetrics metrics,
ContainerTokenIdentifier containerTokenIdentifier, Context context) {
this.daemonConf = conf;
this.dispatcher = dispatcher;
this.stateStore = context.getNMStateStore();
this.version = containerTokenIdentifier.getVersion();
this.launchContext = launchContext;
this.containerTokenIdentifier = containerTokenIdentifier;
this.containerId = containerTokenIdentifier.getContainerID();
this.resource = containerTokenIdentifier.getResource();
this.diagnostics = new StringBuilder();
this.credentials = creds;
this.metrics = metrics;
user = containerTokenIdentifier.getApplicationSubmitter();
ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
this.readLock = readWriteLock.readLock();
this.writeLock = readWriteLock.writeLock();
this.context = context;
stateMachine = stateMachineFactory.make(this);
}
// constructor for a recovered container
public ContainerImpl(Configuration conf, Dispatcher dispatcher,
ContainerLaunchContext launchContext, Credentials creds,
NodeManagerMetrics metrics,
ContainerTokenIdentifier containerTokenIdentifier, Context context,
RecoveredContainerState rcs) {
this(conf, dispatcher, launchContext, creds, metrics,
containerTokenIdentifier, context);
this.recoveredStatus = rcs.getStatus();
this.exitCode = rcs.getExitCode();
this.recoveredAsKilled = rcs.getKilled();
this.diagnostics.append(diagnostics);
Resource recoveredCapability = rcs.getCapability();
if (recoveredCapability != null
&& !this.resource.equals(recoveredCapability)) {
// resource capability had been updated before NM was down
this.resource = Resource.newInstance(recoveredCapability.getMemorySize(),
recoveredCapability.getVirtualCores());
}
this.version = rcs.getVersion();
}
private static final ContainerDiagnosticsUpdateTransition UPDATE_DIAGNOSTICS_TRANSITION =
new ContainerDiagnosticsUpdateTransition();
// State Machine for each container.
private static StateMachineFactory
<ContainerImpl, ContainerState, ContainerEventType, ContainerEvent>
stateMachineFactory =
new StateMachineFactory<ContainerImpl, ContainerState, ContainerEventType, ContainerEvent>(ContainerState.NEW)
// From NEW State
.addTransition(ContainerState.NEW,
EnumSet.of(ContainerState.LOCALIZING,
ContainerState.LOCALIZED,
ContainerState.LOCALIZATION_FAILED,
ContainerState.DONE),
ContainerEventType.INIT_CONTAINER, new RequestResourcesTransition())
.addTransition(ContainerState.NEW, ContainerState.NEW,
ContainerEventType.UPDATE_DIAGNOSTICS_MSG,
UPDATE_DIAGNOSTICS_TRANSITION)
.addTransition(ContainerState.NEW, ContainerState.DONE,
ContainerEventType.KILL_CONTAINER, new KillOnNewTransition())
// From LOCALIZING State
.addTransition(ContainerState.LOCALIZING,
EnumSet.of(ContainerState.LOCALIZING, ContainerState.LOCALIZED),
ContainerEventType.RESOURCE_LOCALIZED, new LocalizedTransition())
.addTransition(ContainerState.LOCALIZING,
ContainerState.LOCALIZATION_FAILED,
ContainerEventType.RESOURCE_FAILED,
new ResourceFailedTransition())
.addTransition(ContainerState.LOCALIZING, ContainerState.LOCALIZING,
ContainerEventType.UPDATE_DIAGNOSTICS_MSG,
UPDATE_DIAGNOSTICS_TRANSITION)
.addTransition(ContainerState.LOCALIZING, ContainerState.KILLING,
ContainerEventType.KILL_CONTAINER,
new KillDuringLocalizationTransition())
// From LOCALIZATION_FAILED State
.addTransition(ContainerState.LOCALIZATION_FAILED,
ContainerState.DONE,
ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP,
new LocalizationFailedToDoneTransition())
.addTransition(ContainerState.LOCALIZATION_FAILED,
ContainerState.LOCALIZATION_FAILED,
ContainerEventType.UPDATE_DIAGNOSTICS_MSG,
UPDATE_DIAGNOSTICS_TRANSITION)
// container not launched so kill is a no-op
.addTransition(ContainerState.LOCALIZATION_FAILED,
ContainerState.LOCALIZATION_FAILED,
ContainerEventType.KILL_CONTAINER)
// container cleanup triggers a release of all resources
// regardless of whether they were localized or not
// LocalizedResource handles release event in all states
.addTransition(ContainerState.LOCALIZATION_FAILED,
ContainerState.LOCALIZATION_FAILED,
ContainerEventType.RESOURCE_LOCALIZED)
.addTransition(ContainerState.LOCALIZATION_FAILED,
ContainerState.LOCALIZATION_FAILED,
ContainerEventType.RESOURCE_FAILED)
// From LOCALIZED State
.addTransition(ContainerState.LOCALIZED, ContainerState.RUNNING,
ContainerEventType.CONTAINER_LAUNCHED, new LaunchTransition())
.addTransition(ContainerState.LOCALIZED, ContainerState.EXITED_WITH_FAILURE,
ContainerEventType.CONTAINER_EXITED_WITH_FAILURE,
new ExitedWithFailureTransition(true))
.addTransition(ContainerState.LOCALIZED, ContainerState.LOCALIZED,
ContainerEventType.UPDATE_DIAGNOSTICS_MSG,
UPDATE_DIAGNOSTICS_TRANSITION)
.addTransition(ContainerState.LOCALIZED, ContainerState.KILLING,
ContainerEventType.KILL_CONTAINER, new KillTransition())
// From RUNNING State
.addTransition(ContainerState.RUNNING,
ContainerState.EXITED_WITH_SUCCESS,
ContainerEventType.CONTAINER_EXITED_WITH_SUCCESS,
new ExitedWithSuccessTransition(true))
.addTransition(ContainerState.RUNNING,
ContainerState.EXITED_WITH_FAILURE,
ContainerEventType.CONTAINER_EXITED_WITH_FAILURE,
new ExitedWithFailureTransition(true))
.addTransition(ContainerState.RUNNING, ContainerState.RUNNING,
ContainerEventType.UPDATE_DIAGNOSTICS_MSG,
UPDATE_DIAGNOSTICS_TRANSITION)
.addTransition(ContainerState.RUNNING, ContainerState.KILLING,
ContainerEventType.KILL_CONTAINER, new KillTransition())
.addTransition(ContainerState.RUNNING, ContainerState.EXITED_WITH_FAILURE,
ContainerEventType.CONTAINER_KILLED_ON_REQUEST,
new KilledExternallyTransition())
// From CONTAINER_EXITED_WITH_SUCCESS State
.addTransition(ContainerState.EXITED_WITH_SUCCESS, ContainerState.DONE,
ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP,
new ExitedWithSuccessToDoneTransition())
.addTransition(ContainerState.EXITED_WITH_SUCCESS,
ContainerState.EXITED_WITH_SUCCESS,
ContainerEventType.UPDATE_DIAGNOSTICS_MSG,
UPDATE_DIAGNOSTICS_TRANSITION)
.addTransition(ContainerState.EXITED_WITH_SUCCESS,
ContainerState.EXITED_WITH_SUCCESS,
ContainerEventType.KILL_CONTAINER)
// From EXITED_WITH_FAILURE State
.addTransition(ContainerState.EXITED_WITH_FAILURE, ContainerState.DONE,
ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP,
new ExitedWithFailureToDoneTransition())
.addTransition(ContainerState.EXITED_WITH_FAILURE,
ContainerState.EXITED_WITH_FAILURE,
ContainerEventType.UPDATE_DIAGNOSTICS_MSG,
UPDATE_DIAGNOSTICS_TRANSITION)
.addTransition(ContainerState.EXITED_WITH_FAILURE,
ContainerState.EXITED_WITH_FAILURE,
ContainerEventType.KILL_CONTAINER)
// From KILLING State.
.addTransition(ContainerState.KILLING,
ContainerState.CONTAINER_CLEANEDUP_AFTER_KILL,
ContainerEventType.CONTAINER_KILLED_ON_REQUEST,
new ContainerKilledTransition())
.addTransition(ContainerState.KILLING,
ContainerState.KILLING,
ContainerEventType.RESOURCE_LOCALIZED,
new LocalizedResourceDuringKillTransition())
.addTransition(ContainerState.KILLING,
ContainerState.KILLING,
ContainerEventType.RESOURCE_FAILED)
.addTransition(ContainerState.KILLING, ContainerState.KILLING,
ContainerEventType.UPDATE_DIAGNOSTICS_MSG,
UPDATE_DIAGNOSTICS_TRANSITION)
.addTransition(ContainerState.KILLING, ContainerState.KILLING,
ContainerEventType.KILL_CONTAINER)
.addTransition(ContainerState.KILLING, ContainerState.EXITED_WITH_SUCCESS,
ContainerEventType.CONTAINER_EXITED_WITH_SUCCESS,
new ExitedWithSuccessTransition(false))
.addTransition(ContainerState.KILLING, ContainerState.EXITED_WITH_FAILURE,
ContainerEventType.CONTAINER_EXITED_WITH_FAILURE,
new ExitedWithFailureTransition(false))
.addTransition(ContainerState.KILLING,
ContainerState.DONE,
ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP,
new KillingToDoneTransition())
// Handle a launched container during killing stage is a no-op
// as cleanup container is always handled after launch container event
// in the container launcher
.addTransition(ContainerState.KILLING,
ContainerState.KILLING,
ContainerEventType.CONTAINER_LAUNCHED)
// From CONTAINER_CLEANEDUP_AFTER_KILL State.
.addTransition(ContainerState.CONTAINER_CLEANEDUP_AFTER_KILL,
ContainerState.DONE,
ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP,
new ContainerCleanedupAfterKillToDoneTransition())
.addTransition(ContainerState.CONTAINER_CLEANEDUP_AFTER_KILL,
ContainerState.CONTAINER_CLEANEDUP_AFTER_KILL,
ContainerEventType.UPDATE_DIAGNOSTICS_MSG,
UPDATE_DIAGNOSTICS_TRANSITION)
.addTransition(ContainerState.CONTAINER_CLEANEDUP_AFTER_KILL,
ContainerState.CONTAINER_CLEANEDUP_AFTER_KILL,
EnumSet.of(ContainerEventType.KILL_CONTAINER,
ContainerEventType.RESOURCE_FAILED,
ContainerEventType.CONTAINER_EXITED_WITH_SUCCESS,
ContainerEventType.CONTAINER_EXITED_WITH_FAILURE))
// From DONE
.addTransition(ContainerState.DONE, ContainerState.DONE,
ContainerEventType.KILL_CONTAINER)
.addTransition(ContainerState.DONE, ContainerState.DONE,
ContainerEventType.INIT_CONTAINER)
.addTransition(ContainerState.DONE, ContainerState.DONE,
ContainerEventType.UPDATE_DIAGNOSTICS_MSG,
UPDATE_DIAGNOSTICS_TRANSITION)
// This transition may result when
// we notify container of failed localization if localizer thread (for
// that container) fails for some reason
.addTransition(ContainerState.DONE, ContainerState.DONE,
EnumSet.of(ContainerEventType.RESOURCE_FAILED,
ContainerEventType.CONTAINER_EXITED_WITH_SUCCESS,
ContainerEventType.CONTAINER_EXITED_WITH_FAILURE))
// create the topology tables
.installTopology();
private final StateMachine<ContainerState, ContainerEventType, ContainerEvent>
stateMachine;
public org.apache.hadoop.yarn.api.records.ContainerState getCurrentState() {
switch (stateMachine.getCurrentState()) {
case NEW:
case LOCALIZING:
case LOCALIZATION_FAILED:
case LOCALIZED:
case RUNNING:
case EXITED_WITH_SUCCESS:
case EXITED_WITH_FAILURE:
case KILLING:
case CONTAINER_CLEANEDUP_AFTER_KILL:
case CONTAINER_RESOURCES_CLEANINGUP:
return org.apache.hadoop.yarn.api.records.ContainerState.RUNNING;
case DONE:
default:
return org.apache.hadoop.yarn.api.records.ContainerState.COMPLETE;
}
}
@Override
public String getUser() {
this.readLock.lock();
try {
return this.user;
} finally {
this.readLock.unlock();
}
}
@Override
public Map<Path,List<String>> getLocalizedResources() {
this.readLock.lock();
try {
if (ContainerState.LOCALIZED == getContainerState()) {
return localizedResources;
} else {
return null;
}
} finally {
this.readLock.unlock();
}
}
@Override
public Credentials getCredentials() {
this.readLock.lock();
try {
return credentials;
} finally {
this.readLock.unlock();
}
}
@Override
public ContainerState getContainerState() {
this.readLock.lock();
try {
return stateMachine.getCurrentState();
} finally {
this.readLock.unlock();
}
}
@Override
public ContainerLaunchContext getLaunchContext() {
this.readLock.lock();
try {
return launchContext;
} finally {
this.readLock.unlock();
}
}
@Override
public ContainerStatus cloneAndGetContainerStatus() {
this.readLock.lock();
try {
return BuilderUtils.newContainerStatus(this.containerId,
getCurrentState(), diagnostics.toString(), exitCode, getResource());
} finally {
this.readLock.unlock();
}
}
@Override
public NMContainerStatus getNMContainerStatus() {
this.readLock.lock();
try {
return NMContainerStatus.newInstance(this.containerId, this.version,
getCurrentState(), getResource(), diagnostics.toString(), exitCode,
containerTokenIdentifier.getPriority(),
containerTokenIdentifier.getCreationTime(),
containerTokenIdentifier.getNodeLabelExpression());
} finally {
this.readLock.unlock();
}
}
@Override
public ContainerId getContainerId() {
return this.containerId;
}
@Override
public Resource getResource() {
return Resources.clone(this.resource);
}
@Override
public void setResource(Resource targetResource) {
Resource currentResource = getResource();
this.resource = Resources.clone(targetResource);
this.metrics.changeContainer(currentResource, targetResource);
}
@Override
public ContainerTokenIdentifier getContainerTokenIdentifier() {
this.readLock.lock();
try {
return this.containerTokenIdentifier;
} finally {
this.readLock.unlock();
}
}
@SuppressWarnings("unchecked")
private void sendFinishedEvents() {
// Inform the application
@SuppressWarnings("rawtypes")
EventHandler eventHandler = dispatcher.getEventHandler();
eventHandler.handle(new ApplicationContainerFinishedEvent(containerId));
// Remove the container from the resource-monitor
eventHandler.handle(new ContainerStopMonitoringEvent(containerId));
// Tell the logService too
eventHandler.handle(new LogHandlerContainerFinishedEvent(
containerId, exitCode));
}
@SuppressWarnings("unchecked") // dispatcher not typed
private void sendLaunchEvent() {
ContainersLauncherEventType launcherEvent =
ContainersLauncherEventType.LAUNCH_CONTAINER;
if (recoveredStatus == RecoveredContainerStatus.LAUNCHED) {
// try to recover a container that was previously launched
launcherEvent = ContainersLauncherEventType.RECOVER_CONTAINER;
}
containerLaunchStartTime = clock.getTime();
dispatcher.getEventHandler().handle(
new ContainersLauncherEvent(this, launcherEvent));
}
// Inform the ContainersMonitor to start monitoring the container's
// resource usage.
@SuppressWarnings("unchecked") // dispatcher not typed
private void sendContainerMonitorStartEvent() {
long launchDuration = clock.getTime() - containerLaunchStartTime;
metrics.addContainerLaunchDuration(launchDuration);
long pmemBytes = getResource().getMemorySize() * 1024 * 1024L;
float pmemRatio = daemonConf.getFloat(
YarnConfiguration.NM_VMEM_PMEM_RATIO,
YarnConfiguration.DEFAULT_NM_VMEM_PMEM_RATIO);
long vmemBytes = (long) (pmemRatio * pmemBytes);
int cpuVcores = getResource().getVirtualCores();
long localizationDuration = containerLaunchStartTime -
containerLocalizationStartTime;
dispatcher.getEventHandler().handle(
new ContainerStartMonitoringEvent(containerId,
vmemBytes, pmemBytes, cpuVcores, launchDuration,
localizationDuration));
}
private void addDiagnostics(String... diags) {
for (String s : diags) {
this.diagnostics.append(s);
}
try {
stateStore.storeContainerDiagnostics(containerId, diagnostics);
} catch (IOException e) {
LOG.warn("Unable to update diagnostics in state store for "
+ containerId, e);
}
}
@SuppressWarnings("unchecked") // dispatcher not typed
public void cleanup() {
Map<LocalResourceVisibility, Collection<LocalResourceRequest>> rsrc =
new HashMap<LocalResourceVisibility,
Collection<LocalResourceRequest>>();
if (!publicRsrcs.isEmpty()) {
rsrc.put(LocalResourceVisibility.PUBLIC, publicRsrcs);
}
if (!privateRsrcs.isEmpty()) {
rsrc.put(LocalResourceVisibility.PRIVATE, privateRsrcs);
}
if (!appRsrcs.isEmpty()) {
rsrc.put(LocalResourceVisibility.APPLICATION, appRsrcs);
}
dispatcher.getEventHandler().handle(
new ContainerLocalizationCleanupEvent(this, rsrc));
}
static class ContainerTransition implements
SingleArcTransition<ContainerImpl, ContainerEvent> {
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
// Just drain the event and change the state.
}
}
/**
* State transition when a NEW container receives the INIT_CONTAINER
* message.
*
* If there are resources to localize, sends a
* ContainerLocalizationRequest (INIT_CONTAINER_RESOURCES)
* to the ResourceLocalizationManager and enters LOCALIZING state.
*
* If there are no resources to localize, sends LAUNCH_CONTAINER event
* and enters LOCALIZED state directly.
*
* If there are any invalid resources specified, enters LOCALIZATION_FAILED
* directly.
*/
@SuppressWarnings("unchecked") // dispatcher not typed
static class RequestResourcesTransition implements
MultipleArcTransition<ContainerImpl,ContainerEvent,ContainerState> {
@Override
public ContainerState transition(ContainerImpl container,
ContainerEvent event) {
if (container.recoveredStatus == RecoveredContainerStatus.COMPLETED) {
container.sendFinishedEvents();
return ContainerState.DONE;
} else if (container.recoveredAsKilled &&
container.recoveredStatus == RecoveredContainerStatus.REQUESTED) {
// container was killed but never launched
container.metrics.killedContainer();
NMAuditLogger.logSuccess(container.user,
AuditConstants.FINISH_KILLED_CONTAINER, "ContainerImpl",
container.containerId.getApplicationAttemptId().getApplicationId(),
container.containerId);
container.metrics.releaseContainer(container.resource);
container.sendFinishedEvents();
return ContainerState.DONE;
}
final ContainerLaunchContext ctxt = container.launchContext;
container.metrics.initingContainer();
container.dispatcher.getEventHandler().handle(new AuxServicesEvent
(AuxServicesEventType.CONTAINER_INIT, container));
// Inform the AuxServices about the opaque serviceData
Map<String,ByteBuffer> csd = ctxt.getServiceData();
if (csd != null) {
// This can happen more than once per Application as each container may
// have distinct service data
for (Map.Entry<String,ByteBuffer> service : csd.entrySet()) {
container.dispatcher.getEventHandler().handle(
new AuxServicesEvent(AuxServicesEventType.APPLICATION_INIT,
container.user, container.containerId
.getApplicationAttemptId().getApplicationId(),
service.getKey().toString(), service.getValue()));
}
}
container.containerLocalizationStartTime = clock.getTime();
// Send requests for public, private resources
Map<String,LocalResource> cntrRsrc = ctxt.getLocalResources();
if (!cntrRsrc.isEmpty()) {
try {
for (Map.Entry<String,LocalResource> rsrc : cntrRsrc.entrySet()) {
try {
LocalResourceRequest req =
new LocalResourceRequest(rsrc.getValue());
List<String> links = container.pendingResources.get(req);
if (links == null) {
links = new ArrayList<String>();
container.pendingResources.put(req, links);
}
links.add(rsrc.getKey());
storeSharedCacheUploadPolicy(container, req, rsrc.getValue()
.getShouldBeUploadedToSharedCache());
switch (rsrc.getValue().getVisibility()) {
case PUBLIC:
container.publicRsrcs.add(req);
break;
case PRIVATE:
container.privateRsrcs.add(req);
break;
case APPLICATION:
container.appRsrcs.add(req);
break;
}
} catch (URISyntaxException e) {
LOG.info("Got exception parsing " + rsrc.getKey()
+ " and value " + rsrc.getValue());
throw e;
}
}
} catch (URISyntaxException e) {
// malformed resource; abort container launch
LOG.warn("Failed to parse resource-request", e);
container.cleanup();
container.metrics.endInitingContainer();
return ContainerState.LOCALIZATION_FAILED;
}
Map<LocalResourceVisibility, Collection<LocalResourceRequest>> req =
new LinkedHashMap<LocalResourceVisibility,
Collection<LocalResourceRequest>>();
if (!container.publicRsrcs.isEmpty()) {
req.put(LocalResourceVisibility.PUBLIC, container.publicRsrcs);
}
if (!container.privateRsrcs.isEmpty()) {
req.put(LocalResourceVisibility.PRIVATE, container.privateRsrcs);
}
if (!container.appRsrcs.isEmpty()) {
req.put(LocalResourceVisibility.APPLICATION, container.appRsrcs);
}
container.dispatcher.getEventHandler().handle(
new ContainerLocalizationRequestEvent(container, req));
return ContainerState.LOCALIZING;
} else {
container.sendLaunchEvent();
container.metrics.endInitingContainer();
return ContainerState.LOCALIZED;
}
}
}
/**
* Store the resource's shared cache upload policies
* Given LocalResourceRequest can be shared across containers in
* LocalResourcesTrackerImpl, we preserve the upload policies here.
* In addition, it is possible for the application to create several
* "identical" LocalResources as part of
* ContainerLaunchContext.setLocalResources with different symlinks.
* There is a corner case where these "identical" local resources have
* different upload policies. For that scenario, upload policy will be set to
* true as long as there is at least one LocalResource entry with
* upload policy set to true.
*/
private static void storeSharedCacheUploadPolicy(ContainerImpl container,
LocalResourceRequest resourceRequest, Boolean uploadPolicy) {
Boolean storedUploadPolicy =
container.resourcesUploadPolicies.get(resourceRequest);
if (storedUploadPolicy == null || (!storedUploadPolicy && uploadPolicy)) {
container.resourcesUploadPolicies.put(resourceRequest, uploadPolicy);
}
}
/**
* Transition when one of the requested resources for this container
* has been successfully localized.
*/
static class LocalizedTransition implements
MultipleArcTransition<ContainerImpl,ContainerEvent,ContainerState> {
@SuppressWarnings("unchecked")
@Override
public ContainerState transition(ContainerImpl container,
ContainerEvent event) {
ContainerResourceLocalizedEvent rsrcEvent = (ContainerResourceLocalizedEvent) event;
LocalResourceRequest resourceRequest = rsrcEvent.getResource();
Path location = rsrcEvent.getLocation();
List<String> syms = container.pendingResources.remove(resourceRequest);
if (null == syms) {
LOG.warn("Localized unknown resource " + resourceRequest +
" for container " + container.containerId);
assert false;
// fail container?
return ContainerState.LOCALIZING;
}
container.localizedResources.put(location, syms);
// check to see if this resource should be uploaded to the shared cache
// as well
if (shouldBeUploadedToSharedCache(container, resourceRequest)) {
container.resourcesToBeUploaded.put(resourceRequest, location);
}
if (!container.pendingResources.isEmpty()) {
return ContainerState.LOCALIZING;
}
container.dispatcher.getEventHandler().handle(
new ContainerLocalizationEvent(LocalizationEventType.
CONTAINER_RESOURCES_LOCALIZED, container));
container.sendLaunchEvent();
container.metrics.endInitingContainer();
// If this is a recovered container that has already launched, skip
// uploading resources to the shared cache. We do this to avoid uploading
// the same resources multiple times. The tradeoff is that in the case of
// a recovered container, there is a chance that resources don't get
// uploaded into the shared cache. This is OK because resources are not
// acknowledged by the SCM until they have been uploaded by the node
// manager.
if (container.recoveredStatus != RecoveredContainerStatus.LAUNCHED
&& container.recoveredStatus != RecoveredContainerStatus.COMPLETED) {
// kick off uploads to the shared cache
container.dispatcher.getEventHandler().handle(
new SharedCacheUploadEvent(container.resourcesToBeUploaded, container
.getLaunchContext(), container.getUser(),
SharedCacheUploadEventType.UPLOAD));
}
return ContainerState.LOCALIZED;
}
}
/**
* Transition from LOCALIZED state to RUNNING state upon receiving
* a CONTAINER_LAUNCHED event
*/
static class LaunchTransition extends ContainerTransition {
@SuppressWarnings("unchecked")
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
container.sendContainerMonitorStartEvent();
container.metrics.runningContainer();
container.wasLaunched = true;
if (container.recoveredAsKilled) {
LOG.info("Killing " + container.containerId
+ " due to recovered as killed");
container.addDiagnostics("Container recovered as killed.\n");
container.dispatcher.getEventHandler().handle(
new ContainersLauncherEvent(container,
ContainersLauncherEventType.CLEANUP_CONTAINER));
}
}
}
/**
* Transition from RUNNING or KILLING state to EXITED_WITH_SUCCESS state
* upon EXITED_WITH_SUCCESS message.
*/
@SuppressWarnings("unchecked") // dispatcher not typed
static class ExitedWithSuccessTransition extends ContainerTransition {
boolean clCleanupRequired;
public ExitedWithSuccessTransition(boolean clCleanupRequired) {
this.clCleanupRequired = clCleanupRequired;
}
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
// Set exit code to 0 on success
container.exitCode = 0;
// TODO: Add containerWorkDir to the deletion service.
if (clCleanupRequired) {
container.dispatcher.getEventHandler().handle(
new ContainersLauncherEvent(container,
ContainersLauncherEventType.CLEANUP_CONTAINER));
}
container.cleanup();
}
}
/**
* Transition to EXITED_WITH_FAILURE state upon
* CONTAINER_EXITED_WITH_FAILURE state.
**/
@SuppressWarnings("unchecked") // dispatcher not typed
static class ExitedWithFailureTransition extends ContainerTransition {
boolean clCleanupRequired;
public ExitedWithFailureTransition(boolean clCleanupRequired) {
this.clCleanupRequired = clCleanupRequired;
}
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
ContainerExitEvent exitEvent = (ContainerExitEvent) event;
container.exitCode = exitEvent.getExitCode();
if (exitEvent.getDiagnosticInfo() != null) {
container.addDiagnostics(exitEvent.getDiagnosticInfo(), "\n");
}
// TODO: Add containerWorkDir to the deletion service.
// TODO: Add containerOuputDir to the deletion service.
if (clCleanupRequired) {
container.dispatcher.getEventHandler().handle(
new ContainersLauncherEvent(container,
ContainersLauncherEventType.CLEANUP_CONTAINER));
}
container.cleanup();
}
}
/**
* Transition to EXITED_WITH_FAILURE upon receiving KILLED_ON_REQUEST
*/
static class KilledExternallyTransition extends ExitedWithFailureTransition {
KilledExternallyTransition() {
super(true);
}
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
super.transition(container, event);
container.addDiagnostics("Killed by external signal\n");
}
}
/**
* Transition from LOCALIZING to LOCALIZATION_FAILED upon receiving
* RESOURCE_FAILED event.
*/
static class ResourceFailedTransition implements
SingleArcTransition<ContainerImpl, ContainerEvent> {
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
ContainerResourceFailedEvent rsrcFailedEvent =
(ContainerResourceFailedEvent) event;
container.addDiagnostics(rsrcFailedEvent.getDiagnosticMessage(), "\n");
// Inform the localizer to decrement reference counts and cleanup
// resources.
container.cleanup();
container.metrics.endInitingContainer();
}
}
/**
* Transition from LOCALIZING to KILLING upon receiving
* KILL_CONTAINER event.
*/
static class KillDuringLocalizationTransition implements
SingleArcTransition<ContainerImpl, ContainerEvent> {
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
// Inform the localizer to decrement reference counts and cleanup
// resources.
container.cleanup();
container.metrics.endInitingContainer();
ContainerKillEvent killEvent = (ContainerKillEvent) event;
container.exitCode = killEvent.getContainerExitStatus();
container.addDiagnostics(killEvent.getDiagnostic(), "\n");
container.addDiagnostics("Container is killed before being launched.\n");
}
}
/**
* Remain in KILLING state when receiving a RESOURCE_LOCALIZED request
* while in the process of killing.
*/
static class LocalizedResourceDuringKillTransition implements
SingleArcTransition<ContainerImpl, ContainerEvent> {
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
ContainerResourceLocalizedEvent rsrcEvent = (ContainerResourceLocalizedEvent) event;
List<String> syms =
container.pendingResources.remove(rsrcEvent.getResource());
if (null == syms) {
LOG.warn("Localized unknown resource " + rsrcEvent.getResource() +
" for container " + container.containerId);
assert false;
// fail container?
return;
}
container.localizedResources.put(rsrcEvent.getLocation(), syms);
}
}
/**
* Transitions upon receiving KILL_CONTAINER:
* - LOCALIZED -> KILLING
* - RUNNING -> KILLING
*/
@SuppressWarnings("unchecked") // dispatcher not typed
static class KillTransition implements
SingleArcTransition<ContainerImpl, ContainerEvent> {
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
// Kill the process/process-grp
container.dispatcher.getEventHandler().handle(
new ContainersLauncherEvent(container,
ContainersLauncherEventType.CLEANUP_CONTAINER));
ContainerKillEvent killEvent = (ContainerKillEvent) event;
container.addDiagnostics(killEvent.getDiagnostic(), "\n");
container.exitCode = killEvent.getContainerExitStatus();
}
}
/**
* Transition from KILLING to CONTAINER_CLEANEDUP_AFTER_KILL
* upon receiving CONTAINER_KILLED_ON_REQUEST.
*/
static class ContainerKilledTransition implements
SingleArcTransition<ContainerImpl, ContainerEvent> {
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
ContainerExitEvent exitEvent = (ContainerExitEvent) event;
if (container.hasDefaultExitCode()) {
container.exitCode = exitEvent.getExitCode();
}
if (exitEvent.getDiagnosticInfo() != null) {
container.addDiagnostics(exitEvent.getDiagnosticInfo(), "\n");
}
// The process/process-grp is killed. Decrement reference counts and
// cleanup resources
container.cleanup();
}
}
/**
* Handle the following transitions:
* - {LOCALIZATION_FAILED, EXITED_WITH_SUCCESS, EXITED_WITH_FAILURE,
* KILLING, CONTAINER_CLEANEDUP_AFTER_KILL}
* -> DONE upon CONTAINER_RESOURCES_CLEANEDUP
*/
static class ContainerDoneTransition implements
SingleArcTransition<ContainerImpl, ContainerEvent> {
@Override
@SuppressWarnings("unchecked")
public void transition(ContainerImpl container, ContainerEvent event) {
container.metrics.releaseContainer(container.resource);
container.sendFinishedEvents();
//if the current state is NEW it means the CONTAINER_INIT was never
// sent for the event, thus no need to send the CONTAINER_STOP
if (container.getCurrentState()
!= org.apache.hadoop.yarn.api.records.ContainerState.NEW) {
container.dispatcher.getEventHandler().handle(new AuxServicesEvent
(AuxServicesEventType.CONTAINER_STOP, container));
}
container.context.getNodeStatusUpdater().sendOutofBandHeartBeat();
}
}
/**
* Handle the following transition:
* - NEW -> DONE upon KILL_CONTAINER
*/
static class KillOnNewTransition extends ContainerDoneTransition {
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
if (container.recoveredStatus == RecoveredContainerStatus.COMPLETED) {
container.sendFinishedEvents();
} else {
ContainerKillEvent killEvent = (ContainerKillEvent) event;
container.exitCode = killEvent.getContainerExitStatus();
container.addDiagnostics(killEvent.getDiagnostic(), "\n");
container.addDiagnostics("Container is killed before being launched.\n");
container.metrics.killedContainer();
NMAuditLogger.logSuccess(container.user,
AuditConstants.FINISH_KILLED_CONTAINER, "ContainerImpl",
container.containerId.getApplicationAttemptId().getApplicationId(),
container.containerId);
super.transition(container, event);
}
}
}
/**
* Handle the following transition:
* - LOCALIZATION_FAILED -> DONE upon CONTAINER_RESOURCES_CLEANEDUP
*/
static class LocalizationFailedToDoneTransition extends
ContainerDoneTransition {
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
container.metrics.failedContainer();
NMAuditLogger.logFailure(container.user,
AuditConstants.FINISH_FAILED_CONTAINER, "ContainerImpl",
"Container failed with state: " + container.getContainerState(),
container.containerId.getApplicationAttemptId().getApplicationId(),
container.containerId);
super.transition(container, event);
}
}
/**
* Handle the following transition:
* - EXITED_WITH_SUCCESS -> DONE upon CONTAINER_RESOURCES_CLEANEDUP
*/
static class ExitedWithSuccessToDoneTransition extends
ContainerDoneTransition {
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
if (container.wasLaunched) {
container.metrics.endRunningContainer();
} else {
LOG.warn("Container exited with success despite being killed and not" +
"actually running");
}
container.metrics.completedContainer();
NMAuditLogger.logSuccess(container.user,
AuditConstants.FINISH_SUCCESS_CONTAINER, "ContainerImpl",
container.containerId.getApplicationAttemptId().getApplicationId(),
container.containerId);
super.transition(container, event);
}
}
/**
* Handle the following transition:
* - EXITED_WITH_FAILURE -> DONE upon CONTAINER_RESOURCES_CLEANEDUP
*/
static class ExitedWithFailureToDoneTransition extends
ContainerDoneTransition {
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
if (container.wasLaunched) {
container.metrics.endRunningContainer();
}
container.metrics.failedContainer();
NMAuditLogger.logFailure(container.user,
AuditConstants.FINISH_FAILED_CONTAINER, "ContainerImpl",
"Container failed with state: " + container.getContainerState(),
container.containerId.getApplicationAttemptId().getApplicationId(),
container.containerId);
super.transition(container, event);
}
}
/**
* Handle the following transition:
* - KILLING -> DONE upon CONTAINER_RESOURCES_CLEANEDUP
*/
static class KillingToDoneTransition extends
ContainerDoneTransition {
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
container.metrics.killedContainer();
NMAuditLogger.logSuccess(container.user,
AuditConstants.FINISH_KILLED_CONTAINER, "ContainerImpl",
container.containerId.getApplicationAttemptId().getApplicationId(),
container.containerId);
super.transition(container, event);
}
}
/**
* Handle the following transition:
* CONTAINER_CLEANEDUP_AFTER_KILL -> DONE upon CONTAINER_RESOURCES_CLEANEDUP
*/
static class ContainerCleanedupAfterKillToDoneTransition extends
ContainerDoneTransition {
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
if (container.wasLaunched) {
container.metrics.endRunningContainer();
}
container.metrics.killedContainer();
NMAuditLogger.logSuccess(container.user,
AuditConstants.FINISH_KILLED_CONTAINER, "ContainerImpl",
container.containerId.getApplicationAttemptId().getApplicationId(),
container.containerId);
super.transition(container, event);
}
}
/**
* Update diagnostics, staying in the same state.
*/
static class ContainerDiagnosticsUpdateTransition implements
SingleArcTransition<ContainerImpl, ContainerEvent> {
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
ContainerDiagnosticsUpdateEvent updateEvent =
(ContainerDiagnosticsUpdateEvent) event;
container.addDiagnostics(updateEvent.getDiagnosticsUpdate(), "\n");
}
}
@Override
public void handle(ContainerEvent event) {
try {
this.writeLock.lock();
ContainerId containerID = event.getContainerID();
LOG.debug("Processing " + containerID + " of type " + event.getType());
ContainerState oldState = stateMachine.getCurrentState();
ContainerState newState = null;
try {
newState =
stateMachine.doTransition(event.getType(), event);
} catch (InvalidStateTransitionException e) {
LOG.warn("Can't handle this event at current state: Current: ["
+ oldState + "], eventType: [" + event.getType() + "]", e);
}
if (oldState != newState) {
LOG.info("Container " + containerID + " transitioned from "
+ oldState
+ " to " + newState);
}
} finally {
this.writeLock.unlock();
}
}
@Override
public String toString() {
this.readLock.lock();
try {
return this.containerId.toString();
} finally {
this.readLock.unlock();
}
}
private boolean hasDefaultExitCode() {
return (this.exitCode == ContainerExitStatus.INVALID);
}
/**
* Returns whether the specific resource should be uploaded to the shared
* cache.
*/
private static boolean shouldBeUploadedToSharedCache(ContainerImpl container,
LocalResourceRequest resource) {
return container.resourcesUploadPolicies.get(resource);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.adapter.enumerable;
import org.apache.calcite.DataContext;
import org.apache.calcite.adapter.java.JavaTypeFactory;
import org.apache.calcite.linq4j.Enumerator;
import org.apache.calcite.linq4j.tree.BlockBuilder;
import org.apache.calcite.linq4j.tree.BlockStatement;
import org.apache.calcite.linq4j.tree.Blocks;
import org.apache.calcite.linq4j.tree.Expression;
import org.apache.calcite.linq4j.tree.Expressions;
import org.apache.calcite.linq4j.tree.MemberDeclaration;
import org.apache.calcite.linq4j.tree.ParameterExpression;
import org.apache.calcite.linq4j.tree.Types;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.rel.RelCollation;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.Calc;
import org.apache.calcite.rex.RexProgram;
import org.apache.calcite.util.BuiltInMethod;
import org.apache.calcite.util.Pair;
import com.google.common.collect.ImmutableList;
import java.lang.reflect.Modifier;
import java.lang.reflect.Type;
import java.util.Collections;
import java.util.List;
import static org.apache.calcite.adapter.enumerable.EnumUtils.BRIDGE_METHODS;
import static org.apache.calcite.adapter.enumerable.EnumUtils.NO_EXPRS;
import static org.apache.calcite.adapter.enumerable.EnumUtils.NO_PARAMS;
/** Implementation of {@link org.apache.calcite.rel.core.Calc} in
* {@link org.apache.calcite.adapter.enumerable.EnumerableConvention enumerable calling convention}. */
public class EnumerableCalc extends Calc implements EnumerableRel {
public EnumerableCalc(
RelOptCluster cluster,
RelTraitSet traitSet,
RelNode child,
RexProgram program,
List<RelCollation> collationList) {
super(cluster, traitSet, child, program, collationList);
assert getConvention() instanceof EnumerableConvention;
assert !program.containsAggs();
}
@Override public EnumerableCalc copy(RelTraitSet traitSet, RelNode child,
RexProgram program, List<RelCollation> collationList) {
// we do not need to copy program; it is immutable
return new EnumerableCalc(getCluster(), traitSet, child,
program, collationList);
}
public Result implement(EnumerableRelImplementor implementor, Prefer pref) {
final JavaTypeFactory typeFactory = implementor.getTypeFactory();
final BlockBuilder builder = new BlockBuilder();
final EnumerableRel child = (EnumerableRel) getInput();
final Result result =
implementor.visitChild(this, 0, child, pref);
final PhysType physType =
PhysTypeImpl.of(
typeFactory, getRowType(), pref.prefer(result.format));
// final Enumerable<Employee> inputEnumerable = <<child adapter>>;
// return new Enumerable<IntString>() {
// Enumerator<IntString> enumerator() {
// return new Enumerator<IntString>() {
// public void reset() {
// ...
Type outputJavaType = physType.getJavaRowType();
final Type enumeratorType =
Types.of(
Enumerator.class, outputJavaType);
Type inputJavaType = result.physType.getJavaRowType();
ParameterExpression inputEnumerator =
Expressions.parameter(
Types.of(
Enumerator.class, inputJavaType),
"inputEnumerator");
Expression input =
RexToLixTranslator.convert(
Expressions.call(
inputEnumerator,
BuiltInMethod.ENUMERATOR_CURRENT.method),
inputJavaType);
BlockStatement moveNextBody;
if (program.getCondition() == null) {
moveNextBody =
Blocks.toFunctionBlock(
Expressions.call(
inputEnumerator,
BuiltInMethod.ENUMERATOR_MOVE_NEXT.method));
} else {
final BlockBuilder builder2 = new BlockBuilder();
Expression condition =
RexToLixTranslator.translateCondition(
program,
typeFactory,
builder2,
new RexToLixTranslator.InputGetterImpl(
Collections.singletonList(
Pair.of(input, result.physType))),
implementor.allCorrelateVariables);
builder2.add(
Expressions.ifThen(
condition,
Expressions.return_(
null, Expressions.constant(true))));
moveNextBody =
Expressions.block(
Expressions.while_(
Expressions.call(
inputEnumerator,
BuiltInMethod.ENUMERATOR_MOVE_NEXT.method),
builder2.toBlock()),
Expressions.return_(
null,
Expressions.constant(false)));
}
final BlockBuilder builder3 = new BlockBuilder();
List<Expression> expressions =
RexToLixTranslator.translateProjects(
program,
typeFactory,
builder3,
physType,
DataContext.ROOT,
new RexToLixTranslator.InputGetterImpl(
Collections.singletonList(
Pair.of(input, result.physType))),
implementor.allCorrelateVariables);
builder3.add(
Expressions.return_(
null, physType.record(expressions)));
BlockStatement currentBody =
builder3.toBlock();
final Expression inputEnumerable =
builder.append(
"inputEnumerable", result.block, false);
final Expression body =
Expressions.new_(
enumeratorType,
NO_EXPRS,
Expressions.<MemberDeclaration>list(
Expressions.fieldDecl(
Modifier.PUBLIC
| Modifier.FINAL,
inputEnumerator,
Expressions.call(
inputEnumerable,
BuiltInMethod.ENUMERABLE_ENUMERATOR.method)),
EnumUtils.overridingMethodDecl(
BuiltInMethod.ENUMERATOR_RESET.method,
NO_PARAMS,
Blocks.toFunctionBlock(
Expressions.call(
inputEnumerator,
BuiltInMethod.ENUMERATOR_RESET.method))),
EnumUtils.overridingMethodDecl(
BuiltInMethod.ENUMERATOR_MOVE_NEXT.method,
NO_PARAMS,
moveNextBody),
EnumUtils.overridingMethodDecl(
BuiltInMethod.ENUMERATOR_CLOSE.method,
NO_PARAMS,
Blocks.toFunctionBlock(
Expressions.call(
inputEnumerator,
BuiltInMethod.ENUMERATOR_CLOSE.method))),
Expressions.methodDecl(
Modifier.PUBLIC,
BRIDGE_METHODS
? Object.class
: outputJavaType,
"current",
NO_PARAMS,
currentBody)));
builder.add(
Expressions.return_(
null,
Expressions.new_(
BuiltInMethod.ABSTRACT_ENUMERABLE_CTOR.constructor,
// TODO: generics
// Collections.singletonList(inputRowType),
NO_EXPRS,
ImmutableList.<MemberDeclaration>of(
Expressions.methodDecl(
Modifier.PUBLIC,
enumeratorType,
BuiltInMethod.ENUMERABLE_ENUMERATOR.method.getName(),
NO_PARAMS,
Blocks.toFunctionBlock(body))))));
return implementor.result(physType, builder.toBlock());
}
public RexProgram getProgram() {
return program;
}
}
// End EnumerableCalc.java
|
|
package jp.gauzau.MikuMikuDroid;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import jp.gauzau.MikuMikuDroid.util.AccelerometerCalibratorR;
import jp.gauzau.MikuMikuDroid.util.FullScreenCompatWrapper;
import jp.gauzau.MikuMikuDroid.util.MotionDetector;
import jp.gauzau.MikuMikuDroid.util.OrientationEstimater;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.AlertDialog.Builder;
import android.content.DialogInterface;
import android.content.pm.ActivityInfo;
import android.content.res.Configuration;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.CompoundButton.OnCheckedChangeListener;
import android.widget.RelativeLayout;
import android.widget.RelativeLayout.LayoutParams;
import android.widget.SeekBar;
import android.widget.SeekBar.OnSeekBarChangeListener;
import android.widget.Toast;
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
public class MikuMikuDroid extends Activity implements SensorEventListener {
// View
private MMGLSurfaceView mMMGLSurfaceView;
private RelativeLayout mRelativeLayout;
private SeekBar mSeekBar;
private Button mPlayPauseButton;
private Button mRewindButton;
private ScaleGestureDetector mScaleGestureDetector;
private OrientationEstimater orientationEstimater = new OrientationEstimater();
private MotionDetector motionDetector = new MotionDetector();
private AccelerometerCalibratorR calibrator = new AccelerometerCalibratorR();
private boolean posTracking = false;
// Model
private CoreLogic mCoreLogic;
// Sensor
SensorManager mSM = null;
Sensor mAx = null;
Sensor mMg = null;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
calibrator.load();
mSM = (SensorManager) getSystemService(SENSOR_SERVICE);
mAx = mSM.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
mMg = mSM.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
mCoreLogic = new CoreLogic(this) {
@Override
public void onInitialize() {
MikuMikuDroid.this.runOnUiThread(new Runnable() {
@Override
public void run() {
AsyncExec<CoreLogic> ae = new AsyncExec<CoreLogic>(MikuMikuDroid.this) {
@Override
protected boolean exec(CoreLogic target) {
try {
mCoreLogic.restoreState();
final int max = target.getDulation();
mSeekBar.post(new Runnable() {
@Override
public void run() {
mSeekBar.setMax(max);
}
});
} catch (OutOfMemoryError e) {
return false;
}
return true;
}
@Override
public void post() {
if(mFail.size() != 0) {
Toast.makeText(MikuMikuDroid.this, "Out of Memory. Abort.", Toast.LENGTH_LONG).show();
}
}
};
ae.setMax(1);
ae.setMessage("Restoring Previous state...");
ae.execute(mCoreLogic);
}
});
}
@Override
public void onDraw(final int pos) {
MikuMikuDroid.this.mSeekBar.post(new Runnable() {
@Override
public void run() {
MikuMikuDroid.this.mSeekBar.setProgress(pos);
}
});
}
};
mCoreLogic.setScreenAngle(0);
if (mCoreLogic.getCameraMode() == CoreLogic.CAMERA_MODE_SENSOR2) {
cameraPos[2] = 0;
}
mCoreLogic.setCameraPosition(cameraPos);
setContentView(R.layout.main);
mRelativeLayout = (RelativeLayout)findViewById(R.id.content);
findViewById(R.id.reset).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
orientationEstimater.reset();
mCoreLogic.setCameraPosition(cameraPos);
}
});
findViewById(R.id.openMenu).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
openOptionsMenu();
}
});
findViewById(R.id.viewMode).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mCoreLogic.toggleViewMode();
}
});
((CompoundButton) findViewById(R.id.oculusToggle)).setOnCheckedChangeListener(new OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
mCoreLogic.enableOculusMode = isChecked;
}
});
((CompoundButton) findViewById(R.id.postrackToggle)).setOnCheckedChangeListener(new OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
posTracking = isChecked;
}
});
// Oculus enable
mCoreLogic.enableOculusMode = ((CompoundButton) findViewById(R.id.oculusToggle)).isChecked();
posTracking = ((CompoundButton) findViewById(R.id.postrackToggle)).isChecked();
//mRelativeLayout = new RelativeLayout(this);
//mRelativeLayout.setVerticalGravity(Gravity.BOTTOM);
mMMGLSurfaceView = new MMGLSurfaceView(this, mCoreLogic);
LayoutParams p = new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT);
p.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM);
mSeekBar = new SeekBar(this);
mSeekBar.setLayoutParams(p);
mSeekBar.setId(1024);
mSeekBar.setVisibility(SeekBar.INVISIBLE);
mSeekBar.setOnSeekBarChangeListener(new OnSeekBarChangeListener() {
private boolean mIsPlaying = false;
@Override
public void onProgressChanged(SeekBar seekBar, final int progress, boolean fromUser) {
if(fromUser) {
mCoreLogic.seekTo(progress);
}
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
if(mCoreLogic.isPlaying()) {
mCoreLogic.pause();
mIsPlaying = true;
}
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
if(mIsPlaying) {
mCoreLogic.toggleStartStop();
mIsPlaying = false;
}
}
});
p = new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
p.addRule(RelativeLayout.CENTER_HORIZONTAL);
p.addRule(RelativeLayout.ABOVE, mSeekBar.getId());
p.setMargins(5, 5, 5, 60);
mPlayPauseButton = new Button(this);
mPlayPauseButton.setLayoutParams(p);
mPlayPauseButton.setVisibility(Button.INVISIBLE);
mPlayPauseButton.setBackgroundResource(R.drawable.ic_media_play);
mPlayPauseButton.setId(mSeekBar.getId() + 1);
mPlayPauseButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if(mCoreLogic.toggleStartStop()) {
mPlayPauseButton.setBackgroundResource(R.drawable.ic_media_pause);
} else {
mPlayPauseButton.setBackgroundResource(R.drawable.ic_media_play);
}
}
});
p = new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
p.addRule(RelativeLayout.ABOVE, mSeekBar.getId());
p.addRule(RelativeLayout.LEFT_OF, mPlayPauseButton.getId());
p.setMargins(5, 5, 60, 60);
mRewindButton = new Button(this);
mRewindButton.setLayoutParams(p);
mRewindButton.setVisibility(Button.INVISIBLE);
mRewindButton.setBackgroundResource(R.drawable.ic_media_previous);
mRewindButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mCoreLogic.rewind();
}
});
mRelativeLayout.addView(mMMGLSurfaceView);
mRelativeLayout.addView(mSeekBar);
mRelativeLayout.addView(mPlayPauseButton);
mRelativeLayout.addView(mRewindButton);
// setContentView(mRelativeLayout);
if (mCoreLogic.checkFileIsPrepared() == false) {
/*
AlertDialog.Builder ad;
ad = new AlertDialog.Builder(this);
ad.setTitle(R.string.setup_alert_title);
ad.setMessage(R.string.setup_alert_text);
ad.setPositiveButton(R.string.select_ok, null);
ad.show();
*/
}
mScaleGestureDetector = new ScaleGestureDetector(this,
new ScaleGestureDetector.SimpleOnScaleGestureListener() {
@Override
public boolean onScaleBegin(ScaleGestureDetector detector) {
Log.d("", "onScaleBegin : " + detector.getScaleFactor());
return super.onScaleBegin(detector);
}
@Override
public void onScaleEnd(ScaleGestureDetector detector) {
Log.d("", "onScaleEnd : " + detector.getScaleFactor());
super.onScaleEnd(detector);
}
@Override
public boolean onScale(ScaleGestureDetector detector) {
float d = detector.getCurrentSpan() - detector.getPreviousSpan();
if (mCoreLogic.getCameraMode() == CoreLogic.CAMERA_MODE_SENSOR2) {
if (d < 0 || mCoreLogic.cameraDistance > 0) {
mCoreLogic.cameraDistance -= d * 0.1f;
}
} else {
float cameraPosition[] = mCoreLogic.getCameraPositionAsRef();
orientationEstimater.translateInDisplay(cameraPosition, 0,0, -d);
}
return true;
};
});
}
@Override
protected void onStart() {
super.onStart();
FullScreenCompatWrapper.fullScreen(mRelativeLayout, true);
}
@Override
protected void onResume() {
super.onResume();
this.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
mRelativeLayout.setKeepScreenOn(true);
mMMGLSurfaceView.onResume();
if(mAx != null && mMg != null) {
mSM.registerListener(this, mAx, SensorManager.SENSOR_DELAY_FASTEST);
mSM.registerListener(this, mMg, SensorManager.SENSOR_DELAY_GAME);
Sensor gs = mSM.getDefaultSensor(Sensor.TYPE_GYROSCOPE);
if (gs != null) {
mSM.registerListener(this, gs, SensorManager.SENSOR_DELAY_FASTEST);
}
}
}
@Override
protected void onPause() {
super.onPause();
mCoreLogic.pause();
mMMGLSurfaceView.onPause();
if(mAx != null || mMg != null) {
mSM.unregisterListener(this);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
boolean ret = super.onCreateOptionsMenu(menu);
menu.add(0, Menu.FIRST, Menu.NONE, R.string.menu_load_model);
menu.add(0, Menu.FIRST + 1, Menu.NONE, R.string.menu_load_camera);
menu.add(0, Menu.FIRST + 2, Menu.NONE, R.string.menu_load_music);
menu.add(0, Menu.FIRST + 4, Menu.NONE, R.string.menu_toggle_physics);
menu.add(0, Menu.FIRST + 5, Menu.NONE, R.string.menu_initialize);
return ret;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case (Menu.FIRST + 0):
final File[] sc0 = mCoreLogic.getModelSelector();
openSelectDialog(sc0, R.string.menu_load_model, R.string.setup_alert_pmd, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
final String model = sc0[which].getPath();
// read as background if not .pmd
if(!model.endsWith(".pmd") && !model.endsWith(".pmx")) {
if(model.endsWith(".x")) { // accessory
AsyncExec<CoreLogic> ae = new AsyncExec<CoreLogic>(MikuMikuDroid.this) {
@Override
protected boolean exec(CoreLogic target) {
try {
mCoreLogic.loadAccessory(model);
mCoreLogic.storeState();
} catch (OutOfMemoryError e) {
return false;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return true;
}
@Override
public void post() {
if(mFail.size() != 0) {
Toast.makeText(MikuMikuDroid.this, "Out of Memory. Abort.", Toast.LENGTH_LONG).show();
}
}
};
ae.setMax(1);
ae.setMessage("Loading Model/Motion...");
ae.execute(mCoreLogic);
} else {
mMMGLSurfaceView.deleteTexture(mCoreLogic.loadBG(model));
}
return ;
}
final File[] sc = mCoreLogic.getMotionSelector();
openMotionSelectDialog(sc, R.string.menu_load_motion, R.string.setup_alert_vmd, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, final int which) {
final String motion = which == 0 ? null : sc[which-1].getPath();
AsyncExec<CoreLogic> ae = new AsyncExec<CoreLogic>(MikuMikuDroid.this) {
@Override
protected boolean exec(CoreLogic target) {
try {
if(which == 0) {
MikuModel m = target.loadStage(model);
if(m != null) {
ArrayList<MikuModel> mm = new ArrayList<MikuModel>(1);
mm.add(m);
mMMGLSurfaceView.deleteTextures(mm);
}
} else {
target.loadModelMotion(model, motion);
final int max = target.getDulation();
mSeekBar.post(new Runnable() {
@Override
public void run() {
mSeekBar.setMax(max);
}
});
}
mCoreLogic.storeState();
} catch (OutOfMemoryError e) {
e.printStackTrace();
return false;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return true;
}
@Override
public void post() {
if(mFail.size() != 0) {
Toast.makeText(MikuMikuDroid.this, "Out of Memory. Abort.", Toast.LENGTH_LONG).show();
}
}
};
ae.setMax(1);
ae.setMessage("Loading Model/Motion...");
ae.execute(mCoreLogic);
}
});
}
});
break;
case (Menu.FIRST + 1):
final File[] sc1 = mCoreLogic.getCameraSelector();
openSelectDialog(sc1, R.string.menu_load_camera, R.string.setup_alert_vmd, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
final String camera = sc1[which].getPath();
new AsyncTask <Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
try {
mCoreLogic.loadCamera(camera);
mCoreLogic.storeState();
final int max = mCoreLogic.getDulation();
mSeekBar.post(new Runnable() {
@Override
public void run() {
mSeekBar.setMax(max);
}
});
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
}.execute();
}
});
break;
case (Menu.FIRST + 2):
final File[] sc2 = mCoreLogic.getMediaSelector();
openSelectDialog(sc2, R.string.menu_load_music, R.string.setup_alert_music, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
final String media = "file://" + sc2[which].getPath();
new AsyncTask <Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
mCoreLogic.loadMedia(media);
mCoreLogic.storeState();
final int max = mCoreLogic.getDulation();
mSeekBar.post(new Runnable() {
@Override
public void run() {
mSeekBar.setMax(max);
}
});
return null;
}
}.execute();
}
});
break;
case (Menu.FIRST + 4):
mCoreLogic.togglePhysics();
break;
case (Menu.FIRST + 5):
mMMGLSurfaceView.deleteTextures(mCoreLogic.clear());
break;
default:
;
}
return super.onOptionsItemSelected(item);
}
private void openSelectDialog(File[] item, int title, int alert, DialogInterface.OnClickListener task) {
Builder ad = new AlertDialog.Builder(this);
if (item == null) {
ad.setTitle(R.string.setup_alert_title);
ad.setMessage(alert);
ad.setPositiveButton(R.string.select_ok, null);
} else {
ad.setTitle(title);
String[] is = new String[item.length];
for(int i = 0; i < item.length; i++) {
is[i] = item[i].getName();
//int idx = is[i].lastIndexOf(".");
//is[i] = is[i].substring(0, idx);
}
ad.setItems(is, task);
}
ad.show();
}
private void openMotionSelectDialog(File[] item, int title, int alert, DialogInterface.OnClickListener task) {
Builder ad = new AlertDialog.Builder(this);
if (item == null) {
ad.setTitle(R.string.setup_alert_title);
ad.setMessage(alert);
ad.setPositiveButton(R.string.select_ok, null);
} else {
ad.setTitle(title);
String[] is = new String[item.length+1];
is[0] = "Load as Background";
for(int i = 1; i < is.length; i++) {
is[i] = item[i-1].getName();
int idx = is[i].lastIndexOf(".");
is[i] = is[i].substring(0, idx);
}
ad.setItems(is, task);
}
ad.show();
}
float touchX = 0f;
float touchY = 0f;
@Override
public boolean onTouchEvent(MotionEvent event) {
if(event.getAction() == MotionEvent.ACTION_UP) {
if(mCoreLogic.isPlaying()) {
mPlayPauseButton.setBackgroundResource(R.drawable.ic_media_pause);
} else {
mPlayPauseButton.setBackgroundResource(R.drawable.ic_media_play);
}
if (mPlayPauseButton.getVisibility() == View.VISIBLE) {
FullScreenCompatWrapper.fullScreen(mRelativeLayout, true);
}
int visibility = mPlayPauseButton.getVisibility() == View.VISIBLE ? View.INVISIBLE : View.VISIBLE;
findViewById(R.id.top_menu).setVisibility(visibility);
mSeekBar.setVisibility(visibility);
mPlayPauseButton.setVisibility(visibility);
mRewindButton.setVisibility(visibility);
mRelativeLayout.requestLayout();
}
if (event.getActionMasked() == MotionEvent.ACTION_POINTER_DOWN) {
if (event.getPointerCount() == 2) {
touchX = (event.getX(0) + event.getX(1))/2f;
touchY = (event.getY(0) + event.getY(1))/2f;
}
}
if (event.getActionMasked() == MotionEvent.ACTION_POINTER_UP) {
touchX = 0f;
}
if (event.getAction() == MotionEvent.ACTION_DOWN) {
touchX = event.getX();
touchY = event.getY();
}
if (event.getAction() == MotionEvent.ACTION_MOVE && event.getPointerCount() == 1) {
if (touchX == 0f) {
touchX = event.getX();
touchY = event.getY();
} else {
float dx = event.getX() - touchX;
float dy = event.getY() - touchY;
touchX = event.getX();
touchY = event.getY();
if (mCoreLogic.getCameraMode() == CoreLogic.CAMERA_MODE_SENSOR2) {
dx = -dx;
dy = -dy;
}
orientationEstimater.rotateInDisplay(dx, dy);
// mCoreLogic.setCameraOrientation(orientationEstimater.getCurrentOrientation());
mCoreLogic.setSensorRotationMatrix(orientationEstimater.getRotationMatrix());
}
}
if (event.getAction() == MotionEvent.ACTION_MOVE && event.getPointerCount() == 2) {
float dy = (event.getY(0) + event.getY(1))/2f - touchY;
float dx = (event.getX(0) + event.getX(1))/2f - touchX;
touchX = (event.getX(0) + event.getX(1))/2f;
touchY = (event.getY(0) + event.getY(1))/2f;
float cameraPosition[] = mCoreLogic.getCameraPositionAsRef();
orientationEstimater.translateInDisplay(cameraPosition, dx,dy, 0);
}
final boolean isInProgres = mScaleGestureDetector.isInProgress();
mScaleGestureDetector.onTouchEvent(event);
return isInProgres || mScaleGestureDetector.isInProgress();
}
@Override
public void onSaveInstanceState(Bundle bundle) {
mCoreLogic.storeState();
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
}
@Override
public void onDestroy() {
super.onDestroy();
mCoreLogic.storeState();
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
// TODO Auto-generated method stub
}
@Override
public void onSensorChanged(SensorEvent event) {
orientationEstimater.onSensorEvent(event);
motionDetector.onSensorEvent(event);
if (motionDetector.jump) {
Log.d("","JUMP!");
//mCoreLogic.cameraJump();
}
if (!orientationEstimater.isReady()) return;
if (Math.abs(mCoreLogic.analogInput[0]) > 0.1) {
orientationEstimater.rotate(0, -mCoreLogic.analogInput[0] * 0.005f);
}
if (Math.abs(mCoreLogic.analogInput[4]) > 0.1) {
orientationEstimater.rotate(-mCoreLogic.analogInput[4] * 0.005f, 0);
}
if (Math.abs(mCoreLogic.analogInput[5]) > 0.1) {
orientationEstimater.rotate(mCoreLogic.analogInput[5] * 0.005f, 0);
}
if (mCoreLogic.keyState[KeyEvent.KEYCODE_A] || mCoreLogic.keyState[KeyEvent.KEYCODE_PAGE_UP]) {
orientationEstimater.rotate(0, 0.01f);
}
if (mCoreLogic.keyState[KeyEvent.KEYCODE_D] || mCoreLogic.keyState[KeyEvent.KEYCODE_PAGE_DOWN]) {
orientationEstimater.rotate(0, -0.01f);
}
if (mCoreLogic.keyState[KeyEvent.KEYCODE_F]) {
orientationEstimater.rotate(-0.01f, 0);
}
if (mCoreLogic.keyState[KeyEvent.KEYCODE_R]) {
orientationEstimater.rotate(0.01f, 0);
}
//Log.d("Sensor","Orientation " + orientation[0] + "," + orientation[1] + "," + orientation[2]);
// mCoreLogic.setCameraOrientation(orientationEstimater.getCurrentOrientation());
mCoreLogic.setSensorRotationMatrix(orientationEstimater.getRotationMatrix());
if (posTracking) {
mCoreLogic.cameraOffsetHeight = orientationEstimater.getPosition()[1] * 0.01f;
mCoreLogic.cameraOffsetDist = orientationEstimater.getPosition()[2] * 0.01f;
}
}
float cameraPos[] = new float[]{0,17,-11};
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode >= 0 && keyCode < mCoreLogic.keyState.length) {
mCoreLogic.keyState[keyCode] = true;
}
switch(keyCode) {
case KeyEvent.KEYCODE_DPAD_UP:
case KeyEvent.KEYCODE_DPAD_DOWN:
case KeyEvent.KEYCODE_DPAD_LEFT:
case KeyEvent.KEYCODE_DPAD_RIGHT:
case KeyEvent.KEYCODE_W:
case KeyEvent.KEYCODE_S:
mCoreLogic.analogInput[0] = 0;
mCoreLogic.analogInput[1] = 0;
break;
case KeyEvent.KEYCODE_M:
openOptionsMenu();
break;
case KeyEvent.KEYCODE_BUTTON_R1:
//case KeyEvent.KEYCODE_D:
orientationEstimater.rotate(0, -0.05f);
break;
case KeyEvent.KEYCODE_BUTTON_L1:
//case KeyEvent.KEYCODE_A:
orientationEstimater.rotate(0, 0.05f);
break;
case KeyEvent.KEYCODE_V:
case KeyEvent.KEYCODE_BUTTON_Y: // ^
mCoreLogic.toggleViewMode();
break;
case KeyEvent.KEYCODE_C:
case KeyEvent.KEYCODE_BUTTON_X: // []
mCoreLogic.toggleCameraView();
break;
case KeyEvent.KEYCODE_X:
case KeyEvent.KEYCODE_BUTTON_B: // O
// mCoreLogic.cameraJump();
break;
case KeyEvent.KEYCODE_BUTTON_A: // X
case KeyEvent.KEYCODE_SPACE:
FullScreenCompatWrapper.fullScreen(mRelativeLayout, true);
mSeekBar.setVisibility(View.INVISIBLE);
mPlayPauseButton.setVisibility(View.INVISIBLE);
mRewindButton.setVisibility(View.INVISIBLE);
mCoreLogic.toggleStartStop();
break;
default:
return super.onKeyDown(keyCode, event);
}
//mCoreLogic.setCameraPosition(cameraPos);
return true;
}
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
if (keyCode >= 0 && keyCode < mCoreLogic.keyState.length) {
mCoreLogic.keyState[keyCode] = false;
}
return super.onKeyUp(keyCode, event);
}
@Override
public boolean onGenericMotionEvent(MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_MOVE) {
Log.d("MotionEvent" ,"Action: " + event.getAction() + " s:" + event.getSource());
float h1 = event.getAxisValue(MotionEvent.AXIS_X);
float v1 = event.getAxisValue(MotionEvent.AXIS_Y);
float h2 = event.getAxisValue(MotionEvent.AXIS_Z);
float v2 = event.getAxisValue(MotionEvent.AXIS_RZ);
mCoreLogic.analogInput[0] = h1;
mCoreLogic.analogInput[1] = v1;
mCoreLogic.analogInput[2] = h2;
mCoreLogic.analogInput[3] = v2;
mCoreLogic.analogInput[4] = event.getAxisValue(MotionEvent.AXIS_LTRIGGER);
mCoreLogic.analogInput[5] = event.getAxisValue(MotionEvent.AXIS_RTRIGGER);
return true;
}
return super.onGenericMotionEvent(event);
}
}
|
|
package org.multibit.viewsystem.commandline.rxtx;
import gnu.io.*;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Enumeration;
import java.util.Vector;
/**
* Used to simplify communication over a Serial port. Using the RXTX-library
* (rxtx.qbang.org), one connection per instance of this class can be handled.
* In addition to handling a connection, information about the available Serial
* ports can be received using this class.
*
* A separate {@link Thread} is started to handle messages that are being
* received over the Serial interface.
*
* This class also makes packages out of a stream of bytes received, using a
* {@link #divider}, and sending these packages as an array of <b>int</b>s (each
* between 0 and 255) to a function implemented by a class implementing the
* {@link net.Network_iface}-interface.
*
* @author Raphael Blatter ([email protected])
* @author heavily using code examples from the RXTX-website (rxtx.qbang.org)
*/
public class Network {
private InputStream inputStream;
private OutputStream outputStream;
/**
* The status of the connection.
*/
private boolean connected = false;
/**
* The Thread used to receive the data from the Serial interface.
*/
private Thread reader;
private SerialPort serialPort;
/**
* Communicating between threads, showing the {@link #reader} when the
* connection has been closed, so it can {@link Thread#join()}.
*/
private boolean end = false;
/**
* Link to the instance of the class implementing {@link net.Network_iface}.
*/
private Network_iface contact;
/**
* A small <b>int</b> representing the number to be used to distinguish
* between two consecutive packages. It can only take a value between 0 and
* 255. Note that data is only sent to
* {@link net.Network_iface#parseInput(int, int, int[])} once the following
* 'divider' could be identified.
*
* As a default, <b>255</b> is used as a divider (unless specified otherwise
* in the constructor).
*
* @see net.Network#Network(int, Network_iface, int)
*/
private int divider;
/**
* <b>int</b> identifying the specific instance of the Network-class. While
* having only a single instance, 'id' is irrelevant. However, having more
* than one open connection (using more than one instance of {@link Network}
* ), 'id' helps identifying which Serial connection a message or a log
* entry came from.
*/
private int id;
private int[] tempBytes;
int numTempBytes = 0, numTotBytes = 0;
/**
* @param id
* <b>int</b> identifying the specific instance of the
* Network-class. While having only a single instance,
* {@link #id} is irrelevant. However, having more than one open
* connection (using more than one instance of Network),
* {@link #id} helps identifying which Serial connection a
* message or a log entry came from.
*
* @param contact
* Link to the instance of the class implementing
* {@link net.Network_iface}.
*
* @param divider
* A small <b>int</b> representing the number to be used to
* distinguish between two consecutive packages. It can take a
* value between 0 and 255. Note that data is only sent to
* {@link net.Network_iface#parseInput(int, int, int[])} once the
* following {@link #divider} could be identified.
*/
public Network(int id, Network_iface contact, int divider) {
this.contact = contact;
this.divider = divider;
if (this.divider > 255)
this.divider = 255;
if (this.divider < 0)
this.divider = 0;
this.id = id;
tempBytes = new int[1024];
}
/**
* Just as {@link #Network(int, Network_iface, int)}, but with a default
* {@link #divider} of <b>255</b>.
*
* @see #Network(int, Network_iface, int)
*/
public Network(int id, Network_iface contact) {
this(id, contact, 255);
}
/**
* Just as {@link #Network(int, Network_iface, int)}, but with a default
* {@link #divider} of <b>255</b> and a default {@link #id} of 0. This
* constructor may mainly be used if only one Serial connection is needed at
* any time.
*
* @see #Network(int, Network_iface, int)
*/
public Network(Network_iface contact) {
this(0, contact);
}
/**
* This method is used to get a list of all the available Serial ports
* (note: only Serial ports are considered). Any one of the elements
* contained in the returned {@link Vector} can be used as a parameter in
* {@link #connect(String)} or {@link #connect(String, int)} to open a
* Serial connection.
*
* @return A {@link Vector} containing {@link String}s showing all available
* Serial ports.
*/
@SuppressWarnings("unchecked")
public Vector<String> getPortList() {
Enumeration<CommPortIdentifier> portList;
Vector<String> portVect = new Vector<String>();
portList = CommPortIdentifier.getPortIdentifiers();
CommPortIdentifier portId;
while (portList.hasMoreElements()) {
portId = (CommPortIdentifier) portList.nextElement();
if (portId.getPortType() == CommPortIdentifier.PORT_SERIAL) {
portVect.add(portId.getName());
}
}
contact.writeLog(id, "found the following ports:");
for (int i = 0; i < portVect.size(); i++) {
contact.writeLog(id, (" " + (String) portVect.elementAt(i)));
}
return portVect;
}
/**
* Just as {@link #connect(String, int)}, but using 115200 bps as a default
* speed of the connection.
*
* @param portName
* The name of the port the connection should be opened to (see
* {@link #getPortList()}).
* @return <b>true</b> if the connection has been opened successfully,
* <b>false</b> otherwise.
* @see #connect(String, int)
*/
public boolean connect(String portName) {
return connect(portName, 115200);
}
/**
* Opening a connection to the specified Serial port, using the specified
* speed. After opening the port, messages can be sent using
* {@link #writeSerial(String)} and received data will be packed into
* packets (see {@link #divider}) and forwarded using
* {@link net.Network_iface#parseInput(int, int, int[])}.
*
* @param portName
* The name of the port the connection should be opened to (see
* {@link #getPortList()}).
* @param speed
* The desired speed of the connection in bps.
* @return <b>true</b> if the connection has been opened successfully,
* <b>false</b> otherwise.
*/
public boolean connect(String portName, int speed) {
CommPortIdentifier portIdentifier;
boolean conn = false;
try {
portIdentifier = CommPortIdentifier.getPortIdentifier(portName);
if (portIdentifier.isCurrentlyOwned()) {
contact.writeLog(id, "Error: Port is currently in use");
} else {
serialPort = (SerialPort) portIdentifier.open("RTBug_network",
2000);
serialPort.setSerialPortParams(speed, SerialPort.DATABITS_8,
SerialPort.STOPBITS_1, SerialPort.PARITY_NONE);
inputStream = serialPort.getInputStream();
outputStream = serialPort.getOutputStream();
reader = (new Thread(new SerialReader(inputStream)));
end = false;
reader.start();
connected = true;
contact.writeLog(id, "connection on " + portName
+ " established");
conn = true;
}
} catch (NoSuchPortException e) {
contact.writeLog(id, "the connection could not be made");
e.printStackTrace();
} catch (PortInUseException e) {
contact.writeLog(id, "the connection could not be made");
e.printStackTrace();
} catch (UnsupportedCommOperationException e) {
contact.writeLog(id, "the connection could not be made");
e.printStackTrace();
} catch (IOException e) {
contact.writeLog(id, "the connection could not be made");
e.printStackTrace();
}
return conn;
}
/**
* A separate class to use as the {@link net.Network#reader}. It is run as a
* separate {@link Thread} and manages the incoming data, packaging them
* using {@link net.Network#divider} into arrays of <b>int</b>s and
* forwarding them using
* {@link net.Network_iface#parseInput(int, int, int[])}.
*
*/
private class SerialReader implements Runnable {
InputStream in;
public SerialReader(InputStream in) {
this.in = in;
}
public void run() {
byte[] buffer = new byte[1024];
int len = -1, i, temp;
try {
while (!end) {
if ((in.available()) > 0) {
if ((len = this.in.read(buffer)) > -1) {
for (i = 0; i < len; i++) {
temp = buffer[i];
// adjust from C-Byte to Java-Byte
if (temp < 0) {
temp += 256;
}
System.out.print((char)temp + "-");
if (temp == divider || temp % 256 == 0) {
if (numTempBytes > 0) {
contact.parseInput(id, numTempBytes,
tempBytes);
}
numTempBytes = 0;
} else {
tempBytes[numTempBytes] = temp;
++numTempBytes;
}
}
}
}
}
} catch (IOException e) {
end = true;
try {
outputStream.close();
inputStream.close();
} catch (IOException e1) {
e1.printStackTrace();
}
serialPort.close();
connected = false;
contact.networkDisconnected(id);
contact.writeLog(id, "connection has been interrupted");
}
}
}
/**
* Simple function closing the connection held by this instance of
* {@link net.Network}. It also ends the Thread {@link net.Network#reader}.
*
* @return <b>true</b> if the connection could be closed, <b>false</b>
* otherwise.
*/
public boolean disconnect() {
boolean disconn = true;
end = true;
try {
reader.join();
} catch (InterruptedException e1) {
e1.printStackTrace();
disconn = false;
}
try {
outputStream.close();
inputStream.close();
} catch (IOException e) {
e.printStackTrace();
disconn = false;
}
serialPort.close();
connected = false;
contact.networkDisconnected(id);
contact.writeLog(id, "connection disconnected");
return disconn;
}
/**
* @return Whether this instance of {@link net.Network} has currently an
* open connection of not.
*/
public boolean isConnected() {
return connected;
}
/**
* This method is included as a legacy. Depending on the other side of the
* Serial port, it might be easier to send using a String. Note: this method
* does not add the {@link #divider} to the end.
*
* If a connection is open, a {@link String} can be sent over the Serial
* port using this function. If no connection is available, <b>false</b> is
* returned and a message is sent using
* {@link net.Network_iface#writeLog(int, String)}.
*
* @param message
* The {@link String} to be sent over the Serial connection.
* @return <b>true</b> if the message could be sent, <b>false</b> otherwise.
*/
public boolean writeSerial(String message) {
boolean success = false;
if (isConnected()) {
try {
outputStream.write(message.getBytes());
success = true;
} catch (IOException e) {
disconnect();
}
} else {
contact.writeLog(id, "No port is connected.");
}
return success;
}
/**
* If a connection is open, an <b>int</b> between 0 and 255 (except the
* {@link net.Network#divider}) can be sent over the Serial port using this
* function. The message will be finished by sending the
* {@link net.Network#divider}. If no connection is available, <b>false</b>
* is returned and a message is sent using
* {@link net.Network_iface#writeLog(int, String)}.
*
* @param numBytes
* The number of bytes to send over the Serial port.
* @param message
* [] The array of<b>int</b>s to be sent over the Serial
* connection (between 0 and 256).
* @return <b>true</b> if the message could be sent, <b>false</b> otherwise
* or if one of the numbers is equal to the #{@link Network#divider}
* .
*/
public boolean writeSerial(int numBytes, int message[]) {
boolean success = true;
int i;
for (i = 0; i < numBytes; ++i) {
if (message[i] == divider) {
success = false;
break;
}
}
if (success && isConnected()) {
try {
for (i = 0; i < numBytes; ++i) {
outputStream.write(changeToByte(message[i]));
}
//outputStream.write(changeToByte(divider));
} catch (IOException e) {
success = false;
disconnect();
}
} else if (!success) {
// message contains the divider
contact.writeLog(id, "The message contains the divider.");
} else {
contact.writeLog(id, "No port is connected.");
}
return success;
}
private byte changeToByte(int num) {
byte number;
int temp;
temp = num;
if (temp > 255)
temp = 255;
if (temp < 0)
temp = 0;
number = (byte) temp;
return number;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.socket;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.UnknownHostException;
import java.util.*;
import java.util.concurrent.ConcurrentLinkedQueue;
import javax.servlet.http.HttpServletRequest;
import com.google.common.base.Strings;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import org.apache.zeppelin.conf.ZeppelinConfiguration;
import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars;
import org.apache.zeppelin.display.AngularObject;
import org.apache.zeppelin.display.AngularObjectRegistry;
import org.apache.zeppelin.display.AngularObjectRegistryListener;
import org.apache.zeppelin.interpreter.InterpreterGroup;
import org.apache.zeppelin.interpreter.remote.RemoteAngularObjectRegistry;
import org.apache.zeppelin.user.AuthenticationInfo;
import org.apache.zeppelin.interpreter.InterpreterOutput;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.InterpreterSetting;
import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcessListener;
import org.apache.zeppelin.notebook.*;
import org.apache.zeppelin.scheduler.Job;
import org.apache.zeppelin.scheduler.Job.Status;
import org.apache.zeppelin.server.ZeppelinServer;
import org.apache.zeppelin.socket.Message.OP;
import org.apache.zeppelin.ticket.TicketContainer;
import org.apache.zeppelin.utils.SecurityUtils;
import org.eclipse.jetty.websocket.servlet.WebSocketServlet;
import org.eclipse.jetty.websocket.servlet.WebSocketServletFactory;
import org.quartz.SchedulerException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Zeppelin websocket service.
*/
public class NotebookServer extends WebSocketServlet implements
NotebookSocketListener, JobListenerFactory, AngularObjectRegistryListener,
RemoteInterpreterProcessListener {
private static final Logger LOG = LoggerFactory.getLogger(NotebookServer.class);
Gson gson = new Gson();
final Map<String, List<NotebookSocket>> noteSocketMap = new HashMap<>();
final Queue<NotebookSocket> connectedSockets = new ConcurrentLinkedQueue<>();
private Notebook notebook() {
return ZeppelinServer.notebook;
}
@Override
public void configure(WebSocketServletFactory factory) {
factory.setCreator(new NotebookWebSocketCreator(this));
}
public boolean checkOrigin(HttpServletRequest request, String origin) {
try {
return SecurityUtils.isValidOrigin(origin, ZeppelinConfiguration.create());
} catch (UnknownHostException e) {
LOG.error(e.toString(), e);
} catch (URISyntaxException e) {
LOG.error(e.toString(), e);
}
return false;
}
public NotebookSocket doWebSocketConnect(HttpServletRequest req, String protocol) {
return new NotebookSocket(req, protocol, this);
}
@Override
public void onOpen(NotebookSocket conn) {
LOG.info("New connection from {} : {}", conn.getRequest().getRemoteAddr(),
conn.getRequest().getRemotePort());
connectedSockets.add(conn);
}
@Override
public void onMessage(NotebookSocket conn, String msg) {
Notebook notebook = notebook();
try {
Message messagereceived = deserializeMessage(msg);
LOG.debug("RECEIVE << " + messagereceived.op);
LOG.debug("RECEIVE PRINCIPAL << " + messagereceived.principal);
LOG.debug("RECEIVE TICKET << " + messagereceived.ticket);
LOG.debug("RECEIVE ROLES << " + messagereceived.roles);
if (LOG.isTraceEnabled()) {
LOG.trace("RECEIVE MSG = " + messagereceived);
}
String ticket = TicketContainer.instance.getTicket(messagereceived.principal);
if (ticket != null && !ticket.equals(messagereceived.ticket))
throw new Exception("Invalid ticket " + messagereceived.ticket + " != " + ticket);
ZeppelinConfiguration conf = ZeppelinConfiguration.create();
boolean allowAnonymous = conf.
getBoolean(ZeppelinConfiguration.ConfVars.ZEPPELIN_ANONYMOUS_ALLOWED);
if (!allowAnonymous && messagereceived.principal.equals("anonymous")) {
throw new Exception("Anonymous access not allowed ");
}
HashSet<String> userAndRoles = new HashSet<String>();
userAndRoles.add(messagereceived.principal);
if (!messagereceived.roles.equals("")) {
HashSet<String> roles = gson.fromJson(messagereceived.roles,
new TypeToken<HashSet<String>>(){}.getType());
if (roles != null) {
userAndRoles.addAll(roles);
}
}
/** Lets be elegant here */
switch (messagereceived.op) {
case LIST_NOTES:
unicastNoteList(conn);
break;
case RELOAD_NOTES_FROM_REPO:
broadcastReloadedNoteList();
break;
case GET_HOME_NOTE:
sendHomeNote(conn, userAndRoles, notebook);
break;
case GET_NOTE:
sendNote(conn, userAndRoles, notebook, messagereceived);
break;
case NEW_NOTE:
createNote(conn, userAndRoles, notebook, messagereceived);
break;
case DEL_NOTE:
removeNote(conn, userAndRoles, notebook, messagereceived);
break;
case CLONE_NOTE:
cloneNote(conn, userAndRoles, notebook, messagereceived);
break;
case IMPORT_NOTE:
importNote(conn, userAndRoles, notebook, messagereceived);
break;
case COMMIT_PARAGRAPH:
updateParagraph(conn, userAndRoles, notebook, messagereceived);
break;
case RUN_PARAGRAPH:
runParagraph(conn, userAndRoles, notebook, messagereceived);
break;
case CANCEL_PARAGRAPH:
cancelParagraph(conn, userAndRoles, notebook, messagereceived);
break;
case MOVE_PARAGRAPH:
moveParagraph(conn, userAndRoles, notebook, messagereceived);
break;
case INSERT_PARAGRAPH:
insertParagraph(conn, userAndRoles, notebook, messagereceived);
break;
case PARAGRAPH_REMOVE:
removeParagraph(conn, userAndRoles, notebook, messagereceived);
break;
case PARAGRAPH_CLEAR_OUTPUT:
clearParagraphOutput(conn, userAndRoles, notebook, messagereceived);
break;
case NOTE_UPDATE:
updateNote(conn, userAndRoles, notebook, messagereceived);
break;
case COMPLETION:
completion(conn, userAndRoles, notebook, messagereceived);
break;
case PING:
break; //do nothing
case ANGULAR_OBJECT_UPDATED:
angularObjectUpdated(conn, userAndRoles, notebook, messagereceived);
break;
case ANGULAR_OBJECT_CLIENT_BIND:
angularObjectClientBind(conn, userAndRoles, notebook, messagereceived);
break;
case ANGULAR_OBJECT_CLIENT_UNBIND:
angularObjectClientUnbind(conn, userAndRoles, notebook, messagereceived);
break;
case LIST_CONFIGURATIONS:
sendAllConfigurations(conn, userAndRoles, notebook);
break;
case CHECKPOINT_NOTEBOOK:
checkpointNotebook(conn, notebook, messagereceived);
break;
default:
break;
}
} catch (Exception e) {
LOG.error("Can't handle message", e);
}
}
@Override
public void onClose(NotebookSocket conn, int code, String reason) {
LOG.info("Closed connection to {} : {}. ({}) {}", conn.getRequest()
.getRemoteAddr(), conn.getRequest().getRemotePort(), code, reason);
removeConnectionFromAllNote(conn);
connectedSockets.remove(conn);
}
protected Message deserializeMessage(String msg) {
return gson.fromJson(msg, Message.class);
}
protected String serializeMessage(Message m) {
return gson.toJson(m);
}
private void addConnectionToNote(String noteId, NotebookSocket socket) {
synchronized (noteSocketMap) {
removeConnectionFromAllNote(socket); // make sure a socket relates only a
// single note.
List<NotebookSocket> socketList = noteSocketMap.get(noteId);
if (socketList == null) {
socketList = new LinkedList<>();
noteSocketMap.put(noteId, socketList);
}
if (!socketList.contains(socket)) {
socketList.add(socket);
}
}
}
private void removeConnectionFromNote(String noteId, NotebookSocket socket) {
synchronized (noteSocketMap) {
List<NotebookSocket> socketList = noteSocketMap.get(noteId);
if (socketList != null) {
socketList.remove(socket);
}
}
}
private void removeNote(String noteId) {
synchronized (noteSocketMap) {
List<NotebookSocket> socketList = noteSocketMap.remove(noteId);
}
}
private void removeConnectionFromAllNote(NotebookSocket socket) {
synchronized (noteSocketMap) {
Set<String> keys = noteSocketMap.keySet();
for (String noteId : keys) {
removeConnectionFromNote(noteId, socket);
}
}
}
private String getOpenNoteId(NotebookSocket socket) {
String id = null;
synchronized (noteSocketMap) {
Set<String> keys = noteSocketMap.keySet();
for (String noteId : keys) {
List<NotebookSocket> sockets = noteSocketMap.get(noteId);
if (sockets.contains(socket)) {
id = noteId;
}
}
}
return id;
}
private void broadcastToNoteBindedInterpreter(String interpreterGroupId,
Message m) {
Notebook notebook = notebook();
List<Note> notes = notebook.getAllNotes();
for (Note note : notes) {
List<String> ids = note.getNoteReplLoader().getInterpreters();
for (String id : ids) {
if (id.equals(interpreterGroupId)) {
broadcast(note.id(), m);
}
}
}
}
private void broadcast(String noteId, Message m) {
synchronized (noteSocketMap) {
List<NotebookSocket> socketLists = noteSocketMap.get(noteId);
if (socketLists == null || socketLists.size() == 0) {
return;
}
LOG.debug("SEND >> " + m.op);
for (NotebookSocket conn : socketLists) {
try {
conn.send(serializeMessage(m));
} catch (IOException e) {
LOG.error("socket error", e);
}
}
}
}
private void broadcastExcept(String noteId, Message m, NotebookSocket exclude) {
synchronized (noteSocketMap) {
List<NotebookSocket> socketLists = noteSocketMap.get(noteId);
if (socketLists == null || socketLists.size() == 0) {
return;
}
LOG.debug("SEND >> " + m.op);
for (NotebookSocket conn : socketLists) {
if (exclude.equals(conn)) {
continue;
}
try {
conn.send(serializeMessage(m));
} catch (IOException e) {
LOG.error("socket error", e);
}
}
}
}
private void broadcastAll(Message m) {
for (NotebookSocket conn : connectedSockets) {
try {
conn.send(serializeMessage(m));
} catch (IOException e) {
LOG.error("socket error", e);
}
}
}
private void unicast(Message m, NotebookSocket conn) {
try {
conn.send(serializeMessage(m));
} catch (IOException e) {
LOG.error("socket error", e);
}
}
public List<Map<String, String>> generateNotebooksInfo(boolean needsReload) {
Notebook notebook = notebook();
ZeppelinConfiguration conf = notebook.getConf();
String homescreenNotebookId = conf.getString(ConfVars.ZEPPELIN_NOTEBOOK_HOMESCREEN);
boolean hideHomeScreenNotebookFromList = conf
.getBoolean(ConfVars.ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE);
if (needsReload) {
try {
notebook.reloadAllNotes();
} catch (IOException e) {
LOG.error("Fail to reload notes from repository");
}
}
List<Note> notes = notebook.getAllNotes();
List<Map<String, String>> notesInfo = new LinkedList<>();
for (Note note : notes) {
Map<String, String> info = new HashMap<>();
if (hideHomeScreenNotebookFromList && note.id().equals(homescreenNotebookId)) {
continue;
}
info.put("id", note.id());
info.put("name", note.getName());
notesInfo.add(info);
}
return notesInfo;
}
public void broadcastNote(Note note) {
broadcast(note.id(), new Message(OP.NOTE).put("note", note));
}
public void broadcastNoteList() {
List<Map<String, String>> notesInfo = generateNotebooksInfo(false);
broadcastAll(new Message(OP.NOTES_INFO).put("notes", notesInfo));
}
public void unicastNoteList(NotebookSocket conn) {
List<Map<String, String>> notesInfo = generateNotebooksInfo(false);
unicast(new Message(OP.NOTES_INFO).put("notes", notesInfo), conn);
}
public void broadcastReloadedNoteList() {
List<Map<String, String>> notesInfo = generateNotebooksInfo(true);
broadcastAll(new Message(OP.NOTES_INFO).put("notes", notesInfo));
}
void permissionError(NotebookSocket conn, String op, Set<String> current,
Set<String> allowed) throws IOException {
LOG.info("Cannot {}. Connection readers {}. Allowed readers {}",
op, current, allowed);
conn.send(serializeMessage(new Message(OP.AUTH_INFO).put("info",
"Insufficient privileges to " + op + " note.\n\n" +
"Allowed users or roles: " + allowed.toString() + "\n\n" +
"User belongs to: " + current.toString())));
}
private void sendNote(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook,
Message fromMessage) throws IOException {
LOG.info("New operation from {} : {} : {} : {} : {}", conn.getRequest().getRemoteAddr(),
conn.getRequest().getRemotePort(),
fromMessage.principal, fromMessage.op, fromMessage.get("id")
);
String noteId = (String) fromMessage.get("id");
if (noteId == null) {
return;
}
Note note = notebook.getNote(noteId);
NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization();
if (note != null) {
if (!notebookAuthorization.isReader(noteId, userAndRoles)) {
permissionError(conn, "read", userAndRoles, notebookAuthorization.getReaders(noteId));
return;
}
addConnectionToNote(note.id(), conn);
conn.send(serializeMessage(new Message(OP.NOTE).put("note", note)));
sendAllAngularObjects(note, conn);
}
}
private void sendHomeNote(NotebookSocket conn, HashSet<String> userAndRoles,
Notebook notebook) throws IOException {
String noteId = notebook.getConf().getString(ConfVars.ZEPPELIN_NOTEBOOK_HOMESCREEN);
Note note = null;
if (noteId != null) {
note = notebook.getNote(noteId);
}
if (note != null) {
NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization();
if (!notebookAuthorization.isReader(noteId, userAndRoles)) {
permissionError(conn, "read", userAndRoles, notebookAuthorization.getReaders(noteId));
return;
}
addConnectionToNote(note.id(), conn);
conn.send(serializeMessage(new Message(OP.NOTE).put("note", note)));
sendAllAngularObjects(note, conn);
} else {
removeConnectionFromAllNote(conn);
conn.send(serializeMessage(new Message(OP.NOTE).put("note", null)));
}
}
private void updateNote(NotebookSocket conn, HashSet<String> userAndRoles,
Notebook notebook, Message fromMessage)
throws SchedulerException, IOException {
String noteId = (String) fromMessage.get("id");
String name = (String) fromMessage.get("name");
Map<String, Object> config = (Map<String, Object>) fromMessage
.get("config");
if (noteId == null) {
return;
}
if (config == null) {
return;
}
NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization();
if (!notebookAuthorization.isWriter(noteId, userAndRoles)) {
permissionError(conn, "update", userAndRoles, notebookAuthorization.getWriters(noteId));
return;
}
Note note = notebook.getNote(noteId);
if (note != null) {
boolean cronUpdated = isCronUpdated(config, note.getConfig());
note.setName(name);
note.setConfig(config);
if (cronUpdated) {
notebook.refreshCron(note.id());
}
note.persist();
broadcastNote(note);
broadcastNoteList();
}
}
private boolean isCronUpdated(Map<String, Object> configA,
Map<String, Object> configB) {
boolean cronUpdated = false;
if (configA.get("cron") != null && configB.get("cron") != null
&& configA.get("cron").equals(configB.get("cron"))) {
cronUpdated = true;
} else if (configA.get("cron") == null && configB.get("cron") == null) {
cronUpdated = false;
} else if (configA.get("cron") != null || configB.get("cron") != null) {
cronUpdated = true;
}
return cronUpdated;
}
private void createNote(NotebookSocket conn, HashSet<String> userAndRoles,
Notebook notebook, Message message)
throws IOException {
Note note = notebook.createNote();
note.addParagraph(); // it's an empty note. so add one paragraph
if (message != null) {
String noteName = (String) message.get("name");
if (noteName == null || noteName.isEmpty()){
noteName = "Note " + note.getId();
}
note.setName(noteName);
}
note.persist();
addConnectionToNote(note.id(), (NotebookSocket) conn);
conn.send(serializeMessage(new Message(OP.NEW_NOTE).put("note", note)));
broadcastNoteList();
}
private void removeNote(NotebookSocket conn, HashSet<String> userAndRoles,
Notebook notebook, Message fromMessage)
throws IOException {
String noteId = (String) fromMessage.get("id");
if (noteId == null) {
return;
}
Note note = notebook.getNote(noteId);
NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization();
if (!notebookAuthorization.isOwner(noteId, userAndRoles)) {
permissionError(conn, "remove", userAndRoles, notebookAuthorization.getOwners(noteId));
return;
}
notebook.removeNote(noteId);
removeNote(noteId);
broadcastNoteList();
}
private void updateParagraph(NotebookSocket conn, HashSet<String> userAndRoles,
Notebook notebook, Message fromMessage) throws IOException {
String paragraphId = (String) fromMessage.get("id");
if (paragraphId == null) {
return;
}
Map<String, Object> params = (Map<String, Object>) fromMessage
.get("params");
Map<String, Object> config = (Map<String, Object>) fromMessage
.get("config");
String noteId = getOpenNoteId(conn);
final Note note = notebook.getNote(noteId);
NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization();
if (!notebookAuthorization.isWriter(noteId, userAndRoles)) {
permissionError(conn, "write", userAndRoles, notebookAuthorization.getWriters(noteId));
return;
}
Paragraph p = note.getParagraph(paragraphId);
p.settings.setParams(params);
p.setConfig(config);
p.setTitle((String) fromMessage.get("title"));
p.setText((String) fromMessage.get("paragraph"));
note.persist();
broadcast(note.id(), new Message(OP.PARAGRAPH).put("paragraph", p));
}
private void cloneNote(NotebookSocket conn, HashSet<String> userAndRoles,
Notebook notebook, Message fromMessage)
throws IOException, CloneNotSupportedException {
String noteId = getOpenNoteId(conn);
String name = (String) fromMessage.get("name");
Note newNote = notebook.cloneNote(noteId, name);
addConnectionToNote(newNote.id(), (NotebookSocket) conn);
conn.send(serializeMessage(new Message(OP.NEW_NOTE).put("note", newNote)));
broadcastNoteList();
}
protected Note importNote(NotebookSocket conn, HashSet<String> userAndRoles,
Notebook notebook, Message fromMessage)
throws IOException {
Note note = null;
if (fromMessage != null) {
String noteName = (String) ((Map) fromMessage.get("notebook")).get("name");
String noteJson = gson.toJson(fromMessage.get("notebook"));
note = notebook.importNote(noteJson, noteName);
note.persist();
broadcastNote(note);
broadcastNoteList();
}
return note;
}
private void removeParagraph(NotebookSocket conn, HashSet<String> userAndRoles,
Notebook notebook, Message fromMessage) throws IOException {
final String paragraphId = (String) fromMessage.get("id");
if (paragraphId == null) {
return;
}
String noteId = getOpenNoteId(conn);
final Note note = notebook.getNote(noteId);
NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization();
if (!notebookAuthorization.isWriter(noteId, userAndRoles)) {
permissionError(conn, "write", userAndRoles, notebookAuthorization.getWriters(noteId));
return;
}
/** We dont want to remove the last paragraph */
if (!note.isLastParagraph(paragraphId)) {
note.removeParagraph(paragraphId);
note.persist();
broadcastNote(note);
}
}
private void clearParagraphOutput(NotebookSocket conn, HashSet<String> userAndRoles,
Notebook notebook, Message fromMessage) throws IOException {
final String paragraphId = (String) fromMessage.get("id");
if (paragraphId == null) {
return;
}
String noteId = getOpenNoteId(conn);
final Note note = notebook.getNote(noteId);
NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization();
if (!notebookAuthorization.isWriter(noteId, userAndRoles)) {
permissionError(conn, "write", userAndRoles, notebookAuthorization.getWriters(noteId));
return;
}
note.clearParagraphOutput(paragraphId);
broadcastNote(note);
}
private void completion(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook,
Message fromMessage) throws IOException {
String paragraphId = (String) fromMessage.get("id");
String buffer = (String) fromMessage.get("buf");
int cursor = (int) Double.parseDouble(fromMessage.get("cursor").toString());
Message resp = new Message(OP.COMPLETION_LIST).put("id", paragraphId);
if (paragraphId == null) {
conn.send(serializeMessage(resp));
return;
}
final Note note = notebook.getNote(getOpenNoteId(conn));
List<String> candidates = note.completion(paragraphId, buffer, cursor);
resp.put("completions", candidates);
conn.send(serializeMessage(resp));
}
/**
* When angular object updated from client
*
* @param conn the web socket.
* @param notebook the notebook.
* @param fromMessage the message.
*/
private void angularObjectUpdated(NotebookSocket conn, HashSet<String> userAndRoles,
Notebook notebook, Message fromMessage) {
String noteId = (String) fromMessage.get("noteId");
String paragraphId = (String) fromMessage.get("paragraphId");
String interpreterGroupId = (String) fromMessage.get("interpreterGroupId");
String varName = (String) fromMessage.get("name");
Object varValue = fromMessage.get("value");
AngularObject ao = null;
boolean global = false;
// propagate change to (Remote) AngularObjectRegistry
Note note = notebook.getNote(noteId);
if (note != null) {
List<InterpreterSetting> settings = note.getNoteReplLoader()
.getInterpreterSettings();
for (InterpreterSetting setting : settings) {
if (setting.getInterpreterGroup(note.id()) == null) {
continue;
}
if (interpreterGroupId.equals(setting.getInterpreterGroup(note.id()).getId())) {
AngularObjectRegistry angularObjectRegistry = setting
.getInterpreterGroup(note.id()).getAngularObjectRegistry();
// first trying to get local registry
ao = angularObjectRegistry.get(varName, noteId, paragraphId);
if (ao == null) {
// then try notebook scope registry
ao = angularObjectRegistry.get(varName, noteId, null);
if (ao == null) {
// then try global scope registry
ao = angularObjectRegistry.get(varName, null, null);
if (ao == null) {
LOG.warn("Object {} is not binded", varName);
} else {
// path from client -> server
ao.set(varValue, false);
global = true;
}
} else {
// path from client -> server
ao.set(varValue, false);
global = false;
}
} else {
ao.set(varValue, false);
global = false;
}
break;
}
}
}
if (global) { // broadcast change to all web session that uses related
// interpreter.
for (Note n : notebook.getAllNotes()) {
List<InterpreterSetting> settings = note.getNoteReplLoader()
.getInterpreterSettings();
for (InterpreterSetting setting : settings) {
if (setting.getInterpreterGroup(n.id()) == null) {
continue;
}
if (interpreterGroupId.equals(setting.getInterpreterGroup(n.id()).getId())) {
AngularObjectRegistry angularObjectRegistry = setting
.getInterpreterGroup(n.id()).getAngularObjectRegistry();
this.broadcastExcept(
n.id(),
new Message(OP.ANGULAR_OBJECT_UPDATE).put("angularObject", ao)
.put("interpreterGroupId", interpreterGroupId)
.put("noteId", n.id())
.put("paragraphId", ao.getParagraphId()),
conn);
}
}
}
} else { // broadcast to all web session for the note
this.broadcastExcept(
note.id(),
new Message(OP.ANGULAR_OBJECT_UPDATE).put("angularObject", ao)
.put("interpreterGroupId", interpreterGroupId)
.put("noteId", note.id())
.put("paragraphId", ao.getParagraphId()),
conn);
}
}
/**
* Push the given Angular variable to the target
* interpreter angular registry given a noteId
* and a paragraph id
* @param conn
* @param notebook
* @param fromMessage
* @throws Exception
*/
protected void angularObjectClientBind(NotebookSocket conn, HashSet<String> userAndRoles,
Notebook notebook, Message fromMessage)
throws Exception {
String noteId = fromMessage.getType("noteId");
String varName = fromMessage.getType("name");
Object varValue = fromMessage.get("value");
String paragraphId = fromMessage.getType("paragraphId");
Note note = notebook.getNote(noteId);
if (paragraphId == null) {
throw new IllegalArgumentException("target paragraph not specified for " +
"angular value bind");
}
if (note != null) {
final InterpreterGroup interpreterGroup = findInterpreterGroupForParagraph(note,
paragraphId);
final AngularObjectRegistry registry = interpreterGroup.getAngularObjectRegistry();
if (registry instanceof RemoteAngularObjectRegistry) {
RemoteAngularObjectRegistry remoteRegistry = (RemoteAngularObjectRegistry) registry;
pushAngularObjectToRemoteRegistry(noteId, paragraphId, varName, varValue, remoteRegistry,
interpreterGroup.getId(), conn);
} else {
pushAngularObjectToLocalRepo(noteId, paragraphId, varName, varValue, registry,
interpreterGroup.getId(), conn);
}
}
}
/**
* Remove the given Angular variable to the target
* interpreter(s) angular registry given a noteId
* and an optional list of paragraph id(s)
* @param conn
* @param notebook
* @param fromMessage
* @throws Exception
*/
protected void angularObjectClientUnbind(NotebookSocket conn, HashSet<String> userAndRoles,
Notebook notebook, Message fromMessage)
throws Exception{
String noteId = fromMessage.getType("noteId");
String varName = fromMessage.getType("name");
String paragraphId = fromMessage.getType("paragraphId");
Note note = notebook.getNote(noteId);
if (paragraphId == null) {
throw new IllegalArgumentException("target paragraph not specified for " +
"angular value unBind");
}
if (note != null) {
final InterpreterGroup interpreterGroup = findInterpreterGroupForParagraph(note,
paragraphId);
final AngularObjectRegistry registry = interpreterGroup.getAngularObjectRegistry();
if (registry instanceof RemoteAngularObjectRegistry) {
RemoteAngularObjectRegistry remoteRegistry = (RemoteAngularObjectRegistry) registry;
removeAngularFromRemoteRegistry(noteId, paragraphId, varName, remoteRegistry,
interpreterGroup.getId(), conn);
} else {
removeAngularObjectFromLocalRepo(noteId, paragraphId, varName, registry,
interpreterGroup.getId(), conn);
}
}
}
private InterpreterGroup findInterpreterGroupForParagraph(Note note, String paragraphId)
throws Exception {
final Paragraph paragraph = note.getParagraph(paragraphId);
if (paragraph == null) {
throw new IllegalArgumentException("Unknown paragraph with id : " + paragraphId);
}
return paragraph.getCurrentRepl().getInterpreterGroup();
}
private void pushAngularObjectToRemoteRegistry(String noteId, String paragraphId,
String varName, Object varValue, RemoteAngularObjectRegistry remoteRegistry,
String interpreterGroupId, NotebookSocket conn) {
final AngularObject ao = remoteRegistry.addAndNotifyRemoteProcess(varName, varValue,
noteId, paragraphId);
this.broadcastExcept(
noteId,
new Message(OP.ANGULAR_OBJECT_UPDATE).put("angularObject", ao)
.put("interpreterGroupId", interpreterGroupId)
.put("noteId", noteId)
.put("paragraphId", paragraphId),
conn);
}
private void removeAngularFromRemoteRegistry(String noteId, String paragraphId,
String varName, RemoteAngularObjectRegistry remoteRegistry,
String interpreterGroupId, NotebookSocket conn) {
final AngularObject ao = remoteRegistry.removeAndNotifyRemoteProcess(varName, noteId,
paragraphId);
this.broadcastExcept(
noteId,
new Message(OP.ANGULAR_OBJECT_REMOVE).put("angularObject", ao)
.put("interpreterGroupId", interpreterGroupId)
.put("noteId", noteId)
.put("paragraphId", paragraphId),
conn);
}
private void pushAngularObjectToLocalRepo(String noteId, String paragraphId, String varName,
Object varValue, AngularObjectRegistry registry,
String interpreterGroupId, NotebookSocket conn) {
AngularObject angularObject = registry.get(varName, noteId, paragraphId);
if (angularObject == null) {
angularObject = registry.add(varName, varValue, noteId, paragraphId);
} else {
angularObject.set(varValue, true);
}
this.broadcastExcept(
noteId,
new Message(OP.ANGULAR_OBJECT_UPDATE).put("angularObject", angularObject)
.put("interpreterGroupId", interpreterGroupId)
.put("noteId", noteId)
.put("paragraphId", paragraphId),
conn);
}
private void removeAngularObjectFromLocalRepo(String noteId, String paragraphId, String varName,
AngularObjectRegistry registry, String interpreterGroupId, NotebookSocket conn) {
final AngularObject removed = registry.remove(varName, noteId, paragraphId);
if (removed != null) {
this.broadcastExcept(
noteId,
new Message(OP.ANGULAR_OBJECT_REMOVE).put("angularObject", removed)
.put("interpreterGroupId", interpreterGroupId)
.put("noteId", noteId)
.put("paragraphId", paragraphId),
conn);
}
}
private void moveParagraph(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook,
Message fromMessage) throws IOException {
final String paragraphId = (String) fromMessage.get("id");
if (paragraphId == null) {
return;
}
final int newIndex = (int) Double.parseDouble(fromMessage.get("index")
.toString());
String noteId = getOpenNoteId(conn);
final Note note = notebook.getNote(noteId);
NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization();
if (!notebookAuthorization.isWriter(noteId, userAndRoles)) {
permissionError(conn, "write", userAndRoles, notebookAuthorization.getWriters(noteId));
return;
}
note.moveParagraph(paragraphId, newIndex);
note.persist();
broadcastNote(note);
}
private void insertParagraph(NotebookSocket conn, HashSet<String> userAndRoles,
Notebook notebook, Message fromMessage) throws IOException {
final int index = (int) Double.parseDouble(fromMessage.get("index")
.toString());
String noteId = getOpenNoteId(conn);
final Note note = notebook.getNote(noteId);
NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization();
if (!notebookAuthorization.isWriter(noteId, userAndRoles)) {
permissionError(conn, "write", userAndRoles, notebookAuthorization.getWriters(noteId));
return;
}
note.insertParagraph(index);
note.persist();
broadcastNote(note);
}
private void cancelParagraph(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook,
Message fromMessage) throws IOException {
final String paragraphId = (String) fromMessage.get("id");
if (paragraphId == null) {
return;
}
String noteId = getOpenNoteId(conn);
final Note note = notebook.getNote(noteId);
NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization();
if (!notebookAuthorization.isWriter(noteId, userAndRoles)) {
permissionError(conn, "write", userAndRoles, notebookAuthorization.getWriters(noteId));
return;
}
Paragraph p = note.getParagraph(paragraphId);
p.abort();
}
private void runParagraph(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook,
Message fromMessage) throws IOException {
final String paragraphId = (String) fromMessage.get("id");
if (paragraphId == null) {
return;
}
String noteId = getOpenNoteId(conn);
final Note note = notebook.getNote(noteId);
NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization();
if (!notebookAuthorization.isWriter(noteId, userAndRoles)) {
permissionError(conn, "write", userAndRoles, notebookAuthorization.getWriters(noteId));
return;
}
Paragraph p = note.getParagraph(paragraphId);
String text = (String) fromMessage.get("paragraph");
p.setText(text);
p.setTitle((String) fromMessage.get("title"));
if (!fromMessage.principal.equals("anonymous")) {
AuthenticationInfo authenticationInfo = new AuthenticationInfo(fromMessage.principal,
fromMessage.ticket);
p.setAuthenticationInfo(authenticationInfo);
} else {
p.setAuthenticationInfo(new AuthenticationInfo());
}
Map<String, Object> params = (Map<String, Object>) fromMessage
.get("params");
p.settings.setParams(params);
Map<String, Object> config = (Map<String, Object>) fromMessage
.get("config");
p.setConfig(config);
// if it's the last paragraph, let's add a new one
boolean isTheLastParagraph = note.getLastParagraph().getId()
.equals(p.getId());
if (!Strings.isNullOrEmpty(text) && isTheLastParagraph) {
note.addParagraph();
}
note.persist();
try {
note.run(paragraphId);
} catch (Exception ex) {
LOG.error("Exception from run", ex);
if (p != null) {
p.setReturn(
new InterpreterResult(InterpreterResult.Code.ERROR, ex.getMessage()),
ex);
p.setStatus(Status.ERROR);
}
}
}
private void sendAllConfigurations(NotebookSocket conn, HashSet<String> userAndRoles,
Notebook notebook) throws IOException {
ZeppelinConfiguration conf = notebook.getConf();
Map<String, String> configurations = conf.dumpConfigurations(conf,
new ZeppelinConfiguration.ConfigurationKeyPredicate() {
@Override
public boolean apply(String key) {
return !key.contains("password") &&
!key.equals(ZeppelinConfiguration
.ConfVars
.ZEPPELIN_NOTEBOOK_AZURE_CONNECTION_STRING
.getVarName());
}
});
conn.send(serializeMessage(new Message(OP.CONFIGURATIONS_INFO)
.put("configurations", configurations)));
}
private void checkpointNotebook(NotebookSocket conn, Notebook notebook,
Message fromMessage) throws IOException {
String noteId = (String) fromMessage.get("noteId");
String commitMessage = (String) fromMessage.get("commitMessage");
notebook.checkpointNote(noteId, commitMessage);
}
/**
* This callback is for the paragraph that runs on ZeppelinServer
* @param noteId
* @param paragraphId
* @param output output to append
*/
@Override
public void onOutputAppend(String noteId, String paragraphId, String output) {
Message msg = new Message(OP.PARAGRAPH_APPEND_OUTPUT)
.put("noteId", noteId)
.put("paragraphId", paragraphId)
.put("data", output);
Paragraph paragraph = notebook().getNote(noteId).getParagraph(paragraphId);
broadcast(noteId, msg);
}
/**
* This callback is for the paragraph that runs on ZeppelinServer
* @param noteId
* @param paragraphId
* @param output output to update (replace)
*/
@Override
public void onOutputUpdated(String noteId, String paragraphId, String output) {
Message msg = new Message(OP.PARAGRAPH_UPDATE_OUTPUT)
.put("noteId", noteId)
.put("paragraphId", paragraphId)
.put("data", output);
Paragraph paragraph = notebook().getNote(noteId).getParagraph(paragraphId);
broadcast(noteId, msg);
}
/**
* Need description here.
*
*/
public static class ParagraphListenerImpl implements ParagraphJobListener {
private NotebookServer notebookServer;
private Note note;
public ParagraphListenerImpl(NotebookServer notebookServer, Note note) {
this.notebookServer = notebookServer;
this.note = note;
}
@Override
public void onProgressUpdate(Job job, int progress) {
notebookServer.broadcast(
note.id(),
new Message(OP.PROGRESS).put("id", job.getId()).put("progress",
job.progress()));
}
@Override
public void beforeStatusChange(Job job, Status before, Status after) {
}
@Override
public void afterStatusChange(Job job, Status before, Status after) {
if (after == Status.ERROR) {
if (job.getException() != null) {
LOG.error("Error", job.getException());
}
}
if (job.isTerminated()) {
LOG.info("Job {} is finished", job.getId());
try {
note.persist();
} catch (IOException e) {
LOG.error(e.toString(), e);
}
}
notebookServer.broadcastNote(note);
}
/**
* This callback is for praragraph that runs on RemoteInterpreterProcess
* @param paragraph
* @param out
* @param output
*/
@Override
public void onOutputAppend(Paragraph paragraph, InterpreterOutput out, String output) {
Message msg = new Message(OP.PARAGRAPH_APPEND_OUTPUT)
.put("noteId", paragraph.getNote().getId())
.put("paragraphId", paragraph.getId())
.put("data", output);
notebookServer.broadcast(paragraph.getNote().getId(), msg);
}
/**
* This callback is for paragraph that runs on RemoteInterpreterProcess
* @param paragraph
* @param out
* @param output
*/
@Override
public void onOutputUpdate(Paragraph paragraph, InterpreterOutput out, String output) {
Message msg = new Message(OP.PARAGRAPH_UPDATE_OUTPUT)
.put("noteId", paragraph.getNote().getId())
.put("paragraphId", paragraph.getId())
.put("data", output);
notebookServer.broadcast(paragraph.getNote().getId(), msg);
}
}
@Override
public ParagraphJobListener getParagraphJobListener(Note note) {
return new ParagraphListenerImpl(this, note);
}
private void sendAllAngularObjects(Note note, NotebookSocket conn) throws IOException {
List<InterpreterSetting> settings = note.getNoteReplLoader()
.getInterpreterSettings();
if (settings == null || settings.size() == 0) {
return;
}
for (InterpreterSetting intpSetting : settings) {
AngularObjectRegistry registry = intpSetting.getInterpreterGroup(note.id())
.getAngularObjectRegistry();
List<AngularObject> objects = registry.getAllWithGlobal(note.id());
for (AngularObject object : objects) {
conn.send(serializeMessage(new Message(OP.ANGULAR_OBJECT_UPDATE)
.put("angularObject", object)
.put("interpreterGroupId",
intpSetting.getInterpreterGroup(note.id()).getId())
.put("noteId", note.id())
.put("paragraphId", object.getParagraphId())
));
}
}
}
@Override
public void onAdd(String interpreterGroupId, AngularObject object) {
onUpdate(interpreterGroupId, object);
}
@Override
public void onUpdate(String interpreterGroupId, AngularObject object) {
Notebook notebook = notebook();
if (notebook == null) {
return;
}
List<Note> notes = notebook.getAllNotes();
for (Note note : notes) {
if (object.getNoteId() != null && !note.id().equals(object.getNoteId())) {
continue;
}
List<InterpreterSetting> intpSettings = note.getNoteReplLoader()
.getInterpreterSettings();
if (intpSettings.isEmpty())
continue;
for (InterpreterSetting setting : intpSettings) {
if (setting.getInterpreterGroup(note.id()).getId().equals(interpreterGroupId)) {
broadcast(
note.id(),
new Message(OP.ANGULAR_OBJECT_UPDATE)
.put("angularObject", object)
.put("interpreterGroupId", interpreterGroupId)
.put("noteId", note.id())
.put("paragraphId", object.getParagraphId()));
}
}
}
}
@Override
public void onRemove(String interpreterGroupId, String name, String noteId, String paragraphId) {
Notebook notebook = notebook();
List<Note> notes = notebook.getAllNotes();
for (Note note : notes) {
if (noteId != null && !note.id().equals(noteId)) {
continue;
}
List<String> ids = note.getNoteReplLoader().getInterpreters();
for (String id : ids) {
if (id.equals(interpreterGroupId)) {
broadcast(
note.id(),
new Message(OP.ANGULAR_OBJECT_REMOVE).put("name", name).put(
"noteId", noteId).put("paragraphId", paragraphId));
}
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.wan;
import java.util.ArrayList;
import java.util.List;
import org.apache.geode.cache.asyncqueue.AsyncEventListener;
import org.apache.geode.cache.client.internal.LocatorDiscoveryCallback;
import org.apache.geode.cache.wan.GatewayEventFilter;
import org.apache.geode.cache.wan.GatewayEventSubstitutionFilter;
import org.apache.geode.cache.wan.GatewaySender;
import org.apache.geode.cache.wan.GatewaySender.OrderPolicy;
import org.apache.geode.cache.wan.GatewayTransportFilter;
public class GatewaySenderAttributes {
public static final boolean DEFAULT_IS_BUCKETSORTED = true;
public static final boolean DEFAULT_IS_META_QUEUE = false;
private int socketBufferSize = GatewaySender.DEFAULT_SOCKET_BUFFER_SIZE;
private int socketReadTimeout = GatewaySender.DEFAULT_SOCKET_READ_TIMEOUT;
private int maximumQueueMemory = GatewaySender.DEFAULT_MAXIMUM_QUEUE_MEMORY;
private int batchSize = GatewaySender.DEFAULT_BATCH_SIZE;
private int batchTimeInterval = GatewaySender.DEFAULT_BATCH_TIME_INTERVAL;
private boolean isBatchConflationEnabled = GatewaySender.DEFAULT_BATCH_CONFLATION;
private boolean isPersistenceEnabled = GatewaySender.DEFAULT_PERSISTENCE_ENABLED;
private int alertThreshold = GatewaySender.DEFAULT_ALERT_THRESHOLD;
private boolean manualStart = GatewaySender.DEFAULT_MANUAL_START;
private String diskStoreName;
private List<GatewayEventFilter> eventFilters = new ArrayList<GatewayEventFilter>();
private ArrayList<GatewayTransportFilter> transFilters = new ArrayList<GatewayTransportFilter>();
private List<AsyncEventListener> listeners = new ArrayList<AsyncEventListener>();
private GatewayEventSubstitutionFilter eventSubstitutionFilter;
private String id;
private int remoteDs = GatewaySender.DEFAULT_DISTRIBUTED_SYSTEM_ID;
private LocatorDiscoveryCallback locatorDiscoveryCallback;
private boolean isDiskSynchronous = GatewaySender.DEFAULT_DISK_SYNCHRONOUS;
private OrderPolicy policy;
private int dispatcherThreads = GatewaySender.DEFAULT_DISPATCHER_THREADS;
private int parallelism = GatewaySender.DEFAULT_PARALLELISM_REPLICATED_REGION;
private boolean isParallel = GatewaySender.DEFAULT_IS_PARALLEL;
private boolean groupTransactionEvents = GatewaySender.DEFAULT_MUST_GROUP_TRANSACTION_EVENTS;
private int retriesToGetTransactionEventsFromQueue =
GatewaySender.GET_TRANSACTION_EVENTS_FROM_QUEUE_RETRIES;
private boolean isForInternalUse = GatewaySender.DEFAULT_IS_FOR_INTERNAL_USE;
private boolean isBucketSorted = GatewaySenderAttributes.DEFAULT_IS_BUCKETSORTED;
private boolean isMetaQueue = GatewaySenderAttributes.DEFAULT_IS_META_QUEUE;
private boolean forwardExpirationDestroy = GatewaySender.DEFAULT_FORWARD_EXPIRATION_DESTROY;
private boolean enforceThreadsConnectSameReceiver =
GatewaySender.DEFAULT_ENFORCE_THREADS_CONNECT_SAME_RECEIVER;
public void setSocketBufferSize(int bufferSize) {
socketBufferSize = bufferSize;
}
public void setSocketReadTimeout(int readTimeout) {
socketReadTimeout = readTimeout;
}
public void setMaximumQueueMemory(int maxQueueMemory) {
maximumQueueMemory = maxQueueMemory;
}
public void setBatchSize(int batchsize) {
batchSize = batchsize;
}
public void setBatchTimeInterval(int batchtimeinterval) {
batchTimeInterval = batchtimeinterval;
}
public void setBatchConflationEnabled(boolean batchConfEnabled) {
isBatchConflationEnabled = batchConfEnabled;
}
public void setPersistenceEnabled(boolean persistenceEnabled) {
isPersistenceEnabled = persistenceEnabled;
}
public void setAlertThreshold(int alertThresh) {
alertThreshold = alertThresh;
}
public void setManualStart(boolean manualstart) {
manualStart = manualstart;
}
public void setDiskStoreName(String diskstorename) {
diskStoreName = diskstorename;
}
public void setEventSubstitutionFilter(GatewayEventSubstitutionFilter eventsubstitutionfilter) {
eventSubstitutionFilter = eventsubstitutionfilter;
}
public void setId(String idString) {
id = idString;
}
public void setRemoteDs(int rDs) {
remoteDs = rDs;
}
public void setLocatorDiscoveryCallback(LocatorDiscoveryCallback locatorDiscCall) {
locatorDiscoveryCallback = locatorDiscCall;
}
public void setDiskSynchronous(boolean diskSynchronous) {
isDiskSynchronous = diskSynchronous;
}
public void setOrderPolicy(OrderPolicy orderpolicy) {
policy = orderpolicy;
}
public void setDispatcherThreads(int dispatchThreads) {
dispatcherThreads = dispatchThreads;
}
public void setParallelism(int tempParallelism) {
parallelism = tempParallelism;
}
public void setParallel(boolean parallel) {
isParallel = parallel;
}
public void setGroupTransactionEvents(boolean groupTransEvents) {
groupTransactionEvents = groupTransEvents;
}
public void setRetriesToGetTransactionEventsFromQueue(int retries) {
retriesToGetTransactionEventsFromQueue = retries;
}
public void setForInternalUse(boolean forInternalUse) {
isForInternalUse = forInternalUse;
}
public void setBucketSorted(boolean bucketSorted) {
isBucketSorted = bucketSorted;
}
public void setMetaQueue(boolean metaQueue) {
isMetaQueue = metaQueue;
}
public void setForwardExpirationDestroy(boolean forwardexpirationdestroy) {
forwardExpirationDestroy = forwardexpirationdestroy;
}
public void setEnforceThreadsConnectSameReceiver(boolean enforcethreadsconnectsamereceiver) {
enforceThreadsConnectSameReceiver = enforcethreadsconnectsamereceiver;
}
public int getSocketBufferSize() {
return this.socketBufferSize;
}
public boolean isDiskSynchronous() {
return this.isDiskSynchronous;
}
public int getSocketReadTimeout() {
return this.socketReadTimeout;
}
public String getDiskStoreName() {
return this.diskStoreName;
}
public int getMaximumQueueMemory() {
return this.maximumQueueMemory;
}
public int getBatchSize() {
return this.batchSize;
}
public int getBatchTimeInterval() {
return this.batchTimeInterval;
}
public boolean isBatchConflationEnabled() {
return this.isBatchConflationEnabled;
}
public boolean isPersistenceEnabled() {
return this.isPersistenceEnabled;
}
public int getAlertThreshold() {
return this.alertThreshold;
}
public List<GatewayEventFilter> getGatewayEventFilters() {
return this.eventFilters;
}
public List<GatewayTransportFilter> getGatewayTransportFilters() {
return this.transFilters;
}
public List<AsyncEventListener> getAsyncEventListeners() {
return this.listeners;
}
public LocatorDiscoveryCallback getGatewayLocatoDiscoveryCallback() {
return this.locatorDiscoveryCallback;
}
public boolean isManualStart() {
return this.manualStart;
}
public boolean isParallel() {
return this.isParallel;
}
public boolean mustGroupTransactionEvents() {
return this.groupTransactionEvents;
}
public int getRetriesToGetTransactionEventsFromQueue() {
return this.retriesToGetTransactionEventsFromQueue;
}
public boolean isForInternalUse() {
return this.isForInternalUse;
}
public void addGatewayEventFilter(GatewayEventFilter filter) {
this.eventFilters.add(filter);
}
public void addGatewayTransportFilter(GatewayTransportFilter filter) {
this.transFilters.add(filter);
}
public void addAsyncEventListener(AsyncEventListener listener) {
this.listeners.add(listener);
}
public String getId() {
return this.id;
}
public int getRemoteDSId() {
return this.remoteDs;
}
public int getDispatcherThreads() {
return dispatcherThreads;
}
public int getParallelismForReplicatedRegion() {
return parallelism;
}
public OrderPolicy getOrderPolicy() {
return policy;
}
public boolean isBucketSorted() {
return this.isBucketSorted;
}
public GatewayEventSubstitutionFilter getGatewayEventSubstitutionFilter() {
return this.eventSubstitutionFilter;
}
public boolean isMetaQueue() {
return this.isMetaQueue;
}
public boolean isForwardExpirationDestroy() {
return this.forwardExpirationDestroy;
}
public boolean getEnforceThreadsConnectSameReceiver() {
return this.enforceThreadsConnectSameReceiver;
}
}
|
|
package natlab;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Reader;
import java.io.StringReader;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import natlab.options.Options;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.xml.sax.InputSource;
import ast.CompilationUnits;
import ast.Program;
public class NatlabServer {
private static final int SERVER_PORT = 47146; //default server port
private static final long HEART_RATE = 4000; //in milliseconds
private static final long HEART_DELAY = 5000; //delay till first heart beat check is made
private int port;
private boolean quiet;
private boolean natlab;
private boolean heartbeat;
private PrintWriter out;
private Scanner in;
private AtomicBoolean heartbeatFlag = new AtomicBoolean(true);
private Timer heartbeatTimer = new Timer();
public static NatlabServer create(Options options) {
int port = SERVER_PORT;
if (options.sp().length() > 0) {
port = Integer.parseInt(options.sp());
}
return new NatlabServer(port, options.quiet(), options.natlab(), !options.noheart());
}
private NatlabServer(int port, boolean quiet, boolean natlab, boolean heartbeat) {
this.port = port;
this.quiet = quiet;
this.natlab = natlab;
this.heartbeat = heartbeat;
}
private void log(String message) {
if (!quiet) {
System.err.println(message);
}
}
private void connect() {
log("Server mode");
log("Opening server on port " + port);
ServerSocket serverSocket = null;
try{
serverSocket = new ServerSocket( port );
} catch (IOException e) {
System.err.println("Server could not be opened on port " + port);
System.exit(1);
}
log("Server started");
Socket clientSocket = null;
try {
clientSocket = serverSocket.accept();
} catch (IOException e) {
System.err.println("Accept client failed");
System.exit(1);
}
log("Client connected");
try {
out = new PrintWriter(clientSocket.getOutputStream(), true);
in = new Scanner(clientSocket.getInputStream()).useDelimiter("\0");
} catch (IOException e) {
System.err.println("Server input stream creation failed");
System.exit(1);
}
if (heartbeat) {
heartbeatTimer.schedule(new TimerTask() {
@Override public void run() {
if (!heartbeatFlag.getAndSet(false)) {
log("Server timed out, aborting");
out.print("<errorlist><error>server timed out, aborting</error></errorlist>\0");
out.flush();
System.exit(1);
}
}
}, HEART_DELAY, HEART_RATE);
}
}
private void shutdown() {
log("shutdown cmd");
out.print("<shutdown />\0");
out.flush();
heartbeatTimer.cancel();
out.close();
in.close();
}
private static class Command {
public String command;
public String argument;
}
private Command parseCommand(String cmd) {
try {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
dbf.setIgnoringElementContentWhitespace(true);
dbf.setCoalescing(true);
DocumentBuilder db = dbf.newDocumentBuilder();
Reader xmlData = new StringReader(cmd);
Document d = db.parse(new InputSource(xmlData));
Element root = d.getDocumentElement();
root.normalize();
Command command = new Command();
command.command = root.getNodeName().trim().toLowerCase();
if (root.hasChildNodes()) {
command.argument = root.getFirstChild().getNodeValue().trim();
}
return command;
} catch (Exception e) {
return null;
}
}
public void start() {
connect();
while (in.hasNext()) {
Command command = parseCommand(in.next());
if (command.command.equals("heartbeat")) {
heartbeatFlag.set(true);
continue;
} else if (command.command.equals("shutdown")) {
shutdown();
break;
}
String filename = null;
Reader input = null;
if (command.command.equals("parsefile")) {
filename = command.argument;
} else if (command.command.equals("parsetext")) {
filename = "source/text";
input = new StringReader(command.argument);
}
log("Parsing");
List<CompilationProblem> errors = new ArrayList<>();
Program program;
if (!natlab) {
if (input != null) {
program = Parse.parseMatlabFile(filename, input, errors);
} else {
program = Parse.parseMatlabFile(filename, errors);
}
} else {
if (input != null) {
program = Parse.parseNatlabFile(filename, input, errors);
} else {
program = Parse.parseNatlabFile(filename, errors);
}
}
if (!errors.isEmpty()) {
out.print("<errorlist>");
for (CompilationProblem problem : errors) {
out.print("<error>" + problem + "</error>");
}
out.print("</errorlist>\0");
out.flush();
continue;
}
CompilationUnits cu = new CompilationUnits();
cu.addProgram(program);
String ast = cu.XMLtoString(cu.ASTtoXML(false));
log("Sending response: \n" + ast);
out.print(ast + "\0");
out.flush();
}
log("Server shutdown");
}
}
|
|
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kim.service.impl;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.kuali.rice.core.api.config.property.ConfigurationService;
import org.kuali.rice.kim.api.KimConstants;
import org.kuali.rice.kim.impl.identity.IdentityArchiveService;
import org.kuali.rice.kim.api.identity.entity.EntityDefault;
import org.kuali.rice.kim.api.identity.principal.Principal;
import org.kuali.rice.kim.impl.identity.EntityDefaultInfoCacheBo;
import org.kuali.rice.krad.service.BusinessObjectService;
import org.kuali.rice.krad.service.KRADServiceLocatorInternal;
import org.kuali.rice.ksb.service.KSBServiceLocator;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionTemplate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
/**
* This is the default implementation for the IdentityArchiveService.
* @see IdentityArchiveService
* @author Kuali Rice Team ([email protected])
*
*/
public class IdentityArchiveServiceImpl implements IdentityArchiveService, InitializingBean, DisposableBean {
private static final Logger LOG = Logger.getLogger( IdentityArchiveServiceImpl.class );
private BusinessObjectService businessObjectService;
private ConfigurationService kualiConfigurationService;
private PlatformTransactionManager transactionManager;
private static final String EXEC_INTERVAL_SECS = "kim.identityArchiveServiceImpl.executionIntervalSeconds";
private static final String MAX_WRITE_QUEUE_SIZE = "kim.identityArchiveServiceImpl.maxWriteQueueSize";
private static final int EXECUTION_INTERVAL_SECONDS_DEFAULT = 600; // by default, flush the write queue this often
private static final int MAX_WRITE_QUEUE_SIZE_DEFAULT = 300; // cache this many KEDI's before forcing write
private final WriteQueue writeQueue = new WriteQueue();
private final EntityArchiveWriter writer = new EntityArchiveWriter();
// all this ceremony just decorates the writer so it logs a message first, and converts the Callable to Runnable
private final Runnable maxQueueSizeExceededWriter =
new CallableAdapter(new PreLogCallableWrapper<Boolean>(writer, Level.DEBUG, "max size exceeded, flushing write queue"));
// ditto
private final Runnable scheduledWriter =
new CallableAdapter(new PreLogCallableWrapper<Boolean>(writer, Level.DEBUG, "scheduled write out, flushing write queue"));
// ditto
private final Runnable shutdownWriter =
new CallableAdapter(new PreLogCallableWrapper<Boolean>(writer, Level.DEBUG, "rice is shutting down, flushing write queue"));
private int getExecutionIntervalSeconds() {
final String prop = kualiConfigurationService.getPropertyValueAsString(EXEC_INTERVAL_SECS);
try {
return Integer.valueOf(prop).intValue();
} catch (NumberFormatException e) {
return EXECUTION_INTERVAL_SECONDS_DEFAULT;
}
}
private int getMaxWriteQueueSize() {
final String prop = kualiConfigurationService.getPropertyValueAsString(MAX_WRITE_QUEUE_SIZE);
try {
return Integer.valueOf(prop).intValue();
} catch (NumberFormatException e) {
return MAX_WRITE_QUEUE_SIZE_DEFAULT;
}
}
@Override
public EntityDefault getEntityDefaultFromArchive( String entityId ) {
if (StringUtils.isBlank(entityId)) {
throw new IllegalArgumentException("entityId is blank");
}
Map<String,String> criteria = new HashMap<String, String>(1);
criteria.put(KimConstants.PrimaryKeyConstants.SUB_ENTITY_ID, entityId);
EntityDefaultInfoCacheBo cachedValue = businessObjectService.findByPrimaryKey(EntityDefaultInfoCacheBo.class, criteria);
return (cachedValue == null) ? null : cachedValue.convertCacheToEntityDefaultInfo();
}
@Override
public EntityDefault getEntityDefaultFromArchiveByPrincipalId(String principalId) {
if (StringUtils.isBlank(principalId)) {
throw new IllegalArgumentException("principalId is blank");
}
Map<String,String> criteria = new HashMap<String, String>(1);
criteria.put("principalId", principalId);
EntityDefaultInfoCacheBo cachedValue = businessObjectService.findByPrimaryKey(EntityDefaultInfoCacheBo.class, criteria);
return (cachedValue == null) ? null : cachedValue.convertCacheToEntityDefaultInfo();
}
@Override
public EntityDefault getEntityDefaultFromArchiveByPrincipalName(String principalName) {
if (StringUtils.isBlank(principalName)) {
throw new IllegalArgumentException("principalName is blank");
}
Map<String,String> criteria = new HashMap<String, String>(1);
criteria.put("principalName", principalName);
Collection<EntityDefaultInfoCacheBo> entities = businessObjectService.findMatching(EntityDefaultInfoCacheBo.class, criteria);
return (entities == null || entities.isEmpty()) ? null : entities.iterator().next().convertCacheToEntityDefaultInfo();
}
@Override
public EntityDefault getEntityDefaultFromArchiveByEmployeeId(String employeeId) {
if (StringUtils.isBlank(employeeId)) {
throw new IllegalArgumentException("employeeId is blank");
}
Map<String,String> criteria = new HashMap<String, String>(1);
criteria.put("employeeId", employeeId);
Collection<EntityDefaultInfoCacheBo> entities = businessObjectService.findMatching(EntityDefaultInfoCacheBo.class, criteria);
return (entities == null || entities.isEmpty()) ? null : entities.iterator().next().convertCacheToEntityDefaultInfo();
}
@Override
public void saveEntityDefaultToArchive(EntityDefault entity) {
if (entity == null) {
throw new IllegalArgumentException("entity is blank");
}
// if the max size has been reached, schedule now
if (getMaxWriteQueueSize() <= writeQueue.offerAndGetSize(entity) /* <- this enqueues the KEDI */ &&
writer.requestSubmit()) {
KSBServiceLocator.getThreadPool().execute(maxQueueSizeExceededWriter);
}
}
@Override
public void flushToArchive() {
writer.call();
}
public void setBusinessObjectService(BusinessObjectService businessObjectService) {
this.businessObjectService = businessObjectService;
}
public void setKualiConfigurationService(
ConfigurationService kualiConfigurationService) {
this.kualiConfigurationService = kualiConfigurationService;
}
public void setTransactionManager(PlatformTransactionManager txMgr) {
this.transactionManager = txMgr;
}
/** schedule the writer on the KSB scheduled pool. */
@Override
public void afterPropertiesSet() throws Exception {
LOG.info("scheduling writer...");
KSBServiceLocator.getScheduledPool().scheduleAtFixedRate(scheduledWriter,
getExecutionIntervalSeconds(), getExecutionIntervalSeconds(), TimeUnit.SECONDS);
}
/** flush the write queue immediately. */
@Override
public void destroy() throws Exception {
KSBServiceLocator.getThreadPool().execute(shutdownWriter);
}
/**
* store the person to the database, but do this an alternate thread to
* prevent transaction issues since this service is non-transactional
*
* @author Kuali Rice Team ([email protected])
*
*/
private class EntityArchiveWriter implements Callable {
// flag used to prevent multiple processes from being submitted at once
AtomicBoolean currentlySubmitted = new AtomicBoolean(false);
private final Comparator<Comparable> nullSafeComparator = new Comparator<Comparable>() {
@Override
public int compare(Comparable i1, Comparable i2) {
if (i1 != null && i2 != null) {
return i1.compareTo(i2);
} else if (i1 == null) {
if (i2 == null) {
return 0;
} else {
return -1;
}
} else { // if (entityId2 == null) {
return 1;
}
};
};
/**
* Comparator that attempts to impose a total ordering on EntityDefault instances
*/
private final Comparator<EntityDefault> kediComparator = new Comparator<EntityDefault>() {
/**
* compares by entityId value
* @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
*/
@Override
public int compare(EntityDefault o1, EntityDefault o2) {
String entityId1 = (o1 == null) ? null : o1.getEntityId();
String entityId2 = (o2 == null) ? null : o2.getEntityId();
int result = nullSafeComparator.compare(entityId1, entityId2);
if (result == 0) {
result = getPrincipalIdsString(o1).compareTo(getPrincipalIdsString(o2));
}
return result;
}
/**
* This method builds a newline delimited String containing the identity's principal IDs in sorted order
*
* @param entity
* @return
*/
private String getPrincipalIdsString(EntityDefault entity) {
String result = "";
if (entity != null) {
List<Principal> principals = entity.getPrincipals();
if (principals != null) {
if (principals.size() == 1) { // one
result = principals.get(0).getPrincipalId();
} else { // multiple
String [] ids = new String [principals.size()];
int insertIndex = 0;
for (Principal principal : principals) {
ids[insertIndex++] = principal.getPrincipalId();
}
Arrays.sort(ids);
result = StringUtils.join(ids, "\n");
}
}
}
return result;
}
};
public boolean requestSubmit() {
return currentlySubmitted.compareAndSet(false, true);
}
/**
* Call that tries to flush the write queue.
* @see Callable#call()
*/
@Override
public Object call() {
try {
// the strategy is to grab chunks of entities, dedupe & sort them, and insert them in a big
// batch to reduce transaction overhead. Sorting is done so insertion order is guaranteed, which
// prevents deadlocks between concurrent writers to the database.
TransactionTemplate template = new TransactionTemplate(transactionManager);
template.execute(new TransactionCallback() {
@Override
public Object doInTransaction(TransactionStatus status) {
EntityDefault entity = null;
ArrayList<EntityDefault> entitiesToInsert = new ArrayList<EntityDefault>(getMaxWriteQueueSize());
Set<String> deduper = new HashSet<String>(getMaxWriteQueueSize());
// order is important in this conditional so that elements aren't dequeued and then ignored
while (entitiesToInsert.size() < getMaxWriteQueueSize() && null != (entity = writeQueue.poll())) {
if (deduper.add(entity.getEntityId())) {
entitiesToInsert.add(entity);
}
}
Collections.sort(entitiesToInsert, kediComparator);
List<EntityDefaultInfoCacheBo> entityCache = new ArrayList<EntityDefaultInfoCacheBo>(entitiesToInsert.size());
for (EntityDefault entityToInsert : entitiesToInsert) {
entityCache.add(new EntityDefaultInfoCacheBo( entityToInsert ));
}
businessObjectService.save(entityCache);
//for (EntityDefault entityToInsert : entitiesToInsert) {
// businessObjectService.save( new EntityDefaultInfoCacheBo( entityToInsert ) );
//}
return null;
}
});
} finally { // make sure our running flag is unset, otherwise we'll never run again
currentlySubmitted.compareAndSet(true, false);
}
return Boolean.TRUE;
}
}
/**
* A class encapsulating a {@link ConcurrentLinkedQueue} and an {@link AtomicInteger} to
* provide fast offer(enqueue)/poll(dequeue) and size checking. Size may be approximate due to concurrent
* activity, but for our purposes that is fine.
*
* @author Kuali Rice Team ([email protected])
*
*/
private static class WriteQueue {
AtomicInteger writeQueueSize = new AtomicInteger(0);
ConcurrentLinkedQueue<EntityDefault> queue = new ConcurrentLinkedQueue<EntityDefault>();
public int offerAndGetSize(EntityDefault entity) {
queue.add(entity);
return writeQueueSize.incrementAndGet();
}
private EntityDefault poll() {
EntityDefault result = queue.poll();
if (result != null) { writeQueueSize.decrementAndGet(); }
return result;
}
}
/**
* decorator for a callable to log a message before it is executed
*
* @author Kuali Rice Team ([email protected])
*
*/
private static class PreLogCallableWrapper<A> implements Callable<A> {
private final Callable inner;
private final Level level;
private final String message;
public PreLogCallableWrapper(Callable inner, Level level, String message) {
this.inner = inner;
this.level = level;
this.message = message;
}
/**
* logs the message then calls the inner Callable
*
* @see java.util.concurrent.Callable#call()
*/
@Override
@SuppressWarnings("unchecked")
public A call() throws Exception {
LOG.log(level, message);
return (A)inner.call();
}
}
/**
* Adapts a Callable to be Runnable
*
* @author Kuali Rice Team ([email protected])
*
*/
private static class CallableAdapter implements Runnable {
private final Callable callable;
public CallableAdapter(Callable callable) {
this.callable = callable;
}
@Override
public void run() {
try {
callable.call();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.git.producer;
import java.io.File;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.camel.CamelExecutionException;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.git.GitConstants;
import org.apache.camel.component.git.GitTestSupport;
import org.eclipse.jgit.api.CreateBranchCommand.SetupUpstreamMode;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.MergeResult;
import org.eclipse.jgit.api.PullResult;
import org.eclipse.jgit.api.RemoteAddCommand;
import org.eclipse.jgit.api.Status;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.transport.RemoteConfig;
import org.eclipse.jgit.transport.URIish;
import org.junit.Test;
public class GitProducerTest extends GitTestSupport {
@Test
public void cloneTest() throws Exception {
template.sendBody("direct:clone", "");
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
}
@Test
public void initTest() throws Exception {
template.sendBody("direct:init", "");
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
}
@Test
public void checkoutTest() throws Exception {
// Init
Git git = getGitTestRepository();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
// Test camel-git checkout
template.sendBody("direct:checkout", "");
// Check
List<Ref> ref = git.branchList().call();
boolean branchCreated = false;
for (Ref refInternal : ref) {
if (refInternal.getName().equals("refs/heads/" + branchTest)) {
branchCreated = true;
}
}
assertEquals(branchCreated, true);
git.close();
}
@Test
public void checkoutSpecificTagTest() throws Exception {
// Init
Git git = getGitTestRepository();
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
// Test camel-git create tag
template.sendBody("direct:create-tag", "");
// Check
List<Ref> ref = git.tagList().call();
boolean tagCreated = false;
for (Ref refInternal : ref) {
if (refInternal.getName().equals("refs/tags/" + tagTest)) {
tagCreated = true;
}
}
assertEquals(tagCreated, true);
// Test camel-git create-branch
template.sendBody("direct:checkout-specific-tag", "");
// Check
ref = git.branchList().call();
boolean branchCreated = false;
for (Ref refInternal : ref) {
if (refInternal.getName().equals("refs/heads/" + branchTest)) {
branchCreated = true;
}
}
assertEquals(branchCreated, true);
git.close();
}
@Test(expected = CamelExecutionException.class)
public void doubleCloneOperationTest() throws Exception {
template.sendBody("direct:clone", "");
template.sendBody("direct:clone", "");
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
}
@Test
public void pullTest() throws Exception {
template.sendBody("direct:clone", "");
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
PullResult pr = template.requestBody("direct:pull", "", PullResult.class);
assertTrue(pr.isSuccessful());
}
@Test
public void addTest() throws Exception {
Git git = getGitTestRepository();
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
// Test camel-git add
template.sendBodyAndHeader("direct:add", "", GitConstants.GIT_FILE_NAME, filenameToAdd);
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.close();
}
@Test
public void removeTest() throws Exception {
// Init
Git git = getGitTestRepository();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
// Test camel-git remove
template.sendBodyAndHeader("direct:remove", "", GitConstants.GIT_FILE_NAME, filenameToAdd);
// Check
gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
git.commit().setMessage(commitMessage).call();
validateGitLogs(git, commitMessage);
status = git.status().call();
assertFalse(status.getAdded().contains(filenameToAdd));
git.close();
}
@Test
public void commitTest() throws Exception {
// Init
Git git = getGitTestRepository();
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
// Test camel-git commit
template.sendBodyAndHeader("direct:commit", "", GitConstants.GIT_COMMIT_MESSAGE, commitMessage);
// Check
validateGitLogs(git, commitMessage);
git.close();
}
@Test
public void commitTestEmpty() throws Exception {
// Init
Git git = getGitTestRepository();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
// Test camel-git commit (with no changes)
template.requestBodyAndHeader("direct:commit", "", GitConstants.GIT_COMMIT_MESSAGE, commitMessage);
// Check that it has been commited twice
validateGitLogs(git, commitMessage, commitMessage);
git.close();
}
@Test(expected = CamelExecutionException.class)
public void commitTestAllowEmptyFalse() throws Exception {
// Init
Git git = getGitTestRepository();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
// Test camel-git commit (with allowEmpty set to false)
Map<String, Object> headers = new HashMap<>();
headers.put(GitConstants.GIT_COMMIT_MESSAGE, commitMessage);
template.requestBodyAndHeaders("direct:commit-not-allow-empty", "", headers);
// Check : An exception should have been raised
}
@Test
public void addAndStatusAndCommitTest() throws Exception {
// Initialize repository using JGit
Repository repository = getTestRepository();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
Git git = new Git(repository);
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
// Checking camel route
Map<String, Object> headers = new HashMap<>();
headers.put(GitConstants.GIT_FILE_NAME, filenameToAdd);
headers.put(GitConstants.GIT_COMMIT_MESSAGE, commitMessage);
template.requestBodyAndHeaders("direct:add-status-commit", "", headers);
validateGitLogs(git, commitMessage);
git.close();
}
@Test
public void commitBranchTest() throws Exception {
// Init
Git git = getGitTestRepository();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
validateGitLogs(git, commitMessage);
git.checkout().setCreateBranch(true).setName(branchTest).setUpstreamMode(SetupUpstreamMode.SET_UPSTREAM).call();
// Test camel-git commit (with branch)
template.send("direct:commit-branch", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(GitConstants.GIT_COMMIT_MESSAGE, commitMessageBranch);
}
});
validateGitLogs(git, commitMessageBranch, commitMessage);
git.close();
}
@Test
public void commitAllTest() throws Exception {
Git git = getGitTestRepository();
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
template.send("direct:commit-all", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(GitConstants.GIT_COMMIT_MESSAGE, commitMessageAll);
}
});
validateGitLogs(git, commitMessageAll);
git.close();
}
@Test
public void commitAllDifferentBranchTest() throws Exception {
// Init
Git git = getGitTestRepository();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
validateGitLogs(git, commitMessage);
git.checkout().setCreateBranch(true).setName(branchTest).setUpstreamMode(SetupUpstreamMode.SET_UPSTREAM).call();
File fileToAdd1 = new File(gitLocalRepo, filenameBranchToAdd);
fileToAdd1.createNewFile();
// Test camel-git add and commit (different branches)
template.send("direct:add-on-branch", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(GitConstants.GIT_FILE_NAME, filenameBranchToAdd);
}
});
template.send("direct:commit-all-branch", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(GitConstants.GIT_COMMIT_MESSAGE, commitMessageAll);
}
});
// Check
validateGitLogs(git, commitMessageAll, commitMessage);
git.close();
}
@Test
public void removeFileBranchTest() throws Exception {
// Init
Git git = getGitTestRepository();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
validateGitLogs(git, commitMessage);
git.checkout().setCreateBranch(true).setName(branchTest).setUpstreamMode(SetupUpstreamMode.SET_UPSTREAM).call();
File fileToAdd1 = new File(gitLocalRepo, filenameBranchToAdd);
fileToAdd1.createNewFile();
git.add().addFilepattern(filenameBranchToAdd).call();
git.commit().setMessage(commitMessageAll).call();
validateGitLogs(git, commitMessageAll, commitMessage);
// Test camel-git remove
template.send("direct:remove-on-branch", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(GitConstants.GIT_FILE_NAME, filenameToAdd);
}
});
// Check
git.checkout().setCreateBranch(false).setName(branchTest).call();
status = git.status().call();
assertFalse(status.getAdded().contains(filenameToAdd));
git.close();
}
@Test
public void createBranchTest() throws Exception {
// Init
Git git = getGitTestRepository();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
// Test camel-git create-branch
template.sendBody("direct:create-branch", "");
// Check
List<Ref> ref = git.branchList().call();
boolean branchCreated = false;
for (Ref refInternal : ref) {
if (refInternal.getName().equals("refs/heads/" + branchTest)) {
branchCreated = true;
}
}
assertEquals(branchCreated, true);
git.close();
}
@Test
public void deleteBranchTest() throws Exception {
// Init
Git git = getGitTestRepository();
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
git.branchCreate().setName(branchTest).call();
List<Ref> ref = git.branchList().call();
boolean branchCreated = false;
for (Ref refInternal : ref) {
if (refInternal.getName().equals("refs/heads/" + branchTest)) {
branchCreated = true;
}
}
assertEquals(branchCreated, true);
// Test camel-git delete-branch
template.sendBody("direct:delete-branch", "");
ref = git.branchList().call();
branchCreated = false;
for (Ref refInternal : ref) {
if (refInternal.getName().equals("refs/heads/" + branchTest)) {
branchCreated = true;
}
}
assertEquals(branchCreated, false);
git.close();
}
@Test
public void statusTest() throws Exception {
// Init
Git git = getGitTestRepository();
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
// Test camel-git status
Status status = template.requestBody("direct:status", "", Status.class);
// Check
assertTrue(status.getAdded().contains(filenameToAdd));
Status gitStatus = git.status().call();
assertEquals(gitStatus.getAdded(), status.getAdded());
git.close();
}
@Test
public void statusBranchTest() throws Exception {
// Init
Git git = getGitTestRepository();
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
git.branchCreate().setName(branchTest).call();
List<Ref> ref = git.branchList().call();
boolean branchCreated = false;
for (Ref refInternal : ref) {
if (refInternal.getName().equals("refs/heads/" + branchTest)) {
branchCreated = true;
}
}
assertEquals(branchCreated, true);
File fileToAddDifferent = new File(gitLocalRepo, filenameBranchToAdd);
fileToAddDifferent.createNewFile();
git.add().addFilepattern(filenameBranchToAdd).call();
// Test camel-git status branch
status = template.requestBody("direct:status-branch", "", Status.class);
// Check
assertTrue(status.getAdded().contains(filenameBranchToAdd));
Status gitStatus = git.status().call();
assertEquals(gitStatus.getAdded(), status.getAdded());
git.close();
}
@Test
public void logTest() throws Exception {
// Init
Git git = getGitTestRepository();
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
// Test camel-git log
Iterable<RevCommit> revCommits = template.requestBody("direct:log", "", Iterable.class);
// Check
Iterator<RevCommit> gitLogs = git.log().call().iterator();
for (RevCommit rev : revCommits) {
RevCommit gitRevCommit = gitLogs.next();
assertEquals(gitRevCommit.getName(), rev.getName());
}
git.close();
}
@Test
public void logBranchTest() throws Exception {
// Init
Git git = getGitTestRepository();
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
git.branchCreate().setName(branchTest).call();
List<Ref> ref = git.branchList().call();
boolean branchCreated = false;
for (Ref refInternal : ref) {
if (refInternal.getName().equals("refs/heads/" + branchTest)) {
branchCreated = true;
}
}
assertEquals(branchCreated, true);
File fileToAddDifferent = new File(gitLocalRepo, filenameBranchToAdd);
fileToAddDifferent.createNewFile();
git.add().addFilepattern(filenameBranchToAdd).call();
git.commit().setMessage(commitMessageAll).call();
// Test camel-git log (with branches)
Iterable<RevCommit> revCommits = template.requestBody("direct:log-branch", "", Iterable.class);
// Check
Iterator<RevCommit> gitLogs = git.log().call().iterator();
for (RevCommit rev : revCommits) {
RevCommit gitRevCommit = gitLogs.next();
assertEquals(gitRevCommit.getName(), rev.getName());
assertEquals(gitRevCommit.getShortMessage(), rev.getShortMessage());
}
git.close();
}
@Test
public void createTagTest() throws Exception {
// Init
Git git = getGitTestRepository();
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
// Test camel-git create tag
template.sendBody("direct:create-tag", "");
// Check
List<Ref> ref = git.tagList().call();
boolean tagCreated = false;
for (Ref refInternal : ref) {
if (refInternal.getName().equals("refs/tags/" + tagTest)) {
tagCreated = true;
}
}
assertEquals(tagCreated, true);
git.close();
}
@Test
public void deleteTagTest() throws Exception {
// Init
Git git = getGitTestRepository();
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
git.tag().setName(tagTest).call();
List<Ref> ref = git.tagList().call();
boolean tagCreated = false;
for (Ref refInternal : ref) {
if (refInternal.getName().equals("refs/tags/" + tagTest)) {
tagCreated = true;
}
}
assertEquals(tagCreated, true);
// Test camel-git delete-tag
template.sendBody("direct:delete-tag", "");
// Check
ref = git.tagList().call();
boolean tagExists = false;
for (Ref refInternal : ref) {
if (refInternal.getName().equals("refs/tags/" + tagTest)) {
tagExists = true;
}
}
assertEquals(tagExists, false);
git.close();
}
@Test
public void showBranchesTest() throws Exception {
// Init
Git git = getGitTestRepository();
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
git.branchCreate().setName(branchTest).call();
// Test camel-git show-branches
List<Ref> branches = template.requestBody("direct:show-branches", "", List.class);
// Check
Boolean branchExists = false;
for (Ref reference : branches) {
if (("refs/heads/" + branchTest).equals(reference.getName())) {
branchExists = true;
}
}
assertTrue(branchExists);
git.close();
}
@Test
public void cherryPickTest() throws Exception {
// Init
Git git = getGitTestRepository();
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
git.branchCreate().setName(branchTest).call();
List<Ref> branches = git.branchList().call();
Boolean branchExists = false;
for (Ref reference : branches) {
if (("refs/heads/" + branchTest).equals(reference.getName())) {
branchExists = true;
}
}
assertTrue(branchExists);
String fileToAdd1Name = "filetest1test.txt";
File fileToAdd1 = new File(gitLocalRepo, fileToAdd1Name);
fileToAdd1.createNewFile();
git.add().addFilepattern(fileToAdd1Name).call();
status = git.status().call();
assertTrue(status.getAdded().contains(fileToAdd1Name));
git.commit().setMessage("Test second commit").call();
Iterable<RevCommit> logs = git.log().call();
validateGitLogs(git, "Test second commit", commitMessage);
String id = logs.iterator().next().getName();
// Test camel-git cherry-pick
template.sendBodyAndHeader("direct:cherrypick", "", GitConstants.GIT_COMMIT_ID, id);
// Check
validateGitLogs(git, "Test second commit", commitMessage);
git.close();
}
@Test
public void cherryPickBranchToMasterTest() throws Exception {
// Init
Git git = getGitTestRepository();
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
git.branchCreate().setName(branchTest).call();
List<Ref> branches = git.branchList().call();
Boolean branchExists = false;
for (Ref reference : branches) {
if (("refs/heads/" + branchTest).equals(reference.getName())) {
branchExists = true;
}
}
assertTrue(branchExists);
String fileToAdd1Name = "filetest1test.txt";
File fileToAdd1 = new File(gitLocalRepo, fileToAdd1Name);
fileToAdd1.createNewFile();
git.add().addFilepattern(fileToAdd1Name).call();
status = git.status().call();
assertTrue(status.getAdded().contains(fileToAdd1Name));
git.commit().setMessage("Test second commit").call();
Iterable<RevCommit> logs = git.log().call();
validateGitLogs(git, "Test second commit", commitMessage);
String id = logs.iterator().next().getName();
// Test camel-git cherry-pick (on master)
template.sendBodyAndHeader("direct:cherrypick-master", "", GitConstants.GIT_COMMIT_ID, id);
// Check
git.checkout().setCreateBranch(false).setName("refs/heads/master").call();
validateGitLogs(git, "Test second commit", commitMessage);
git.close();
}
@Test
public void remoteAddTest() throws Exception {
Repository repository = getTestRepository();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
Git git = new Git(repository);
List<RemoteConfig> remoteConfigList = git.remoteList().call();
assertTrue(remoteConfigList.size() == 0);
Object result = template.requestBody("direct:remoteAdd", "");
assertTrue(result instanceof RemoteConfig);
RemoteConfig remoteConfig = (RemoteConfig)result;
remoteConfigList = git.remoteList().call();
assertTrue(remoteConfigList.size() == 1);
assertEquals(remoteConfigList.get(0).getName(), remoteConfig.getName());
assertEquals(remoteConfigList.get(0).getURIs(), remoteConfig.getURIs());
git.close();
}
@Test
public void remoteListTest() throws Exception {
Repository repository = getTestRepository();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
Git git = new Git(repository);
RemoteAddCommand remoteAddCommand = git.remoteAdd();
remoteAddCommand.setName("origin");
remoteAddCommand.setUri(new URIish(remoteUriTest));
remoteAddCommand.call();
List<RemoteConfig> gitRemoteConfigs = git.remoteList().call();
Object result = template.requestBody("direct:remoteList", "");
assertTrue(result instanceof List);
List<RemoteConfig> remoteConfigs = (List<RemoteConfig>)result;
assertEquals(gitRemoteConfigs.size(), remoteConfigs.size());
assertEquals(gitRemoteConfigs.get(0).getName(), remoteConfigs.get(0).getName());
assertEquals(gitRemoteConfigs.get(0).getURIs(), remoteConfigs.get(0).getURIs());
git.close();
}
@Test
public void cleanTest() throws Exception {
Git git = getGitTestRepository();
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
// Test camel-git add
Set<String> cleaned = template.requestBodyAndHeader("direct:clean", "", GitConstants.GIT_FILE_NAME, filenameToAdd, Set.class);
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
assertTrue(cleaned.contains(filenameToAdd));
git.close();
}
@Test
public void gcTest() throws Exception {
Git git = getGitTestRepository();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
// Test camel-git commit (with no changes)
template.requestBodyAndHeader("direct:commit", "", GitConstants.GIT_COMMIT_MESSAGE, commitMessage);
// Check that it has been commited twice
validateGitLogs(git, commitMessage, commitMessage);
// Test camel-git add
Properties gcResult = template.requestBodyAndHeader("direct:gc", "", GitConstants.GIT_FILE_NAME, filenameToAdd, Properties.class);
assertNotNull(gcResult);
git.close();
}
@Test
public void mergeTest() throws Exception {
// Init
Git git = getGitTestRepository();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
validateGitLogs(git, commitMessage);
git.checkout().setCreateBranch(true).setName(branchTest).setUpstreamMode(SetupUpstreamMode.SET_UPSTREAM).call();
// Test camel-git commit (with branch)
template.send("direct:commit-branch", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(GitConstants.GIT_COMMIT_MESSAGE, commitMessageBranch);
}
});
validateGitLogs(git, commitMessageBranch, commitMessage);
// Test camel-git commit (with branch)
MergeResult result = template.requestBody("direct:merge", "", MergeResult.class);
assertEquals(result.getMergeStatus().toString(), "Fast-forward");
git.close();
}
@Test
public void showTagsTest() throws Exception {
// Init
Git git = getGitTestRepository();
File fileToAdd = new File(gitLocalRepo, filenameToAdd);
fileToAdd.createNewFile();
git.add().addFilepattern(filenameToAdd).call();
File gitDir = new File(gitLocalRepo, ".git");
assertEquals(gitDir.exists(), true);
Status status = git.status().call();
assertTrue(status.getAdded().contains(filenameToAdd));
git.commit().setMessage(commitMessage).call();
// Test camel-git create tag
template.sendBody("direct:create-tag", "");
// Check
List<Ref> result = template.requestBody("direct:show-tags", "", List.class);
boolean tagCreated = false;
for (Ref refInternal : result) {
if (refInternal.getName().equals("refs/tags/" + tagTest)) {
tagCreated = true;
}
}
assertEquals(true, tagCreated);
git.close();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:clone").to("git://" + gitLocalRepo + "?remotePath=https://github.com/oscerd/json-webserver-example.git&operation=clone");
from("direct:init").to("git://" + gitLocalRepo + "?operation=init");
from("direct:add").to("git://" + gitLocalRepo + "?operation=add");
from("direct:checkout").to("git://" + gitLocalRepo + "?operation=checkout&branchName=" + branchTest);
from("direct:checkout-specific-tag").to("git://" + gitLocalRepo + "?operation=checkout&branchName=" + branchTest + "&tagName=" + tagTest);
from("direct:remove").to("git://" + gitLocalRepo + "?operation=remove");
from("direct:add-on-branch").to("git://" + gitLocalRepo + "?operation=add&branchName=" + branchTest);
from("direct:remove-on-branch").to("git://" + gitLocalRepo + "?operation=add&branchName=" + branchTest);
from("direct:commit").to("git://" + gitLocalRepo + "?operation=commit");
from("direct:commit-not-allow-empty").to("git://" + gitLocalRepo + "?operation=commit&allowEmpty=false");
from("direct:commit-branch").to("git://" + gitLocalRepo + "?operation=commit&branchName=" + branchTest);
from("direct:commit-all").to("git://" + gitLocalRepo + "?operation=commit");
from("direct:commit-all-branch").to("git://" + gitLocalRepo + "?operation=commit&branchName=" + branchTest);
from("direct:add-status-commit").to("git://" + gitLocalRepo + "?operation=add").to("git://" + gitLocalRepo + "?operation=status").choice()
.when(simple("${body.hasUncommittedChanges()}")).log("Commiting changes...").to("git://" + gitLocalRepo + "?operation=commit").otherwise()
.log("Nothing to commit").end();
from("direct:create-branch").to("git://" + gitLocalRepo + "?operation=createBranch&branchName=" + branchTest);
from("direct:delete-branch").to("git://" + gitLocalRepo + "?operation=deleteBranch&branchName=" + branchTest);
from("direct:status").to("git://" + gitLocalRepo + "?operation=status");
from("direct:status-branch").to("git://" + gitLocalRepo + "?operation=status&branchName=" + branchTest);
from("direct:log").to("git://" + gitLocalRepo + "?operation=log");
from("direct:log-branch").to("git://" + gitLocalRepo + "?operation=log&branchName=" + branchTest);
from("direct:create-tag").to("git://" + gitLocalRepo + "?operation=createTag&tagName=" + tagTest);
from("direct:delete-tag").to("git://" + gitLocalRepo + "?operation=deleteTag&tagName=" + tagTest);
from("direct:show-branches").to("git://" + gitLocalRepo + "?operation=showBranches");
from("direct:cherrypick").to("git://" + gitLocalRepo + "?operation=cherryPick&branchName=" + branchTest);
from("direct:cherrypick-master").to("git://" + gitLocalRepo + "?operation=cherryPick&branchName=refs/heads/master");
from("direct:pull").to("git://" + gitLocalRepo + "?remoteName=origin&operation=pull");
from("direct:clean").to("git://" + gitLocalRepo + "?operation=clean");
from("direct:gc").to("git://" + gitLocalRepo + "?operation=gc");
from("direct:remoteAdd").to("git://" + gitLocalRepo + "?operation=remoteAdd&remotePath=https://github.com/oscerd/json-webserver-example.git&remoteName=origin");
from("direct:remoteList").to("git://" + gitLocalRepo + "?operation=remoteList");
from("direct:merge").to("git://" + gitLocalRepo + "?operation=merge&branchName=" + branchTest);
from("direct:show-tags").to("git://" + gitLocalRepo + "?operation=showTags");
}
};
}
}
|
|
/*
* Copyright 1999-2011 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.druid.test.util;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.sql.Array;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Date;
import java.sql.NClob;
import java.sql.Ref;
import java.sql.ResultSet;
import java.sql.RowId;
import java.sql.SQLException;
import java.sql.SQLXML;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Calendar;
import oracle.jdbc.OracleParameterMetaData;
import oracle.jdbc.OracleResultSetCache;
import oracle.jdbc.dcn.DatabaseChangeRegistration;
import oracle.sql.ARRAY;
import oracle.sql.BFILE;
import oracle.sql.BINARY_DOUBLE;
import oracle.sql.BINARY_FLOAT;
import oracle.sql.BLOB;
import oracle.sql.CHAR;
import oracle.sql.CLOB;
import oracle.sql.CustomDatum;
import oracle.sql.DATE;
import oracle.sql.Datum;
import oracle.sql.INTERVALDS;
import oracle.sql.INTERVALYM;
import oracle.sql.NUMBER;
import oracle.sql.OPAQUE;
import oracle.sql.ORAData;
import oracle.sql.RAW;
import oracle.sql.REF;
import oracle.sql.ROWID;
import oracle.sql.STRUCT;
import oracle.sql.StructDescriptor;
import oracle.sql.TIMESTAMP;
import oracle.sql.TIMESTAMPLTZ;
import oracle.sql.TIMESTAMPTZ;
import com.alibaba.druid.mock.MockPreparedStatement;
public class OracleMockPreparedStatement extends MockPreparedStatement implements oracle.jdbc.internal.OraclePreparedStatement {
private int executeBatch = 50;
private int rowPrefetch;
public OracleMockPreparedStatement(OracleMockConnection conn, String sql){
super(conn, sql);
this.rowPrefetch = conn.getDefaultRowPrefetch();
}
@Override
public OracleParameterMetaData OracleGetParameterMetaData() throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public void defineParameterType(int arg0, int arg1, int arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void defineParameterTypeBytes(int arg0, int arg1, int arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void defineParameterTypeChars(int arg0, int arg1, int arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public int getExecuteBatch() {
return executeBatch;
}
public ResultSet getReturnResultSet() throws SQLException {
// TODO Auto-generated method stub
return null;
}
public void registerReturnParameter(int arg0, int arg1) throws SQLException {
// TODO Auto-generated method stub
}
public void registerReturnParameter(int arg0, int arg1, int arg2) throws SQLException {
// TODO Auto-generated method stub
}
public void registerReturnParameter(int arg0, int arg1, String arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public int sendBatch() throws SQLException {
// TODO Auto-generated method stub
return 0;
}
@Override
public void setARRAY(int arg0, ARRAY arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setARRAYAtName(String arg0, ARRAY arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setArrayAtName(String arg0, Array arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setAsciiStreamAtName(String arg0, InputStream arg1, int arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBFILE(int arg0, BFILE arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBFILEAtName(String arg0, BFILE arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBLOB(int arg0, BLOB arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBLOBAtName(String arg0, BLOB arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBfile(int arg0, BFILE arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBfileAtName(String arg0, BFILE arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBigDecimalAtName(String arg0, BigDecimal arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBinaryDouble(int arg0, double arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBinaryDouble(int arg0, BINARY_DOUBLE arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBinaryDoubleAtName(String arg0, double arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBinaryDoubleAtName(String arg0, BINARY_DOUBLE arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBinaryFloat(int arg0, float arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBinaryFloat(int arg0, BINARY_FLOAT arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBinaryFloatAtName(String arg0, float arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBinaryFloatAtName(String arg0, BINARY_FLOAT arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBinaryStreamAtName(String arg0, InputStream arg1, int arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBlobAtName(String arg0, Blob arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBooleanAtName(String arg0, boolean arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setByteAtName(String arg0, byte arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBytesAtName(String arg0, byte[] arg1) throws SQLException {
// TODO Auto-generated method stub
}
public void setBytesForBlob(int arg0, byte[] arg1) throws SQLException {
// TODO Auto-generated method stub
}
public void setBytesForBlobAtName(String arg0, byte[] arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setCHAR(int arg0, CHAR arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setCHARAtName(String arg0, CHAR arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setCLOB(int arg0, CLOB arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setCLOBAtName(String arg0, CLOB arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setClobAtName(String arg0, Clob arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setCursor(int arg0, ResultSet arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setCursorAtName(String arg0, ResultSet arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setCustomDatum(int arg0, CustomDatum arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setCustomDatumAtName(String arg0, CustomDatum arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setDATE(int arg0, DATE arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setDATEAtName(String arg0, DATE arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setDateAtName(String arg0, Date arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setDisableStmtCaching(boolean arg0) {
// TODO Auto-generated method stub
}
@Override
public void setDoubleAtName(String arg0, double arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setExecuteBatch(int executeBatch) throws SQLException {
this.executeBatch = executeBatch;
}
@Override
public void setFixedCHAR(int arg0, String arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setFixedCHARAtName(String arg0, String arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setFloatAtName(String arg0, float arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setFormOfUse(int arg0, short arg1) {
// TODO Auto-generated method stub
}
@Override
public void setINTERVALDS(int arg0, INTERVALDS arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setINTERVALDSAtName(String arg0, INTERVALDS arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setINTERVALYM(int arg0, INTERVALYM arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setINTERVALYMAtName(String arg0, INTERVALYM arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setIntAtName(String arg0, int arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setLongAtName(String arg0, long arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setNUMBER(int arg0, NUMBER arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setNUMBERAtName(String arg0, NUMBER arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setNullAtName(String arg0, int arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setNullAtName(String arg0, int arg1, String arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setOPAQUE(int arg0, OPAQUE arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setOPAQUEAtName(String arg0, OPAQUE arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setORAData(int arg0, ORAData arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setORADataAtName(String arg0, ORAData arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setObjectAtName(String arg0, Object arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setObjectAtName(String arg0, Object arg1, int arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setObjectAtName(String arg0, Object arg1, int arg2, int arg3) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setOracleObject(int arg0, Datum arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setOracleObjectAtName(String arg0, Datum arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setPlsqlIndexTable(int arg0, Object arg1, int arg2, int arg3, int arg4, int arg5) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setRAW(int arg0, RAW arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setRAWAtName(String arg0, RAW arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setREF(int arg0, REF arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setREFAtName(String arg0, REF arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setROWID(int arg0, ROWID arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setROWIDAtName(String arg0, ROWID arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setRefAtName(String arg0, Ref arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setRefType(int arg0, REF arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setRefTypeAtName(String arg0, REF arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setSTRUCT(int arg0, STRUCT arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setSTRUCTAtName(String arg0, STRUCT arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setShortAtName(String arg0, short arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setStringAtName(String arg0, String arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setStringForClob(int arg0, String arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setStringForClobAtName(String arg0, String arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setStructDescriptor(int arg0, StructDescriptor arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setStructDescriptorAtName(String arg0, StructDescriptor arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setTIMESTAMP(int arg0, TIMESTAMP arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setTIMESTAMPAtName(String arg0, TIMESTAMP arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setTIMESTAMPLTZ(int arg0, TIMESTAMPLTZ arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setTIMESTAMPLTZAtName(String arg0, TIMESTAMPLTZ arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setTIMESTAMPTZ(int arg0, TIMESTAMPTZ arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setTIMESTAMPTZAtName(String arg0, TIMESTAMPTZ arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setTimeAtName(String arg0, Time arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setTimestampAtName(String arg0, Timestamp arg1) throws SQLException {
// TODO Auto-generated method stub
}
public void setTimestampAtName(String arg0, Timestamp arg1, Calendar arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setURLAtName(String arg0, URL arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setUnicodeStreamAtName(String arg0, InputStream arg1, int arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void clearDefines() throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void closeWithKey(String arg0) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public int creationState() {
// TODO Auto-generated method stub
return 0;
}
@Override
public void defineColumnType(int arg0, int arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void defineColumnType(int arg0, int arg1, int arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void defineColumnType(int arg0, int arg1, String arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void defineColumnType(int arg0, int arg1, int arg2, short arg3) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void defineColumnTypeBytes(int arg0, int arg1, int arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void defineColumnTypeChars(int arg0, int arg1, int arg2) throws SQLException {
// TODO Auto-generated method stub
}
public int getLobPrefetchSize() {
// TODO Auto-generated method stub
return 0;
}
public long getRegisteredQueryId() throws SQLException {
// TODO Auto-generated method stub
return 0;
}
public String[] getRegisteredTableNames() throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public int getRowPrefetch() {
return rowPrefetch;
}
@Override
public boolean isNCHAR(int arg0) throws SQLException {
// TODO Auto-generated method stub
return false;
}
public void setLobPrefetchSize(int arg0) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setResultSetCache(OracleResultSetCache arg0) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setRowPrefetch(int rowPrefetch) throws SQLException {
this.rowPrefetch = rowPrefetch;
}
public long getChecksum() throws SQLException {
// TODO Auto-generated method stub
return 0;
}
@Override
public boolean getFixedString() {
// TODO Auto-generated method stub
return false;
}
@Override
public int getcacheState() {
// TODO Auto-generated method stub
return 0;
}
@Override
public boolean getserverCursor() {
// TODO Auto-generated method stub
return false;
}
@Override
public int getstatementType() {
// TODO Auto-generated method stub
return 0;
}
@Override
public void setFixedString(boolean arg0) {
// TODO Auto-generated method stub
}
@Override
public void enterExplicitCache() throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void enterImplicitCache() throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void exitExplicitCacheToActive() throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void exitExplicitCacheToClose() throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void exitImplicitCacheToActive() throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void exitImplicitCacheToClose() throws SQLException {
// TODO Auto-generated method stub
}
public String getOriginalSql() throws SQLException {
// TODO Auto-generated method stub
return null;
}
public void setCharacterStreamAtName(String arg0, Reader arg1, int arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setCheckBindTypes(boolean arg0) {
// TODO Auto-generated method stub
}
@Override
public void setInternalBytes(int arg0, byte[] arg1, int arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setAsciiStreamAtName(String arg0, InputStream arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setAsciiStreamAtName(String arg0, InputStream arg1, long arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBinaryStreamAtName(String arg0, InputStream arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBinaryStreamAtName(String arg0, InputStream arg1, long arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBlobAtName(String arg0, InputStream arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setBlobAtName(String arg0, InputStream arg1, long arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setCharacterStreamAtName(String arg0, Reader arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setCharacterStreamAtName(String arg0, Reader arg1, long arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setClobAtName(String arg0, Reader arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setClobAtName(String arg0, Reader arg1, long arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setDateAtName(String arg0, Date arg1, Calendar arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setNCharacterStreamAtName(String arg0, Reader arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setNCharacterStreamAtName(String arg0, Reader arg1, long arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setNClobAtName(String arg0, NClob arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setNClobAtName(String arg0, Reader arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setNClobAtName(String arg0, Reader arg1, long arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setNStringAtName(String arg0, String arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setRowIdAtName(String arg0, RowId arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setSQLXMLAtName(String arg0, SQLXML arg1) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setTimeAtName(String arg0, Time arg1, Calendar arg2) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void setDatabaseChangeRegistration(DatabaseChangeRegistration arg0) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public SqlKind getSqlKind() throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public void setSnapshotSCN(long arg0) throws SQLException {
// TODO Auto-generated method stub
}
}
|
|
package com.microsoft.recognizers.text.number.extractors;
import com.microsoft.recognizers.text.ExtractResult;
import com.microsoft.recognizers.text.IExtractor;
import com.microsoft.recognizers.text.ParseResult;
import com.microsoft.recognizers.text.number.NumberRangeConstants;
import com.microsoft.recognizers.text.number.parsers.BaseNumberParser;
import com.microsoft.recognizers.text.utilities.RegExpUtility;
import org.javatuples.Pair;
import org.javatuples.Triplet;
import java.util.*;
import java.util.regex.MatchResult;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public abstract class BaseNumberRangeExtractor implements IExtractor {
private final BaseNumberExtractor numberExtractor;
private final BaseNumberExtractor ordinalExtractor;
private final BaseNumberParser numberParser;
protected abstract Map<Pattern, String> getRegexes();
protected String getExtractType() {
return "";
}
protected BaseNumberRangeExtractor(BaseNumberExtractor numberExtractor, BaseNumberExtractor ordinalExtractor, BaseNumberParser numberParser) {
this.numberExtractor = numberExtractor;
this.ordinalExtractor = ordinalExtractor;
this.numberParser = numberParser;
}
@Override
public List<ExtractResult> extract(String source) {
if (source == null || source.isEmpty()) {
return Collections.emptyList();
}
List<ExtractResult> result = new ArrayList<>();
Map<Pair<Integer, Integer>, String> matchSource = new HashMap<>();
boolean[] matched = new boolean[source.length()];
Arrays.fill(matched, false);
List<Pair<Matcher, String>> matches = new ArrayList<>();
getRegexes().forEach((k, value) -> {
Matcher matcher = k.matcher(source);
if(matcher.find()) {
matcher.reset();
matches.add(Pair.with(matcher, value));
}
});
for(Pair<Matcher, String> pair : matches) {
Matcher matcher = pair.getValue0();
String value = pair.getValue1();
while (matcher.find()) {
int start = NumberRangeConstants.INVALID_NUM;
int length = NumberRangeConstants.INVALID_NUM;
Pair<Integer, Integer> startAndLength = getMatchedStartAndLength(matcher, value, source, start, length);
start = startAndLength.getValue0();
length = startAndLength.getValue1();
if (start >= 0 && length > 0) {
for (int j = 0; j < length; j++) {
matched[start + j] = true;
}
// Keep Source Data for extra information
matchSource.put(Pair.with(start, length), value);
}
}
}
int last = -1;
for (int i = 0; i < source.length(); i++) {
if (matched[i]) {
if (i + 1 == source.length() || !matched[i + 1]) {
int start = last + 1;
int length = i - last;
String substr = source.substring(start, start + length);
Optional<Pair<Integer, Integer>> srcMatches = matchSource.keySet().stream().filter(o -> o.getValue0() == start && o.getValue1() == length).findFirst();
if (srcMatches.isPresent()) {
Pair<Integer, Integer> srcMatch = srcMatches.get();
ExtractResult er = new ExtractResult(start, length, substr, getExtractType(), matchSource.containsKey(srcMatch) ? matchSource.get(srcMatch) : null);
result.add(er);
}
}
} else {
last = i;
}
}
return result;
}
private Pair<Integer, Integer> getMatchedStartAndLength(Matcher match, String type, String source, int start, int length) {
Map<String, String> groupValues = RegExpUtility.getNamedGroups(match);
String numberStr1 = groupValues.containsKey("number1") ? groupValues.get("number1") : "";
String numberStr2 = groupValues.containsKey("number2") ? groupValues.get("number2") : "";
if (type.contains(NumberRangeConstants.TWONUM)) {
List<ExtractResult> extractNumList1 = extractNumberAndOrdinalFromStr(numberStr1);
List<ExtractResult> extractNumList2 = extractNumberAndOrdinalFromStr(numberStr2);
if (extractNumList1 != null && extractNumList2 != null) {
if (type.contains(NumberRangeConstants.TWONUMTILL)) {
// num1 must have same type with num2
if (!extractNumList1.get(0).type.equals(extractNumList2.get(0).type)) {
return Pair.with(start, length);
}
// num1 must less than num2
ParseResult numExt1 = numberParser.parse(extractNumList1.get(0));
ParseResult numExt2 = numberParser.parse(extractNumList2.get(0));
double num1 = numExt1.value != null ? (double) numExt1.value : 0;
double num2 = numExt1.value != null ? (double) numExt2.value : 0;
if (num1 > num2) {
return Pair.with(start, length);
}
extractNumList1.subList(1, extractNumList1.size()).clear();
extractNumList2.subList(1, extractNumList2.size()).clear();
}
start = match.start();
length = match.end() - start;
Triplet<Boolean, Integer, Integer> num1 = validateMatchAndGetStartAndLength(extractNumList1, numberStr1, match, source, start, length);
start = num1.getValue1();
length = num1.getValue2();
Triplet<Boolean, Integer, Integer> num2 = validateMatchAndGetStartAndLength(extractNumList2, numberStr2, match, source, start, length);
start = num2.getValue1();
length = num2.getValue2();
if (!num1.getValue0() || !num2.getValue0()) {
start = NumberRangeConstants.INVALID_NUM;
length = NumberRangeConstants.INVALID_NUM;
}
}
} else {
String numberStr = numberStr1 == null || numberStr1.isEmpty() ? numberStr2 : numberStr1;
List<ExtractResult> extractNumList = extractNumberAndOrdinalFromStr(numberStr);
if (extractNumList != null) {
start = match.start();
length = match.end() - start;
Triplet<Boolean, Integer, Integer> num = validateMatchAndGetStartAndLength(extractNumList, numberStr, match, source, start, length);
start = num.getValue1();
length = num.getValue2();
if (!num.getValue0()) {
start = NumberRangeConstants.INVALID_NUM;
length = NumberRangeConstants.INVALID_NUM;
}
}
}
return Pair.with(start, length);
}
private Triplet<Boolean, Integer, Integer> validateMatchAndGetStartAndLength(List<ExtractResult> extractNumList, String numberStr, MatchResult match, String source, int start, int length) {
boolean validNum = false;
for(ExtractResult extractNum : extractNumList)
{
if (numberStr.trim().endsWith(extractNum.text) && match.group().startsWith(numberStr))
{
start = source.indexOf(numberStr) + (extractNum.start != null ? extractNum.start : 0);
length = length - (extractNum.start != null ? extractNum.start : 0);
validNum = true;
}
else if (extractNum.start == 0 && match.group().endsWith(numberStr))
{
length = length - numberStr.length() + (extractNum.length != null ? extractNum.length : 0);
validNum = true;
}
else if (extractNum.start == 0 && extractNum.length == numberStr.trim().length())
{
validNum = true;
}
if (validNum)
{
break;
}
}
return Triplet.with(validNum, start, length);
}
private List<ExtractResult> extractNumberAndOrdinalFromStr(String numberStr) {
List<ExtractResult> extractNumber = numberExtractor.extract(numberStr);
List<ExtractResult> extractOrdinal = ordinalExtractor.extract(numberStr);
if (extractNumber.size() == 0) {
return extractOrdinal.size() == 0 ? null : extractOrdinal;
}
if (extractOrdinal.size() == 0) {
return extractNumber;
}
extractNumber.addAll(extractOrdinal);
// extractNumber = extractNumber.OrderByDescending(num => num.Length).ThenByDescending(num => num.Start).ToList();
Collections.sort(extractNumber, (Comparator) (o1, o2) -> {
Integer x1 = ((ExtractResult) o1).length;
Integer x2 = ((ExtractResult) o2).length;
int sComp = x2.compareTo(x1);
if (sComp != 0) {
return sComp;
}
x1 = ((ExtractResult) o1).start;
x2 = ((ExtractResult) o2).start;
return x2.compareTo(x1);
});
return extractNumber;
}
}
|
|
/*
* Copyright 2005 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.collect.Lists;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
public class VarCheckTest extends CompilerTestCase {
private static final String EXTERNS = "var window; function alert() {}";
private CheckLevel strictModuleDepErrorLevel;
private boolean sanityCheck = false;
public VarCheckTest() {
super(EXTERNS);
}
@Override
protected void setUp() throws Exception {
super.setUp();
strictModuleDepErrorLevel = CheckLevel.OFF;
sanityCheck = false;
}
@Override
protected CompilerOptions getOptions() {
CompilerOptions options = super.getOptions();
options.setWarningLevel(DiagnosticGroups.STRICT_MODULE_DEP_CHECK,
strictModuleDepErrorLevel);
return options;
}
@Override
protected CompilerPass getProcessor(Compiler compiler) {
return new VarCheck(compiler, sanityCheck);
}
@Override
protected int getNumRepetitions() {
// Because we synthesize externs, the second pass won't emit a warning.
return 1;
}
public void testBreak() {
testSame("a: while(1) break a;");
}
public void testContinue() {
testSame("a: while(1) continue a;");
}
public void testReferencedVarNotDefined() {
test("x = 0;", null, VarCheck.UNDEFINED_VAR_ERROR);
}
public void testReferencedVarDefined1() {
testSame("var x, y; x=1;");
}
public void testReferencedVarDefined2() {
testSame("var x; function y() {x=1;}");
}
public void testReferencedVarsExternallyDefined() {
testSame("var x = window; alert(x);");
}
public void testMultiplyDeclaredVars1() {
test("var x = 1; var x = 2;", null,
SyntacticScopeCreator.VAR_MULTIPLY_DECLARED_ERROR);
}
public void testMultiplyDeclaredVars2() {
test("var y; try { y=1 } catch (x) {}" +
"try { y=1 } catch (x) {}",
"var y;try{y=1}catch(x){}try{y=1}catch(x){}");
}
public void testMultiplyDeclaredVars3() {
test("try { var x = 1; x *=2; } catch (x) {}", null,
SyntacticScopeCreator.VAR_MULTIPLY_DECLARED_ERROR);
}
public void testVarReferenceInExterns() {
testSame("asdf;", "var asdf;",
VarCheck.NAME_REFERENCE_IN_EXTERNS_ERROR, true);
}
public void testCallInExterns() {
testSame("yz();", "function yz() {}",
VarCheck.NAME_REFERENCE_IN_EXTERNS_ERROR, true);
}
public void testPropReferenceInExterns1() {
testSame("asdf.foo;", "var asdf;",
VarCheck.UNDEFINED_EXTERN_VAR_ERROR, true);
}
public void testPropReferenceInExterns2() {
testSame("asdf.foo;", "",
VarCheck.UNDEFINED_EXTERN_VAR_ERROR, true);
}
public void testVarInWithBlock() {
test("var a = {b:5}; with (a){b;}", null, VarCheck.UNDEFINED_VAR_ERROR);
}
public void testInvalidFunctionDecl1() {
test("function() {};", null, VarCheck.INVALID_FUNCTION_DECL);
}
public void testInvalidFunctionDecl2() {
test("if (true) { function() {}; }", null, VarCheck.INVALID_FUNCTION_DECL);
}
public void testValidFunctionExpr() {
testSame("(function() {});");
}
public void testRecursiveFunction() {
testSame("(function a() { return a(); })();");
}
public void testRecursiveFunction2() {
testSame("var a = 3; (function a() { return a(); })();");
}
public void testLegalVarReferenceBetweenModules() {
testDependentModules("var x = 10;", "var y = x++;", null);
}
public void testMissingModuleDependencyDefault() {
testIndependentModules("var x = 10;", "var y = x++;",
null, VarCheck.MISSING_MODULE_DEP_ERROR);
}
public void testViolatedModuleDependencyDefault() {
testDependentModules("var y = x++;", "var x = 10;",
VarCheck.VIOLATED_MODULE_DEP_ERROR);
}
public void testMissingModuleDependencySkipNonStrict() {
sanityCheck = true;
testIndependentModules("var x = 10;", "var y = x++;",
null, null);
}
public void testViolatedModuleDependencySkipNonStrict() {
sanityCheck = true;
testDependentModules("var y = x++;", "var x = 10;",
null);
}
public void testMissingModuleDependencySkipNonStrictPromoted() {
sanityCheck = true;
strictModuleDepErrorLevel = CheckLevel.ERROR;
testIndependentModules("var x = 10;", "var y = x++;",
VarCheck.STRICT_MODULE_DEP_ERROR, null);
}
public void testViolatedModuleDependencyNonStrictPromoted() {
sanityCheck = true;
strictModuleDepErrorLevel = CheckLevel.ERROR;
testDependentModules("var y = x++;", "var x = 10;",
VarCheck.STRICT_MODULE_DEP_ERROR);
}
public void testDependentStrictModuleDependencyCheck() {
strictModuleDepErrorLevel = CheckLevel.ERROR;
testDependentModules("var f = function() {return new B();};",
"var B = function() {}",
VarCheck.STRICT_MODULE_DEP_ERROR);
}
public void testIndependentStrictModuleDependencyCheck() {
strictModuleDepErrorLevel = CheckLevel.ERROR;
testIndependentModules("var f = function() {return new B();};",
"var B = function() {}",
VarCheck.STRICT_MODULE_DEP_ERROR, null);
}
public void testStarStrictModuleDependencyCheck() {
strictModuleDepErrorLevel = CheckLevel.WARNING;
testSame(createModuleStar("function a() {}", "function b() { a(); c(); }",
"function c() { a(); }"),
VarCheck.STRICT_MODULE_DEP_ERROR);
}
public void testForwardVarReferenceInLocalScope1() {
testDependentModules("var x = 10; function a() {y++;}",
"var y = 11; a();", null);
}
public void testForwardVarReferenceInLocalScope2() {
// It would be nice if this pass could use a call graph to flag this case
// as an error, but it currently doesn't.
testDependentModules("var x = 10; function a() {y++;} a();",
"var y = 11;", null);
}
private void testDependentModules(String code1, String code2,
DiagnosticType error) {
testDependentModules(code1, code2, error, null);
}
private void testDependentModules(String code1, String code2,
DiagnosticType error,
DiagnosticType warning) {
testTwoModules(code1, code2, true, error, warning);
}
private void testIndependentModules(String code1, String code2,
DiagnosticType error,
DiagnosticType warning) {
testTwoModules(code1, code2, false, error, warning);
}
private void testTwoModules(String code1, String code2, boolean m2DependsOnm1,
DiagnosticType error, DiagnosticType warning) {
JSModule m1 = new JSModule("m1");
m1.add(JSSourceFile.fromCode("input1", code1));
JSModule m2 = new JSModule("m2");
m2.add(JSSourceFile.fromCode("input2", code2));
if (m2DependsOnm1) {
m2.addDependency(m1);
}
test(new JSModule[] { m1, m2 },
new String[] { code1, code2 }, error, warning);
}
//////////////////////////////////////////////////////////////////////////////
// Test synthesis of externs
public void testSimple() {
checkSynthesizedExtern("x", "var x");
checkSynthesizedExtern("var x", "");
}
public void testSimpleSanityCheck() {
sanityCheck = true;
try {
checkSynthesizedExtern("x", "");
} catch (RuntimeException e) {
assertTrue(e.getMessage().indexOf("Unexpected variable x") != -1);
}
}
public void testParameter() {
checkSynthesizedExtern("function f(x){}", "");
}
public void testLocalVar() {
checkSynthesizedExtern("function f(){x}", "var x");
}
public void testTwoLocalVars() {
checkSynthesizedExtern("function f(){x}function g() {x}", "var x");
}
public void testInnerFunctionLocalVar() {
checkSynthesizedExtern("function f(){function g() {x}}", "var x");
}
public void testNoCreateVarsForLabels() {
checkSynthesizedExtern("x:var y", "");
}
private final static class VariableTestCheck implements CompilerPass {
final AbstractCompiler compiler;
VariableTestCheck(AbstractCompiler compiler) {
this.compiler = compiler;
}
@Override
public void process(Node externs, Node root) {
NodeTraversal.traverseRoots(compiler, Lists.newArrayList(externs, root),
new AbstractPostOrderCallback() {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (NodeUtil.isName(n) && !NodeUtil.isFunction(parent)
&& parent.getType() != Token.LABEL) {
assertTrue("Variable " + n.getString() + " should have be declared",
t.getScope().isDeclared(n.getString(), true));
}
}
});
}
}
public void checkSynthesizedExtern(String input, String expectedExtern) {
Compiler compiler = new Compiler();
CompilerOptions options = new CompilerOptions();
options.setWarningLevel(
DiagnosticGroup.forType(VarCheck.UNDEFINED_VAR_ERROR),
CheckLevel.OFF);
compiler.init(
new JSSourceFile[] {},
new JSSourceFile[] { JSSourceFile.fromCode("input", input) },
options);
compiler.parseInputs();
assertFalse(compiler.hasErrors());
Node externsAndJs = compiler.getRoot();
Node root = externsAndJs.getLastChild();
Node rootOriginal = root.cloneTree();
Node externs = externsAndJs.getFirstChild();
Node expected = compiler.parseTestCode(expectedExtern);
assertFalse(compiler.hasErrors());
(new VarCheck(compiler, sanityCheck)).process(externs, root);
if (!sanityCheck) {
(new VariableTestCheck(compiler)).process(externs, root);
}
String externsCode = compiler.toSource(externs);
String expectedCode = compiler.toSource(expected);
assertEquals(expectedCode, externsCode);
}
}
|
|
package me.newyith.fortress.core;
import me.newyith.fortress.bedrock.BedrockBatch;
import me.newyith.fortress.event.TickTimer;
import me.newyith.fortress.main.FortressesManager;
import me.newyith.fortress.util.Debug;
import me.newyith.fortress.util.Point;
import org.bukkit.Bukkit;
import org.bukkit.Material;
import org.bukkit.World;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.*;
/*
altered:
blocks changed to bedrock
protected:
blocks made unbreakable
generated:
blocks made unbreakable and blocks changed to bedrock
claimed:
points the generate thinks it owns
//*/
public class CoreAnimatorOld {
private static class Model {
private Point anchorPoint = null;
//TODO: think about changing alteredBatchesByLayerIndex to alteredBatchByLayerIndex (single batch per layer)
private Map<Integer, Set<BedrockBatch>> alteredBatchesByLayerIndex = null;
private Set<Point> protectedPoints = null;
private List<Set<Point>> generatedLayers = null;
private List<Set<Point>> animationLayers = null;
private CoreMaterials coreMats = null;
private boolean skipAnimation = false;
private boolean animationInProgress = false;
private boolean isGeneratingWall = false;
private String worldName = null;
private transient World world = null;
private final transient int ticksPerFrame;
private transient int animationWaitTicks = 0;
private transient int curIndex = 0;
@JsonCreator
public Model(@JsonProperty("anchorPoint") Point anchorPoint,
@JsonProperty("alteredBatchesByLayerIndex") Map<Integer, Set<BedrockBatch>> alteredBatchesByLayerIndex,
@JsonProperty("protectedPoints") Set<Point> protectedPoints,
@JsonProperty("generatedLayers") List<Set<Point>> generatedLayers,
@JsonProperty("animationLayers") List<Set<Point>> animationLayers,
@JsonProperty("coreMats") CoreMaterials coreMats,
@JsonProperty("skipAnimation") boolean skipAnimation,
@JsonProperty("animationInProgress") boolean animationInProgress,
@JsonProperty("isGeneratingWall") boolean isGeneratingWall,
@JsonProperty("worldName") String worldName) {
this.anchorPoint = anchorPoint;
this.alteredBatchesByLayerIndex = alteredBatchesByLayerIndex;
this.protectedPoints = protectedPoints;
this.generatedLayers = generatedLayers;
this.animationLayers = animationLayers;
this.coreMats = coreMats;
this.skipAnimation = skipAnimation;
this.animationInProgress = animationInProgress;
this.isGeneratingWall = isGeneratingWall;
this.worldName = worldName;
//rebuild transient fields
this.world = Bukkit.getWorld(worldName);
this.ticksPerFrame = (150 / TickTimer.msPerTick); // msPerFrame / msPerTick
this.animationWaitTicks = 0;
this.curIndex = 0;
}
}
private Model model = null;
@JsonCreator
public CoreAnimatorOld(@JsonProperty("model") Model model) {
this.model = model;
}
public CoreAnimatorOld(World world, Point anchorPoint, CoreMaterials coreMats) {
Map<Integer, Set<BedrockBatch>> alteredBatchesByLayerIndex = new HashMap<>();
Set<Point> protectedPoints = new HashSet<>();
List<Set<Point>> generatedLayers = new ArrayList<>();
List<Set<Point>> animationLayers = new ArrayList<>();
boolean skipAnimation = false;
boolean animationInProgress = false;
boolean isGeneratingWall = false;
String worldName = world.getName();
model = new Model(anchorPoint, alteredBatchesByLayerIndex, protectedPoints, generatedLayers, animationLayers,
coreMats, skipAnimation, animationInProgress, isGeneratingWall, worldName);
}
//------------------------------------------------------------------------------------------------------------------
public List<Set<Point>> getGeneratedLayers() {
return model.generatedLayers;
}
public void generate(List<Set<Point>> layers) {
model.animationLayers = layers;
model.curIndex = 0;
model.isGeneratingWall = true;
model.animationInProgress = true;
}
public void degenerate(boolean skipAnimation) {
model.curIndex = 0; //starting from end if degenerating is handled elsewhere
model.isGeneratingWall = false;
model.animationInProgress = true;
if (skipAnimation) {
model.skipAnimation = true;
tick();
model.skipAnimation = false;
}
}
public CoreMaterials getCoreMats() {
return model.coreMats;
}
public Set<Point> getAlteredPoints() {
//TODO: add model.alteredPoints and keep it updated (so we don't need to recalculate here all the time)
// maybe test first to make sure its worth improving performance here?
// Debug.start("CoreAnimator::getAlteredPoints()");
Set<Point> alteredPoints = new HashSet<>();
for (Set<BedrockBatch> alteredBatches : model.alteredBatchesByLayerIndex.values()) {
for (BedrockBatch batch : alteredBatches) {
alteredPoints.addAll(batch.getPoints());
}
}
// Debug.end("CoreAnimator::getAlteredPoints()");
return alteredPoints;
}
public Set<Point> getProtectedPoints() {
return model.protectedPoints;
}
public Set<Point> getGeneratedPoints() {
Set<Point> generatedPoints = new HashSet<>();
generatedPoints.addAll(getAlteredPoints());
generatedPoints.addAll(model.protectedPoints);
return generatedPoints;
}
public Set<Material> getInvalidWallMaterials() {
return model.coreMats.getInvalidWallMaterials();
}
public void tick() {
if (model.animationInProgress) {
model.animationWaitTicks++;
if (model.animationWaitTicks >= model.ticksPerFrame) {
model.animationWaitTicks = 0;
while (true) {
//try to update to next frame
boolean updatedFrame = updateToNextFrame();
if (!updatedFrame) {
//no more layers to update so stop animating
model.animationInProgress = false;
break;
}
if (updatedFrame && !model.skipAnimation) {
//updated a layer so we're done with this frame
break;
}
}
}
}
}
// --------- Internal Methods ---------
private void onGeneratedChanged() {
BaseCore core = FortressesManager.forWorld(model.world).getCore(model.anchorPoint);
if (core != null) {
core.onGeneratedChanged();
} else {
Debug.error("CoreAnimator.onGeneratedChanged(): Core at " + model.anchorPoint + " is null.");
}
}
private boolean updateToNextFrame() {
boolean updatedToNextFrame = false;
while (!updatedToNextFrame && model.curIndex < model.animationLayers.size()) {
int layerIndex = model.curIndex;
//if (degenerating) start from the outer most layer
if (!model.isGeneratingWall) {
layerIndex = (model.animationLayers.size()-1) - model.curIndex;
}
//try to update layer
int updatedCount = updateLayer(layerIndex);
if (updatedCount > 0) {
updatedToNextFrame = true;
onGeneratedChanged(); //particles update
}
model.curIndex++;
}
return updatedToNextFrame;
}
//*/
private int updateLayer(int layerIndex) {
return 0;
}
/*/ //commented out to allow compile despite errors here
private int updateLayer(int layerIndex) {
Set<Point> updatedPoints = new HashSet<>();
if (model.isGeneratingWall) {
Set<Point> genPoints = generateLayer(layerIndex);
updatedPoints.addAll(genPoints);
//update model.generatedLayers
if (genPoints.size() > 0) {
//ensure generatedLayers.get(layerIndex) exists and add genPoints
while (layerIndex >= model.generatedLayers.size()) {
model.generatedLayers.add(new HashSet<>());
}
model.generatedLayers.get(layerIndex).addAll(genPoints);
}
} else {
Set<Point> degenPoints = degenerateLayer(layerIndex);
updatedPoints.addAll(degenPoints);
//update model.generatedLayers
if (degenPoints.size() > 0) {
//from generatedLayers.get(layerIndex) remove degenPoints
if (layerIndex < model.generatedLayers.size()) {
model.generatedLayers.get(layerIndex).removeAll(degenPoints);
}
}
}
//TODO: delete commented out block
// Set<Point> layer = new HashSet<>(model.animationLayers.get(layerIndex)); //make copy to avoid concurrent modification errors
// for (Point p : layer) {
// if (model.isGeneratingWall) {
// //try to generate block at p
// boolean pGenerated = alter(p) || protect(p);
// if (pGenerated) {
// updatedPoints.add(p);
//
// //add p to generatedLayers
// while (layerIndex >= model.generatedLayers.size()) {
// model.generatedLayers.add(new HashSet<>());
// }
// model.generatedLayers.get(layerIndex).add(p);
// }
// } else {
// //try to degenerate block at p
// boolean pDegenerated = unalter(p) || unprotect(p);
// if (pDegenerated) {
// updatedPoints.add(p);
//
// //remove p from generatedLayers
// if (layerIndex < model.generatedLayers.size()) {
// model.generatedLayers.get(layerIndex).remove(p);
// } //else we would be degenerating another generators wall
// }
// }
//
// if (updatedPoints.size() >= model.maxBlocksPerFrame) {
// break;
// }
// } // end for (Point p : layer)
if (!model.skipAnimation) {
//show bedrock wave
int ms = 4 * model.ticksPerFrame * TickTimer.msPerTick;
TimedBedrockManager.forWorld(model.world).convert(model.bedrockGroupId, updatedPoints, ms);
}
return updatedPoints.size();
}
private Set<Point> generateLayer(int layerIndex) {
Set<Point> layer = new HashSet<>(model.animationLayers.get(layerIndex)); //make copy to avoid concurrent modification errors
Set<Point> generatedPoints = new HashSet<>();
//fill alterPoints and protectPoints from layer
Set<Point> alterPoints = new HashSet<>();
Set<Point> protectPoints = new HashSet<>();
for (Point p : layer) {
if (!isProtected(p) && isProtectable(p)) {
protectPoints.add(p);
} else if (!isAltered(p) && isAlterable(p)) {
alterPoints.add(p);
}
}
generatedPoints.addAll(alter(alterPoints, layerIndex));
generatedPoints.addAll(protect(protectPoints));
return generatedPoints;
}
private Set<Point> degenerateLayer(int layerIndex) {
Set<Point> layer = new HashSet<>(model.animationLayers.get(layerIndex)); //make copy to avoid concurrent modification errors
Set<Point> degeneratedPoints = new HashSet<>();
//fill alterPoints and protectPoints from layer
Set<Point> alterPoints = new HashSet<>();
Set<Point> protectPoints = new HashSet<>();
for (Point p : layer) {
if (isProtected(p)) {
protectPoints.add(p);
} else if (isAltered(p)) {
alterPoints.add(p);
}
}
Set<BedrockBatch> batches = getAlteredBatches(layerIndex);
degeneratedPoints.addAll(unalter(alterPoints, layerIndex));
degeneratedPoints.addAll(unprotect(protectPoints));
//unprotect protectPoints
if (protectPoints.size() > 0) {
removeProtectedPoints(protectPoints);
degeneratedPoints.addAll(protectPoints);
}
//unalter alterPoints
if (alterPoints.size() > 0) {
Set<BedrockBatch> batches = getAlteredBatches(layerIndex);
for (BedrockBatch batch : batches) {
BedrockManager.forWorld(model.world).revert(batch);
removeAlteredBatch(batch);
degeneratedPoints.addAll(batch.getPoints());
}
}
return degeneratedPoints;
}
private Set<Point> unalter(Set<Point> alterPoints, int layerIndex) {
Set<Point> unalteredPoints = new HashSet<>();
if (alterPoints.size() > 0) {
Set<BedrockBatch> batches = getAlteredBatches(layerIndex);
for (BedrockBatch batch : batches) {
BedrockManager.forWorld(model.world).revert(batch);
removeAlteredBatch(batch);
degeneratedPoints.addAll(batch.getPoints());
}
}
return unalteredPoints;
}
private Set<Point> alter(Set<Point> alterPoints, int layerIndex) {
Set<Point> alteredPoints = new HashSet<>();
if (alterPoints.size() > 0) {
BedrockBatch batch = new BedrockBatch(model.bedrockGroupId, alterPoints);
BedrockManager.forWorld(model.world).convert(batch);
addAlteredBatch(layerIndex, batch);
alteredPoints.addAll(batch.getPoints());
}
return alteredPoints;
}
private Set<Point> protect(Set<Point> protectPoints) {
Set<Point> protectedPoints = new HashSet<>();
if (protectPoints.size() > 0) {
addProtectedPoints(protectPoints);
protectedPoints.addAll(protectPoints);
}
return protectedPoints;
}
private boolean isAltered(Point p) {
for (BedrockBatch batch : model.alteredBatches) {
return batch.contains(p);
}
return false;
}
private boolean isProtected(Point p) {
return model.protectedPoints.contains(p);
}
private boolean isAlterable(Point p) {
Material mat = BedrockManager.forWorld(model.world).getMaterialOrNull(p);
if (mat == null) mat = p.getType(model.world);
return model.coreMats.isAlterable(mat);
}
private boolean isProtectable(Point p) {
Material mat = BedrockManager.forWorld(model.world).getMaterialOrNull(p);
if (mat == null) mat = p.getType(model.world);
return model.coreMats.isProtectable(mat);
}
private boolean alter(Point p) {
boolean altered = false;
Block b = p.getBlock(model.world);
boolean alterable = false;
alterable = alterable || model.coreMats.isAlterable(b);
alterable = alterable || model.coreMats.isAlterable(BedrockManagerOld.getMaterial(model.world, p));
boolean alreadyAltered = model.alteredPoints.contains(p);
if (alterable && !alreadyAltered) {
BedrockManagerOld.convert(model.world, p);
addAlteredPoint(p);
altered = true;
}
return altered;
}
private boolean unalter(Point p) {
boolean unaltered = false;
if (model.alteredPoints.contains(p)) {
BedrockManagerOld.revert(model.world, p);
removeAlteredPoint(p);
unaltered = true;
}
return unaltered;
}
private boolean protect(Point p) {
boolean pointProtected = false;
Block b = p.getBlock(model.world);
boolean protectable = false;
protectable = protectable || model.coreMats.isProtectable(b);
protectable = protectable || model.coreMats.isProtectable(BedrockManagerOld.getMaterial(model.world, p));
if (!model.protectedPoints.contains(p) && protectable) {
addProtectedPoint(p);
pointProtected = true;
}
return pointProtected;
}
private boolean unprotect(Point p) {
boolean unprotected = false;
if (model.protectedPoints.contains(p)) {
removeProtectedPoint(p);
unprotected = true;
}
return unprotected;
}
private void addProtectedPoint(Point p) {
model.protectedPoints.add(p);
FortressesManager.forWorld(model.world).addProtectedPoint(p, model.anchorPoint);
}
private void removeProtectedPoint(Point p) {
model.protectedPoints.remove(p);
FortressesManager.forWorld(model.world).removeProtectedPoint(p);
}
private void addAlteredBatch(int layerIndex, BedrockBatch batch) {
model.alteredBatches.add(batch);
FortressesManager.forWorld(model.world).addAlteredPoints(batch.getPoints(), model.anchorPoint);
}
private void removeAlteredBatch(BedrockBatch batch) {
FortressesManager.forWorld(model.world).removeAlteredPoints(batch.getPoints());
model.alteredBatches.remove(batch);
}
//*/
}
|
|
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.inferred.freebuilder.processor.property;
import static org.inferred.freebuilder.processor.property.ElementFactory.TYPES;
import com.google.common.collect.BiMap;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.inferred.freebuilder.FreeBuilder;
import org.inferred.freebuilder.processor.FeatureSets;
import org.inferred.freebuilder.processor.NamingConvention;
import org.inferred.freebuilder.processor.Processor;
import org.inferred.freebuilder.processor.source.SourceBuilder;
import org.inferred.freebuilder.processor.source.feature.FeatureSet;
import org.inferred.freebuilder.processor.source.testing.BehaviorTester;
import org.inferred.freebuilder.processor.source.testing.ParameterizedBehaviorTestFactory;
import org.inferred.freebuilder.processor.source.testing.ParameterizedBehaviorTestFactory.Shared;
import org.inferred.freebuilder.processor.source.testing.TestBuilder;
import org.inferred.freebuilder.processor.testtype.NonComparable;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.junit.runners.Parameterized.UseParametersRunnerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
@RunWith(Parameterized.class)
@UseParametersRunnerFactory(ParameterizedBehaviorTestFactory.class)
public class BiMapMutateMethodTest {
@SuppressWarnings("unchecked")
@Parameters(name = "BiMap<{0}, {1}>, checked={2}, {3}, {4}")
public static Iterable<Object[]> parameters() {
List<Boolean> checked = ImmutableList.of(false, true);
List<NamingConvention> conventions = Arrays.asList(NamingConvention.values());
List<FeatureSet> features = FeatureSets.WITH_GUAVA;
return () -> Lists
.cartesianProduct(TYPES, TYPES, checked, conventions, features)
.stream()
.map(List::toArray)
.iterator();
}
@Rule public final ExpectedException thrown = ExpectedException.none();
@Shared public BehaviorTester behaviorTester;
private final ElementFactory keys;
private final ElementFactory values;
private final boolean checked;
private final NamingConvention convention;
private final FeatureSet features;
private final SourceBuilder bimapPropertyType;
public BiMapMutateMethodTest(
ElementFactory keys,
ElementFactory values,
boolean checked,
NamingConvention convention,
FeatureSet features) {
this.keys = keys;
this.values = values;
this.checked = checked;
this.convention = convention;
this.features = features;
bimapPropertyType = SourceBuilder.forTesting()
.addLine("package com.example;")
.addLine("@%s", FreeBuilder.class)
.addLine("public interface DataType {")
.addLine(" %s<%s, %s> %s;", BiMap.class, keys.type(), values.type(), convention.get())
.addLine("")
.addLine(" public static class Builder extends DataType_Builder {");
if (checked) {
bimapPropertyType
.addLine(" @Override public Builder forcePutItems(%s key, %s value) {",
keys.unwrappedType(), values.unwrappedType())
.addLine(" if (!(%s)) {", keys.validation("key"))
.addLine(" throw new IllegalArgumentException(\"key %s\");", keys.errorMessage())
.addLine(" }")
.addLine(" if (!(%s)) {", values.validation("value"))
.addLine(" throw new IllegalArgumentException(\"value %s\");",
values.errorMessage())
.addLine(" }")
.addLine(" return super.forcePutItems(key, value);")
.addLine(" }");
}
bimapPropertyType
.addLine(" }")
.addLine("}");
}
@Before
public void before() {
behaviorTester
.with(new Processor(features))
.withPermittedPackage(NonComparable.class.getPackage());
}
@Test
public void putModifiesUnderlyingProperty() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.put(%s, %s))",
keys.example(1), values.example(1))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);",
convention.get(), exampleBiMap(0, 0, 1, 1))
.build())
.runTest();
}
@Test
public void putChecksArguments() {
if (checked) {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("key " + keys.errorMessage());
}
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .mutateItems(items -> items.put(%s, %s));",
keys.invalidExample(), values.example(0))
.build())
.runTest();
}
@Test
public void putReplacesDuplicateKey() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.put(%s, %s))",
keys.example(0), values.example(1))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);",
convention.get(), exampleBiMap(0, 1))
.build())
.runTest();
}
@Test
public void putReplacesDuplicateKeyAndValue() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.put(%s, %s))",
keys.example(0), values.example(0))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);",
convention.get(), exampleBiMap(0, 0))
.build())
.runTest();
}
@Test
public void putRejectsDuplicateValue() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("value already present: " + values.exampleToString(0));
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.put(%s, %s));",
keys.example(1), values.example(0))
.build())
.runTest();
}
@Test
public void inversePutModifiesUnderlyingProperty() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.inverse().put(%s, %s))",
values.example(1), keys.example(1))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);",
convention.get(), exampleBiMap(0, 0, 1, 1))
.build())
.runTest();
}
@Test
public void inversePutChecksArguments() {
if (checked) {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("key " + keys.errorMessage());
}
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .mutateItems(items -> items.inverse().put(%s, %s));",
values.example(0), keys.invalidExample())
.build())
.runTest();
}
@Test
public void inversePutReplacesDuplicateValue() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.inverse().put(%s, %s))",
values.example(0), keys.example(1))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);",
convention.get(), exampleBiMap(1, 0))
.build())
.runTest();
}
@Test
public void inversePutReplacesDuplicateKeyAndValue() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.inverse().put(%s, %s))",
values.example(0), keys.example(0))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);",
convention.get(), exampleBiMap(0, 0))
.build())
.runTest();
}
@Test
public void inversePutRejectsDuplicateKey() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("value already present: " + keys.exampleToString(0));
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.inverse().put(%s, %s));",
values.example(1), keys.example(0))
.build())
.runTest();
}
@Test
public void forcePutModifiesUnderlyingProperty() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.forcePut(%s, %s))",
keys.example(1), values.example(1))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);",
convention.get(), exampleBiMap(0, 0, 1, 1))
.build())
.runTest();
}
@Test
public void forcePutChecksArguments() {
if (checked) {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("key " + keys.errorMessage());
}
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .mutateItems(items -> items.forcePut(%s, %s));",
keys.invalidExample(), values.example(0))
.build())
.runTest();
}
@Test
public void forcePutReplacesDuplicateKey() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.forcePut(%s, %s))",
keys.example(0), values.example(1))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);",
convention.get(), exampleBiMap(0, 1))
.build())
.runTest();
}
@Test
public void forcePutReplacesDuplicateValue() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.forcePut(%s, %s))",
keys.example(1), values.example(0))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);",
convention.get(), exampleBiMap(1, 0))
.build())
.runTest();
}
@Test
public void inverseForcePutModifiesUnderlyingProperty() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.inverse().forcePut(%s, %s))",
values.example(1), keys.example(1))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);",
convention.get(), exampleBiMap(0, 0, 1, 1))
.build())
.runTest();
}
@Test
public void inverseForcePutChecksArguments() {
if (checked) {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("key " + keys.errorMessage());
}
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .mutateItems(items -> items.inverse().forcePut(%s, %s));",
values.example(0), keys.invalidExample())
.build())
.runTest();
}
@Test
public void inverseForcePutReplacesDuplicateValue() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.inverse().forcePut(%s, %s))",
values.example(0), keys.example(1))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);",
convention.get(), exampleBiMap(1, 0))
.build())
.runTest();
}
@Test
public void inverseForcePutReplacesDuplicateKey() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.inverse().forcePut(%s, %s))",
values.example(1), keys.example(0))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);",
convention.get(), exampleBiMap(0, 1))
.build())
.runTest();
}
@Test
public void putAllModifiesUnderlyingProperty() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.putAll(%s))", exampleBiMap(1, 1, 2, 2))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);",
convention.get(), exampleBiMap(0, 0, 1, 1, 2, 2))
.build())
.runTest();
}
@Test
public void putAllChecksArguments() {
if (checked) {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("key " + keys.errorMessage());
}
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .mutateItems(items -> items.putAll(%s.of(%s, %s)));",
ImmutableMap.class, keys.invalidExample(), values.example(0))
.build())
.runTest();
}
@Test
public void putAllReplacesDuplicateKey() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.putAll(%s))", exampleBiMap(0, 1, 2, 3))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);",
convention.get(), exampleBiMap(0, 1, 2, 3))
.build())
.runTest();
}
@Test
public void putAllRejectsDuplicateValue() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("value already present: " + values.exampleToString(0));
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.putAll(%s));", exampleBiMap(1, 0, 2, 3))
.build())
.runTest();
}
@Test
public void inversePutAllModifiesUnderlyingProperty() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.inverse().putAll(%s))",
exampleInverseBiMap(1, 1, 2, 2))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);",
convention.get(), exampleBiMap(0, 0, 1, 1, 2, 2))
.build())
.runTest();
}
@Test
public void inversePutAllChecksArguments() {
if (checked) {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("key " + keys.errorMessage());
}
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .mutateItems(items -> items.inverse().putAll(%s.of(%s, %s)));",
ImmutableMap.class, values.example(0), keys.invalidExample())
.build())
.runTest();
}
@Test
public void inversePutAllReplacesDuplicateValue() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.inverse().putAll(%s))",
exampleInverseBiMap(0, 1, 2, 3))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);",
convention.get(), exampleBiMap(1, 0, 3, 2))
.build())
.runTest();
}
@Test
public void inversePutAllRejectsDuplicateKey() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("value already present: " + keys.exampleToString(0));
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.inverse().putAll(%s));",
exampleInverseBiMap(1, 0, 2, 3))
.build())
.runTest();
}
@Test
public void iterateEntrySetFindsContainedEntry() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> {")
.addLine(" Map.Entry<%s, %s> entry = items.entrySet().iterator().next();",
keys.type(), values.type())
.addLine(" assertThat(entry.getKey()).isEqualTo(%s);", keys.example(0))
.addLine(" assertThat(entry.getValue()).isEqualTo(%s);", values.example(0))
.addLine(" });")
.build())
.runTest();
}
@Test
public void iterateInverseEntrySetFindsContainedEntry() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> {")
.addLine(" Map.Entry<%s, %s> entry =", values.type(), keys.type())
.addLine(" items.inverse().entrySet().iterator().next();")
.addLine(" assertThat(entry.getKey()).isEqualTo(%s);", values.example(0))
.addLine(" assertThat(entry.getValue()).isEqualTo(%s);", keys.example(0))
.addLine(" });")
.build())
.runTest();
}
@Test
public void callRemoveOnEntrySetIteratorModifiesUnderlyingProperty() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .putItems(%s, %s)", keys.example(1), values.example(1))
.addLine(" .mutateItems(items -> {")
.addLine(" Iterator<Map.Entry<%s, %s>> i = items.entrySet().iterator();",
keys.type(), values.type())
.addLine(" i.next();")
.addLine(" i.remove();")
.addLine(" })")
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);", convention.get(), exampleBiMap(1, 1))
.build())
.runTest();
}
@Test
public void callRemoveOnInverseEntrySetIteratorModifiesUnderlyingProperty() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .putItems(%s, %s)", keys.example(1), values.example(1))
.addLine(" .mutateItems(items -> {")
.addLine(" Iterator<Map.Entry<%s, %s>> i =", values.type(), keys.type())
.addLine(" items.inverse().entrySet().iterator();")
.addLine(" i.next();")
.addLine(" i.remove();")
.addLine(" })")
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);", convention.get(), exampleBiMap(1, 1))
.build())
.runTest();
}
@Test
public void entrySetIteratorRemainsUsableAfterCallingRemove() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .putItems(%s, %s)", keys.example(1), values.example(1))
.addLine(" .mutateItems(items -> {")
.addLine(" Iterator<Map.Entry<%s, %s>> i = items.entrySet().iterator();",
keys.type(), values.type())
.addLine(" Map.Entry<%s, %s> entry = i.next();", keys.type(), values.type())
.addLine(" assertThat(entry.getKey()).isEqualTo(%s);", keys.example(0))
.addLine(" assertThat(entry.getValue()).isEqualTo(%s);", values.example(0))
.addLine(" assertThat(i.hasNext()).isTrue();")
.addLine(" i.remove();")
.addLine(" assertThat(i.hasNext()).isTrue();")
.addLine(" entry = i.next();", Map.Entry.class)
.addLine(" assertThat(entry.getKey()).isEqualTo(%s);", keys.example(1))
.addLine(" assertThat(entry.getValue()).isEqualTo(%s);", values.example(1))
.addLine(" assertThat(i.hasNext()).isFalse();")
.addLine(" });")
.build())
.runTest();
}
@Test
public void inverseEntrySetIteratorRemainsUsableAfterCallingRemove() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .putItems(%s, %s)", keys.example(1), values.example(1))
.addLine(" .mutateItems(items -> {")
.addLine(" Iterator<Map.Entry<%s, %s>> i =", values.type(), keys.type())
.addLine(" items.inverse().entrySet().iterator();")
.addLine(" Map.Entry<%s, %s> entry = i.next();", values.type(), keys.type())
.addLine(" assertThat(entry.getKey()).isEqualTo(%s);", values.example(0))
.addLine(" assertThat(entry.getValue()).isEqualTo(%s);", keys.example(0))
.addLine(" assertThat(i.hasNext()).isTrue();")
.addLine(" i.remove();")
.addLine(" assertThat(i.hasNext()).isTrue();")
.addLine(" entry = i.next();", Map.Entry.class)
.addLine(" assertThat(entry.getKey()).isEqualTo(%s);", values.example(1))
.addLine(" assertThat(entry.getValue()).isEqualTo(%s);", keys.example(1))
.addLine(" assertThat(i.hasNext()).isFalse();")
.addLine(" });")
.build())
.runTest();
}
@Test
public void callSetValueOnEntryChecksArguments() {
if (checked) {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("value " + values.errorMessage());
}
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.entrySet().iterator().next().setValue(%s));",
values.invalidExample())
.build())
.runTest();
}
@Test
public void callSetValueOnEntryChecksDuplicateValue() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("value already present: " + values.exampleToString(1));
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .putItems(%s, %s)", keys.example(1), values.example(1))
.addLine(" .mutateItems(items -> items.entrySet().iterator().next().setValue(%s));",
values.example(1))
.build())
.runTest();
}
@Test
public void callSetValueOnEntryAllowsDuplicateKeyAndValue() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.entrySet().iterator().next().setValue(%s));",
values.example(0))
.build())
.runTest();
}
@Test
public void callSetValueOnEntryModifiesUnderlyingProperty() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.entrySet().iterator().next().setValue(%s))",
values.example(1))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);", convention.get(), exampleBiMap(0, 1))
.build())
.runTest();
}
@Test
public void callSetValueReturnsOldValue() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> {")
.addLine(" Iterator<Map.Entry<%s, %s>> i = items.entrySet().iterator();",
keys.type(), values.type())
.addLine(" Map.Entry<%s, %s> entry = i.next();", keys.type(), values.type())
.addLine(" %s oldValue = entry.setValue(%s);", values.type(), values.example(2))
.addLine(" assertThat(oldValue).isEqualTo(%s);", values.example(0))
.addLine(" });")
.build())
.runTest();
}
@Test
public void callSetValueOnInverseEntryChecksArguments() {
if (checked) {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("key " + keys.errorMessage());
}
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items")
.addLine(" .inverse().entrySet().iterator().next().setValue(%s));",
keys.invalidExample())
.build())
.runTest();
}
@Test
public void callSetValueOnInverseEntryChecksDuplicateValue() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("value already present: " + keys.exampleToString(1));
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .putItems(%s, %s)", keys.example(1), values.example(1))
.addLine(" .mutateItems(items -> items")
.addLine(" .inverse().entrySet().iterator().next().setValue(%s));",
keys.example(1))
.build())
.runTest();
}
@Test
public void callSetValueOnInverseEntryAllowsDuplicateKeyAndValue() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items")
.addLine(" .inverse().entrySet().iterator().next().setValue(%s));",
keys.example(0))
.build())
.runTest();
}
@Test
public void callSetValueOnInverseEntryModifiesUnderlyingProperty() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items")
.addLine(" .inverse().entrySet().iterator().next().setValue(%s))",
keys.example(1))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);", convention.get(), exampleBiMap(1, 0))
.build())
.runTest();
}
@Test
public void entryRemainsUsableAfterCallingSetValue() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .putItems(%s, %s)", keys.example(1), values.example(1))
.addLine(" .mutateItems(items -> {")
.addLine(" Iterator<Map.Entry<%s, %s>> i = items.entrySet().iterator();",
keys.type(), values.type())
.addLine(" Map.Entry<%s, %s> entry = i.next();", keys.type(), values.type())
.addLine(" entry.setValue(%s);", values.example(2))
.addLine(" assertThat(entry.getKey()).isEqualTo(%s);", keys.example(0))
.addLine(" assertThat(entry.getValue()).isEqualTo(%s);", values.example(2))
.addLine(" });")
.build())
.runTest();
}
@Test
public void inverseEntryRemainsUsableAfterCallingSetValue() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .putItems(%s, %s)", keys.example(1), values.example(1))
.addLine(" .mutateItems(items -> {")
.addLine(" Iterator<Map.Entry<%s, %s>> i =", values.type(), keys.type())
.addLine(" items.inverse().entrySet().iterator();")
.addLine(" Map.Entry<%s, %s> entry = i.next();", values.type(), keys.type())
.addLine(" entry.setValue(%s);", keys.example(2))
.addLine(" assertThat(entry.getKey()).isEqualTo(%s);", values.example(0))
.addLine(" assertThat(entry.getValue()).isEqualTo(%s);", keys.example(2))
.addLine(" });")
.build())
.runTest();
}
@Test
public void getReturnsContainedValue() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("List<%s> values = new ArrayList<>();", values.type())
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> values.add(items.get(%s)))", keys.example(0))
.addLine(" .mutateItems(items -> values.add(items.get(%s)));", keys.example(1))
.addLine("assertThat(values).containsExactly(%s, null).inOrder();", values.example(0))
.build())
.runTest();
}
@Test
public void inverseGetReturnsContainedKey() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("List<%s> keys = new ArrayList<>();", keys.type())
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> keys.add(items.inverse().get(%s)))",
values.example(0))
.addLine(" .mutateItems(items -> keys.add(items.inverse().get(%s)));",
values.example(1))
.addLine("assertThat(keys).containsExactly(%s, null).inOrder();", keys.example(0))
.build())
.runTest();
}
@Test
public void containsKeyFindsContainedKey() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("List<Boolean> results = new ArrayList<>();")
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> results.add(items.containsKey(%s)))",
keys.example(0))
.addLine(" .mutateItems(items -> results.add(items.containsKey(%s)));",
keys.example(1))
.addLine("assertThat(results).containsExactly(true, false).inOrder();")
.build())
.runTest();
}
@Test
public void inverseContainsKeyFindsContainedValue() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("List<Boolean> results = new ArrayList<>();")
.addLine("new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> results.add(items.inverse().containsKey(%s)))",
values.example(0))
.addLine(" .mutateItems(items -> results.add(items.inverse().containsKey(%s)));",
values.example(1))
.addLine("assertThat(results).containsExactly(true, false).inOrder();")
.build())
.runTest();
}
@Test
public void removeModifiesUnderlyingProperty() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .putItems(%s, %s)", keys.example(1), values.example(1))
.addLine(" .mutateItems(items -> items.remove(%s))", keys.example(0))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);", convention.get(), exampleBiMap(1, 1))
.build())
.runTest();
}
@Test
public void inverseRemoveModifiesUnderlyingProperty() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .putItems(%s, %s)", keys.example(1), values.example(1))
.addLine(" .mutateItems(items -> items.inverse().remove(%s))", values.example(0))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);", convention.get(), exampleBiMap(1, 1))
.build())
.runTest();
}
@Test
public void clearModifiesUnderlyingProperty() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .putItems(%s, %s)", keys.example(1), values.example(1))
.addLine(" .mutateItems(%s::clear)", BiMap.class)
.addLine(" .build();")
.addLine("assertThat(value.%s).isEmpty();", convention.get())
.build())
.runTest();
}
@Test
public void inverseClearModifiesUnderlyingProperty() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .putItems(%s, %s)", keys.example(1), values.example(1))
.addLine(" .mutateItems(items -> items.inverse().clear())")
.addLine(" .build();")
.addLine("assertThat(value.%s).isEmpty();", convention.get())
.build())
.runTest();
}
@Test
public void valuesReturnsLiveViewOfValuesInMap() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("%s<%s<%s>> values = new %s<>();",
List.class, Set.class, values.type(), ArrayList.class)
.addLine("DataType.Builder builder = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .putItems(%s, %s)", keys.example(1), values.example(1))
.addLine(" .mutateItems(items -> values.add(items.values()));")
.addLine("assertThat(values.get(0)).containsExactly(%s);", values.examples(0, 1))
.addLine("builder.putItems(%s, %s);", keys.example(2), values.example(2))
.addLine("assertThat(values.get(0)).containsExactly(%s);", values.examples(0, 1, 2))
.build())
.runTest();
}
@Test
public void inverseValuesReturnsLiveViewOfKeysInMap() {
behaviorTester
.with(bimapPropertyType)
.with(testBuilder()
.addLine("%s<%s<%s>> keys = new %s<>();",
List.class, Set.class, keys.type(), ArrayList.class)
.addLine("DataType.Builder builder = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .putItems(%s, %s)", keys.example(1), values.example(1))
.addLine(" .mutateItems(items -> keys.add(items.inverse().values()));")
.addLine("assertThat(keys.get(0)).containsExactly(%s);", keys.examples(0, 1))
.addLine("builder.putItems(%s, %s);", keys.example(2), values.example(2))
.addLine("assertThat(keys.get(0)).containsExactly(%s);", keys.examples(0, 1, 2))
.build())
.runTest();
}
@Test
public void canUseCustomFunctionalInterface() {
SourceBuilder customMutatorType = SourceBuilder.forTesting();
for (String line : bimapPropertyType.toString().split("\n")) {
if (line.contains("extends DataType_Builder")) {
int insertIndex = line.indexOf('{') + 1;
customMutatorType
.addLine("%s", line.substring(0, insertIndex))
.addLine(" public interface Mutator {")
.addLine(" void mutate(%s<%s, %s> multiBiMap);",
BiMap.class, keys.type(), values.type())
.addLine(" }")
.addLine(" @Override public Builder mutateItems(Mutator mutator) {")
.addLine(" return super.mutateItems(mutator);")
.addLine(" }")
.addLine("%s", line.substring(insertIndex));
} else {
customMutatorType.addLine("%s", line);
}
}
behaviorTester
.with(customMutatorType)
.with(testBuilder()
.addLine("DataType value = new DataType.Builder()")
.addLine(" .putItems(%s, %s)", keys.example(0), values.example(0))
.addLine(" .mutateItems(items -> items.put(%s, %s))",
keys.example(1), values.example(1))
.addLine(" .build();")
.addLine("assertThat(value.%s).isEqualTo(%s);",
convention.get(), exampleBiMap(0, 0, 1, 1))
.build())
.runTest();
}
private String exampleBiMap(int key, int value) {
return String.format("ImmutableBiMap.of(%s, %s)", keys.example(key), values.example(value));
}
private String exampleBiMap(int key1, int value1, int key2, int value2) {
return String.format("ImmutableBiMap.of(%s, %s, %s, %s)",
keys.example(key1), values.example(value1), keys.example(key2), values.example(value2));
}
private String exampleInverseBiMap(int value1, int key1, int value2, int key2) {
return String.format("ImmutableBiMap.of(%s, %s, %s, %s)",
values.example(value1), keys.example(key1), values.example(value2), keys.example(key2));
}
private String exampleBiMap(int key1, int value1, int key2, int value2, int key3, int value3) {
return String.format("ImmutableBiMap.of(%s, %s, %s, %s, %s, %s)",
keys.example(key1), values.example(value1),
keys.example(key2), values.example(value2),
keys.example(key3), values.example(value3));
}
private static TestBuilder testBuilder() {
return new TestBuilder()
.addImport("com.example.DataType")
.addImport(ArrayList.class)
.addImport(Iterator.class)
.addImport(List.class)
.addImport(Map.class)
.addImport(BiMap.class)
.addImport(ImmutableBiMap.class);
}
}
|
|
/*
* Copyright 2013-2018 Jonathan Vasquez <[email protected]>
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.vasquez;
import com.vasquez.Utilities.RegistryUtility;
import com.vasquez.Utilities.VersionUtility;
import com.vasquez.Utilities.Logger;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.apache.commons.io.FileUtils;
import static java.nio.file.StandardCopyOption.*;
import java.util.ArrayList;
import java.util.Arrays;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
// Switches the files to the correct version of Diablo II
public class FileSwitcher {
private JFrame _mainFrame;
private Entry _selectedEntry;
private Entry _lastRanEntry;
private EntryWithModel _tableManager;
private ProcessManager processManager;
private File game;
private File root;
private enum GameType {
Classic,
Expansion
}
private String[] thirdPartyLibraries = {
"binkw32.dll",
"ijl11.dll",
"SmackW32.dll"
};
// Keeping separate from commonFiles since 1.07 doesn't have it.
private final String patchMpqFile = "Patch_D2.mpq";
private String[] commonFiles = {
"Diablo II.exe",
"Game.exe"
};
private String[] updateFile = {
"BNUpdate.exe"
};
private String[] requiredBeta10741Files = {
"D2OpenGL.dll",
"D2Server.dll",
"Keyhook.dll"
};
private final String[] requiredPre114Files = {
"Bnclient.dll",
"D2Client.dll",
"D2CMP.dll",
"D2Common.dll",
"D2DDraw.dll",
"D2Direct3D.dll",
"D2Game.dll",
"D2Gdi.dll",
"D2gfx.dll",
"D2Glide.dll",
"D2Lang.dll",
"D2Launch.dll",
"D2MCPClient.dll",
"D2Multi.dll",
"D2Net.dll",
"D2sound.dll",
"D2Win.dll",
"Fog.dll",
"Storm.dll",
"D2VidTst.exe"
};
private final String[] requiredPost113Files = {
"BlizzardError.exe",
"SystemSurvey.exe"
};
private String[] expansionMPQs = {
"d2exp.mpq",
"d2xmusic.mpq",
"d2xvideo.mpq",
"d2xtalk.mpq"
};
public FileSwitcher(JFrame mainFrame, EntryWithModel tableManager) {
_mainFrame = mainFrame;
_tableManager = tableManager;
_lastRanEntry = tableManager.getLastRanEntry();
processManager = new ProcessManager();
}
// Sets the information about the entry you want to launch
public void setEntry(Entry entry) {
_selectedEntry = entry;
game = new File(_selectedEntry.Path);
root = new File(game.getParent());
}
public void resetLastRanEntry() {
_lastRanEntry = null;
}
public void setLastRanEntry(Entry entry) {
_lastRanEntry = entry;
}
// Launches the game for the appropriate scenario
// 1. First time running the application
// 2. Replaying the version you played before
// 3. Playing a different version than the last one you played
public void launch() {
Logger.LogInfo("Launching Diablo II: " + getGameType() + " - " + _selectedEntry.Label + " [" + _selectedEntry.Version + "]");
// This will only happen the first time the user launches Diablo II from within the switcher.
if(_lastRanEntry == null) {
// Set version to the current version the user has
markSelectedEntryAsLastRan();
// Backs up the files since this is the first time you are running this application
backupFiles(true);
// Updates the registry and makes sure that you have a save directory set up (Fresh environment)
prepareRegistry();
// Launch the game
runGame();
}
else if(_lastRanEntry.Label.equalsIgnoreCase(_selectedEntry.Label) && _lastRanEntry.IsExpansion == _selectedEntry.IsExpansion) {
// If the labels are the same, we will be using the same folder,
// however, if you were to have multiple entries of the same label,
// with different flags, switching between those entries would not
// update the boolean. Let's fix that here.
if(_lastRanEntry.WasLastRan != _selectedEntry.WasLastRan) {
markSelectedEntryAsLastRan();
}
// Delete the 'data' directory in the backup if the game doesn't have a 'data' directory in the root
deleteDataDir(1);
// Since this was the last version you were playing, just start the game.
runGame();
}
else {
// This will run if you want to run a different version of D2 compared to the last one
// Launch the game only if another D2 version isn't running
if(processManager.getProcessCount() == 0) {
// Don't do anything if the files we need don't exist.
if (!areAllRequiredFilesAvailable())
{
JOptionPane.showMessageDialog(_mainFrame,
"The version of Diablo II you want to switch to doesn't have all the required files.",
"Core files missing",
JOptionPane.ERROR_MESSAGE);
return;
}
// Delete the 'data' directory of the previous version if it exists in the Diablo II root
deleteDataDir(0);
// Backs up the files if you don't already have a backup for the last ran version
backupFiles(false);
// Copy the files for the target version now
restoreFiles();
// Updates the registry and makes sure that you have a save directory set up (Fresh environment)
prepareRegistry();
// write the version for this run now
markSelectedEntryAsLastRan();
// Start the game
runGame();
}
else {
String message = "You are already running a different entry of Diablo II. Close it before switching to another entry.";
JOptionPane.showMessageDialog(_mainFrame, message, "Already running another Diablo II entry",
JOptionPane.ERROR_MESSAGE);
Logger.LogInfo(message);
}
}
}
// Update the "Save Path" and "Resolution" registry variables
private void prepareRegistry()
{
RegistryUtility ru = new RegistryUtility(root.getAbsolutePath(), _selectedEntry.Label, _selectedEntry.IsExpansion);
ru.update();
}
public void prepareRegistry(String rootDir, String label, boolean isExpansion) {
RegistryUtility ru = new RegistryUtility(rootDir, label, isExpansion);
ru.update();
}
// Makes sure that the backup/save directories exist
private void prepareBackupDir() {
File saveDir = null;
// Sets the path depending if it's an expansion or classic entry
if(_selectedEntry.IsExpansion) {
saveDir = new File(root.getAbsolutePath() + "\\Expansion\\" + _selectedEntry.Label + "\\save\\");
}
else {
saveDir = new File(root.getAbsolutePath() + "\\Classic\\" + _selectedEntry.Label + "\\save\\");
}
// If we are going to be creating the backup directory, might as well
// use the 'save' directory as the top most folder since we are going
// to need to create this directory anyways.
if(!saveDir.exists()) {
saveDir.mkdirs();
}
}
// Runs the game in a separate thread
private void runGame() {
Thread gameLaunch = new Thread(new LauncherRunnable());
gameLaunch.start();
}
// Backup the files that are in this current directory
private void backupFiles(boolean firstRun) {
Logger.LogInfo("Backing up files for " + _lastRanEntry.Label + " [" + _lastRanEntry.Version + "] ...");
// Makes sure that the backup directories exist
prepareBackupDir();
for(String file: getRequiredFiles(_lastRanEntry)) {
File sourceFile = new File(root.getAbsolutePath() + "\\" + file);
File targetFile = null;
// Sets the path depending if it's an expansion or classic entry
if(_lastRanEntry.IsExpansion) {
targetFile = new File(root.getAbsolutePath() + "\\Expansion\\" + _lastRanEntry.Label + "\\" + file);
}
else {
targetFile = new File(root.getAbsolutePath() + "\\Classic\\" + _lastRanEntry.Label + "\\" + file);
}
// Backup the files if they aren't already backed up
if(sourceFile.exists() && !targetFile.exists()) {
backupFilesHandler(sourceFile, targetFile);
}
// Delete the files for these backed up ones so that the next
// version has a clean slate. Don't delete if it's the first run,
// since then we won't be able to run anything!
if (!firstRun) {
sourceFile.delete();
}
}
// Backs up the data folder if it exists
doDataDir(0);
}
private ArrayList<String> getRequiredFiles(Entry entry)
{
ArrayList<String> requiredFiles = new ArrayList<>();
requiredFiles.addAll(Arrays.asList(commonFiles));
// 1.00 doesn't have BNUpdate.exe
if (!VersionUtility.Is100(entry.Version))
{
requiredFiles.addAll(Arrays.asList(updateFile));
}
requiredFiles.addAll(Arrays.asList(thirdPartyLibraries));
if (VersionUtility.Is114OrNewer(entry.Version))
{
requiredFiles.addAll(Arrays.asList(requiredPost113Files));
requiredFiles.add(patchMpqFile);
}
else {
// Every other version is the same (1.00-1.13),
// just 1.00 and 1.07 don't have a Patch_D2.mpq.
requiredFiles.addAll(Arrays.asList(requiredPre114Files));
// 1.07.41 (1.07 Beta. Normal 1.07 (Retail) = 1.07.44) has a few extra files.
if (VersionUtility.Is107Beta(entry.Version))
{
requiredFiles.addAll(Arrays.asList(requiredBeta10741Files));
}
if (!VersionUtility.Is100Or107(entry.Version))
{
requiredFiles.add(patchMpqFile);
}
}
return requiredFiles;
}
public String getRootPath(String gameExePath)
{
return new File(gameExePath).getParent();
}
public String getLabelRootPath(String gameExePath, String label, boolean isExpansion)
{
String root = new File(gameExePath).getParent();
File labelRoot;
if(isExpansion) {
labelRoot = new File(root + "\\Expansion\\" + label);
}
else {
labelRoot = new File(root + "\\Classic\\" + label);
}
return labelRoot.getAbsolutePath();
}
// Restore the files for the version you want to play
private void restoreFiles() {
Logger.LogInfo("Restoring important files for " + _selectedEntry.Label + " [" + _selectedEntry.Version + "] ...");
for(String file: getRequiredFiles(_selectedEntry)) {
File sourceFile = null;
File targetFile = new File(root.getAbsolutePath() + "\\" + file);
// Sets the path depending if it's an expansion or classic entry
if(_selectedEntry.IsExpansion) {
sourceFile = new File(root.getAbsolutePath() + "\\Expansion\\" + _selectedEntry.Label + "\\" + file);
}
else {
sourceFile = new File(root.getAbsolutePath() + "\\Classic\\" + _selectedEntry.Label + "\\" + file);
}
Path sourceDll = Paths.get(sourceFile.getAbsolutePath());
Path destDll = Paths.get(targetFile.getAbsolutePath());
if(sourceFile.exists()) {
try {
Files.copy(sourceDll, destDll, REPLACE_EXISTING);
}
catch (IOException e) {
e.printStackTrace();
}
}
}
// Restore the 'data' directory if it exists
doDataDir(1);
// Switch the Expansion MPQs to different locations depending if expansion/classic
if(_selectedEntry.IsExpansion) {
switchTo(GameType.Expansion);
}
else {
switchTo(GameType.Classic);
}
}
// Checks to see that all the files that we are about to attempt to use/switch to
// are available. If any files that we are expecting are missing, then we shouldn't
// switch to that version at all.
// Example: Attempting to switch to a BVS entry that has no files.. This would effectively
// delete those specific dlls from your D2 directory and you won't be able to play.
// Remarks: The 'data' folder isn't considered part of this.
private boolean areAllRequiredFilesAvailable()
{
for(String file: getRequiredFiles(_selectedEntry)) {
File sourceFile = null;
// Sets the path depending if it's an expansion or classic entry
if(_selectedEntry.IsExpansion) {
sourceFile = new File(root.getAbsolutePath() + "\\Expansion\\" + _selectedEntry.Label + "\\" + file);
}
else {
sourceFile = new File(root.getAbsolutePath() + "\\Classic\\" + _selectedEntry.Label + "\\" + file);
}
if(!sourceFile.exists())
{
return false;
}
}
return true;
}
private void backupFilesHandler(File source, File dest) {
Path sourceDll = Paths.get(source.getAbsolutePath());
Path destDll = Paths.get(dest.getAbsolutePath());
// Check to see if the version is 1.00/1.07 and if it is then don't copy some files
try {
// Expansion
if(_selectedEntry.IsExpansion) {
if(VersionUtility.Is107(_selectedEntry.Version) && !source.getName().equalsIgnoreCase("Patch_D2.mpq")) {
Files.copy(sourceDll, destDll, REPLACE_EXISTING);
}
else if(VersionUtility.Is107(_selectedEntry.Version) && source.getName().equalsIgnoreCase("Patch_D2.mpq")){
source.delete();
}
else {
// You can copy the same files for all the other versions (Well... anything > 1.07).
Files.copy(sourceDll, destDll, REPLACE_EXISTING);
}
}
else {
// Classic
if(VersionUtility.Is100(_selectedEntry.Version) && (!source.getName().equalsIgnoreCase("Patch_D2.mpq")
&& !source.getName().equalsIgnoreCase("BNUpdate.exe"))) {
Files.copy(sourceDll, destDll, REPLACE_EXISTING);
}
else if(VersionUtility.Is100(_selectedEntry.Version) && (source.getName().equalsIgnoreCase("Patch_D2.mpq")
|| source.getName().equalsIgnoreCase("BNUpdate.exe"))){
source.delete();
}
else {
// You can copy the same files for all the other versions (Well... anything > 1.00).
Files.copy(sourceDll, destDll, REPLACE_EXISTING);
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
// Backs up or restores the 'data' directory depending on the option
// Options:
// 0 = Backup
// Other = Restore
private void doDataDir(int choice) {
File sourceFile = null;
File targetFile = null;
if(choice == 0) {
Logger.LogInfo("Backing up data directory if needed...");
sourceFile = new File(root.getAbsolutePath() + "\\data\\");
if(_selectedEntry.IsExpansion) {
targetFile = new File(root.getAbsolutePath() + "\\Expansion\\" + _selectedEntry.Label + "\\data\\");
}
else {
targetFile = new File(root.getAbsolutePath() + "\\Classic\\" + _selectedEntry.Label + "\\data\\");
}
}
else {
Logger.LogInfo("Restoring data directory if needed...");
targetFile = new File(root.getAbsolutePath() + "\\data\\");
if(_selectedEntry.IsExpansion) {
sourceFile = new File(root.getAbsolutePath() + "\\Expansion\\" + _selectedEntry.Label + "\\data\\");
}
else {
sourceFile = new File(root.getAbsolutePath() + "\\Classic\\" + _selectedEntry.Label + "\\data\\");
}
}
try {
if(sourceFile.exists() && sourceFile.isDirectory()) {
if(targetFile.exists() && targetFile.isDirectory()) {
FileUtils.deleteDirectory(targetFile);
FileUtils.copyDirectory(sourceFile, targetFile);
}
else if(targetFile.exists() && !targetFile.isDirectory()) {
targetFile.delete();
FileUtils.copyDirectory(sourceFile, targetFile);
}
else {
FileUtils.copyDirectory(sourceFile, targetFile);
}
}
else if(sourceFile.exists() && !sourceFile.isDirectory()) {
Logger.LogWarning("A 'data' of type file was detected. 'data' is supposed to be a folder. Deleting...");
sourceFile.delete();
}
}
catch(IOException e) {
e.printStackTrace();
}
}
// Deletes the 'data' directory depending the situation
// Options: 0 = Deletes data dir in root
// 1 = Deletes data dir in backup if data dir in root doesn't exist
private void deleteDataDir(int option) {
File sourceFile = new File(root.getAbsolutePath() + "\\data\\");
try {
if(option == 0) {
if(sourceFile.exists() && sourceFile.isDirectory()) {
FileUtils.deleteDirectory(sourceFile);
}
else if(sourceFile.exists() && !sourceFile.isDirectory()) {
sourceFile.delete();
}
}
else if(option == 1 && !sourceFile.exists()) {
File backupFile = null;
if(_selectedEntry.IsExpansion) {
backupFile = new File(root.getAbsolutePath() + "\\Expansion\\" + _selectedEntry.Label + "\\data\\");
}
else {
backupFile = new File(root.getAbsolutePath() + "\\Classic\\" + _selectedEntry.Label + "\\data\\");
}
if(backupFile.exists() && backupFile.isDirectory()) {
FileUtils.deleteDirectory(backupFile);
}
else if(backupFile.exists() && !backupFile.isDirectory()) {
backupFile.delete();
}
}
} catch (IOException e) {
Logger.LogError("A problem was encountered while deleting the data dir");
e.printStackTrace();
}
}
// Moves classic or expansion specific MPQs to or from the root directory
private void switchTo(GameType gameType) {
String sourcePath = null;
String targetPath = null;
File sourceMpq = null;
File targetMpq = null;
if (gameType == GameType.Expansion) {
Logger.LogInfo("The game will use the Expansion MPQs. Enabling Expansion ...");
sourcePath = root.getAbsolutePath() + "\\Expansion\\";
targetPath = root.getAbsolutePath() + "\\";
}
else {
Logger.LogInfo("The game will not use the Expansion MPQs. Enabling Classic ...");
sourcePath = root.getAbsolutePath() + "\\";
targetPath = root.getAbsolutePath() + "\\Expansion\\";
}
for(String mpq: expansionMPQs) {
if (gameType == GameType.Expansion) {
sourceMpq = new File(sourcePath + mpq);
targetMpq = new File(targetPath + mpq);
}
else {
sourceMpq = new File(sourcePath + mpq);
targetMpq = new File(targetPath + mpq);
}
Path sourceMpqPath = Paths.get(sourceMpq.getAbsolutePath());
Path targetMpqPath = Paths.get(targetMpq.getAbsolutePath());
if(sourceMpq.exists()) {
try {
Files.move(sourceMpqPath, targetMpqPath, REPLACE_EXISTING);
}
catch (IOException e) {
e.printStackTrace();
}
}
}
}
private void markSelectedEntryAsLastRan() {
// There seems to be a bug somewhere where under some circumstance
// we could have 2 last ran versions. Given the amount of data we are
// working with, I think it's perfectly fine to just "reset" the state completely.
// so that we don't have to worry about anything else.
_tableManager.ClearAllLastRan();
_selectedEntry.WasLastRan = true;
_lastRanEntry = _selectedEntry;
_tableManager.saveData();
}
private String getGameType() {
return _selectedEntry.IsExpansion ? "Expansion" : "Classic";
}
public class LauncherRunnable implements Runnable {
public void run() {
int result = processManager.startProcess(_selectedEntry.Path, _selectedEntry.getSplitFlags());
if (result == -1) {
Logger.LogError("There was an error managing the Diablo II process.");
}
// Only backup the 'data' directory if it exists and only after the last process finished.
// Meaning that if the user has 3 Diablo II processes opened, only after the user closes the
// last one will the application backup the data dir.
if(processManager.getProcessCount() == 0) {
doDataDir(0);
}
}
}
}
|
|
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.peejweej.androidsideloading.fragments;
import android.annotation.TargetApi;
import android.app.ProgressDialog;
import android.content.Intent;
import android.net.Uri;
import android.net.wifi.WpsInfo;
import android.net.wifi.p2p.WifiP2pConfig;
import android.net.wifi.p2p.WifiP2pDevice;
import android.net.wifi.p2p.WifiP2pInfo;
import android.net.wifi.p2p.WifiP2pManager.ConnectionInfoListener;
import android.os.Build;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.TextView;
import com.github.peejweej.androidsideloading.R;
import com.github.peejweej.androidsideloading.fragments.wifi.FileServerAsyncTask;
import com.github.peejweej.androidsideloading.fragments.wifi.FileTransferService;
import com.github.peejweej.androidsideloading.activities.WiFiDirectActivity;
/**
* A fragment that manages a particular peer and allows interaction with device
* i.e. setting up network connection and transferring data.
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
public class DeviceDetailFragment extends Fragment implements ConnectionInfoListener {
protected static final int CHOOSE_FILE_RESULT_CODE = 20;
private View mContentView = null;
private WifiP2pDevice device;
private WifiP2pInfo info;
private ProgressDialog progressDialog = null;
private Button findDevicesButton;
private Button activateWifiButton;
private Button connectButton;
private Button disconnectButton;
private Button startTransferButton;
private TextView statusTextView;
private TextView deviceInfoTextView;
public DeviceDetailFragment() {
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
mContentView = inflater.inflate(R.layout.device_detail, null);
setupViews();
manageButtons();
return mContentView;
}
@Override
public void onResume() {
super.onResume();
manageButtons();
}
private void setupViews(){
findDevicesButton = (Button) mContentView.findViewById(R.id.btn_find_devices);
findDevicesButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
findDevices();
}
});
activateWifiButton = (Button) mContentView.findViewById(R.id.btn_activate_wifi);
activateWifiButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
openWifi();
}
});
statusTextView = (TextView) mContentView.findViewById(R.id.status_text);
deviceInfoTextView = (TextView) mContentView.findViewById(R.id.device_info);
connectButton = (Button) mContentView.findViewById(R.id.btn_connect);
connectButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
connect();
}
});
disconnectButton = (Button) mContentView.findViewById(R.id.btn_disconnect);
disconnectButton.setOnClickListener(
new View.OnClickListener() {
public void onClick(View v) {
disconnect();
}
});
startTransferButton = (Button) mContentView.findViewById(R.id.btn_start_client);
startTransferButton.setOnClickListener(
new View.OnClickListener() {
public void onClick(View v) {
startTransfer();
}
});
}
private void findDevices(){
((WiFiDirectActivity) getActivity()).startDiscovery();
}
private void openWifi(){
((WiFiDirectActivity) getActivity()).openWifiPreferences();
}
private void startTransfer(){
((WiFiDirectActivity) getActivity()).transferFile();
}
private void disconnect(){
((DevicesListFragment.DeviceActionListener) getActivity()).disconnect();
}
public void manageButtons(){
boolean hasDevice = device != null;
boolean hasConnected = info != null;
findDevicesButton.setVisibility((!hasDevice &&!hasConnected && wifiIsOn())? View.VISIBLE : View.GONE);
activateWifiButton.setVisibility((!wifiIsOn())? View.VISIBLE : View.GONE);
connectButton.setVisibility((hasDevice && !hasConnected)? View.VISIBLE : View.GONE);
disconnectButton.setVisibility((hasDevice && hasConnected)? View.VISIBLE : View.GONE);
startTransferButton.setVisibility((hasDevice && hasConnected) ? View.VISIBLE : View.GONE);
}
private void connect(){
WifiP2pConfig config = new WifiP2pConfig();
config.deviceAddress = device.deviceAddress;
config.wps.setup = WpsInfo.PBC;
if (progressDialog != null && progressDialog.isShowing()) {
progressDialog.dismiss();
}
progressDialog = ProgressDialog.show(getActivity(), "Press back to cancel",
"Connecting to :" + device.deviceAddress, true, true
// new DialogInterface.OnCancelListener() {
//
// @Override
// public void onCancel(DialogInterface dialog) {
// ((DeviceActionListener) getActivity()).cancelDisconnect();
// }
// }
);
((DevicesListFragment.DeviceActionListener) getActivity()).connect(config);
}
public void transferFile(Uri uri){
TextView statusText = (TextView) mContentView.findViewById(R.id.status_text);
statusText.setText("Sending: " + uri);
Log.d(WiFiDirectActivity.TAG, "Intent----------- " + uri);
Intent serviceIntent = new Intent(getActivity(), FileTransferService.class);
serviceIntent.setAction(FileTransferService.ACTION_SEND_FILE);
serviceIntent.putExtra(FileTransferService.EXTRAS_FILE_PATH, uri.toString());
serviceIntent.putExtra(FileTransferService.EXTRAS_GROUP_OWNER_ADDRESS,
info.groupOwnerAddress.getHostAddress());
serviceIntent.putExtra(FileTransferService.EXTRAS_GROUP_OWNER_PORT, 8988);
getActivity().startService(serviceIntent);
}
public void onConnectionInfoAvailable(final WifiP2pInfo info) {
if (progressDialog != null && progressDialog.isShowing()) {
progressDialog.dismiss();
}
this.info = info;
// The owner IP is now known.
// TextView view = (TextView) mContentView.findViewById(R.id.group_owner);
// view.setText(getResources().getString(R.string.group_owner_text)
// + ((info.isGroupOwner == true) ? getResources().getString(R.string.yes)
// : getResources().getString(R.string.no)));
// InetAddress from WifiP2pInfo struct.
deviceInfoTextView.setText("Group Owner IP - " + info.groupOwnerAddress.getHostAddress());
// After the group negotiation, we assign the group owner as the file
// server. The file server is single threaded, single connection server
// socket.
if (info.groupFormed && info.isGroupOwner) {
new FileServerAsyncTask(getActivity(), statusTextView).execute();
} else if (info.groupFormed) {
// The other device acts as the client. In this case, we enable the
// get file button.
startTransferButton.setVisibility(View.VISIBLE);
statusTextView.setText(getResources().getString(R.string.client_text));
}
// hide the connect button
manageButtons();
}
/**
* Updates the UI with device data
*
* @param device the device to be displayed
*/
public void showDetails(WifiP2pDevice device) {
this.device = device;
// TextView view = (TextView) mContentView.findViewById(R.id.device_address);
// view.setText(device.deviceAddress);
deviceInfoTextView.setText(device.toString());
manageButtons();
}
/**
* Clears the UI fields after a disconnect or direct mode disable operation.
*/
public void resetViews() {
// mContentView.findViewById(R.id.btn_connect).setVisibility(View.VISIBLE);
// TextView view = (TextView) mContentView.findViewById(R.id.device_address);
// view.setText(R.string.empty);
deviceInfoTextView.setText("");
// view = (TextView) mContentView.findViewById(R.id.group_owner);
// view.setText(R.string.empty);
statusTextView.setText("");
// mContentView.findViewById(R.id.btn_start_client).setVisibility(View.GONE);
// this.getView().setVisibility(View.GONE);
manageButtons();
}
public void resetData(){
device = null;
info = null;
}
private boolean wifiIsOn() {
return ((WiFiDirectActivity) getActivity()).isWifiP2pEnabled();
}
}
|
|
/*
* Copyright (c) 2007-present, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.threeten.extra;
import static java.time.temporal.ChronoField.ALIGNED_DAY_OF_WEEK_IN_MONTH;
import static java.time.temporal.ChronoField.ALIGNED_DAY_OF_WEEK_IN_YEAR;
import static java.time.temporal.ChronoField.ALIGNED_WEEK_OF_MONTH;
import static java.time.temporal.ChronoField.ALIGNED_WEEK_OF_YEAR;
import static java.time.temporal.ChronoField.AMPM_OF_DAY;
import static java.time.temporal.ChronoField.CLOCK_HOUR_OF_AMPM;
import static java.time.temporal.ChronoField.CLOCK_HOUR_OF_DAY;
import static java.time.temporal.ChronoField.DAY_OF_MONTH;
import static java.time.temporal.ChronoField.DAY_OF_WEEK;
import static java.time.temporal.ChronoField.DAY_OF_YEAR;
import static java.time.temporal.ChronoField.EPOCH_DAY;
import static java.time.temporal.ChronoField.ERA;
import static java.time.temporal.ChronoField.HOUR_OF_AMPM;
import static java.time.temporal.ChronoField.HOUR_OF_DAY;
import static java.time.temporal.ChronoField.INSTANT_SECONDS;
import static java.time.temporal.ChronoField.MICRO_OF_DAY;
import static java.time.temporal.ChronoField.MICRO_OF_SECOND;
import static java.time.temporal.ChronoField.MILLI_OF_DAY;
import static java.time.temporal.ChronoField.MILLI_OF_SECOND;
import static java.time.temporal.ChronoField.MINUTE_OF_DAY;
import static java.time.temporal.ChronoField.MINUTE_OF_HOUR;
import static java.time.temporal.ChronoField.MONTH_OF_YEAR;
import static java.time.temporal.ChronoField.NANO_OF_DAY;
import static java.time.temporal.ChronoField.NANO_OF_SECOND;
import static java.time.temporal.ChronoField.OFFSET_SECONDS;
import static java.time.temporal.ChronoField.PROLEPTIC_MONTH;
import static java.time.temporal.ChronoField.SECOND_OF_DAY;
import static java.time.temporal.ChronoField.SECOND_OF_MINUTE;
import static java.time.temporal.ChronoField.YEAR;
import static java.time.temporal.ChronoField.YEAR_OF_ERA;
import static java.time.temporal.IsoFields.QUARTER_OF_YEAR;
import static java.time.temporal.IsoFields.QUARTER_YEARS;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import java.io.Serializable;
import java.time.DateTimeException;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.Month;
import java.time.chrono.IsoChronology;
import java.time.format.TextStyle;
import java.time.temporal.TemporalAccessor;
import java.time.temporal.TemporalQueries;
import java.time.temporal.UnsupportedTemporalTypeException;
import java.util.Locale;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
/**
* Test Quarter.
*/
@Test
public class TestQuarter {
@BeforeMethod
public void setUp() {
}
//-----------------------------------------------------------------------
@Test
public void test_interfaces() {
assertTrue(Enum.class.isAssignableFrom(Quarter.class));
assertTrue(Serializable.class.isAssignableFrom(Quarter.class));
assertTrue(Comparable.class.isAssignableFrom(Quarter.class));
assertTrue(TemporalAccessor.class.isAssignableFrom(Quarter.class));
}
//-----------------------------------------------------------------------
// of(int)
//-----------------------------------------------------------------------
@Test
public void test_of_int_singleton() {
for (int i = 1; i <= 4; i++) {
Quarter test = Quarter.of(i);
assertEquals(test.getValue(), i);
}
}
@Test(expectedExceptions = DateTimeException.class)
public void test_of_int_valueTooLow() {
Quarter.of(0);
}
@Test(expectedExceptions = DateTimeException.class)
public void test_of_int_valueTooHigh() {
Quarter.of(5);
}
//-----------------------------------------------------------------------
// ofMonth(int)
//-----------------------------------------------------------------------
@Test
public void test_ofMonth_int_singleton() {
assertEquals(Quarter.ofMonth(1), Quarter.Q1);
assertEquals(Quarter.ofMonth(2), Quarter.Q1);
assertEquals(Quarter.ofMonth(3), Quarter.Q1);
assertEquals(Quarter.ofMonth(4), Quarter.Q2);
assertEquals(Quarter.ofMonth(5), Quarter.Q2);
assertEquals(Quarter.ofMonth(6), Quarter.Q2);
assertEquals(Quarter.ofMonth(7), Quarter.Q3);
assertEquals(Quarter.ofMonth(8), Quarter.Q3);
assertEquals(Quarter.ofMonth(9), Quarter.Q3);
assertEquals(Quarter.ofMonth(10), Quarter.Q4);
assertEquals(Quarter.ofMonth(11), Quarter.Q4);
assertEquals(Quarter.ofMonth(12), Quarter.Q4);
}
@Test(expectedExceptions = DateTimeException.class)
public void test_ofMonth_int_valueTooLow() {
Quarter.ofMonth(0);
}
@Test(expectedExceptions = DateTimeException.class)
public void test_ofMonth_int_valueTooHigh() {
Quarter.ofMonth(13);
}
//-----------------------------------------------------------------------
// from(TemporalAccessor)
//-----------------------------------------------------------------------
@Test
public void test_from_TemporalAccessor() {
assertEquals(Quarter.from(LocalDate.of(2011, 6, 6)), Quarter.Q2);
assertEquals(Quarter.from(LocalDateTime.of(2012, 2, 3, 12, 30)), Quarter.Q1);
}
@Test
public void test_from_TemporalAccessor_Month() {
assertEquals(Quarter.from(Month.JANUARY), Quarter.Q1);
assertEquals(Quarter.from(Month.FEBRUARY), Quarter.Q1);
assertEquals(Quarter.from(Month.MARCH), Quarter.Q1);
assertEquals(Quarter.from(Month.APRIL), Quarter.Q2);
assertEquals(Quarter.from(Month.MAY), Quarter.Q2);
assertEquals(Quarter.from(Month.JUNE), Quarter.Q2);
assertEquals(Quarter.from(Month.JULY), Quarter.Q3);
assertEquals(Quarter.from(Month.AUGUST), Quarter.Q3);
assertEquals(Quarter.from(Month.SEPTEMBER), Quarter.Q3);
assertEquals(Quarter.from(Month.OCTOBER), Quarter.Q4);
assertEquals(Quarter.from(Month.NOVEMBER), Quarter.Q4);
assertEquals(Quarter.from(Month.DECEMBER), Quarter.Q4);
}
@Test(expectedExceptions = DateTimeException.class)
public void test_from_TemporalAccessorl_invalid_noDerive() {
Quarter.from(LocalTime.of(12, 30));
}
@Test(expectedExceptions = NullPointerException.class)
public void test_from_TemporalAccessor_null() {
Quarter.from((TemporalAccessor) null);
}
//-----------------------------------------------------------------------
// getDisplayName()
//-----------------------------------------------------------------------
@Test
public void test_getDisplayName() {
assertEquals(Quarter.Q1.getDisplayName(TextStyle.SHORT, Locale.US), "Q1");
}
@Test(expectedExceptions = NullPointerException.class)
public void test_getDisplayName_nullStyle() {
Quarter.Q1.getDisplayName(null, Locale.US);
}
@Test(expectedExceptions = NullPointerException.class)
public void test_getDisplayName_nullLocale() {
Quarter.Q1.getDisplayName(TextStyle.FULL, null);
}
//-----------------------------------------------------------------------
// isSupported()
//-----------------------------------------------------------------------
public void test_isSupported() {
Quarter test = Quarter.Q1;
assertEquals(test.isSupported(null), false);
assertEquals(test.isSupported(NANO_OF_SECOND), false);
assertEquals(test.isSupported(NANO_OF_DAY), false);
assertEquals(test.isSupported(MICRO_OF_SECOND), false);
assertEquals(test.isSupported(MICRO_OF_DAY), false);
assertEquals(test.isSupported(MILLI_OF_SECOND), false);
assertEquals(test.isSupported(MILLI_OF_DAY), false);
assertEquals(test.isSupported(SECOND_OF_MINUTE), false);
assertEquals(test.isSupported(SECOND_OF_DAY), false);
assertEquals(test.isSupported(MINUTE_OF_HOUR), false);
assertEquals(test.isSupported(MINUTE_OF_DAY), false);
assertEquals(test.isSupported(HOUR_OF_AMPM), false);
assertEquals(test.isSupported(CLOCK_HOUR_OF_AMPM), false);
assertEquals(test.isSupported(HOUR_OF_DAY), false);
assertEquals(test.isSupported(CLOCK_HOUR_OF_DAY), false);
assertEquals(test.isSupported(AMPM_OF_DAY), false);
assertEquals(test.isSupported(DAY_OF_WEEK), false);
assertEquals(test.isSupported(ALIGNED_DAY_OF_WEEK_IN_MONTH), false);
assertEquals(test.isSupported(ALIGNED_DAY_OF_WEEK_IN_YEAR), false);
assertEquals(test.isSupported(DAY_OF_MONTH), false);
assertEquals(test.isSupported(DAY_OF_YEAR), false);
assertEquals(test.isSupported(EPOCH_DAY), false);
assertEquals(test.isSupported(ALIGNED_WEEK_OF_MONTH), false);
assertEquals(test.isSupported(ALIGNED_WEEK_OF_YEAR), false);
assertEquals(test.isSupported(MONTH_OF_YEAR), false);
assertEquals(test.isSupported(PROLEPTIC_MONTH), false);
assertEquals(test.isSupported(YEAR_OF_ERA), false);
assertEquals(test.isSupported(YEAR), false);
assertEquals(test.isSupported(ERA), false);
assertEquals(test.isSupported(INSTANT_SECONDS), false);
assertEquals(test.isSupported(OFFSET_SECONDS), false);
assertEquals(test.isSupported(QUARTER_OF_YEAR), true);
}
//-----------------------------------------------------------------------
// range()
//-----------------------------------------------------------------------
public void test_range() {
assertEquals(Quarter.Q1.range(QUARTER_OF_YEAR), QUARTER_OF_YEAR.range());
}
@Test(expectedExceptions = UnsupportedTemporalTypeException.class)
public void test_range_invalidField() {
Quarter.Q1.range(MONTH_OF_YEAR);
}
@Test(expectedExceptions = NullPointerException.class)
public void test_range_null() {
Quarter.Q1.range(null);
}
//-----------------------------------------------------------------------
// get()
//-----------------------------------------------------------------------
public void test_get() {
assertEquals(Quarter.Q1.get(QUARTER_OF_YEAR), 1);
assertEquals(Quarter.Q2.get(QUARTER_OF_YEAR), 2);
assertEquals(Quarter.Q3.get(QUARTER_OF_YEAR), 3);
assertEquals(Quarter.Q4.get(QUARTER_OF_YEAR), 4);
}
@Test(expectedExceptions = UnsupportedTemporalTypeException.class)
public void test_get_invalidField() {
Quarter.Q2.get(MONTH_OF_YEAR);
}
@Test(expectedExceptions = NullPointerException.class)
public void test_get_null() {
Quarter.Q2.get(null);
}
//-----------------------------------------------------------------------
// getLong()
//-----------------------------------------------------------------------
public void test_getLong() {
assertEquals(Quarter.Q1.getLong(QUARTER_OF_YEAR), 1);
assertEquals(Quarter.Q2.getLong(QUARTER_OF_YEAR), 2);
assertEquals(Quarter.Q3.getLong(QUARTER_OF_YEAR), 3);
assertEquals(Quarter.Q4.getLong(QUARTER_OF_YEAR), 4);
}
@Test(expectedExceptions = UnsupportedTemporalTypeException.class)
public void test_getLong_invalidField() {
Quarter.Q2.getLong(MONTH_OF_YEAR);
}
@Test(expectedExceptions = NullPointerException.class)
public void test_getLong_null() {
Quarter.Q2.getLong(null);
}
//-----------------------------------------------------------------------
// plus(long), plus(long,unit)
//-----------------------------------------------------------------------
@DataProvider(name = "plus")
Object[][] data_plus() {
return new Object[][] {
{1, -5, 4},
{1, -4, 1},
{1, -3, 2},
{1, -2, 3},
{1, -1, 4},
{1, 0, 1},
{1, 1, 2},
{1, 2, 3},
{1, 3, 4},
{1, 4, 1},
{1, 5, 2},
};
}
@Test(dataProvider = "plus")
public void test_plus_long(int base, long amount, int expected) {
assertEquals(Quarter.of(base).plus(amount), Quarter.of(expected));
}
//-----------------------------------------------------------------------
// minus(long), minus(long,unit)
//-----------------------------------------------------------------------
@DataProvider(name = "minus")
Object[][] data_minus() {
return new Object[][] {
{1, -5, 2},
{1, -4, 1},
{1, -3, 4},
{1, -2, 3},
{1, -1, 2},
{1, 0, 1},
{1, 1, 4},
{1, 2, 3},
{1, 3, 2},
{1, 4, 1},
{1, 5, 4},
};
}
@Test(dataProvider = "minus")
public void test_minus_long(int base, long amount, int expected) {
assertEquals(Quarter.of(base).minus(amount), Quarter.of(expected));
}
//-----------------------------------------------------------------------
// length(boolean)
//-----------------------------------------------------------------------
@Test
public void test_length_boolean() {
assertEquals(Quarter.Q1.length(true), 91);
assertEquals(Quarter.Q1.length(false), 90);
assertEquals(Quarter.Q2.length(true), 91);
assertEquals(Quarter.Q2.length(false), 91);
assertEquals(Quarter.Q3.length(true), 92);
assertEquals(Quarter.Q3.length(false), 92);
assertEquals(Quarter.Q4.length(true), 92);
assertEquals(Quarter.Q4.length(false), 92);
}
//-----------------------------------------------------------------------
// firstMonth()
//-----------------------------------------------------------------------
@Test
public void test_firstMonth() {
assertEquals(Quarter.Q1.firstMonth(), Month.JANUARY);
assertEquals(Quarter.Q2.firstMonth(), Month.APRIL);
assertEquals(Quarter.Q3.firstMonth(), Month.JULY);
assertEquals(Quarter.Q4.firstMonth(), Month.OCTOBER);
}
//-----------------------------------------------------------------------
// query()
//-----------------------------------------------------------------------
@Test
public void test_query() {
assertEquals(Quarter.Q1.query(TemporalQueries.chronology()), IsoChronology.INSTANCE);
assertEquals(Quarter.Q1.query(TemporalQueries.localDate()), null);
assertEquals(Quarter.Q1.query(TemporalQueries.localTime()), null);
assertEquals(Quarter.Q1.query(TemporalQueries.offset()), null);
assertEquals(Quarter.Q1.query(TemporalQueries.precision()), QUARTER_YEARS);
assertEquals(Quarter.Q1.query(TemporalQueries.zone()), null);
assertEquals(Quarter.Q1.query(TemporalQueries.zoneId()), null);
}
//-----------------------------------------------------------------------
// toString()
//-----------------------------------------------------------------------
@Test
public void test_toString() {
assertEquals(Quarter.Q1.toString(), "Q1");
assertEquals(Quarter.Q2.toString(), "Q2");
assertEquals(Quarter.Q3.toString(), "Q3");
assertEquals(Quarter.Q4.toString(), "Q4");
}
//-----------------------------------------------------------------------
// generated methods
//-----------------------------------------------------------------------
@Test
public void test_enum() {
assertEquals(Quarter.valueOf("Q4"), Quarter.Q4);
assertEquals(Quarter.values()[0], Quarter.Q1);
}
}
|
|
package com.company.Tree;
/**
* Created by Wushudeng on 2018/5/23.
*/
public class Treap<K extends Comparable<? super K>, V> {
private static int MAX_PRIORITY = Integer.MAX_VALUE;
private static class Node<K extends Comparable<? super K>, V> {
public K key;
public V value;
public int priority;
public Node<K,V> left;
public Node<K,V> right;
public Node() {
priority = (int)(MAX_PRIORITY*Math.random());
}
public Node(K new_key, V new_value) {
key = new_key;
value = new_value;
priority = (int)(MAX_PRIORITY*Math.random());
}
public void setPriority(int new_priority) {
priority = new_priority;
}
}
private Node root;
public Treap() {
root = null;
}
private Treap(Node node) {
root = node;
}
/**
* rotate from left to right
* @param node
*/
private Node rightRotate(Node node) {
Node left = node.left;
node.left = left.right;
left.right = node;
return left;
}
/**
* rotate from right to left
* @param node
* @return
*/
private Node leftRotate(Node node) {
Node right = node.right;
node.right = node.left;
right.left = node;
return right;
}
public void insert(K key, V value) {
if (key == null || value == null) {
throw new NullPointerException("key or value is null");
}
if (root == null) {
root = new Node(key, value);
}else {
root = BSTInsert(root, key, value);
}
}
public Node BSTInsert(Node node, K key, V value) {
if (node == null) {
node = new Node(key, value);
}
if (node.key.compareTo(key)>0) {
node.left = BSTInsert(node.left, key, value);
if (node.priority > node.left.priority) {
return rightRotate(node);
}
} else if (node.key.compareTo(key) < 0) {
node.right = BSTInsert(node.right, key, value);
if (node.priority > node.right.priority) {
return leftRotate(node);
}
}else {
node.value = value;
}
return node;
}
public Node BSTFind(Node node, K key) {
if (node == null) {
return null;
}
if (node.key.compareTo(key) > 0) {
return BSTFind(node.left, key);
} else if (node.key.compareTo(key) < 0) {
return BSTFind(node.right, key);
} else {
return node;
}
}
public Node find(K key) {
if (root == null) {
return null;
}
return BSTFind(root, key);
}
private Node BSTRemove(Node node, K key) {
if (node != null) {
int compare = node.key.compareTo(key);
if (compare > 0) {
node.right = BSTRemove(node.right, key);
}else if (compare<0) {
node.left = BSTRemove(node.left, key);
} else {
if (node.left == null) {
return node.right;
} else if (node.right == null) {
return node.left;
} else {
if (node.left.priority > node.right.priority) {
node = leftRotate(node);
BSTRemove(node.left, key);
} else {
node = rightRotate(node);
BSTRemove(node.right, key);
}
}
}
}
return node;
}
public Node remove(K key) {
if (key == null) {
throw new NullPointerException("key is null");
}
Node remove_node = find(key);
// if removed node does not exist
if (remove_node == null) {
return null;
}
if (remove_node.left == null || remove_node.right == null) {
root = null;
} else {
root = BSTRemove(root, key);
}
return remove_node;
}
private Node BSTMinimunInsert(Node node, K key, V value) {
if (node == null) {
node = new Node(key, value);
}
if (node.key.compareTo(key)>0) {
node.left = BSTInsert(node.left, key, value);
if (node.priority > node.left.priority) {
return rightRotate(node);
}
} else if (node.key.compareTo(key) < 0) {
node.right = BSTInsert(node.right, key, value);
if (node.priority > node.right.priority) {
return leftRotate(node);
}
}else {
node.value = value;
}
node.setPriority(Integer.MIN_VALUE);
return node;
}
public Treap<K,V>[] split(K key) {
Treap<K, V>[] treaps = new Treap[2];
if (root == null) {
treaps[0] = new Treap<>();
treaps[1] = new Treap<>();
return treaps;
}
Treap<K, V> new_treap = new Treap<>(root);
Node<K,V> lost = new_treap.find(key);
int flag = 0;
new_treap.BSTMinimunInsert(new_treap.root, key, (V)root.value);
if (lost != null) {
if (lost.key.compareTo((K)root.key) > 0) {
flag = 1;
} else if (lost.key.compareTo((K)root.key) < 0) {
flag = -1;
}
}
treaps[0] = new Treap<>(new_treap.root.left);
treaps[1] = new Treap<>(new_treap.root.right);
if (flag == -1) {
treaps[0].insert(lost.key, lost.value);
} else if (flag == 1) {
treaps[1].insert(lost.key, lost.value);
}
return treaps;
}
public void join(Treap<K,V> treap) {
Treap<K,V> join = new Treap<>();
if (treap.root == null) {
return;
}
if (root == null) {
root = treap.root;
return;
}
join.root = new Node<K,V>((K)root.key, (V)root.value);
join.root.setPriority(Integer.MIN_VALUE);
join.root.left = root;
join.root.right = treap.root;
join.remove((K) root.key);
root = join.root;
}
}
|
|
package org.docksidestage.hangar.dbflute.bsentity;
import java.util.List;
import java.util.ArrayList;
import org.dbflute.Entity;
import org.dbflute.dbmeta.DBMeta;
import org.dbflute.dbmeta.AbstractEntity;
import org.dbflute.dbmeta.accessory.DomainEntity;
import org.dbflute.optional.OptionalEntity;
import org.docksidestage.hangar.dbflute.allcommon.DBMetaInstanceHandler;
import org.docksidestage.hangar.dbflute.exentity.*;
/**
* The entity of WHITE_COMPOUND_PK_REF_PHYSICAL as TABLE. <br>
* <pre>
* [primary-key]
* REF_FIRST_ID, REF_SECOND_ID, REF_THIRD_ID
*
* [column]
* REF_FIRST_ID, REF_SECOND_ID, REF_THIRD_ID, COMPOUND_REF_NAME
*
* [sequence]
*
*
* [identity]
*
*
* [version-no]
*
*
* [foreign table]
* WHITE_COMPOUND_PK
*
* [referrer table]
*
*
* [foreign property]
* whiteCompoundPk
*
* [referrer property]
*
*
* [get/set template]
* /= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
* Long refFirstId = entity.getRefFirstId();
* Long refSecondId = entity.getRefSecondId();
* Long refThirdId = entity.getRefThirdId();
* String compoundRefName = entity.getCompoundRefName();
* entity.setRefFirstId(refFirstId);
* entity.setRefSecondId(refSecondId);
* entity.setRefThirdId(refThirdId);
* entity.setCompoundRefName(compoundRefName);
* = = = = = = = = = =/
* </pre>
* @author DBFlute(AutoGenerator)
*/
public abstract class BsWhiteCompoundPkRefPhysical extends AbstractEntity implements DomainEntity {
// ===================================================================================
// Definition
// ==========
/** The serial version UID for object serialization. (Default) */
private static final long serialVersionUID = 1L;
// ===================================================================================
// Attribute
// =========
/** REF_FIRST_ID: {PK, IX+, NotNull, DECIMAL(16), FK to WHITE_COMPOUND_PK} */
protected Long _refFirstId;
/** REF_SECOND_ID: {PK, NotNull, DECIMAL(16), FK to WHITE_COMPOUND_PK} */
protected Long _refSecondId;
/** REF_THIRD_ID: {PK, NotNull, DECIMAL(16)} */
protected Long _refThirdId;
/** COMPOUND_REF_NAME: {NotNull, VARCHAR(200)} */
protected String _compoundRefName;
// ===================================================================================
// DB Meta
// =======
/** {@inheritDoc} */
public DBMeta asDBMeta() {
return DBMetaInstanceHandler.findDBMeta(asTableDbName());
}
/** {@inheritDoc} */
public String asTableDbName() {
return "WHITE_COMPOUND_PK_REF_PHYSICAL";
}
// ===================================================================================
// Key Handling
// ============
/** {@inheritDoc} */
public boolean hasPrimaryKeyValue() {
if (_refFirstId == null) { return false; }
if (_refSecondId == null) { return false; }
if (_refThirdId == null) { return false; }
return true;
}
// ===================================================================================
// Foreign Property
// ================
/** WHITE_COMPOUND_PK by my REF_FIRST_ID, REF_SECOND_ID, named 'whiteCompoundPk'. */
protected OptionalEntity<WhiteCompoundPk> _whiteCompoundPk;
/**
* [get] WHITE_COMPOUND_PK by my REF_FIRST_ID, REF_SECOND_ID, named 'whiteCompoundPk'. <br>
* Optional: alwaysPresent(), ifPresent().orElse(), get(), ...
* @return The entity of foreign property 'whiteCompoundPk'. (NotNull, EmptyAllowed: when e.g. null FK column, no setupSelect)
*/
public OptionalEntity<WhiteCompoundPk> getWhiteCompoundPk() {
if (_whiteCompoundPk == null) { _whiteCompoundPk = OptionalEntity.relationEmpty(this, "whiteCompoundPk"); }
return _whiteCompoundPk;
}
/**
* [set] WHITE_COMPOUND_PK by my REF_FIRST_ID, REF_SECOND_ID, named 'whiteCompoundPk'.
* @param whiteCompoundPk The entity of foreign property 'whiteCompoundPk'. (NullAllowed)
*/
public void setWhiteCompoundPk(OptionalEntity<WhiteCompoundPk> whiteCompoundPk) {
_whiteCompoundPk = whiteCompoundPk;
}
// ===================================================================================
// Referrer Property
// =================
protected <ELEMENT> List<ELEMENT> newReferrerList() { // overriding to import
return new ArrayList<ELEMENT>();
}
// ===================================================================================
// Basic Override
// ==============
@Override
protected boolean doEquals(Object obj) {
if (obj instanceof BsWhiteCompoundPkRefPhysical) {
BsWhiteCompoundPkRefPhysical other = (BsWhiteCompoundPkRefPhysical)obj;
if (!xSV(_refFirstId, other._refFirstId)) { return false; }
if (!xSV(_refSecondId, other._refSecondId)) { return false; }
if (!xSV(_refThirdId, other._refThirdId)) { return false; }
return true;
} else {
return false;
}
}
@Override
protected int doHashCode(int initial) {
int hs = initial;
hs = xCH(hs, asTableDbName());
hs = xCH(hs, _refFirstId);
hs = xCH(hs, _refSecondId);
hs = xCH(hs, _refThirdId);
return hs;
}
@Override
protected String doBuildStringWithRelation(String li) {
StringBuilder sb = new StringBuilder();
if (_whiteCompoundPk != null && _whiteCompoundPk.isPresent())
{ sb.append(li).append(xbRDS(_whiteCompoundPk, "whiteCompoundPk")); }
return sb.toString();
}
protected <ET extends Entity> String xbRDS(org.dbflute.optional.OptionalEntity<ET> et, String name) { // buildRelationDisplayString()
return et.get().buildDisplayString(name, true, true);
}
@Override
protected String doBuildColumnString(String dm) {
StringBuilder sb = new StringBuilder();
sb.append(dm).append(xfND(_refFirstId));
sb.append(dm).append(xfND(_refSecondId));
sb.append(dm).append(xfND(_refThirdId));
sb.append(dm).append(xfND(_compoundRefName));
if (sb.length() > dm.length()) {
sb.delete(0, dm.length());
}
sb.insert(0, "{").append("}");
return sb.toString();
}
@Override
protected String doBuildRelationString(String dm) {
StringBuilder sb = new StringBuilder();
if (_whiteCompoundPk != null && _whiteCompoundPk.isPresent())
{ sb.append(dm).append("whiteCompoundPk"); }
if (sb.length() > dm.length()) {
sb.delete(0, dm.length()).insert(0, "(").append(")");
}
return sb.toString();
}
@Override
public WhiteCompoundPkRefPhysical clone() {
return (WhiteCompoundPkRefPhysical)super.clone();
}
// ===================================================================================
// Accessor
// ========
/**
* [get] REF_FIRST_ID: {PK, IX+, NotNull, DECIMAL(16), FK to WHITE_COMPOUND_PK} <br>
* @return The value of the column 'REF_FIRST_ID'. (basically NotNull if selected: for the constraint)
*/
public Long getRefFirstId() {
checkSpecifiedProperty("refFirstId");
return _refFirstId;
}
/**
* [set] REF_FIRST_ID: {PK, IX+, NotNull, DECIMAL(16), FK to WHITE_COMPOUND_PK} <br>
* @param refFirstId The value of the column 'REF_FIRST_ID'. (basically NotNull if update: for the constraint)
*/
public void setRefFirstId(Long refFirstId) {
registerModifiedProperty("refFirstId");
_refFirstId = refFirstId;
}
/**
* [get] REF_SECOND_ID: {PK, NotNull, DECIMAL(16), FK to WHITE_COMPOUND_PK} <br>
* @return The value of the column 'REF_SECOND_ID'. (basically NotNull if selected: for the constraint)
*/
public Long getRefSecondId() {
checkSpecifiedProperty("refSecondId");
return _refSecondId;
}
/**
* [set] REF_SECOND_ID: {PK, NotNull, DECIMAL(16), FK to WHITE_COMPOUND_PK} <br>
* @param refSecondId The value of the column 'REF_SECOND_ID'. (basically NotNull if update: for the constraint)
*/
public void setRefSecondId(Long refSecondId) {
registerModifiedProperty("refSecondId");
_refSecondId = refSecondId;
}
/**
* [get] REF_THIRD_ID: {PK, NotNull, DECIMAL(16)} <br>
* @return The value of the column 'REF_THIRD_ID'. (basically NotNull if selected: for the constraint)
*/
public Long getRefThirdId() {
checkSpecifiedProperty("refThirdId");
return _refThirdId;
}
/**
* [set] REF_THIRD_ID: {PK, NotNull, DECIMAL(16)} <br>
* @param refThirdId The value of the column 'REF_THIRD_ID'. (basically NotNull if update: for the constraint)
*/
public void setRefThirdId(Long refThirdId) {
registerModifiedProperty("refThirdId");
_refThirdId = refThirdId;
}
/**
* [get] COMPOUND_REF_NAME: {NotNull, VARCHAR(200)} <br>
* @return The value of the column 'COMPOUND_REF_NAME'. (basically NotNull if selected: for the constraint)
*/
public String getCompoundRefName() {
checkSpecifiedProperty("compoundRefName");
return convertEmptyToNull(_compoundRefName);
}
/**
* [set] COMPOUND_REF_NAME: {NotNull, VARCHAR(200)} <br>
* @param compoundRefName The value of the column 'COMPOUND_REF_NAME'. (basically NotNull if update: for the constraint)
*/
public void setCompoundRefName(String compoundRefName) {
registerModifiedProperty("compoundRefName");
_compoundRefName = compoundRefName;
}
}
|
|
/*
* Ninja Trials is an old school style Android Game developed for OUYA & using
* AndEngine. It features several minigames with simple gameplay.
* Copyright 2013 Mad Gear Games <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.madgear.ninjatrials;
import org.andengine.engine.handler.timer.ITimerCallback;
import org.andengine.engine.handler.timer.TimerHandler;
import org.andengine.entity.scene.Scene;
import org.andengine.entity.scene.background.SpriteBackground;
import org.andengine.entity.sprite.Sprite;
import org.andengine.entity.text.Text;
import org.andengine.entity.text.TextOptions;
import org.andengine.util.adt.align.HorizontalAlign;
import com.madgear.ninjatrials.hud.SelectionStripe;
import com.madgear.ninjatrials.managers.GameManager;
import com.madgear.ninjatrials.managers.ResourceManager;
import com.madgear.ninjatrials.managers.SFXManager;
import com.madgear.ninjatrials.managers.SceneManager;
import com.madgear.ninjatrials.sequences.Intro2Scene;
import com.madgear.ninjatrials.test.TestingScene;
public class PlayerSelectionScene extends GameScene {
private final static float WIDTH = ResourceManager.getInstance().cameraWidth;
private final static float HEIGHT = ResourceManager.getInstance().cameraHeight;
private final static float CHAR_SHO_X = 1190;
private final static float CHAR_SHO_Y = HEIGHT - 508;
private final static float CHAR_RYOKO_X = 734;
private final static float CHAR_RYOKO_Y = CHAR_SHO_Y -50;
private static final float CHAR_SELEC_ALPHA = 0.5f;
private static final float PANEL_ALPHA = 0.6f;
private static final float PANEL_X = WIDTH/2;
private static final float PANEL_Y = 450;
private static final float SELECT_DIFF_FLASH_TIME = 4;
private Text selectPlayerText;
private SpriteBackground bg;
private Sprite moon;
private Sprite clouds1;
private Sprite clouds2;
private Sprite roof;
private Sprite charSho;
private Sprite charShoOutline;
private Sprite charShoSelected;
private Sprite charRyoko;
private Sprite charRyokoOutline;
private Sprite charRyokoSelected;
private static int charSelected = GameManager.CHAR_RYOKO;
private static int diffIndex = 1;
private int selectionStep = 0;
private Text selectDiffText;
private Sprite diffPanel;
private SelectionStripe selectionStripe;
private String[] menuOptions = {ResourceManager.getInstance().loadAndroidRes().getString(R.string.select_menu_level_easy),
ResourceManager.getInstance().loadAndroidRes().getString(R.string.select_menu_level_normal),
ResourceManager.getInstance().loadAndroidRes().getString(R.string.select_menu_level_hard)};
private TimerHandler timerHandler;
public PlayerSelectionScene() {
super(0);
}
@Override
public Scene onLoadingScreenLoadAndShown() {
return null;
}
@Override
public void onLoadingScreenUnloadAndHidden() {}
@Override
public void onLoadScene() {
ResourceManager.getInstance().loadMenuSelectedResources();
}
@Override
public void onShowScene() {
// Sky Background:
bg = new SpriteBackground(new Sprite(WIDTH/2, HEIGHT/2,
ResourceManager.getInstance().menuSelectSky,
ResourceManager.getInstance().engine.getVertexBufferObjectManager()));
setBackground(bg);
// Moon:
moon = new Sprite(WIDTH/2, HEIGHT-200,
ResourceManager.getInstance().menuSelectMoon,
ResourceManager.getInstance().engine.getVertexBufferObjectManager());
attachChild(moon);
// Clouds:
clouds1 = new Sprite(WIDTH/4, HEIGHT/2 + 200,
ResourceManager.getInstance().menuSelectClouds1,
ResourceManager.getInstance().engine.getVertexBufferObjectManager());
attachChild(clouds1);
clouds2 = new Sprite(WIDTH*3/4+200, HEIGHT/2-150,
ResourceManager.getInstance().menuSelectClouds2,
ResourceManager.getInstance().engine.getVertexBufferObjectManager());
attachChild(clouds2);
// Roof:
roof = new Sprite(WIDTH/2, 192,
ResourceManager.getInstance().menuSelectRoof,
ResourceManager.getInstance().engine.getVertexBufferObjectManager());
attachChild(roof);
// Select Player Text:
selectPlayerText = new Text(WIDTH/2, HEIGHT/2,
ResourceManager.getInstance().fontBig, ResourceManager.getInstance().loadAndroidRes().getString(R.string.select_menu_select_player),
new TextOptions(HorizontalAlign.LEFT),
ResourceManager.getInstance().engine.getVertexBufferObjectManager());
selectPlayerText.setX(selectPlayerText.getWidth()/2 + 100);
selectPlayerText.setY(HEIGHT - selectPlayerText.getHeight()/2 - 100);
attachChild(selectPlayerText);
// Sho Outline
charShoOutline = new Sprite(CHAR_SHO_X, CHAR_SHO_Y,
ResourceManager.getInstance().menuSelectChShoOutline,
ResourceManager.getInstance().engine.getVertexBufferObjectManager());
charShoOutline.setVisible(false);
attachChild(charShoOutline);
// Sho:
charSho = new Sprite(CHAR_SHO_X, CHAR_SHO_Y,
ResourceManager.getInstance().menuSelectChSho,
ResourceManager.getInstance().engine.getVertexBufferObjectManager());
attachChild(charSho);
// Sho selected:
charShoSelected = new Sprite(CHAR_SHO_X, CHAR_SHO_Y,
ResourceManager.getInstance().menuSelectChShoOutline,
ResourceManager.getInstance().engine.getVertexBufferObjectManager());
charShoSelected.setVisible(false);
charShoSelected.setAlpha(CHAR_SELEC_ALPHA);
attachChild(charShoSelected);
// Ryoko Outline:
charRyokoOutline = new Sprite(CHAR_RYOKO_X, CHAR_RYOKO_Y,
ResourceManager.getInstance().menuSelectChRyokoOutline,
ResourceManager.getInstance().engine.getVertexBufferObjectManager());
attachChild(charRyokoOutline);
// Ryoko:
charRyoko = new Sprite(CHAR_RYOKO_X, CHAR_RYOKO_Y,
ResourceManager.getInstance().menuSelectChRyoko,
ResourceManager.getInstance().engine.getVertexBufferObjectManager());
attachChild(charRyoko);
// Ryoko Selected:
charRyokoSelected = new Sprite(CHAR_RYOKO_X, CHAR_RYOKO_Y,
ResourceManager.getInstance().menuSelectChRyokoOutline,
ResourceManager.getInstance().engine.getVertexBufferObjectManager());
charRyokoSelected.setVisible(false);
charRyokoSelected.setAlpha(CHAR_SELEC_ALPHA);
attachChild(charRyokoSelected);
// LAYER:
// Diff panel:
diffPanel = new Sprite(PANEL_X, PANEL_Y,
ResourceManager.getInstance().menuSelectDifficulty,
ResourceManager.getInstance().engine.getVertexBufferObjectManager());
diffPanel.setAlpha(PANEL_ALPHA);
diffPanel.setVisible(false);
attachChild(diffPanel);
// Diff tittle:
selectDiffText = new Text(diffPanel.getX() - diffPanel.getWidth()/4, diffPanel.getY(),
ResourceManager.getInstance().fontBig, ResourceManager.getInstance().loadAndroidRes().getString(R.string.select_menu_select_difficulty),
new TextOptions(HorizontalAlign.RIGHT),
ResourceManager.getInstance().engine.getVertexBufferObjectManager());
selectDiffText.setVisible(false);
attachChild(selectDiffText);
// Selection Stripe:
selectionStripe = new SelectionStripe(
diffPanel.getX() + diffPanel.getWidth()/4,
diffPanel.getY(),
SelectionStripe.DISP_VERTICAL, 110f,
menuOptions , SelectionStripe.TEXT_ALIGN_CENTER, 1);
selectionStripe.setVisible(false);
attachChild(selectionStripe);
}
@Override
public void onHideScene() {}
@Override
public void onUnloadScene() {
ResourceManager.getInstance().unloadMenuSelectedResources();
}
// INTERFACE --------------------------------------------------------
/**
* Select Player or Diff.
*/
@Override
public void onPressButtonO() {
if(selectionStep == 0) {
// Select Player
selectionStep = 1;
if(charSelected == GameManager.CHAR_RYOKO)
charRyokoSelected.setVisible(true);
else
charShoSelected.setVisible(true);
GameManager.setSelectedCharacter(charSelected);
selectPlayerText.setVisible(false);
diffPanel.setVisible(true);
selectDiffText.setVisible(true);
selectionStripe.setVisible(true);
SFXManager.playSound(ResourceManager.getInstance().menuActivate);
//SceneManager.getInstance().showLayer(new DiffSelectLayer(), false, false, false);
}
else if(selectionStep == 1) {
// Select Diff:
selectionStep = 2;
GameManager.setSelectedDiff(selectionStripe.getSelectedIndex());
selectionStripe.textFlash();
timerHandler = new TimerHandler(SELECT_DIFF_FLASH_TIME, true, new ITimerCallback() {
@Override
public void onTimePassed(final TimerHandler pTimerHandler) {
PlayerSelectionScene.this.unregisterUpdateHandler(timerHandler);
if(GameManager.DEBUG_MODE)
SceneManager.getInstance().showScene(new TestingScene());
else
SceneManager.getInstance().showScene(new Intro2Scene());
}
});
registerUpdateHandler(timerHandler);
SFXManager.playSound(ResourceManager.getInstance().menuActivate);
}
}
/**
* Cancel Select Player or Diff.
*/
@Override
public void onPressButtonA() {
if(selectionStep == 0) {
// Cancel Select Player:
if(GameManager.DEBUG_MODE)
SceneManager.getInstance().showScene(new TestingScene());
else
SceneManager.getInstance().showScene(new MainMenuScene());
}
else if (selectionStep == 1) {
// Cancel Select Diff:
diffPanel.setVisible(false);
selectDiffText.setVisible(false);
selectionStripe.setVisible(false);
selectionStep = 0;
charRyokoSelected.setVisible(false);
charShoSelected.setVisible(false);
}
}
@Override
public void onPressButtonMenu() {
onPressButtonA();
}
/**
* Selected player = Ryoko.
*/
@Override
public void onPressDpadLeft() {
if(selectionStep == 0) {
charRyokoOutline.setVisible(true);
charShoOutline.setVisible(false);
charSelected = GameManager.CHAR_RYOKO;
SFXManager.playSound(ResourceManager.getInstance().menuFocus);
}
}
/**
* Selected player = Sho.
*/
@Override
public void onPressDpadRight() {
if(selectionStep == 0) {
charRyokoOutline.setVisible(false);
charShoOutline.setVisible(true);
charSelected = GameManager.CHAR_SHO;
SFXManager.playSound(ResourceManager.getInstance().menuFocus);
}
}
/**
* Diff = -1
*/
@Override
public void onPressDpadUp() {
if(selectionStep == 1) {
selectionStripe.movePrevious();
}
}
/**
* Diff = +1
*/
@Override
public void onPressDpadDown() {
if(selectionStep == 1) {
selectionStripe.moveNext();
}
}
}
|
|
package com.jalpha_vantage.impl;
import com.jalpha_vantage.domain.DailyStock;
import com.jalpha_vantage.domain.IntraStock;
import com.jalpha_vantage.domain.StockQuote;
import com.jalpha_vantage.exception.*;
import com.jalpha_vantage.service.IStockService;
import com.jalpha_vantage.test.AbstractServiceTest;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.List;
import static junit.framework.TestCase.*;
public class StockTemplateTest extends AbstractServiceTest {
@Autowired
IStockService stockOperation;
@Test
public void testQuote(){
try{
StockQuote quote = stockOperation.quote("BARC.L");
assertNotNull(quote);
// assertNotNull(quote);
}catch (Exception ex){
fail(ex.getMessage());
}
}
@Test
public void testIntraDay() {
try {
HashMap<String, String> params = new HashMap<String, String>();
params.put("interval", "1min");
List<IntraStock> stocks = stockOperation.intraDay("BP.L", params);
//Closed on the Weekends, so it has no data (Test was ran on a weekend)
assertEquals(100, stocks.size());
} catch (UnsupportedEncodingException ex) {
ex.printStackTrace();
fail("Unsupported Encoding Exception");
} catch (InvalidApiKeyException ex1) {
ex1.printStackTrace();
fail("Invalid Api Key Exception");
} catch (InvalidFunctionOptionException ex2) {
ex2.printStackTrace();
fail("Required Function Properties Missing or is Invalid in Exception");
} catch (MalFormattedFunctionException ex3) {
ex3.printStackTrace();
fail("Invalid Function Exception");
} catch (MissingApiKeyException ex4) {
ex4.printStackTrace();
fail("Missing Api Key Exception");
} catch (UltraHighFrequencyRequestException ex5) {
ex5.printStackTrace();
fail("Ultra High Frequency Request Exception");
} catch (ApiLimitExceeded apiLimitExceeded) {
apiLimitExceeded.printStackTrace();
fail("Exceeded Api Limit");
}
}
@Test
public void testDaily() {
try {
HashMap<String, String> params = new HashMap<String, String>();
List<DailyStock> stocks = stockOperation.daily("BARC.L", params);
assertEquals(100, stocks.size());
} catch (UnsupportedEncodingException ex) {
ex.printStackTrace();
fail("Unsupported Encoding Exception");
} catch (InvalidApiKeyException ex1) {
ex1.printStackTrace();
fail("Invalid Api Key Exception");
} catch (InvalidFunctionOptionException ex2) {
ex2.printStackTrace();
fail("Required Function Properties Missing or is Invalid in Exception");
} catch (MalFormattedFunctionException ex3) {
ex3.printStackTrace();
fail("Invalid Function Exception");
} catch (MissingApiKeyException ex4) {
ex4.printStackTrace();
fail("Missing Api Key Exception");
} catch (UltraHighFrequencyRequestException ex5) {
ex5.printStackTrace();
fail("Ultra High Frequency Request Exception");
} catch (ApiLimitExceeded apiLimitExceeded) {
apiLimitExceeded.printStackTrace();
fail("Exceeded Api Limit");
}
}
@Test
public void testDailyAdjustedClose() {
try {
HashMap<String, String> params = new HashMap<String, String>();
List<DailyStock> stocks = stockOperation.dailyAdjustedClose("vod.l", params);
assertEquals(100, stocks.size());
} catch (UnsupportedEncodingException ex) {
ex.printStackTrace();
fail("Unsupported Encoding Exception");
} catch (InvalidApiKeyException ex1) {
ex1.printStackTrace();
fail("Invalid Api Key Exception");
} catch (InvalidFunctionOptionException ex2) {
ex2.printStackTrace();
fail("Required Function Properties Missing or is Invalid in Exception");
} catch (MalFormattedFunctionException ex3) {
ex3.printStackTrace();
fail("Invalid Function Exception");
} catch (MissingApiKeyException ex4) {
ex4.printStackTrace();
fail("Missing Api Key Exception");
} catch (UltraHighFrequencyRequestException ex5) {
ex5.printStackTrace();
fail("Ultra High Frequency Request Exception");
} catch (ApiLimitExceeded apiLimitExceeded) {
apiLimitExceeded.printStackTrace();
fail("Exceeded Api Limit");
}
}
@Test
public void testWeekly() {
try {
HashMap<String, String> params = new HashMap<String, String>();
List<DailyStock> stocks = stockOperation.weekly("vod.l", params);
assertEquals(925, stocks.size());
} catch (UnsupportedEncodingException ex) {
ex.printStackTrace();
fail("Unsupported Encoding Exception");
} catch (InvalidApiKeyException ex1) {
ex1.printStackTrace();
fail("Invalid Api Key Exception");
} catch (InvalidFunctionOptionException ex2) {
ex2.printStackTrace();
fail("Required Function Properties Missing or is Invalid in Exception");
} catch (MalFormattedFunctionException ex3) {
ex3.printStackTrace();
fail("Invalid Function Exception");
} catch (MissingApiKeyException ex4) {
ex4.printStackTrace();
fail("Missing Api Key Exception");
} catch (UltraHighFrequencyRequestException ex5) {
ex5.printStackTrace();
fail("Ultra High Frequency Request Exception");
} catch (ApiLimitExceeded apiLimitExceeded) {
apiLimitExceeded.printStackTrace();
fail("Exceeded Api Limit");
}
}
@Test
public void testWeeklyAdjusted() {
try {
HashMap<String, String> params = new HashMap<String, String>();
List<DailyStock> stocks = stockOperation.weeklyAdjusted("vod.l", params);
assertEquals(931, stocks.size());
} catch (UnsupportedEncodingException ex) {
ex.printStackTrace();
fail("Unsupported Encoding Exception");
} catch (InvalidApiKeyException ex1) {
ex1.printStackTrace();
fail("Invalid Api Key Exception");
} catch (InvalidFunctionOptionException ex2) {
ex2.printStackTrace();
fail("Required Function Properties Missing or is Invalid in Exception");
} catch (MalFormattedFunctionException ex3) {
ex3.printStackTrace();
fail("Invalid Function Exception");
} catch (MissingApiKeyException ex4) {
ex4.printStackTrace();
fail("Missing Api Key Exception");
} catch (UltraHighFrequencyRequestException ex5) {
ex5.printStackTrace();
fail("Ultra High Frequency Request Exception");
} catch (ApiLimitExceeded apiLimitExceeded) {
apiLimitExceeded.printStackTrace();
fail("Exceeded Api Limit");
}
}
@Test
public void testMonthly() {
try {
HashMap<String, String> params = new HashMap<String, String>();
List<DailyStock> stocks = stockOperation.monthly("vod.l", params);
assertEquals(212, stocks.size());
} catch (UnsupportedEncodingException ex) {
ex.printStackTrace();
fail("Unsupported Encoding Exception");
} catch (InvalidApiKeyException ex1) {
ex1.printStackTrace();
fail("Invalid Api Key Exception");
} catch (InvalidFunctionOptionException ex2) {
ex2.printStackTrace();
fail("Required Function Properties Missing or is Invalid in Exception");
} catch (MalFormattedFunctionException ex3) {
ex3.printStackTrace();
fail("Invalid Function Exception");
} catch (MissingApiKeyException ex4) {
ex4.printStackTrace();
fail("Missing Api Key Exception");
} catch (UltraHighFrequencyRequestException ex5) {
ex5.printStackTrace();
fail("Ultra High Frequency Request Exception");
} catch (ApiLimitExceeded apiLimitExceeded) {
apiLimitExceeded.printStackTrace();
fail("Exceeded Api Limit");
}
}
@Test
public void testMonthlyAdjusted() {
try {
HashMap<String, String> params = new HashMap<String, String>();
List<DailyStock> stocks = stockOperation.monthlyAdjusted("vod.l", params);
assertEquals(213, stocks.size());
} catch (UnsupportedEncodingException ex) {
ex.printStackTrace();
fail("Unsupported Encoding Exception");
} catch (InvalidApiKeyException ex1) {
ex1.printStackTrace();
fail("Invalid Api Key Exception");
} catch (InvalidFunctionOptionException ex2) {
ex2.printStackTrace();
fail("Required Function Properties Missing or is Invalid in Exception");
} catch (MalFormattedFunctionException ex3) {
ex3.printStackTrace();
fail("Invalid Function Exception");
} catch (MissingApiKeyException ex4) {
ex4.printStackTrace();
fail("Missing Api Key Exception");
} catch (UltraHighFrequencyRequestException ex5) {
ex5.printStackTrace();
fail("Ultra High Frequency Request Exception");
} catch (ApiLimitExceeded apiLimitExceeded) {
apiLimitExceeded.printStackTrace();
fail("Exceeded Api Limit");
}
}
}
|
|
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.segment.realtime.firehose;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import io.druid.collections.spatial.search.RadiusBound;
import io.druid.data.input.InputRow;
import io.druid.data.input.impl.DelimitedParseSpec;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.data.input.impl.NewSpatialDimensionSchema;
import io.druid.data.input.impl.StringDimensionSchema;
import io.druid.data.input.impl.StringInputRowParser;
import io.druid.data.input.impl.TimestampSpec;
import io.druid.hll.HyperLogLogCollector;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.LongSumAggregatorFactory;
import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
import io.druid.query.filter.SpatialDimFilter;
import io.druid.segment.IndexIO;
import io.druid.segment.IndexMerger;
import io.druid.segment.IndexSpec;
import io.druid.segment.QueryableIndex;
import io.druid.segment.QueryableIndexStorageAdapter;
import io.druid.segment.StorageAdapter;
import io.druid.segment.TestHelper;
import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.IncrementalIndexSchema;
import io.druid.segment.incremental.IncrementalIndexStorageAdapter;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import java.io.File;
import java.util.List;
/**
*/
public class IngestSegmentFirehoseTest
{
private static final DimensionsSpec DIMENSIONS_SPEC = new DimensionsSpec(
ImmutableList.of(
new StringDimensionSchema("host"),
new NewSpatialDimensionSchema("spatial", ImmutableList.of("x", "y"))
),
null,
null
);
private static final DimensionsSpec DIMENSIONS_SPEC_REINDEX = new DimensionsSpec(
ImmutableList.of(
new StringDimensionSchema("host"),
new NewSpatialDimensionSchema("spatial", ImmutableList.of("spatial"))
),
null,
null
);
private static final List<AggregatorFactory> AGGREGATORS = ImmutableList.of(
new LongSumAggregatorFactory("visited_sum", "visited"),
new HyperUniquesAggregatorFactory("unique_hosts", "host")
);
private static final List<AggregatorFactory> AGGREGATORS_REINDEX = ImmutableList.of(
new LongSumAggregatorFactory("visited_sum", "visited_sum"),
new HyperUniquesAggregatorFactory("unique_hosts", "unique_hosts")
);
@Rule
public final TemporaryFolder tempFolder = new TemporaryFolder();
private IndexIO indexIO = TestHelper.getTestIndexIO();
private IndexMerger indexMerger = TestHelper.getTestIndexMergerV9();
@Test
public void testReadFromIndexAndWriteAnotherIndex() throws Exception
{
// Tests a "reindexing" use case that is a common use of ingestSegment.
File segmentDir = tempFolder.newFolder();
createTestIndex(segmentDir);
try (
final QueryableIndex qi = indexIO.loadIndex(segmentDir);
final IncrementalIndex index = new IncrementalIndex.Builder()
.setIndexSchema(
new IncrementalIndexSchema.Builder()
.withDimensionsSpec(DIMENSIONS_SPEC_REINDEX)
.withMetrics(AGGREGATORS_REINDEX.toArray(new AggregatorFactory[]{}))
.build()
)
.setMaxRowCount(5000)
.buildOnheap();
) {
final StorageAdapter sa = new QueryableIndexStorageAdapter(qi);
final WindowedStorageAdapter wsa = new WindowedStorageAdapter(sa, sa.getInterval());
final IngestSegmentFirehose firehose = new IngestSegmentFirehose(
ImmutableList.of(wsa, wsa),
ImmutableList.of("host", "spatial"),
ImmutableList.of("visited_sum", "unique_hosts"),
null
);
int count = 0;
while (firehose.hasMore()) {
final InputRow row = firehose.nextRow();
Assert.assertNotNull(row);
if (count == 0) {
Assert.assertEquals(new DateTime("2014-10-22T00Z"), row.getTimestamp());
Assert.assertEquals("host1", row.getRaw("host"));
Assert.assertEquals("0,1", row.getRaw("spatial"));
Assert.assertEquals(10L, row.getRaw("visited_sum"));
Assert.assertEquals(1.0d, ((HyperLogLogCollector) row.getRaw("unique_hosts")).estimateCardinality(), 0.1);
}
count++;
index.add(row);
}
Assert.assertEquals(18, count);
// Check the index
Assert.assertEquals(9, index.size());
final IncrementalIndexStorageAdapter queryable = new IncrementalIndexStorageAdapter(index);
Assert.assertEquals(2, queryable.getAvailableDimensions().size());
Assert.assertEquals("host", queryable.getAvailableDimensions().get(0));
Assert.assertEquals("spatial", queryable.getAvailableDimensions().get(1));
Assert.assertEquals(ImmutableList.of("visited_sum", "unique_hosts"), queryable.getAvailableMetrics());
// Do a spatial filter
final IngestSegmentFirehose firehose2 = new IngestSegmentFirehose(
ImmutableList.of(new WindowedStorageAdapter(queryable, new Interval("2000/3000"))),
ImmutableList.of("host", "spatial"),
ImmutableList.of("visited_sum", "unique_hosts"),
new SpatialDimFilter("spatial", new RadiusBound(new float[]{1, 0}, 0.1f))
);
final InputRow row = firehose2.nextRow();
Assert.assertFalse(firehose2.hasMore());
Assert.assertEquals(new DateTime("2014-10-22T00Z"), row.getTimestamp());
Assert.assertEquals("host2", row.getRaw("host"));
Assert.assertEquals("1,0", row.getRaw("spatial"));
Assert.assertEquals(40L, row.getRaw("visited_sum"));
Assert.assertEquals(1.0d, ((HyperLogLogCollector) row.getRaw("unique_hosts")).estimateCardinality(), 0.1);
}
}
private void createTestIndex(File segmentDir) throws Exception
{
final List<String> rows = Lists.newArrayList(
"2014102200\thost1\t10\t0\t1",
"2014102200\thost2\t20\t1\t0",
"2014102200\thost3\t30\t1\t1",
"2014102201\thost1\t10\t1\t1",
"2014102201\thost2\t20\t1\t1",
"2014102201\thost3\t30\t1\t1",
"2014102202\thost1\t10\t1\t1",
"2014102202\thost2\t20\t1\t1",
"2014102202\thost3\t30\t1\t1"
);
final StringInputRowParser parser = new StringInputRowParser(
new DelimitedParseSpec(
new TimestampSpec("timestamp", "yyyyMMddHH", null),
DIMENSIONS_SPEC,
"\t",
null,
ImmutableList.of("timestamp", "host", "visited", "x", "y", "spatial"),
false,
0
),
Charsets.UTF_8.toString()
);
try (
final IncrementalIndex index = new IncrementalIndex.Builder()
.setIndexSchema(
new IncrementalIndexSchema.Builder()
.withDimensionsSpec(parser.getParseSpec().getDimensionsSpec())
.withMetrics(AGGREGATORS.toArray(new AggregatorFactory[]{}))
.build()
)
.setMaxRowCount(5000)
.buildOnheap();
) {
for (String line : rows) {
index.add(parser.parse(line));
}
indexMerger.persist(index, segmentDir, new IndexSpec());
}
}
}
|
|
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vcs.roots;
import com.intellij.idea.ActionsBundle;
import com.intellij.notification.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.options.ShowSettingsUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.VcsConfiguration;
import com.intellij.openapi.vcs.VcsDirectoryMapping;
import com.intellij.openapi.vcs.VcsRootError;
import com.intellij.util.Function;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.event.HyperlinkEvent;
import java.util.ArrayList;
import java.util.Collection;
import static com.intellij.notification.NotificationType.ERROR;
import static com.intellij.notification.NotificationType.INFORMATION;
import static com.intellij.openapi.util.text.StringUtil.pluralize;
/**
* Searches for Vcs roots problems via {@link VcsRootErrorsFinder} and notifies about them.
*
* @author Nadya Zabrodina
*/
public class VcsRootProblemNotifier {
private final @NotNull Project myProject;
private final @NotNull VcsConfiguration mySettings;
private @Nullable Notification myNotification;
private final @NotNull Object NOTIFICATION_LOCK = new Object();
public static final NotificationGroup IMPORTANT_ERROR_NOTIFICATION = new NotificationGroup(
"Vcs Important Messages", NotificationDisplayType.STICKY_BALLOON, true);
public static final NotificationGroup MINOR_NOTIFICATION = new NotificationGroup(
"Vcs Minor Notifications", NotificationDisplayType.BALLOON, true);
public static VcsRootProblemNotifier getInstance(@NotNull Project project) {
return new VcsRootProblemNotifier(project);
}
private VcsRootProblemNotifier(@NotNull Project project) {
myProject = project;
mySettings = VcsConfiguration.getInstance(myProject);
}
public void rescanAndNotifyIfNeeded() {
if (!mySettings.SHOW_VCS_ERROR_NOTIFICATIONS) {
return;
}
Collection<VcsRootError> errors = scan();
if (errors.isEmpty()) {
synchronized (NOTIFICATION_LOCK) {
expireNotification();
}
return;
}
Collection<VcsRootError> unregisteredRoots = getUnregisteredRoots(errors);
Collection<VcsRootError> invalidRoots = getInvalidRoots(errors);
String title = makeTitle(unregisteredRoots, invalidRoots);
String description = makeDescription(unregisteredRoots, invalidRoots);
synchronized (NOTIFICATION_LOCK) {
expireNotification();
NotificationGroup notificationGroup = invalidRoots.isEmpty() ? MINOR_NOTIFICATION : IMPORTANT_ERROR_NOTIFICATION;
NotificationType notificationType = invalidRoots.isEmpty() ? INFORMATION : ERROR;
myNotification = notificationGroup.createNotification(title, description, notificationType,
new MyNotificationListener(myProject, mySettings));
myNotification.notify(myProject);
}
}
private void expireNotification() {
if (myNotification != null) {
final Notification notification = myNotification;
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
notification.expire();
}
});
myNotification = null;
}
}
@NotNull
private Collection<VcsRootError> scan() {
return new VcsRootErrorsFinder(myProject).find();
}
@SuppressWarnings("StringConcatenationInsideStringBufferAppend")
@NotNull
private static String makeDescription(@NotNull Collection<VcsRootError> unregisteredRoots,
@NotNull Collection<VcsRootError> invalidRoots) {
Function<VcsRootError, String> rootToDisplayableString = new Function<VcsRootError, String>() {
@Override
public String fun(VcsRootError rootError) {
if (rootError.getMapping().equals(VcsDirectoryMapping.PROJECT_CONSTANT)) {
return StringUtil.escapeXml(rootError.getMapping());
}
return FileUtil.toSystemDependentName(rootError.getMapping());
}
};
StringBuilder description = new StringBuilder();
if (!invalidRoots.isEmpty()) {
if (invalidRoots.size() == 1) {
VcsRootError rootError = invalidRoots.iterator().next();
description
.append("The directory " +
rootToDisplayableString.fun(rootError) +
" is registered as a " +
rootError.getVcsKey().getName() +
" root, " +
"but no " +
rootError.getVcsKey().getName() +
" repositories were found there.");
}
else {
description.append("The following directories are registered as Vcs roots, but they are not: <br/>" +
StringUtil.join(invalidRoots, rootToDisplayableString, ", "));
}
description.append("<br/>");
}
if (!unregisteredRoots.isEmpty()) {
if (unregisteredRoots.size() == 1) {
VcsRootError unregisteredRoot = unregisteredRoots.iterator().next();
description
.append("The directory " +
rootToDisplayableString.fun(unregisteredRoot) +
" is under " +
unregisteredRoot.getVcsKey().getName() +
", " +
"but is not registered in the Settings.");
}
else {
description.append("The following directories are roots of Vcs repositories, but they are not registered in the Settings: <br/>" +
StringUtil.join(unregisteredRoots, rootToDisplayableString, ", "));
}
description.append("<br/>");
}
description.append("<a href='configure'>Configure</a> <a href='ignore'>Ignore VCS root errors</a>");
return description.toString();
}
@NotNull
private static String makeTitle(@NotNull Collection<VcsRootError> unregisteredRoots, @NotNull Collection<VcsRootError> invalidRoots) {
String title;
if (unregisteredRoots.isEmpty()) {
title = "Invalid Vcs root " + pluralize("mapping", invalidRoots.size());
}
else if (invalidRoots.isEmpty()) {
title = "Unregistered Vcs " + pluralize("root", unregisteredRoots.size()) + " detected";
}
else {
title = "Vcs root configuration problems";
}
return title;
}
@NotNull
private static Collection<VcsRootError> getUnregisteredRoots(@NotNull Collection<VcsRootError> errors) {
return filterErrorsByType(errors, VcsRootError.Type.UNREGISTERED_ROOT);
}
@NotNull
private static Collection<VcsRootError> getInvalidRoots(@NotNull Collection<VcsRootError> errors) {
return filterErrorsByType(errors, VcsRootError.Type.EXTRA_MAPPING);
}
@NotNull
private static Collection<VcsRootError> filterErrorsByType(@NotNull Collection<VcsRootError> errors, @NotNull VcsRootError.Type type) {
Collection<VcsRootError> roots = new ArrayList<VcsRootError>();
for (VcsRootError error : errors) {
if (error.getType() == type) {
roots.add(error);
}
}
return roots;
}
private static class MyNotificationListener implements NotificationListener {
@NotNull private final Project myProject;
@NotNull private final VcsConfiguration mySettings;
private MyNotificationListener(@NotNull Project project, @NotNull VcsConfiguration settings) {
myProject = project;
mySettings = settings;
}
@Override
public void hyperlinkUpdate(@NotNull Notification notification, @NotNull HyperlinkEvent event) {
if (event.getEventType() == HyperlinkEvent.EventType.ACTIVATED) {
if (event.getDescription().equals("configure") && !myProject.isDisposed()) {
ShowSettingsUtil.getInstance().showSettingsDialog(myProject, ActionsBundle.message("group.VcsGroup.text"));
Collection<VcsRootError> errorsAfterPossibleFix = getInstance(myProject).scan();
if (errorsAfterPossibleFix.isEmpty() && !notification.isExpired()) {
notification.expire();
}
}
else if (event.getDescription().equals("ignore")) {
mySettings.SHOW_VCS_ERROR_NOTIFICATIONS = false;
notification.expire();
}
}
}
}
}
|
|
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package git4idea.push;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import git4idea.GitLocalBranch;
import git4idea.GitRemoteBranch;
import git4idea.update.GitUpdateResult;
/**
* Result of pushing one repository.
* <p>
* Includes information about the number of pushed commits (or -1 if undefined),
* and tells whether the repository was updated after the push was rejected.
*
* @see git4idea.push.GitPushNativeResult
*/
class GitPushRepoResult
{
enum Type
{
SUCCESS,
NEW_BRANCH,
UP_TO_DATE,
FORCED,
REJECTED_NO_FF,
REJECTED_OTHER,
ERROR,
NOT_PUSHED;
}
static Comparator<Type> TYPE_COMPARATOR = new Comparator<Type>()
{
@Override
public int compare(Type o1, Type o2)
{
return o1.ordinal() - o2.ordinal();
}
};
@Nonnull
private final Type myType;
private final int myCommits;
@Nonnull
private final String mySourceBranch;
@Nonnull
private final String myTargetBranch;
@Nonnull
private final String myTargetRemote;
@Nonnull
private final List<String> myPushedTags;
@Nullable
private final String myError;
@Nullable
private final GitUpdateResult myUpdateResult;
@Nonnull
static GitPushRepoResult convertFromNative(@Nonnull GitPushNativeResult result,
@Nonnull List<GitPushNativeResult> tagResults,
int commits,
@Nonnull GitLocalBranch source,
@Nonnull GitRemoteBranch target)
{
List<String> tags = ContainerUtil.map(tagResults, new Function<GitPushNativeResult, String>()
{
@Override
public String fun(GitPushNativeResult result)
{
return result.getSourceRef();
}
});
return new GitPushRepoResult(convertType(result), commits, source.getFullName(), target.getFullName(), target.getRemote().getName(), tags, null, null);
}
@Nonnull
static GitPushRepoResult error(@Nonnull GitLocalBranch source, @Nonnull GitRemoteBranch target, @Nonnull String error)
{
return new GitPushRepoResult(Type.ERROR, -1, source.getFullName(), target.getFullName(), target.getRemote().getName(), Collections.<String>emptyList(), error, null);
}
@Nonnull
static GitPushRepoResult notPushed(GitLocalBranch source, GitRemoteBranch target)
{
return new GitPushRepoResult(Type.NOT_PUSHED, -1, source.getFullName(), target.getFullName(), target.getRemote().getName(), Collections.<String>emptyList(), null, null);
}
@Nonnull
static GitPushRepoResult addUpdateResult(GitPushRepoResult original, GitUpdateResult updateResult)
{
return new GitPushRepoResult(original.getType(), original.getNumberOfPushedCommits(), original.getSourceBranch(), original.getTargetBranch(), original.getTargetRemote(),
original.getPushedTags(), original.getError(), updateResult);
}
private GitPushRepoResult(@Nonnull Type type,
int pushedCommits,
@Nonnull String sourceBranch,
@Nonnull String targetBranch,
@Nonnull String targetRemote,
@Nonnull List<String> pushedTags,
@Nullable String error,
@Nullable GitUpdateResult result)
{
myType = type;
myCommits = pushedCommits;
mySourceBranch = sourceBranch;
myTargetBranch = targetBranch;
myTargetRemote = targetRemote;
myPushedTags = pushedTags;
myError = error;
myUpdateResult = result;
}
@Nonnull
Type getType()
{
return myType;
}
@Nullable
GitUpdateResult getUpdateResult()
{
return myUpdateResult;
}
int getNumberOfPushedCommits()
{
return myCommits;
}
/**
* Returns the branch we were pushing from, in the full-name format, e.g. {@code refs/heads/master}.
*/
@Nonnull
String getSourceBranch()
{
return mySourceBranch;
}
/**
* Returns the branch we were pushing to, in the full-name format, e.g. {@code refs/remotes/origin/master}.
*/
@Nonnull
String getTargetBranch()
{
return myTargetBranch;
}
@Nullable
String getError()
{
return myError;
}
@Nonnull
List<String> getPushedTags()
{
return myPushedTags;
}
@Nonnull
public String getTargetRemote()
{
return myTargetRemote;
}
@Nonnull
private static Type convertType(@Nonnull GitPushNativeResult nativeResult)
{
switch(nativeResult.getType())
{
case SUCCESS:
return Type.SUCCESS;
case FORCED_UPDATE:
return Type.FORCED;
case NEW_REF:
return Type.NEW_BRANCH;
case REJECTED:
return nativeResult.isNonFFUpdate() ? Type.REJECTED_NO_FF : Type.REJECTED_OTHER;
case UP_TO_DATE:
return Type.UP_TO_DATE;
case ERROR:
return Type.ERROR;
case DELETED:
default:
throw new IllegalArgumentException("Conversion is not supported: " + nativeResult.getType());
}
}
@Override
public String toString()
{
return String.format("%s (%d, '%s'), update: %s}", myType, myCommits, mySourceBranch, myUpdateResult);
}
}
|
|
package com.home.rxtwitch.profile.following.gamefollowed;
import android.content.Context;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.widget.GridLayoutManager;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewStub;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.home.rxtwitch.R;
import com.home.rxtwitch.RxTwitchApplication;
import com.home.rxtwitch.data.twitch.pojo.TwitchGameFollowed;
import com.home.rxtwitch.ui.GridSpacingItemDecoration;
import com.home.rxtwitch.ui.adapter.GameFollowedAdapter;
import com.home.rxtwitch.ui.fragment.BaseLazyFragment;
import com.home.rxtwitch.ui.widget.wraprecyclerview.WrapRecyclerView;
import com.home.rxtwitch.util.CheckNotNullUtil;
import java.util.List;
import javax.inject.Inject;
import butterknife.Bind;
import butterknife.ButterKnife;
/**
* Created by xmax on 2/24/16.
*/
public class GameFollowedFragment extends BaseLazyFragment implements GameFollowedContract.View {
@Bind(R.id.game_followed_recycler_view) WrapRecyclerView mGameWRV;
@Bind(R.id.loading_progress) ProgressBar mLoadingPB;
private GameFollowedAdapter mGameFollowedAdapter;
private View mCloudOffL;
private View mNoGameFollowedL;
private SwipeRefreshLayout mSwipeRefreshLayout;
@Inject GameFollowedPresenter mGameFollowedPresenter;
private GameFollowedContract.Presenter mPresenter;
@Override
public void setPresenter(@NonNull GameFollowedContract.Presenter presenter) {
mPresenter = CheckNotNullUtil.checkNotNull(presenter);
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
DaggerGameFollowedComponent.builder()
.twitchServiceImplComponent(((RxTwitchApplication) getActivity().getApplication())
.getTwitchServiceImplComponent())
.twitchOauthUtilComponent(((RxTwitchApplication) getActivity().getApplication())
.getTwitchOauthUtilComponent())
.gameFollowedPresenterModule(new GameFollowedPresenterModule(this)).build()
.inject(this);
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_game_followed, container, false);
ButterKnife.bind(this, view);
return view;
}
@Override
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
mLoadingPB = (ProgressBar) view.findViewById(R.id.loading_progress);
mSwipeRefreshLayout = mGameWRV.getSwipeRefreshLayout();
mGameFollowedAdapter = new GameFollowedAdapter(view.getContext());
//int space = view.getContext().getResources().getDimensionPixelSize(R.dimen.recycler_view_game_item_space);
mGameWRV.disableFloatingActionButton();
mGameWRV.addItemDecoration(new GridSpacingItemDecoration(view.getContext(), 2, true));
mGameWRV.setLayoutManager(new GridLayoutManager(view.getContext(), 2));
mGameWRV.setAdapter(mGameFollowedAdapter);
mGameWRV.setOnRefreshListener(mOnRefreshListener);
mGameFollowedAdapter.setGameItemListener(mGameItemListener);
//mPresenter.subscribe();
}
@Override
public void fetchData() {
mPresenter.subscribe();
}
private WrapRecyclerView.OnRefreshListener mOnRefreshListener = new WrapRecyclerView.OnRefreshListener() {
@Override
public void onLoadMore(int offset) {
mPresenter.onLoadMore(offset);
}
@Override
public void onRefresh() {
mPresenter.onRefresh();
}
};
private GameFollowedAdapter.GameItemListener mGameItemListener =
gameName -> mPresenter.openGame(gameName);
@Override
public void onStop() {
super.onStop();
}
@Override
public void onDestroy() {
super.onDestroy();
mPresenter.unsubscribe();
}
@Override
public void addGames(List<TwitchGameFollowed> games) {
mGameFollowedAdapter.add(games);
}
@Override
public void clearGames() {
mGameFollowedAdapter.clear();
}
@Override
public void completeLoadingMore() {
mGameWRV.loadingMoreComplete();
}
@Override
public void showGamesRecyclerView() {
mGameWRV.setVisibility(View.VISIBLE);
}
@Override
public void hideGamesRecyclerView() {
mGameWRV.setVisibility(View.GONE);
}
@Override
public void showCloudOffStub() {
if (mCloudOffL == null) {
ViewStub viewStub = (ViewStub) getActivity().findViewById(R.id.game_followed_stub);
mCloudOffL = viewStub.inflate();
mCloudOffL.setVisibility(View.VISIBLE);
mCloudOffL.setOnClickListener(v -> mPresenter.onRefresh());
} else {
mCloudOffL.setVisibility(View.VISIBLE);
}
}
@Override
public void hideCloudOffStub() {
if (mCloudOffL != null)
mCloudOffL.setVisibility(View.GONE);
}
@Override
public void hideNoGameFollowed() {
if (mNoGameFollowedL != null)
mNoGameFollowedL.setVisibility(View.GONE);
}
@Override
public void showNoGameFollowed() {
if (mNoGameFollowedL == null) {
ViewStub viewStub = (ViewStub) getActivity().findViewById(R.id.no_game_followed_stub);
mNoGameFollowedL = viewStub.inflate();
ImageView noFollowIV = (ImageView) mNoGameFollowedL.findViewById(R.id.no_content_img);
noFollowIV.setImageResource(R.drawable.ic_no_follow);
TextView noFollowTV = (TextView) mNoGameFollowedL.findViewById(R.id.no_content_text);
noFollowTV.setText(R.string.no_games_followed);
mNoGameFollowedL.setVisibility(View.VISIBLE);
} else {
mNoGameFollowedL.setVisibility(View.VISIBLE);
}
}
@Override
public void showLoadingProgress() {
mLoadingPB.setVisibility(View.VISIBLE);
}
@Override
public void hideLoadingProgress() {
mLoadingPB.setVisibility(View.GONE);
}
@Override
public boolean isRefreshing() {
return mSwipeRefreshLayout.isRefreshing();
}
@Override
public void setRefreshing(boolean isRefreshing) {
mSwipeRefreshLayout.setRefreshing(isRefreshing);
}
@Override
public void showGameUI(@NonNull String gameName) {
//
}
@Override
public boolean isEmpty() {
return mGameFollowedAdapter.getItemCount() <= 0;
}
}
|
|
package org.testng.internal;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import org.testng.IClass;
import org.testng.IMethodSelector;
import org.testng.IObjectFactory;
import org.testng.IObjectFactory2;
import org.testng.ITestObjectFactory;
import org.testng.TestNGException;
import org.testng.TestRunner;
import org.testng.annotations.IAnnotation;
import org.testng.annotations.IFactoryAnnotation;
import org.testng.annotations.IParametersAnnotation;
import org.testng.collections.Sets;
import org.testng.internal.annotations.IAnnotationFinder;
import org.testng.junit.IJUnitTestRunner;
import org.testng.xml.XmlTest;
/**
* Utility class for different class manipulations.
*/
public final class ClassHelper {
private static final String JUNIT_TESTRUNNER= "org.testng.junit.JUnitTestRunner";
private static final String JUNIT_4_TESTRUNNER = "org.testng.junit.JUnit4TestRunner";
/** The additional class loaders to find classes in. */
private static final List<ClassLoader> m_classLoaders = new Vector<>();
/** Add a class loader to the searchable loaders. */
public static void addClassLoader(final ClassLoader loader) {
m_classLoaders.add(loader);
}
/** Hide constructor. */
private ClassHelper() {
// Hide Constructor
}
public static <T> T newInstance(Class<T> clazz) {
try {
T instance = clazz.newInstance();
return instance;
}
catch(IllegalAccessException iae) {
throw new TestNGException("Class " + clazz.getName()
+ " does not have a no-args constructor", iae);
}
catch(InstantiationException ie) {
throw new TestNGException("Cannot instantiate class " + clazz.getName(), ie);
}
catch(ExceptionInInitializerError eiierr) {
throw new TestNGException("An exception occurred in static initialization of class "
+ clazz.getName(), eiierr);
}
catch(SecurityException se) {
throw new TestNGException(se);
}
}
public static <T> T newInstance(Constructor<T> constructor, Object... parameters) {
try {
return constructor.newInstance(parameters);
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
throw new TestNGException("Cannot instantiate class " + constructor.getDeclaringClass().getName(), e);
}
}
/**
* Tries to load the specified class using the context ClassLoader or if none,
* than from the default ClassLoader. This method differs from the standard
* class loading methods in that it does not throw an exception if the class
* is not found but returns null instead.
*
* @param className the class name to be loaded.
*
* @return the class or null if the class is not found.
*/
public static Class<?> forName(final String className) {
Vector<ClassLoader> allClassLoaders = new Vector<>();
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
if (contextClassLoader != null) {
allClassLoaders.add(contextClassLoader);
}
if (m_classLoaders != null) {
allClassLoaders.addAll(m_classLoaders);
}
for (ClassLoader classLoader : allClassLoaders) {
if (null == classLoader) {
continue;
}
try {
return classLoader.loadClass(className);
}
catch(ClassNotFoundException ex) {
// With additional class loaders, it is legitimate to ignore ClassNotFoundException
if (null == m_classLoaders || m_classLoaders.size() == 0) {
logClassNotFoundError(className, ex);
}
}
}
try {
return Class.forName(className);
}
catch(ClassNotFoundException cnfe) {
logClassNotFoundError(className, cnfe);
return null;
}
}
private static void logClassNotFoundError(String className, Exception ex) {
Utils.log("ClassHelper", 2, "Could not instantiate " + className
+ " : Class doesn't exist (" + ex.getMessage() + ")");
}
/**
* For the given class, returns the method annotated with @Factory or null
* if none is found. This method does not search up the superclass hierarchy.
* If more than one method is @Factory annotated, a TestNGException is thrown.
* @param cls The class to search for the @Factory annotation.
* @param finder The finder (JDK 1.4 or JDK 5.0+) use to search for the annotation.
*
* @return the @Factory <CODE>method</CODE> or null
*
* FIXME: @Factory method must be public!
*/
public static ConstructorOrMethod findDeclaredFactoryMethod(Class<?> cls,
IAnnotationFinder finder) {
ConstructorOrMethod result = null;
for (Method method : cls.getMethods()) {
IFactoryAnnotation f = finder.findAnnotation(method, IFactoryAnnotation.class);
if (null != f) {
result = new ConstructorOrMethod(method);
result.setEnabled(f.getEnabled());
break;
}
}
if (result == null) {
for (Constructor constructor : cls.getDeclaredConstructors()) {
IAnnotation f = finder.findAnnotation(constructor, IFactoryAnnotation.class);
if (f != null) {
result = new ConstructorOrMethod(constructor);
}
}
}
// If we didn't find anything, look for nested classes
// if (null == result) {
// Class[] subClasses = cls.getClasses();
// for (Class subClass : subClasses) {
// result = findFactoryMethod(subClass, finder);
// if (null != result) {
// break;
// }
// }
// }
// Found the method, verify that it returns an array of objects
// TBD
return result;
}
/**
* Extract all callable methods of a class and all its super (keeping in mind
* the Java access rules).
*/
public static Set<Method> getAvailableMethods(Class<?> clazz) {
Set<Method> methods = Sets.newHashSet();
methods.addAll(Arrays.asList(clazz.getDeclaredMethods()));
Class<?> parent = clazz.getSuperclass();
while (Object.class != parent) {
methods.addAll(extractMethods(clazz, parent, methods));
parent = parent.getSuperclass();
}
return methods;
}
public static IJUnitTestRunner createTestRunner(TestRunner runner) {
try {
//try to get runner for JUnit 4 first
Class.forName("org.junit.Test");
IJUnitTestRunner tr = (IJUnitTestRunner) ClassHelper.forName(JUNIT_4_TESTRUNNER).newInstance();
tr.setTestResultNotifier(runner);
return tr;
} catch (Throwable t) {
Utils.log("ClassHelper", 2, "JUnit 4 was not found on the classpath");
try {
//fallback to JUnit 3
Class.forName("junit.framework.Test");
IJUnitTestRunner tr = (IJUnitTestRunner) ClassHelper.forName(JUNIT_TESTRUNNER).newInstance();
tr.setTestResultNotifier(runner);
return tr;
} catch (Exception ex) {
Utils.log("ClassHelper", 2, "JUnit 3 was not found on the classpath");
//there's no JUnit on the classpath
throw new TestNGException("Cannot create JUnit runner", ex);
}
}
}
private static Set<Method> extractMethods(Class<?> childClass, Class<?> clazz,
Set<Method> collected) {
Set<Method> methods = Sets.newHashSet();
Method[] declaredMethods = clazz.getDeclaredMethods();
Package childPackage = childClass.getPackage();
Package classPackage = clazz.getPackage();
boolean isSamePackage = false;
if ((null == childPackage) && (null == classPackage)) {
isSamePackage = true;
}
if ((null != childPackage) && (null != classPackage)) {
isSamePackage = childPackage.getName().equals(classPackage.getName());
}
for (Method method : declaredMethods) {
int methodModifiers = method.getModifiers();
if ((Modifier.isPublic(methodModifiers) || Modifier.isProtected(methodModifiers))
|| (isSamePackage && !Modifier.isPrivate(methodModifiers))) {
if (!isOverridden(method, collected) && !Modifier.isAbstract(methodModifiers)) {
methods.add(method);
}
}
}
return methods;
}
private static boolean isOverridden(Method method, Set<Method> collectedMethods) {
Class<?> methodClass = method.getDeclaringClass();
Class<?>[] methodParams = method.getParameterTypes();
for (Method m: collectedMethods) {
Class<?>[] paramTypes = m.getParameterTypes();
if (method.getName().equals(m.getName())
&& methodClass.isAssignableFrom(m.getDeclaringClass())
&& methodParams.length == paramTypes.length) {
boolean sameParameters = true;
for (int i= 0; i < methodParams.length; i++) {
if (!methodParams[i].equals(paramTypes[i])) {
sameParameters = false;
break;
}
}
if (sameParameters) {
return true;
}
}
}
return false;
}
public static IMethodSelector createSelector(org.testng.xml.XmlMethodSelector selector) {
try {
Class<?> cls = Class.forName(selector.getClassName());
return (IMethodSelector) cls.newInstance();
}
catch(Exception ex) {
throw new TestNGException("Couldn't find method selector : " + selector.getClassName(), ex);
}
}
/**
* Create an instance for the given class.
*/
public static Object createInstance(Class<?> declaringClass,
Map<Class, IClass> classes,
XmlTest xmlTest,
IAnnotationFinder finder,
ITestObjectFactory objectFactory)
{
if (objectFactory instanceof IObjectFactory) {
return createInstance1(declaringClass, classes, xmlTest, finder,
(IObjectFactory) objectFactory);
} else if (objectFactory instanceof IObjectFactory2) {
return createInstance2(declaringClass, (IObjectFactory2) objectFactory);
} else {
throw new AssertionError("Unknown object factory type:" + objectFactory);
}
}
private static Object createInstance2(Class<?> declaringClass, IObjectFactory2 objectFactory) {
return objectFactory.newInstance(declaringClass);
}
public static Object createInstance1(Class<?> declaringClass,
Map<Class, IClass> classes,
XmlTest xmlTest,
IAnnotationFinder finder,
IObjectFactory objectFactory) {
Object result = null;
try {
//
// Any annotated constructor?
//
Constructor<?> constructor = findAnnotatedConstructor(finder, declaringClass);
if (null != constructor) {
IParametersAnnotation annotation = finder.findAnnotation(constructor, IParametersAnnotation.class);
String[] parameterNames = annotation.getValue();
Object[] parameters = Parameters.createInstantiationParameters(constructor,
"@Parameters",
finder,
parameterNames,
xmlTest.getAllParameters(),
xmlTest.getSuite());
result = objectFactory.newInstance(constructor, parameters);
}
//
// No, just try to instantiate the parameterless constructor (or the one
// with a String)
//
else {
// If this class is a (non-static) nested class, the constructor contains a hidden
// parameter of the type of the enclosing class
Class<?>[] parameterTypes = new Class[0];
Object[] parameters = new Object[0];
Class<?> ec = getEnclosingClass(declaringClass);
boolean isStatic = 0 != (declaringClass.getModifiers() & Modifier.STATIC);
// Only add the extra parameter if the nested class is not static
if ((null != ec) && !isStatic) {
parameterTypes = new Class[] { ec };
// Create an instance of the enclosing class so we can instantiate
// the nested class (actually, we reuse the existing instance).
IClass enclosingIClass = classes.get(ec);
Object[] enclosingInstances;
if (null != enclosingIClass) {
enclosingInstances = enclosingIClass.getInstances(false);
if ((null == enclosingInstances) || (enclosingInstances.length == 0)) {
Object o = objectFactory.newInstance(ec.getConstructor(parameterTypes));
enclosingIClass.addInstance(o);
enclosingInstances = new Object[] { o };
}
}
else {
enclosingInstances = new Object[] { ec.newInstance() };
}
Object enclosingClassInstance = enclosingInstances[0];
// Utils.createInstance(ec, classes, xmlTest, finder);
parameters = new Object[] { enclosingClassInstance };
} // isStatic
Constructor<?> ct;
try {
ct = declaringClass.getDeclaredConstructor(parameterTypes);
}
catch (NoSuchMethodException ex) {
ct = declaringClass.getDeclaredConstructor(String.class);
parameters = new Object[] { "Default test name" };
// If ct == null here, we'll pass a null
// constructor to the factory and hope it can deal with it
}
result = objectFactory.newInstance(ct, parameters);
}
}
catch (TestNGException ex) {
throw ex;
// throw new TestNGException("Couldn't instantiate class:" + declaringClass);
}
catch (NoSuchMethodException ex) {
}
catch (Throwable cause) {
// Something else went wrong when running the constructor
throw new TestNGException("An error occurred while instantiating class "
+ declaringClass.getName() + ": " + cause.getMessage(), cause);
}
if (result == null) {
if (! Modifier.isPublic(declaringClass.getModifiers())) {
//result should not be null
throw new TestNGException("An error occurred while instantiating class "
+ declaringClass.getName() + ". Check to make sure it can be accessed/instantiated.");
// } else {
// Utils.log(ClassHelper.class.getName(), 2, "Couldn't instantiate class " + declaringClass);
}
}
return result;
}
/**
* Class.getEnclosingClass() only exists on JDK5, so reimplementing it
* here.
*/
private static Class<?> getEnclosingClass(Class<?> declaringClass) {
Class<?> result = null;
String className = declaringClass.getName();
int index = className.indexOf("$");
if (index != -1) {
String ecn = className.substring(0, index);
try {
result = Class.forName(ecn);
}
catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
return result;
}
/**
* Find the best constructor given the parameters found on the annotation
*/
private static Constructor<?> findAnnotatedConstructor(IAnnotationFinder finder,
Class<?> declaringClass) {
Constructor<?>[] constructors = declaringClass.getDeclaredConstructors();
for (Constructor<?> result : constructors) {
IParametersAnnotation annotation = finder.findAnnotation(result, IParametersAnnotation.class);
if (null != annotation) {
String[] parameters = annotation.getValue();
Class<?>[] parameterTypes = result.getParameterTypes();
if (parameters.length != parameterTypes.length) {
throw new TestNGException("Parameter count mismatch: " + result + "\naccepts "
+ parameterTypes.length
+ " parameters but the @Test annotation declares "
+ parameters.length);
}
else {
return result;
}
}
}
return null;
}
public static <T> T tryOtherConstructor(Class<T> declaringClass) {
T result;
try {
// Special case for inner classes
if (declaringClass.getModifiers() == 0) {
return null;
}
Constructor<T> ctor = declaringClass.getConstructor(String.class);
result = ctor.newInstance("Default test name");
}
catch (Exception e) {
String message = e.getMessage();
if ((message == null) && (e.getCause() != null)) {
message = e.getCause().getMessage();
}
String error = "Could not create an instance of class " + declaringClass
+ ((message != null) ? (": " + message) : "")
+ ".\nPlease make sure it has a constructor that accepts either a String or no parameter.";
throw new TestNGException(error);
}
return result;
}
/**
* When given a file name to form a class name, the file name is parsed and divided
* into segments. For example, "c:/java/classes/com/foo/A.class" would be divided
* into 6 segments {"C:" "java", "classes", "com", "foo", "A"}. The first segment
* actually making up the class name is [3]. This value is saved in m_lastGoodRootIndex
* so that when we parse the next file name, we will try 3 right away. If 3 fails we
* will take the long approach. This is just a optimization cache value.
*/
private static int m_lastGoodRootIndex = -1;
/**
* Returns the Class object corresponding to the given name. The name may be
* of the following form:
* <ul>
* <li>A class name: "org.testng.TestNG"</li>
* <li>A class file name: "/testng/src/org/testng/TestNG.class"</li>
* <li>A class source name: "d:\testng\src\org\testng\TestNG.java"</li>
* </ul>
*
* @param file
* the class name.
* @return the class corresponding to the name specified.
*/
public static Class<?> fileToClass(String file) {
Class<?> result = null;
if(!file.endsWith(".class") && !file.endsWith(".java")) {
// Doesn't end in .java or .class, assume it's a class name
if (file.startsWith("class ")) {
file = file.substring("class ".length());
}
result = ClassHelper.forName(file);
if (null == result) {
throw new TestNGException("Cannot load class from file: " + file);
}
return result;
}
int classIndex = file.lastIndexOf(".class");
if (-1 == classIndex) {
classIndex = file.lastIndexOf(".java");
//
// if(-1 == classIndex) {
// result = ClassHelper.forName(file);
//
// if (null == result) {
// throw new TestNGException("Cannot load class from file: " + file);
// }
//
// return result;
// }
//
}
// Transforms the file name into a class name.
// Remove the ".class" or ".java" extension.
String shortFileName = file.substring(0, classIndex);
// Split file name into segments. For example "c:/java/classes/com/foo/A"
// becomes {"c:", "java", "classes", "com", "foo", "A"}
String[] segments = shortFileName.split("[/\\\\]", -1);
//
// Check if the last good root index works for this one. For example, if the previous
// name was "c:/java/classes/com/foo/A.class" then m_lastGoodRootIndex is 3 and we
// try to make a class name ignoring the first m_lastGoodRootIndex segments (3). This
// will succeed rapidly if the path is the same as the one from the previous name.
//
if (-1 != m_lastGoodRootIndex) {
// TODO use a SringBuffer here
String className = segments[m_lastGoodRootIndex];
for (int i = m_lastGoodRootIndex + 1; i < segments.length; i++) {
className += "." + segments[i];
}
result = ClassHelper.forName(className);
if (null != result) {
return result;
}
}
//
// We haven't found a good root yet, start by resolving the class from the end segment
// and work our way up. For example, if we start with "c:/java/classes/com/foo/A"
// we'll start by resolving "A", then "foo.A", then "com.foo.A" until something
// resolves. When it does, we remember the path we are at as "lastGoodRoodIndex".
//
// TODO CQ use a StringBuffer here
String className = null;
for (int i = segments.length - 1; i >= 0; i--) {
if (null == className) {
className = segments[i];
}
else {
className = segments[i] + "." + className;
}
result = ClassHelper.forName(className);
if (null != result) {
m_lastGoodRootIndex = i;
break;
}
}
if (null == result) {
throw new TestNGException("Cannot load class from file: " + file);
}
return result;
}
}
|
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.core.ilm;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xpack.core.ilm.Step.StepKey;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
public class ShrinkActionTests extends AbstractActionTestCase<ShrinkAction> {
@Override
protected ShrinkAction doParseInstance(XContentParser parser) throws IOException {
return ShrinkAction.parse(parser);
}
@Override
protected ShrinkAction createTestInstance() {
return randomInstance();
}
static ShrinkAction randomInstance() {
if (randomBoolean()) {
return new ShrinkAction(randomIntBetween(1, 100), null);
} else {
return new ShrinkAction(null, new ByteSizeValue(randomIntBetween(1, 100)));
}
}
@Override
protected ShrinkAction mutateInstance(ShrinkAction action) {
if (action.getNumberOfShards() != null) {
return new ShrinkAction(action.getNumberOfShards() + randomIntBetween(1, 2), null);
} else {
return new ShrinkAction(null, new ByteSizeValue(action.getMaxPrimaryShardSize().getBytes() + 1));
}
}
@Override
protected Reader<ShrinkAction> instanceReader() {
return ShrinkAction::new;
}
public void testNonPositiveShardNumber() {
Exception e = expectThrows(Exception.class, () -> new ShrinkAction(randomIntBetween(-100, 0), null));
assertThat(e.getMessage(), equalTo("[number_of_shards] must be greater than 0"));
}
public void testMaxPrimaryShardSize() {
ByteSizeValue maxPrimaryShardSize1 = new ByteSizeValue(10);
Exception e1 = expectThrows(Exception.class, () -> new ShrinkAction(randomIntBetween(1, 100), maxPrimaryShardSize1));
assertThat(e1.getMessage(), equalTo("Cannot set both [number_of_shards] and [max_primary_shard_size]"));
ByteSizeValue maxPrimaryShardSize2 = new ByteSizeValue(0);
Exception e2 = expectThrows(Exception.class, () -> new ShrinkAction(null, maxPrimaryShardSize2));
assertThat(e2.getMessage(), equalTo("[max_primary_shard_size] must be greater than 0"));
}
public void testPerformActionWithSkip() {
String lifecycleName = randomAlphaOfLengthBetween(4, 10);
int numberOfShards = randomIntBetween(1, 10);
ShrinkAction action = new ShrinkAction(numberOfShards, null);
String phase = randomAlphaOfLengthBetween(1, 10);
StepKey nextStepKey = new StepKey(
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10)
);
List<Step> steps = action.toSteps(null, phase, nextStepKey);
BranchingStep step = ((BranchingStep) steps.get(0));
LifecyclePolicy policy = new LifecyclePolicy(
lifecycleName,
Collections.singletonMap("warm", new Phase("warm", TimeValue.ZERO, Collections.singletonMap(action.getWriteableName(), action)))
);
LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(
policy,
Collections.emptyMap(),
randomNonNegativeLong(),
randomNonNegativeLong()
);
String indexName = randomAlphaOfLength(5);
ClusterState state = ClusterState.builder(ClusterName.DEFAULT)
.metadata(
Metadata.builder()
.putCustom(
IndexLifecycleMetadata.TYPE,
new IndexLifecycleMetadata(
Collections.singletonMap(policyMetadata.getName(), policyMetadata),
OperationMode.RUNNING
)
)
.put(
IndexMetadata.builder(indexName)
.settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_NAME, lifecycleName))
.putCustom(
LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY,
LifecycleExecutionState.builder()
.setPhase(step.getKey().getPhase())
.setPhaseTime(0L)
.setAction(step.getKey().getAction())
.setActionTime(0L)
.setStep(step.getKey().getName())
.setStepTime(0L)
.build()
.asMap()
)
.numberOfShards(numberOfShards)
.numberOfReplicas(0)
)
)
.build();
step.performAction(state.metadata().index(indexName).getIndex(), state);
assertThat(step.getNextStepKey(), equalTo(nextStepKey));
}
public void testPerformActionWithoutSkip() {
int numShards = 6;
int divisor = randomFrom(2, 3, 6);
int expectedFinalShards = numShards / divisor;
String lifecycleName = randomAlphaOfLengthBetween(4, 10);
ShrinkAction action = new ShrinkAction(expectedFinalShards, null);
String phase = randomAlphaOfLengthBetween(1, 10);
StepKey nextStepKey = new StepKey(
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10)
);
List<Step> steps = action.toSteps(null, phase, nextStepKey);
BranchingStep step = ((BranchingStep) steps.get(0));
LifecyclePolicy policy = new LifecyclePolicy(
lifecycleName,
Collections.singletonMap("warm", new Phase("warm", TimeValue.ZERO, Collections.singletonMap(action.getWriteableName(), action)))
);
LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(
policy,
Collections.emptyMap(),
randomNonNegativeLong(),
randomNonNegativeLong()
);
String indexName = randomAlphaOfLength(5);
ClusterState state = ClusterState.builder(ClusterName.DEFAULT)
.metadata(
Metadata.builder()
.putCustom(
IndexLifecycleMetadata.TYPE,
new IndexLifecycleMetadata(
Collections.singletonMap(policyMetadata.getName(), policyMetadata),
OperationMode.RUNNING
)
)
.put(
IndexMetadata.builder(indexName)
.settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_NAME, lifecycleName))
.putCustom(
LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY,
LifecycleExecutionState.builder()
.setPhase(step.getKey().getPhase())
.setPhaseTime(0L)
.setAction(step.getKey().getAction())
.setActionTime(0L)
.setStep(step.getKey().getName())
.setStepTime(0L)
.build()
.asMap()
)
.numberOfShards(numShards)
.numberOfReplicas(0)
)
)
.build();
step.performAction(state.metadata().index(indexName).getIndex(), state);
assertThat(step.getNextStepKey(), equalTo(steps.get(1).getKey()));
}
public void testToSteps() {
ShrinkAction action = createTestInstance();
String phase = randomAlphaOfLengthBetween(1, 10);
StepKey nextStepKey = new StepKey(
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10)
);
List<Step> steps = action.toSteps(null, phase, nextStepKey);
assertThat(steps.size(), equalTo(17));
StepKey expectedFirstKey = new StepKey(phase, ShrinkAction.NAME, ShrinkAction.CONDITIONAL_SKIP_SHRINK_STEP);
StepKey expectedSecondKey = new StepKey(phase, ShrinkAction.NAME, CheckNotDataStreamWriteIndexStep.NAME);
StepKey expectedThirdKey = new StepKey(phase, ShrinkAction.NAME, WaitForNoFollowersStep.NAME);
StepKey expectedFourthKey = new StepKey(phase, ShrinkAction.NAME, ReadOnlyAction.NAME);
StepKey expectedFifthKey = new StepKey(phase, ShrinkAction.NAME, CheckTargetShardsCountStep.NAME);
StepKey expectedSixthKey = new StepKey(phase, ShrinkAction.NAME, CleanupShrinkIndexStep.NAME);
StepKey expectedSeventhKey = new StepKey(phase, ShrinkAction.NAME, GenerateUniqueIndexNameStep.NAME);
StepKey expectedEighthKey = new StepKey(phase, ShrinkAction.NAME, SetSingleNodeAllocateStep.NAME);
StepKey expectedNinthKey = new StepKey(phase, ShrinkAction.NAME, CheckShrinkReadyStep.NAME);
StepKey expectedTenthKey = new StepKey(phase, ShrinkAction.NAME, ShrinkStep.NAME);
StepKey expectedEleventhKey = new StepKey(phase, ShrinkAction.NAME, ShrunkShardsAllocatedStep.NAME);
StepKey expectedTwelveKey = new StepKey(phase, ShrinkAction.NAME, CopyExecutionStateStep.NAME);
StepKey expectedThirteenKey = new StepKey(phase, ShrinkAction.NAME, ShrinkAction.CONDITIONAL_DATASTREAM_CHECK_KEY);
StepKey expectedFourteenKey = new StepKey(phase, ShrinkAction.NAME, ShrinkSetAliasStep.NAME);
StepKey expectedFifteenKey = new StepKey(phase, ShrinkAction.NAME, ShrunkenIndexCheckStep.NAME);
StepKey expectedSixteenKey = new StepKey(phase, ShrinkAction.NAME, ReplaceDataStreamBackingIndexStep.NAME);
StepKey expectedSeventeenKey = new StepKey(phase, ShrinkAction.NAME, DeleteStep.NAME);
assertTrue(steps.get(0) instanceof BranchingStep);
assertThat(steps.get(0).getKey(), equalTo(expectedFirstKey));
expectThrows(IllegalStateException.class, () -> steps.get(0).getNextStepKey());
assertThat(((BranchingStep) steps.get(0)).getNextStepKeyOnFalse(), equalTo(expectedSecondKey));
assertThat(((BranchingStep) steps.get(0)).getNextStepKeyOnTrue(), equalTo(nextStepKey));
assertTrue(steps.get(1) instanceof CheckNotDataStreamWriteIndexStep);
assertThat(steps.get(1).getKey(), equalTo(expectedSecondKey));
assertThat(steps.get(1).getNextStepKey(), equalTo(expectedThirdKey));
assertTrue(steps.get(2) instanceof WaitForNoFollowersStep);
assertThat(steps.get(2).getKey(), equalTo(expectedThirdKey));
assertThat(steps.get(2).getNextStepKey(), equalTo(expectedFourthKey));
assertTrue(steps.get(3) instanceof ReadOnlyStep);
assertThat(steps.get(3).getKey(), equalTo(expectedFourthKey));
assertThat(steps.get(3).getNextStepKey(), equalTo(expectedFifthKey));
assertTrue(steps.get(4) instanceof CheckTargetShardsCountStep);
assertThat(steps.get(4).getKey(), equalTo(expectedFifthKey));
assertThat(steps.get(4).getNextStepKey(), equalTo(expectedSixthKey));
assertTrue(steps.get(5) instanceof CleanupShrinkIndexStep);
assertThat(steps.get(5).getKey(), equalTo(expectedSixthKey));
assertThat(steps.get(5).getNextStepKey(), equalTo(expectedSeventhKey));
assertTrue(steps.get(6) instanceof GenerateUniqueIndexNameStep);
assertThat(steps.get(6).getKey(), equalTo(expectedSeventhKey));
assertThat(steps.get(6).getNextStepKey(), equalTo(expectedEighthKey));
assertTrue(steps.get(7) instanceof SetSingleNodeAllocateStep);
assertThat(steps.get(7).getKey(), equalTo(expectedEighthKey));
assertThat(steps.get(7).getNextStepKey(), equalTo(expectedNinthKey));
assertTrue(steps.get(8) instanceof ClusterStateWaitUntilThresholdStep);
assertThat(((ClusterStateWaitUntilThresholdStep) steps.get(8)).getStepToExecute(), is(instanceOf(CheckShrinkReadyStep.class)));
// assert in case the threshold is breached we go back to the "cleanup shrunk index" step
assertThat(((ClusterStateWaitUntilThresholdStep) steps.get(8)).getNextKeyOnThreshold(), is(expectedEighthKey));
assertThat(steps.get(8).getKey(), equalTo(expectedNinthKey));
assertThat(steps.get(8).getNextStepKey(), equalTo(expectedTenthKey));
assertTrue(steps.get(9) instanceof ShrinkStep);
assertThat(steps.get(9).getKey(), equalTo(expectedTenthKey));
assertThat(steps.get(9).getNextStepKey(), equalTo(expectedEleventhKey));
assertTrue(steps.get(10) instanceof ClusterStateWaitUntilThresholdStep);
assertThat(steps.get(10).getKey(), equalTo(expectedEleventhKey));
assertThat(steps.get(10).getNextStepKey(), equalTo(expectedTwelveKey));
assertThat(
((ClusterStateWaitUntilThresholdStep) steps.get(10)).getStepToExecute(),
is(instanceOf(ShrunkShardsAllocatedStep.class))
);
// assert in case the threshold is breached we go back to the "cleanup shrunk index" step
assertThat(((ClusterStateWaitUntilThresholdStep) steps.get(10)).getNextKeyOnThreshold(), is(expectedSixthKey));
assertTrue(steps.get(11) instanceof CopyExecutionStateStep);
assertThat(steps.get(11).getKey(), equalTo(expectedTwelveKey));
assertThat(steps.get(11).getNextStepKey(), equalTo(expectedThirteenKey));
assertTrue(steps.get(12) instanceof BranchingStep);
assertThat(steps.get(12).getKey(), equalTo(expectedThirteenKey));
expectThrows(IllegalStateException.class, () -> steps.get(12).getNextStepKey());
assertThat(((BranchingStep) steps.get(12)).getNextStepKeyOnFalse(), equalTo(expectedFourteenKey));
assertThat(((BranchingStep) steps.get(12)).getNextStepKeyOnTrue(), equalTo(expectedSixteenKey));
assertTrue(steps.get(13) instanceof ShrinkSetAliasStep);
assertThat(steps.get(13).getKey(), equalTo(expectedFourteenKey));
assertThat(steps.get(13).getNextStepKey(), equalTo(expectedFifteenKey));
assertTrue(steps.get(14) instanceof ShrunkenIndexCheckStep);
assertThat(steps.get(14).getKey(), equalTo(expectedFifteenKey));
assertThat(steps.get(14).getNextStepKey(), equalTo(nextStepKey));
assertTrue(steps.get(15) instanceof ReplaceDataStreamBackingIndexStep);
assertThat(steps.get(15).getKey(), equalTo(expectedSixteenKey));
assertThat(steps.get(15).getNextStepKey(), equalTo(expectedSeventeenKey));
assertTrue(steps.get(16) instanceof DeleteStep);
assertThat(steps.get(16).getKey(), equalTo(expectedSeventeenKey));
assertThat(steps.get(16).getNextStepKey(), equalTo(expectedFifteenKey));
}
@Override
protected boolean isSafeAction() {
return false;
}
}
|
|
/*
* Autopsy Forensic Browser
*
* Copyright 2013-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.List;
import java.util.Objects;
import java.util.Observable;
import java.util.Observer;
import java.util.Set;
import java.util.logging.Level;
import org.openide.nodes.AbstractNode;
import org.openide.nodes.ChildFactory;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.nodes.Sheet;
import org.openide.util.NbBundle;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.CasePreferences;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.core.UserPreferences;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentVisitor;
import org.sleuthkit.datamodel.DerivedFile;
import org.sleuthkit.datamodel.Directory;
import org.sleuthkit.datamodel.File;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.datamodel.LayoutFile;
import org.sleuthkit.datamodel.LocalFile;
import org.sleuthkit.datamodel.SlackFile;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.VirtualDirectory;
/**
* Files by Size View node and related child nodes
*/
public class FileSize implements AutopsyVisitableItem {
private SleuthkitCase skCase;
private final long datasourceObjId;
public enum FileSizeFilter implements AutopsyVisitableItem {
SIZE_50_200(0, "SIZE_50_200", "50 - 200MB"), //NON-NLS
SIZE_200_1000(1, "SIZE_200_1GB", "200MB - 1GB"), //NON-NLS
SIZE_1000_(2, "SIZE_1000+", "1GB+"); //NON-NLS
private int id;
private String name;
private String displayName;
private FileSizeFilter(int id, String name, String displayName) {
this.id = id;
this.name = name;
this.displayName = displayName;
}
public String getName() {
return this.name;
}
public int getId() {
return this.id;
}
public String getDisplayName() {
return this.displayName;
}
@Override
public <T> T accept(AutopsyItemVisitor<T> visitor) {
return visitor.visit(this);
}
}
public FileSize(SleuthkitCase skCase) {
this(skCase, 0);
}
public FileSize(SleuthkitCase skCase, long dsObjId) {
this.skCase = skCase;
this.datasourceObjId = dsObjId;
}
@Override
public <T> T accept(AutopsyItemVisitor<T> visitor) {
return visitor.visit(this);
}
public SleuthkitCase getSleuthkitCase() {
return this.skCase;
}
long filteringDataSourceObjId() {
return this.datasourceObjId;
}
/*
* Root node. Children are nodes for specific sizes.
*/
public static class FileSizeRootNode extends DisplayableItemNode {
private static final String NAME = NbBundle.getMessage(FileSize.class, "FileSize.fileSizeRootNode.name");
FileSizeRootNode(SleuthkitCase skCase, long datasourceObjId) {
super(Children.create(new FileSizeRootChildren(skCase, datasourceObjId), true), Lookups.singleton(NAME));
super.setName(NAME);
super.setDisplayName(NAME);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-size-16.png"); //NON-NLS
}
@Override
public boolean isLeafTypeNode() {
return false;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
return visitor.visit(this);
}
@Override
protected Sheet createSheet() {
Sheet sheet = super.createSheet();
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
if (sheetSet == null) {
sheetSet = Sheet.createPropertiesSet();
sheet.put(sheetSet);
}
sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "FileSize.createSheet.name.name"),
NbBundle.getMessage(this.getClass(), "FileSize.createSheet.name.displayName"),
NbBundle.getMessage(this.getClass(), "FileSize.createSheet.name.desc"),
NAME));
return sheet;
}
@Override
public String getItemType() {
return getClass().getName();
}
}
/*
* Makes the children for specific sizes
*/
public static class FileSizeRootChildren extends ChildFactory<org.sleuthkit.autopsy.datamodel.FileSize.FileSizeFilter> {
private SleuthkitCase skCase;
private final long datasourceObjId;
private Observable notifier;
public FileSizeRootChildren(SleuthkitCase skCase, long datasourceObjId) {
this.skCase = skCase;
this.datasourceObjId = datasourceObjId;
notifier = new FileSizeRootChildrenObservable();
}
/**
* Listens for case and ingest invest. Updates observers when events are
* fired. Size-based nodes are listening to this for changes.
*/
private static final class FileSizeRootChildrenObservable extends Observable {
private static final Set<Case.Events> CASE_EVENTS_OF_INTEREST = EnumSet.of(Case.Events.DATA_SOURCE_ADDED, Case.Events.CURRENT_CASE);
FileSizeRootChildrenObservable() {
IngestManager.getInstance().addIngestJobEventListener(pcl);
IngestManager.getInstance().addIngestModuleEventListener(pcl);
Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, pcl);
}
private void removeListeners() {
deleteObservers();
IngestManager.getInstance().removeIngestJobEventListener(pcl);
IngestManager.getInstance().removeIngestModuleEventListener(pcl);
Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, pcl);
}
private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> {
String eventType = evt.getPropertyName();
if (eventType.equals(IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString())) {
/**
* Checking for a current case is a stop gap measure until a
* different way of handling the closing of cases is worked
* out. Currently, remote events may be received for a case
* that is already closed.
*/
try {
// new file was added
// @@@ could check the size here and only fire off updates if we know the file meets the min size criteria
Case.getCurrentCaseThrows();
update();
} catch (NoCurrentCaseException notUsed) {
/**
* Case is closed, do nothing.
*/
}
} else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())
|| eventType.equals(Case.Events.DATA_SOURCE_ADDED.toString())) {
/**
* Checking for a current case is a stop gap measure until a
* different way of handling the closing of cases is worked
* out. Currently, remote events may be received for a case
* that is already closed.
*/
try {
Case.getCurrentCaseThrows();
update();
} catch (NoCurrentCaseException notUsed) {
/**
* Case is closed, do nothing.
*/
}
} else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
// case was closed. Remove listeners so that we don't get called with a stale case handle
if (evt.getNewValue() == null) {
removeListeners();
}
}
};
private void update() {
setChanged();
notifyObservers();
}
}
@Override
protected boolean createKeys(List<FileSizeFilter> list) {
list.addAll(Arrays.asList(FileSizeFilter.values()));
return true;
}
@Override
protected Node createNodeForKey(FileSizeFilter key) {
return new FileSizeNode(skCase, key, notifier, datasourceObjId);
}
/*
* Node for a specific size range. Children are files.
*/
public class FileSizeNode extends DisplayableItemNode {
private FileSizeFilter filter;
private final long datasourceObjId;
// use version with observer instead so that it updates
@Deprecated
FileSizeNode(SleuthkitCase skCase, FileSizeFilter filter, long datasourceObjId) {
super(Children.create(new FileSizeChildren(filter, skCase, null, datasourceObjId), true), Lookups.singleton(filter.getDisplayName()));
this.filter = filter;
this.datasourceObjId = datasourceObjId;
init();
}
/**
*
* @param skCase
* @param filter
* @param o Observable that provides updates when events are
* fired
* @param datasourceObjId filter by data source, if configured in user preferences
*/
FileSizeNode(SleuthkitCase skCase, FileSizeFilter filter, Observable o, long datasourceObjId) {
super(Children.create(new FileSizeChildren(filter, skCase, o, datasourceObjId), true), Lookups.singleton(filter.getDisplayName()));
this.filter = filter;
this.datasourceObjId = datasourceObjId;
init();
o.addObserver(new FileSizeNodeObserver());
}
private void init() {
super.setName(filter.getName());
String tooltip = filter.getDisplayName();
this.setShortDescription(tooltip);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-size-16.png"); //NON-NLS
updateDisplayName();
}
@Override
public String getItemType() {
/**
* Return getClass().getName() + filter.getName() if custom
* settings are desired for different filters.
*/
return DisplayableItemNode.FILE_PARENT_NODE_KEY;
}
// update the display name when new events are fired
private class FileSizeNodeObserver implements Observer {
@Override
public void update(Observable o, Object arg) {
updateDisplayName();
}
}
private void updateDisplayName() {
final long numVisibleChildren = FileSizeChildren.calculateItems(skCase, filter, datasourceObjId);
super.setDisplayName(filter.getDisplayName() + " (" + numVisibleChildren + ")");
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
return visitor.visit(this);
}
@Override
protected Sheet createSheet() {
Sheet sheet = super.createSheet();
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
if (sheetSet == null) {
sheetSet = Sheet.createPropertiesSet();
sheet.put(sheetSet);
}
sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "FileSize.createSheet.filterType.name"),
NbBundle.getMessage(this.getClass(), "FileSize.createSheet.filterType.displayName"),
NbBundle.getMessage(this.getClass(), "FileSize.createSheet.filterType.desc"),
filter.getDisplayName()));
return sheet;
}
@Override
public boolean isLeafTypeNode() {
return true;
}
}
/*
* Makes children, which are nodes for files of a given range
*/
static class FileSizeChildren extends ChildFactory.Detachable<AbstractFile> {
private final SleuthkitCase skCase;
private final FileSizeFilter filter;
private final Observable notifier;
private final long datasourceObjId;
private static final Logger logger = Logger.getLogger(FileSizeChildren.class.getName());
/**
*
* @param filter
* @param skCase
* @param o Observable that provides updates when new files are
* added to case
*/
FileSizeChildren(FileSizeFilter filter, SleuthkitCase skCase, Observable o, long dsObjId) {
this.skCase = skCase;
this.filter = filter;
this.notifier = o;
this.datasourceObjId = dsObjId;
}
@Override
protected void addNotify() {
if (notifier != null) {
notifier.addObserver(observer);
}
}
@Override
protected void removeNotify() {
if (notifier != null) {
notifier.deleteObserver(observer);
}
}
private final Observer observer = new FileSizeChildrenObserver();
// Cause refresh of children if there are changes
private class FileSizeChildrenObserver implements Observer {
@Override
public void update(Observable o, Object arg) {
refresh(true);
}
}
@Override
protected boolean createKeys(List<AbstractFile> list) {
list.addAll(runFsQuery());
return true;
}
private static String makeQuery(FileSizeFilter filter, long filteringDSObjId) {
String query;
switch (filter) {
case SIZE_50_200:
query = "(size >= 50000000 AND size < 200000000)"; //NON-NLS
break;
case SIZE_200_1000:
query = "(size >= 200000000 AND size < 1000000000)"; //NON-NLS
break;
case SIZE_1000_:
query = "(size >= 1000000000)"; //NON-NLS
break;
default:
throw new IllegalArgumentException("Unsupported filter type to get files by size: " + filter); //NON-NLS
}
// Ignore unallocated block files.
query = query + " AND (type != " + TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS.getFileType() + ")"; //NON-NLS
// Hide known files if indicated in the user preferences.
if(UserPreferences.hideKnownFilesInViewsTree()) {
query += " AND (known != " + TskData.FileKnown.KNOWN.getFileKnownValue() //NON-NLS
+ " OR known IS NULL)"; //NON-NLS
}
// Hide slack files if indicated in the user preferences.
if(UserPreferences.hideSlackFilesInViewsTree()) {
query += " AND (type != " + TskData.TSK_DB_FILES_TYPE_ENUM.SLACK.getFileType() + ")"; //NON-NLS
}
// filter by datasource if indicated in case preferences
if (Objects.equals(CasePreferences.getGroupItemsInTreeByDataSource(), true)) {
query += " AND data_source_obj_id = " + filteringDSObjId;
}
return query;
}
private List<AbstractFile> runFsQuery() {
List<AbstractFile> ret = new ArrayList<>();
try {
String query = makeQuery(filter, datasourceObjId);
ret = skCase.findAllFilesWhere(query);
} catch (Exception e) {
logger.log(Level.SEVERE, "Error getting files for the file size view: " + e.getMessage()); //NON-NLS
}
return ret;
}
/**
* Get children count without actually loading all nodes
*
* @return
*/
static long calculateItems(SleuthkitCase sleuthkitCase, FileSizeFilter filter, long datasourceObjId) {
try {
return sleuthkitCase.countFilesWhere(makeQuery(filter, datasourceObjId));
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error getting files by size search view count", ex); //NON-NLS
return 0;
}
}
@Override
protected Node createNodeForKey(AbstractFile key) {
return key.accept(new ContentVisitor.Default<AbstractNode>() {
public FileNode visit(AbstractFile f) {
return new FileNode(f, false);
}
public FileNode visit(FsContent f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(LayoutFile f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(File f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(Directory f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(LocalFile f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(DerivedFile f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(VirtualDirectory f) {
return new FileNode(f, false);
}
@Override
public FileNode visit(SlackFile f) {
return new FileNode(f, false);
}
@Override
protected AbstractNode defaultVisit(Content di) {
throw new UnsupportedOperationException(
NbBundle.getMessage(this.getClass(),
"FileSize.exception.notSupported.msg",
di.toString()));
}
});
}
}
}
}
|
|
package org.drip.analytics.holset;
/*
* -*- mode: java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*/
/*
* GENERATED on Fri Jan 11 19:54:07 EST 2013 ---- DO NOT DELETE
*/
/*!
* Copyright (C) 2013 Lakshmi Krishnamurthy
* Copyright (C) 2012 Lakshmi Krishnamurthy
* Copyright (C) 2011 Lakshmi Krishnamurthy
*
* This file is part of CreditAnalytics, a free-software/open-source library for
* fixed income analysts and developers - http://www.credit-trader.org
*
* CreditAnalytics is a free, full featured, fixed income credit analytics library, developed with a special focus
* towards the needs of the bonds and credit products community.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class UVRHoliday implements org.drip.analytics.holset.LocationHoliday {
public UVRHoliday()
{
}
public java.lang.String getHolidayLoc()
{
return "UVR";
}
public org.drip.analytics.holiday.Locale getHolidaySet()
{
org.drip.analytics.holiday.Locale lh = new
org.drip.analytics.holiday.Locale();
lh.addStaticHoliday ("01-JAN-1998", "New Years Day");
lh.addStaticHoliday ("12-JAN-1998", "Epiphany");
lh.addStaticHoliday ("23-MAR-1998", "St. Josephs Day");
lh.addStaticHoliday ("09-APR-1998", "Holy Thursday");
lh.addStaticHoliday ("10-APR-1998", "Good Friday");
lh.addStaticHoliday ("01-MAY-1998", "Labor Day");
lh.addStaticHoliday ("25-MAY-1998", "Ascension Day Observed");
lh.addStaticHoliday ("15-JUN-1998", "Corpus Christi Observed");
lh.addStaticHoliday ("22-JUN-1998", "Sacred Heart of Jesus");
lh.addStaticHoliday ("29-JUN-1998", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-1998", "Independence Day");
lh.addStaticHoliday ("07-AUG-1998", "National Holiday");
lh.addStaticHoliday ("17-AUG-1998", "Assumption Day");
lh.addStaticHoliday ("12-OCT-1998", "Columbus Day");
lh.addStaticHoliday ("02-NOV-1998", "All Saints Day");
lh.addStaticHoliday ("16-NOV-1998", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-1998", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-1998", "Christmas Day");
lh.addStaticHoliday ("01-JAN-1999", "New Years Day");
lh.addStaticHoliday ("11-JAN-1999", "Epiphany");
lh.addStaticHoliday ("22-MAR-1999", "St. Josephs Day");
lh.addStaticHoliday ("01-APR-1999", "Holy Thursday");
lh.addStaticHoliday ("02-APR-1999", "Good Friday");
lh.addStaticHoliday ("17-MAY-1999", "Ascension Day Observed");
lh.addStaticHoliday ("07-JUN-1999", "Corpus Christi Observed");
lh.addStaticHoliday ("14-JUN-1999", "Sacred Heart of Jesus");
lh.addStaticHoliday ("05-JUL-1999", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-1999", "Independence Day");
lh.addStaticHoliday ("16-AUG-1999", "Assumption Day");
lh.addStaticHoliday ("18-OCT-1999", "Columbus Day");
lh.addStaticHoliday ("01-NOV-1999", "All Saints Day");
lh.addStaticHoliday ("15-NOV-1999", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-1999", "Immaculate Conception");
lh.addStaticHoliday ("10-JAN-2000", "Epiphany");
lh.addStaticHoliday ("20-MAR-2000", "St. Josephs Day");
lh.addStaticHoliday ("20-APR-2000", "Holy Thursday");
lh.addStaticHoliday ("21-APR-2000", "Good Friday");
lh.addStaticHoliday ("01-MAY-2000", "Labor Day");
lh.addStaticHoliday ("05-JUN-2000", "Ascension Day Observed");
lh.addStaticHoliday ("26-JUN-2000", "Corpus Christi Observed");
lh.addStaticHoliday ("03-JUL-2000", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2000", "Independence Day");
lh.addStaticHoliday ("07-AUG-2000", "National Holiday");
lh.addStaticHoliday ("21-AUG-2000", "Assumption Day");
lh.addStaticHoliday ("16-OCT-2000", "Columbus Day");
lh.addStaticHoliday ("06-NOV-2000", "All Saints Day");
lh.addStaticHoliday ("13-NOV-2000", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2000", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2000", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2001", "New Years Day");
lh.addStaticHoliday ("08-JAN-2001", "Epiphany");
lh.addStaticHoliday ("19-MAR-2001", "St. Josephs Day");
lh.addStaticHoliday ("12-APR-2001", "Holy Thursday");
lh.addStaticHoliday ("13-APR-2001", "Good Friday");
lh.addStaticHoliday ("01-MAY-2001", "Labor Day");
lh.addStaticHoliday ("28-MAY-2001", "Ascension Day Observed");
lh.addStaticHoliday ("18-JUN-2001", "Corpus Christi Observed");
lh.addStaticHoliday ("25-JUN-2001", "Sacred Heart of Jesus");
lh.addStaticHoliday ("02-JUL-2001", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2001", "Independence Day");
lh.addStaticHoliday ("07-AUG-2001", "National Holiday");
lh.addStaticHoliday ("20-AUG-2001", "Assumption Day");
lh.addStaticHoliday ("15-OCT-2001", "Columbus Day");
lh.addStaticHoliday ("05-NOV-2001", "All Saints Day");
lh.addStaticHoliday ("12-NOV-2001", "Independence of Cartagena");
lh.addStaticHoliday ("25-DEC-2001", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2002", "New Years Day");
lh.addStaticHoliday ("07-JAN-2002", "Epiphany");
lh.addStaticHoliday ("25-MAR-2002", "St. Josephs Day");
lh.addStaticHoliday ("28-MAR-2002", "Holy Thursday");
lh.addStaticHoliday ("29-MAR-2002", "Good Friday");
lh.addStaticHoliday ("01-MAY-2002", "Labor Day");
lh.addStaticHoliday ("13-MAY-2002", "Ascension Day Observed");
lh.addStaticHoliday ("03-JUN-2002", "Corpus Christi Observed");
lh.addStaticHoliday ("10-JUN-2002", "Sacred Heart of Jesus");
lh.addStaticHoliday ("01-JUL-2002", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("07-AUG-2002", "National Holiday");
lh.addStaticHoliday ("19-AUG-2002", "Assumption Day");
lh.addStaticHoliday ("14-OCT-2002", "Columbus Day");
lh.addStaticHoliday ("04-NOV-2002", "All Saints Day");
lh.addStaticHoliday ("11-NOV-2002", "Independence of Cartagena");
lh.addStaticHoliday ("25-DEC-2002", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2003", "New Years Day");
lh.addStaticHoliday ("06-JAN-2003", "Epiphany");
lh.addStaticHoliday ("24-MAR-2003", "St. Josephs Day");
lh.addStaticHoliday ("17-APR-2003", "Holy Thursday");
lh.addStaticHoliday ("18-APR-2003", "Good Friday");
lh.addStaticHoliday ("01-MAY-2003", "Labor Day");
lh.addStaticHoliday ("02-JUN-2003", "Ascension Day Observed");
lh.addStaticHoliday ("23-JUN-2003", "Corpus Christi Observed");
lh.addStaticHoliday ("30-JUN-2003", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("07-AUG-2003", "National Holiday");
lh.addStaticHoliday ("18-AUG-2003", "Assumption Day");
lh.addStaticHoliday ("13-OCT-2003", "Columbus Day");
lh.addStaticHoliday ("03-NOV-2003", "All Saints Day");
lh.addStaticHoliday ("17-NOV-2003", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2003", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2003", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2004", "New Years Day");
lh.addStaticHoliday ("12-JAN-2004", "Epiphany");
lh.addStaticHoliday ("22-MAR-2004", "St. Josephs Day");
lh.addStaticHoliday ("08-APR-2004", "Holy Thursday");
lh.addStaticHoliday ("09-APR-2004", "Good Friday");
lh.addStaticHoliday ("24-MAY-2004", "Ascension Day Observed");
lh.addStaticHoliday ("14-JUN-2004", "Corpus Christi Observed");
lh.addStaticHoliday ("21-JUN-2004", "Sacred Heart of Jesus");
lh.addStaticHoliday ("05-JUL-2004", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2004", "Independence Day");
lh.addStaticHoliday ("16-AUG-2004", "Assumption Day");
lh.addStaticHoliday ("18-OCT-2004", "Columbus Day");
lh.addStaticHoliday ("01-NOV-2004", "All Saints Day");
lh.addStaticHoliday ("15-NOV-2004", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2004", "Immaculate Conception");
lh.addStaticHoliday ("10-JAN-2005", "Epiphany");
lh.addStaticHoliday ("21-MAR-2005", "St. Josephs Day");
lh.addStaticHoliday ("24-MAR-2005", "Holy Thursday");
lh.addStaticHoliday ("25-MAR-2005", "Good Friday");
lh.addStaticHoliday ("09-MAY-2005", "Ascension Day Observed");
lh.addStaticHoliday ("30-MAY-2005", "Corpus Christi Observed");
lh.addStaticHoliday ("06-JUN-2005", "Sacred Heart of Jesus");
lh.addStaticHoliday ("04-JUL-2005", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2005", "Independence Day");
lh.addStaticHoliday ("15-AUG-2005", "Assumption Day");
lh.addStaticHoliday ("17-OCT-2005", "Columbus Day");
lh.addStaticHoliday ("07-NOV-2005", "All Saints Day");
lh.addStaticHoliday ("14-NOV-2005", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2005", "Immaculate Conception");
lh.addStaticHoliday ("09-JAN-2006", "Epiphany");
lh.addStaticHoliday ("20-MAR-2006", "St. Josephs Day");
lh.addStaticHoliday ("13-APR-2006", "Holy Thursday");
lh.addStaticHoliday ("14-APR-2006", "Good Friday");
lh.addStaticHoliday ("01-MAY-2006", "Labor Day");
lh.addStaticHoliday ("29-MAY-2006", "Ascension Day Observed");
lh.addStaticHoliday ("19-JUN-2006", "Corpus Christi Observed");
lh.addStaticHoliday ("26-JUN-2006", "Sacred Heart of Jesus");
lh.addStaticHoliday ("03-JUL-2006", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2006", "Independence Day");
lh.addStaticHoliday ("07-AUG-2006", "National Holiday");
lh.addStaticHoliday ("21-AUG-2006", "Assumption Day");
lh.addStaticHoliday ("16-OCT-2006", "Columbus Day");
lh.addStaticHoliday ("06-NOV-2006", "All Saints Day");
lh.addStaticHoliday ("13-NOV-2006", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2006", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2006", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2007", "New Years Day");
lh.addStaticHoliday ("08-JAN-2007", "Epiphany");
lh.addStaticHoliday ("19-MAR-2007", "St. Josephs Day");
lh.addStaticHoliday ("05-APR-2007", "Holy Thursday");
lh.addStaticHoliday ("06-APR-2007", "Good Friday");
lh.addStaticHoliday ("01-MAY-2007", "Labor Day");
lh.addStaticHoliday ("21-MAY-2007", "Ascension Day Observed");
lh.addStaticHoliday ("11-JUN-2007", "Corpus Christi Observed");
lh.addStaticHoliday ("18-JUN-2007", "Sacred Heart of Jesus");
lh.addStaticHoliday ("02-JUL-2007", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2007", "Independence Day");
lh.addStaticHoliday ("07-AUG-2007", "National Holiday");
lh.addStaticHoliday ("20-AUG-2007", "Assumption Day");
lh.addStaticHoliday ("15-OCT-2007", "Columbus Day");
lh.addStaticHoliday ("05-NOV-2007", "All Saints Day");
lh.addStaticHoliday ("12-NOV-2007", "Independence of Cartagena");
lh.addStaticHoliday ("25-DEC-2007", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2008", "New Years Day");
lh.addStaticHoliday ("07-JAN-2008", "Epiphany");
lh.addStaticHoliday ("20-MAR-2008", "Holy Thursday");
lh.addStaticHoliday ("21-MAR-2008", "Good Friday");
lh.addStaticHoliday ("24-MAR-2008", "St. Josephs Day");
lh.addStaticHoliday ("01-MAY-2008", "Labor Day");
lh.addStaticHoliday ("05-MAY-2008", "Ascension Day Observed");
lh.addStaticHoliday ("26-MAY-2008", "Corpus Christi Observed");
lh.addStaticHoliday ("02-JUN-2008", "Sacred Heart of Jesus");
lh.addStaticHoliday ("30-JUN-2008", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("07-AUG-2008", "National Holiday");
lh.addStaticHoliday ("18-AUG-2008", "Assumption Day");
lh.addStaticHoliday ("13-OCT-2008", "Columbus Day");
lh.addStaticHoliday ("03-NOV-2008", "All Saints Day");
lh.addStaticHoliday ("17-NOV-2008", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2008", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2008", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2009", "New Years Day");
lh.addStaticHoliday ("12-JAN-2009", "Epiphany");
lh.addStaticHoliday ("23-MAR-2009", "St. Josephs Day");
lh.addStaticHoliday ("09-APR-2009", "Holy Thursday");
lh.addStaticHoliday ("10-APR-2009", "Good Friday");
lh.addStaticHoliday ("01-MAY-2009", "Labor Day");
lh.addStaticHoliday ("25-MAY-2009", "Ascension Day Observed");
lh.addStaticHoliday ("15-JUN-2009", "Corpus Christi Observed");
lh.addStaticHoliday ("22-JUN-2009", "Sacred Heart of Jesus");
lh.addStaticHoliday ("29-JUN-2009", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2009", "Independence Day");
lh.addStaticHoliday ("07-AUG-2009", "National Holiday");
lh.addStaticHoliday ("17-AUG-2009", "Assumption Day");
lh.addStaticHoliday ("12-OCT-2009", "Columbus Day");
lh.addStaticHoliday ("02-NOV-2009", "All Saints Day");
lh.addStaticHoliday ("16-NOV-2009", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2009", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2009", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2010", "New Years Day");
lh.addStaticHoliday ("11-JAN-2010", "Epiphany");
lh.addStaticHoliday ("22-MAR-2010", "St. Josephs Day");
lh.addStaticHoliday ("01-APR-2010", "Holy Thursday");
lh.addStaticHoliday ("02-APR-2010", "Good Friday");
lh.addStaticHoliday ("17-MAY-2010", "Ascension Day Observed");
lh.addStaticHoliday ("07-JUN-2010", "Corpus Christi Observed");
lh.addStaticHoliday ("14-JUN-2010", "Sacred Heart of Jesus");
lh.addStaticHoliday ("05-JUL-2010", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2010", "Independence Day");
lh.addStaticHoliday ("16-AUG-2010", "Assumption Day");
lh.addStaticHoliday ("18-OCT-2010", "Columbus Day");
lh.addStaticHoliday ("01-NOV-2010", "All Saints Day");
lh.addStaticHoliday ("15-NOV-2010", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2010", "Immaculate Conception");
lh.addStaticHoliday ("10-JAN-2011", "Epiphany");
lh.addStaticHoliday ("21-MAR-2011", "St. Josephs Day");
lh.addStaticHoliday ("21-APR-2011", "Holy Thursday");
lh.addStaticHoliday ("22-APR-2011", "Good Friday");
lh.addStaticHoliday ("06-JUN-2011", "Ascension Day Observed");
lh.addStaticHoliday ("27-JUN-2011", "Corpus Christi Observed");
lh.addStaticHoliday ("04-JUL-2011", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2011", "Independence Day");
lh.addStaticHoliday ("15-AUG-2011", "Assumption Day");
lh.addStaticHoliday ("17-OCT-2011", "Columbus Day");
lh.addStaticHoliday ("07-NOV-2011", "All Saints Day");
lh.addStaticHoliday ("14-NOV-2011", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2011", "Immaculate Conception");
lh.addStaticHoliday ("09-JAN-2012", "Epiphany");
lh.addStaticHoliday ("19-MAR-2012", "St. Josephs Day");
lh.addStaticHoliday ("05-APR-2012", "Holy Thursday");
lh.addStaticHoliday ("06-APR-2012", "Good Friday");
lh.addStaticHoliday ("01-MAY-2012", "Labor Day");
lh.addStaticHoliday ("21-MAY-2012", "Ascension Day Observed");
lh.addStaticHoliday ("11-JUN-2012", "Corpus Christi Observed");
lh.addStaticHoliday ("18-JUN-2012", "Sacred Heart of Jesus");
lh.addStaticHoliday ("02-JUL-2012", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2012", "Independence Day");
lh.addStaticHoliday ("07-AUG-2012", "National Holiday");
lh.addStaticHoliday ("20-AUG-2012", "Assumption Day");
lh.addStaticHoliday ("15-OCT-2012", "Columbus Day");
lh.addStaticHoliday ("05-NOV-2012", "All Saints Day");
lh.addStaticHoliday ("12-NOV-2012", "Independence of Cartagena");
lh.addStaticHoliday ("25-DEC-2012", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2013", "New Years Day");
lh.addStaticHoliday ("07-JAN-2013", "Epiphany");
lh.addStaticHoliday ("25-MAR-2013", "St. Josephs Day");
lh.addStaticHoliday ("28-MAR-2013", "Holy Thursday");
lh.addStaticHoliday ("29-MAR-2013", "Good Friday");
lh.addStaticHoliday ("01-MAY-2013", "Labor Day");
lh.addStaticHoliday ("13-MAY-2013", "Ascension Day Observed");
lh.addStaticHoliday ("03-JUN-2013", "Corpus Christi Observed");
lh.addStaticHoliday ("10-JUN-2013", "Sacred Heart of Jesus");
lh.addStaticHoliday ("01-JUL-2013", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("07-AUG-2013", "National Holiday");
lh.addStaticHoliday ("19-AUG-2013", "Assumption Day");
lh.addStaticHoliday ("14-OCT-2013", "Columbus Day");
lh.addStaticHoliday ("04-NOV-2013", "All Saints Day");
lh.addStaticHoliday ("11-NOV-2013", "Independence of Cartagena");
lh.addStaticHoliday ("25-DEC-2013", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2014", "New Years Day");
lh.addStaticHoliday ("06-JAN-2014", "Epiphany");
lh.addStaticHoliday ("24-MAR-2014", "St. Josephs Day");
lh.addStaticHoliday ("17-APR-2014", "Holy Thursday");
lh.addStaticHoliday ("18-APR-2014", "Good Friday");
lh.addStaticHoliday ("01-MAY-2014", "Labor Day");
lh.addStaticHoliday ("02-JUN-2014", "Ascension Day Observed");
lh.addStaticHoliday ("23-JUN-2014", "Corpus Christi Observed");
lh.addStaticHoliday ("30-JUN-2014", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("07-AUG-2014", "National Holiday");
lh.addStaticHoliday ("18-AUG-2014", "Assumption Day");
lh.addStaticHoliday ("13-OCT-2014", "Columbus Day");
lh.addStaticHoliday ("03-NOV-2014", "All Saints Day");
lh.addStaticHoliday ("17-NOV-2014", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2014", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2014", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2015", "New Years Day");
lh.addStaticHoliday ("12-JAN-2015", "Epiphany");
lh.addStaticHoliday ("23-MAR-2015", "St. Josephs Day");
lh.addStaticHoliday ("02-APR-2015", "Holy Thursday");
lh.addStaticHoliday ("03-APR-2015", "Good Friday");
lh.addStaticHoliday ("01-MAY-2015", "Labor Day");
lh.addStaticHoliday ("18-MAY-2015", "Ascension Day Observed");
lh.addStaticHoliday ("08-JUN-2015", "Corpus Christi Observed");
lh.addStaticHoliday ("15-JUN-2015", "Sacred Heart of Jesus");
lh.addStaticHoliday ("29-JUN-2015", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2015", "Independence Day");
lh.addStaticHoliday ("07-AUG-2015", "National Holiday");
lh.addStaticHoliday ("17-AUG-2015", "Assumption Day");
lh.addStaticHoliday ("12-OCT-2015", "Columbus Day");
lh.addStaticHoliday ("02-NOV-2015", "All Saints Day");
lh.addStaticHoliday ("16-NOV-2015", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2015", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2015", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2016", "New Years Day");
lh.addStaticHoliday ("11-JAN-2016", "Epiphany");
lh.addStaticHoliday ("21-MAR-2016", "St. Josephs Day");
lh.addStaticHoliday ("24-MAR-2016", "Holy Thursday");
lh.addStaticHoliday ("25-MAR-2016", "Good Friday");
lh.addStaticHoliday ("09-MAY-2016", "Ascension Day Observed");
lh.addStaticHoliday ("30-MAY-2016", "Corpus Christi Observed");
lh.addStaticHoliday ("06-JUN-2016", "Sacred Heart of Jesus");
lh.addStaticHoliday ("04-JUL-2016", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2016", "Independence Day");
lh.addStaticHoliday ("15-AUG-2016", "Assumption Day");
lh.addStaticHoliday ("17-OCT-2016", "Columbus Day");
lh.addStaticHoliday ("07-NOV-2016", "All Saints Day");
lh.addStaticHoliday ("14-NOV-2016", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2016", "Immaculate Conception");
lh.addStaticHoliday ("09-JAN-2017", "Epiphany");
lh.addStaticHoliday ("20-MAR-2017", "St. Josephs Day");
lh.addStaticHoliday ("13-APR-2017", "Holy Thursday");
lh.addStaticHoliday ("14-APR-2017", "Good Friday");
lh.addStaticHoliday ("01-MAY-2017", "Labor Day");
lh.addStaticHoliday ("29-MAY-2017", "Ascension Day Observed");
lh.addStaticHoliday ("19-JUN-2017", "Corpus Christi Observed");
lh.addStaticHoliday ("26-JUN-2017", "Sacred Heart of Jesus");
lh.addStaticHoliday ("03-JUL-2017", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2017", "Independence Day");
lh.addStaticHoliday ("07-AUG-2017", "National Holiday");
lh.addStaticHoliday ("21-AUG-2017", "Assumption Day");
lh.addStaticHoliday ("16-OCT-2017", "Columbus Day");
lh.addStaticHoliday ("06-NOV-2017", "All Saints Day");
lh.addStaticHoliday ("13-NOV-2017", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2017", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2017", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2018", "New Years Day");
lh.addStaticHoliday ("08-JAN-2018", "Epiphany");
lh.addStaticHoliday ("19-MAR-2018", "St. Josephs Day");
lh.addStaticHoliday ("29-MAR-2018", "Holy Thursday");
lh.addStaticHoliday ("30-MAR-2018", "Good Friday");
lh.addStaticHoliday ("01-MAY-2018", "Labor Day");
lh.addStaticHoliday ("14-MAY-2018", "Ascension Day Observed");
lh.addStaticHoliday ("04-JUN-2018", "Corpus Christi Observed");
lh.addStaticHoliday ("11-JUN-2018", "Sacred Heart of Jesus");
lh.addStaticHoliday ("02-JUL-2018", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2018", "Independence Day");
lh.addStaticHoliday ("07-AUG-2018", "National Holiday");
lh.addStaticHoliday ("20-AUG-2018", "Assumption Day");
lh.addStaticHoliday ("15-OCT-2018", "Columbus Day");
lh.addStaticHoliday ("05-NOV-2018", "All Saints Day");
lh.addStaticHoliday ("12-NOV-2018", "Independence of Cartagena");
lh.addStaticHoliday ("25-DEC-2018", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2019", "New Years Day");
lh.addStaticHoliday ("07-JAN-2019", "Epiphany");
lh.addStaticHoliday ("25-MAR-2019", "St. Josephs Day");
lh.addStaticHoliday ("18-APR-2019", "Holy Thursday");
lh.addStaticHoliday ("19-APR-2019", "Good Friday");
lh.addStaticHoliday ("01-MAY-2019", "Labor Day");
lh.addStaticHoliday ("03-JUN-2019", "Ascension Day Observed");
lh.addStaticHoliday ("24-JUN-2019", "Corpus Christi Observed");
lh.addStaticHoliday ("01-JUL-2019", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("07-AUG-2019", "National Holiday");
lh.addStaticHoliday ("19-AUG-2019", "Assumption Day");
lh.addStaticHoliday ("14-OCT-2019", "Columbus Day");
lh.addStaticHoliday ("04-NOV-2019", "All Saints Day");
lh.addStaticHoliday ("11-NOV-2019", "Independence of Cartagena");
lh.addStaticHoliday ("25-DEC-2019", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2020", "New Years Day");
lh.addStaticHoliday ("06-JAN-2020", "Epiphany");
lh.addStaticHoliday ("23-MAR-2020", "St. Josephs Day");
lh.addStaticHoliday ("09-APR-2020", "Holy Thursday");
lh.addStaticHoliday ("10-APR-2020", "Good Friday");
lh.addStaticHoliday ("01-MAY-2020", "Labor Day");
lh.addStaticHoliday ("25-MAY-2020", "Ascension Day Observed");
lh.addStaticHoliday ("15-JUN-2020", "Corpus Christi Observed");
lh.addStaticHoliday ("22-JUN-2020", "Sacred Heart of Jesus");
lh.addStaticHoliday ("29-JUN-2020", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2020", "Independence Day");
lh.addStaticHoliday ("07-AUG-2020", "National Holiday");
lh.addStaticHoliday ("17-AUG-2020", "Assumption Day");
lh.addStaticHoliday ("12-OCT-2020", "Columbus Day");
lh.addStaticHoliday ("02-NOV-2020", "All Saints Day");
lh.addStaticHoliday ("16-NOV-2020", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2020", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2020", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2021", "New Years Day");
lh.addStaticHoliday ("11-JAN-2021", "Epiphany");
lh.addStaticHoliday ("22-MAR-2021", "St. Josephs Day");
lh.addStaticHoliday ("01-APR-2021", "Holy Thursday");
lh.addStaticHoliday ("02-APR-2021", "Good Friday");
lh.addStaticHoliday ("17-MAY-2021", "Ascension Day Observed");
lh.addStaticHoliday ("07-JUN-2021", "Corpus Christi Observed");
lh.addStaticHoliday ("14-JUN-2021", "Sacred Heart of Jesus");
lh.addStaticHoliday ("05-JUL-2021", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2021", "Independence Day");
lh.addStaticHoliday ("16-AUG-2021", "Assumption Day");
lh.addStaticHoliday ("18-OCT-2021", "Columbus Day");
lh.addStaticHoliday ("01-NOV-2021", "All Saints Day");
lh.addStaticHoliday ("15-NOV-2021", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2021", "Immaculate Conception");
lh.addStaticHoliday ("10-JAN-2022", "Epiphany");
lh.addStaticHoliday ("21-MAR-2022", "St. Josephs Day");
lh.addStaticHoliday ("14-APR-2022", "Holy Thursday");
lh.addStaticHoliday ("15-APR-2022", "Good Friday");
lh.addStaticHoliday ("30-MAY-2022", "Ascension Day Observed");
lh.addStaticHoliday ("20-JUN-2022", "Corpus Christi Observed");
lh.addStaticHoliday ("27-JUN-2022", "Sacred Heart of Jesus");
lh.addStaticHoliday ("04-JUL-2022", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2022", "Independence Day");
lh.addStaticHoliday ("15-AUG-2022", "Assumption Day");
lh.addStaticHoliday ("17-OCT-2022", "Columbus Day");
lh.addStaticHoliday ("07-NOV-2022", "All Saints Day");
lh.addStaticHoliday ("14-NOV-2022", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2022", "Immaculate Conception");
lh.addStaticHoliday ("09-JAN-2023", "Epiphany");
lh.addStaticHoliday ("20-MAR-2023", "St. Josephs Day");
lh.addStaticHoliday ("06-APR-2023", "Holy Thursday");
lh.addStaticHoliday ("07-APR-2023", "Good Friday");
lh.addStaticHoliday ("01-MAY-2023", "Labor Day");
lh.addStaticHoliday ("22-MAY-2023", "Ascension Day Observed");
lh.addStaticHoliday ("12-JUN-2023", "Corpus Christi Observed");
lh.addStaticHoliday ("19-JUN-2023", "Sacred Heart of Jesus");
lh.addStaticHoliday ("03-JUL-2023", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2023", "Independence Day");
lh.addStaticHoliday ("07-AUG-2023", "National Holiday");
lh.addStaticHoliday ("21-AUG-2023", "Assumption Day");
lh.addStaticHoliday ("16-OCT-2023", "Columbus Day");
lh.addStaticHoliday ("06-NOV-2023", "All Saints Day");
lh.addStaticHoliday ("13-NOV-2023", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2023", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2023", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2024", "New Years Day");
lh.addStaticHoliday ("08-JAN-2024", "Epiphany");
lh.addStaticHoliday ("25-MAR-2024", "St. Josephs Day");
lh.addStaticHoliday ("28-MAR-2024", "Holy Thursday");
lh.addStaticHoliday ("29-MAR-2024", "Good Friday");
lh.addStaticHoliday ("01-MAY-2024", "Labor Day");
lh.addStaticHoliday ("13-MAY-2024", "Ascension Day Observed");
lh.addStaticHoliday ("03-JUN-2024", "Corpus Christi Observed");
lh.addStaticHoliday ("10-JUN-2024", "Sacred Heart of Jesus");
lh.addStaticHoliday ("01-JUL-2024", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("07-AUG-2024", "National Holiday");
lh.addStaticHoliday ("19-AUG-2024", "Assumption Day");
lh.addStaticHoliday ("14-OCT-2024", "Columbus Day");
lh.addStaticHoliday ("04-NOV-2024", "All Saints Day");
lh.addStaticHoliday ("11-NOV-2024", "Independence of Cartagena");
lh.addStaticHoliday ("25-DEC-2024", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2025", "New Years Day");
lh.addStaticHoliday ("06-JAN-2025", "Epiphany");
lh.addStaticHoliday ("24-MAR-2025", "St. Josephs Day");
lh.addStaticHoliday ("17-APR-2025", "Holy Thursday");
lh.addStaticHoliday ("18-APR-2025", "Good Friday");
lh.addStaticHoliday ("01-MAY-2025", "Labor Day");
lh.addStaticHoliday ("02-JUN-2025", "Ascension Day Observed");
lh.addStaticHoliday ("23-JUN-2025", "Corpus Christi Observed");
lh.addStaticHoliday ("30-JUN-2025", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("07-AUG-2025", "National Holiday");
lh.addStaticHoliday ("18-AUG-2025", "Assumption Day");
lh.addStaticHoliday ("13-OCT-2025", "Columbus Day");
lh.addStaticHoliday ("03-NOV-2025", "All Saints Day");
lh.addStaticHoliday ("17-NOV-2025", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2025", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2025", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2026", "New Years Day");
lh.addStaticHoliday ("12-JAN-2026", "Epiphany");
lh.addStaticHoliday ("23-MAR-2026", "St. Josephs Day");
lh.addStaticHoliday ("02-APR-2026", "Holy Thursday");
lh.addStaticHoliday ("03-APR-2026", "Good Friday");
lh.addStaticHoliday ("01-MAY-2026", "Labor Day");
lh.addStaticHoliday ("18-MAY-2026", "Ascension Day Observed");
lh.addStaticHoliday ("08-JUN-2026", "Corpus Christi Observed");
lh.addStaticHoliday ("15-JUN-2026", "Sacred Heart of Jesus");
lh.addStaticHoliday ("29-JUN-2026", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2026", "Independence Day");
lh.addStaticHoliday ("07-AUG-2026", "National Holiday");
lh.addStaticHoliday ("17-AUG-2026", "Assumption Day");
lh.addStaticHoliday ("12-OCT-2026", "Columbus Day");
lh.addStaticHoliday ("02-NOV-2026", "All Saints Day");
lh.addStaticHoliday ("16-NOV-2026", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2026", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2026", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2027", "New Years Day");
lh.addStaticHoliday ("11-JAN-2027", "Epiphany");
lh.addStaticHoliday ("22-MAR-2027", "St. Josephs Day");
lh.addStaticHoliday ("25-MAR-2027", "Holy Thursday");
lh.addStaticHoliday ("26-MAR-2027", "Good Friday");
lh.addStaticHoliday ("10-MAY-2027", "Ascension Day Observed");
lh.addStaticHoliday ("31-MAY-2027", "Corpus Christi Observed");
lh.addStaticHoliday ("07-JUN-2027", "Sacred Heart of Jesus");
lh.addStaticHoliday ("05-JUL-2027", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2027", "Independence Day");
lh.addStaticHoliday ("16-AUG-2027", "Assumption Day");
lh.addStaticHoliday ("18-OCT-2027", "Columbus Day");
lh.addStaticHoliday ("01-NOV-2027", "All Saints Day");
lh.addStaticHoliday ("15-NOV-2027", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2027", "Immaculate Conception");
lh.addStaticHoliday ("10-JAN-2028", "Epiphany");
lh.addStaticHoliday ("20-MAR-2028", "St. Josephs Day");
lh.addStaticHoliday ("13-APR-2028", "Holy Thursday");
lh.addStaticHoliday ("14-APR-2028", "Good Friday");
lh.addStaticHoliday ("01-MAY-2028", "Labor Day");
lh.addStaticHoliday ("29-MAY-2028", "Ascension Day Observed");
lh.addStaticHoliday ("19-JUN-2028", "Corpus Christi Observed");
lh.addStaticHoliday ("26-JUN-2028", "Sacred Heart of Jesus");
lh.addStaticHoliday ("03-JUL-2028", "Sts. Peter and Paul Day");
lh.addStaticHoliday ("20-JUL-2028", "Independence Day");
lh.addStaticHoliday ("07-AUG-2028", "National Holiday");
lh.addStaticHoliday ("21-AUG-2028", "Assumption Day");
lh.addStaticHoliday ("16-OCT-2028", "Columbus Day");
lh.addStaticHoliday ("06-NOV-2028", "All Saints Day");
lh.addStaticHoliday ("13-NOV-2028", "Independence of Cartagena");
lh.addStaticHoliday ("08-DEC-2028", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2028", "Christmas Day");
lh.addStandardWeekend();
return lh;
}
}
|
|
/*
* Copyright (C) 2016 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okhttp3.internal.cache2;
import java.io.File;
import java.io.IOException;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import okio.Buffer;
import okio.BufferedSink;
import okio.BufferedSource;
import okio.ByteString;
import okio.Okio;
import okio.Pipe;
import okio.Source;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.fail;
@Tag("Slowish")
public final class RelayTest {
@TempDir File tempDir;
private final ExecutorService executor = Executors.newCachedThreadPool();
private final ByteString metadata = ByteString.encodeUtf8("great metadata!");
private File file;
@BeforeEach
void setUp() {
file = new File(tempDir, "test");
}
@AfterEach public void tearDown() throws Exception {
executor.shutdown();
}
@Test public void singleSource() throws Exception {
Buffer upstream = new Buffer();
upstream.writeUtf8("abcdefghijklm");
Relay relay = Relay.Companion.edit(file, upstream, metadata, 1024);
Source source = relay.newSource();
Buffer sourceBuffer = new Buffer();
assertThat(source.read(sourceBuffer, 5)).isEqualTo(5);
assertThat(sourceBuffer.readUtf8()).isEqualTo("abcde");
assertThat(source.read(sourceBuffer, 1024)).isEqualTo(8);
assertThat(sourceBuffer.readUtf8()).isEqualTo("fghijklm");
assertThat(source.read(sourceBuffer, 1024)).isEqualTo(-1);
assertThat(sourceBuffer.size()).isEqualTo(0);
source.close();
assertThat(relay.isClosed()).isTrue();
assertFile(Relay.PREFIX_CLEAN, 13L, metadata.size(), "abcdefghijklm", metadata);
}
@Test public void multipleSources() throws Exception {
Buffer upstream = new Buffer();
upstream.writeUtf8("abcdefghijklm");
Relay relay = Relay.Companion.edit(file, upstream, metadata, 1024);
BufferedSource source1 = Okio.buffer(relay.newSource());
BufferedSource source2 = Okio.buffer(relay.newSource());
assertThat(source1.readUtf8()).isEqualTo("abcdefghijklm");
assertThat(source2.readUtf8()).isEqualTo("abcdefghijklm");
source1.close();
source2.close();
assertThat(relay.isClosed()).isTrue();
assertFile(Relay.PREFIX_CLEAN, 13L, metadata.size(), "abcdefghijklm", metadata);
}
@Test public void readFromBuffer() throws Exception {
Buffer upstream = new Buffer();
upstream.writeUtf8("abcdefghij");
Relay relay = Relay.Companion.edit(file, upstream, metadata, 5);
BufferedSource source1 = Okio.buffer(relay.newSource());
BufferedSource source2 = Okio.buffer(relay.newSource());
assertThat(source1.readUtf8(5)).isEqualTo("abcde");
assertThat(source2.readUtf8(5)).isEqualTo("abcde");
assertThat(source2.readUtf8(5)).isEqualTo("fghij");
assertThat(source1.readUtf8(5)).isEqualTo("fghij");
assertThat(source1.exhausted()).isTrue();
assertThat(source2.exhausted()).isTrue();
source1.close();
source2.close();
assertThat(relay.isClosed()).isTrue();
assertFile(Relay.PREFIX_CLEAN, 10L, metadata.size(), "abcdefghij", metadata);
}
@Test public void readFromFile() throws Exception {
Buffer upstream = new Buffer();
upstream.writeUtf8("abcdefghijklmnopqrst");
Relay relay = Relay.Companion.edit(file, upstream, metadata, 5);
BufferedSource source1 = Okio.buffer(relay.newSource());
BufferedSource source2 = Okio.buffer(relay.newSource());
assertThat(source1.readUtf8(10)).isEqualTo("abcdefghij");
assertThat(source2.readUtf8(10)).isEqualTo("abcdefghij");
assertThat(source2.readUtf8(10)).isEqualTo("klmnopqrst");
assertThat(source1.readUtf8(10)).isEqualTo("klmnopqrst");
assertThat(source1.exhausted()).isTrue();
assertThat(source2.exhausted()).isTrue();
source1.close();
source2.close();
assertThat(relay.isClosed()).isTrue();
assertFile(Relay.PREFIX_CLEAN, 20L, metadata.size(), "abcdefghijklmnopqrst", metadata);
}
@Test public void readAfterEdit() throws Exception {
Buffer upstream = new Buffer();
upstream.writeUtf8("abcdefghij");
Relay relay1 = Relay.Companion.edit(file, upstream, metadata, 5);
BufferedSource source1 = Okio.buffer(relay1.newSource());
assertThat(source1.readUtf8(10)).isEqualTo("abcdefghij");
assertThat(source1.exhausted()).isTrue();
source1.close();
assertThat(relay1.isClosed()).isTrue();
// Since relay1 is closed, new sources cannot be created.
assertThat(relay1.newSource()).isNull();
Relay relay2 = Relay.Companion.read(file);
assertThat(relay2.metadata()).isEqualTo(metadata);
BufferedSource source2 = Okio.buffer(relay2.newSource());
assertThat(source2.readUtf8(10)).isEqualTo("abcdefghij");
assertThat(source2.exhausted()).isTrue();
source2.close();
assertThat(relay2.isClosed()).isTrue();
// Since relay2 is closed, new sources cannot be created.
assertThat(relay2.newSource()).isNull();
assertFile(Relay.PREFIX_CLEAN, 10L, metadata.size(), "abcdefghij", metadata);
}
@Test public void closeBeforeExhaustLeavesDirtyFile() throws Exception {
Buffer upstream = new Buffer();
upstream.writeUtf8("abcdefghij");
Relay relay1 = Relay.Companion.edit(file, upstream, metadata, 5);
BufferedSource source1 = Okio.buffer(relay1.newSource());
assertThat(source1.readUtf8(10)).isEqualTo("abcdefghij");
source1.close(); // Not exhausted!
assertThat(relay1.isClosed()).isTrue();
try {
Relay.Companion.read(file);
fail();
} catch (IOException expected) {
assertThat(expected.getMessage()).isEqualTo("unreadable cache file");
}
assertFile(Relay.PREFIX_DIRTY, -1L, -1, null, null);
}
@Test public void redundantCallsToCloseAreIgnored() throws Exception {
Buffer upstream = new Buffer();
upstream.writeUtf8("abcde");
Relay relay = Relay.Companion.edit(file, upstream, metadata, 1024);
Source source1 = relay.newSource();
Source source2 = relay.newSource();
source1.close();
source1.close(); // Unnecessary. Shouldn't decrement the reference count.
assertThat(relay.isClosed()).isFalse();
source2.close();
assertThat(relay.isClosed()).isTrue();
assertFile(Relay.PREFIX_DIRTY, -1L, -1, null, null);
}
@Test public void racingReaders() throws Exception {
Pipe pipe = new Pipe(1024);
BufferedSink sink = Okio.buffer(pipe.sink());
Relay relay = Relay.Companion.edit(file, pipe.source(), metadata, 5);
Future<ByteString> future1 = executor.submit(sourceReader(relay.newSource()));
Future<ByteString> future2 = executor.submit(sourceReader(relay.newSource()));
Thread.sleep(500);
sink.writeUtf8("abcdefghij");
Thread.sleep(500);
sink.writeUtf8("klmnopqrst");
sink.close();
assertThat(future1.get()).isEqualTo(ByteString.encodeUtf8("abcdefghijklmnopqrst"));
assertThat(future2.get()).isEqualTo(ByteString.encodeUtf8("abcdefghijklmnopqrst"));
assertThat(relay.isClosed()).isTrue();
assertFile(Relay.PREFIX_CLEAN, 20L, metadata.size(), "abcdefghijklmnopqrst", metadata);
}
/** Returns a callable that reads all of source, closes it, and returns the bytes. */
private Callable<ByteString> sourceReader(final Source source) {
return () -> {
Buffer buffer = new Buffer();
while (source.read(buffer, 16384) != -1) {
}
source.close();
return buffer.readByteString();
};
}
private void assertFile(ByteString prefix, long upstreamSize, int metadataSize, String upstream,
ByteString metadata) throws IOException {
BufferedSource source = Okio.buffer(Okio.source(file));
assertThat(source.readByteString(prefix.size())).isEqualTo(prefix);
assertThat(source.readLong()).isEqualTo(upstreamSize);
assertThat(source.readLong()).isEqualTo(metadataSize);
if (upstream != null) {
assertThat(source.readUtf8(upstreamSize)).isEqualTo(upstream);
}
if (metadata != null) {
assertThat(source.readByteString(metadataSize)).isEqualTo(metadata);
}
source.close();
}
}
|
|
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.interactions;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.junit.Assume.assumeFalse;
import static org.openqa.selenium.testing.drivers.Browser.CHROME;
import static org.openqa.selenium.testing.drivers.Browser.EDGE;
import static org.openqa.selenium.testing.drivers.Browser.HTMLUNIT;
import static org.openqa.selenium.testing.drivers.Browser.IE;
import static org.openqa.selenium.testing.drivers.Browser.MARIONETTE;
import static org.openqa.selenium.testing.drivers.Browser.SAFARI;
import org.junit.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.Keys;
import org.openqa.selenium.Platform;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.Color;
import org.openqa.selenium.support.Colors;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.testing.Ignore;
import org.openqa.selenium.testing.JUnit4TestBase;
import org.openqa.selenium.testing.NotYetImplemented;
import org.openqa.selenium.testing.TestUtilities;
/**
* Tests interaction through the advanced gestures API of keyboard handling.
*/
public class BasicKeyboardInterfaceTest extends JUnit4TestBase {
private Actions getBuilder(WebDriver driver) {
return new Actions(driver);
}
@Test
public void testBasicKeyboardInput() {
driver.get(appServer.whereIs("single_text_input.html"));
WebElement input = driver.findElement(By.id("textInput"));
Action sendLowercase = getBuilder(driver).sendKeys(input, "abc def").build();
sendLowercase.perform();
shortWait.until(ExpectedConditions.attributeToBe(input, "value", "abc def"));
}
@Test
@NotYetImplemented(SAFARI)
public void testSendingKeyDownOnly() {
driver.get(appServer.whereIs("key_logger.html"));
WebElement keysEventInput = driver.findElement(By.id("theworks"));
Action pressShift = getBuilder(driver).keyDown(keysEventInput, Keys.SHIFT).build();
pressShift.perform();
WebElement keyLoggingElement = driver.findElement(By.id("result"));
String logText = keyLoggingElement.getText();
Action releaseShift = getBuilder(driver).keyUp(keysEventInput, Keys.SHIFT).build();
releaseShift.perform();
assertThat(logText).describedAs("Key down event should be isolated").endsWith("keydown");
}
@Test
@NotYetImplemented(SAFARI)
public void testSendingKeyUp() {
driver.get(appServer.whereIs("key_logger.html"));
WebElement keysEventInput = driver.findElement(By.id("theworks"));
Action pressShift = getBuilder(driver).keyDown(keysEventInput, Keys.SHIFT).build();
pressShift.perform();
WebElement keyLoggingElement = driver.findElement(By.id("result"));
String eventsText = keyLoggingElement.getText();
assertThat(eventsText).describedAs("Key down should be isolated for this test to be meaningful").endsWith("keydown");
Action releaseShift = getBuilder(driver).keyUp(keysEventInput, Keys.SHIFT).build();
releaseShift.perform();
eventsText = keyLoggingElement.getText();
assertThat(eventsText).describedAs("Key up should be isolated for this test to be meaningful").endsWith("keyup");
}
@Test
@NotYetImplemented(SAFARI)
@NotYetImplemented(EDGE)
public void testSendingKeysWithShiftPressed() {
driver.get(pages.javascriptPage);
WebElement keysEventInput = driver.findElement(By.id("theworks"));
keysEventInput.click();
String existingResult = getFormEvents();
Action pressShift = getBuilder(driver).keyDown(keysEventInput, Keys.SHIFT).build();
pressShift.perform();
Action sendLowercase = getBuilder(driver).sendKeys(keysEventInput, "ab").build();
sendLowercase.perform();
Action releaseShift = getBuilder(driver).keyUp(keysEventInput, Keys.SHIFT).build();
releaseShift.perform();
String expectedEvents = " keydown keydown keypress keyup keydown keypress keyup keyup";
assertThatFormEventsFiredAreExactly("Shift key not held", existingResult + expectedEvents);
assertThat(keysEventInput.getAttribute("value")).isEqualTo("AB");
}
@Test
@NotYetImplemented(value = SAFARI, reason = "getText does not normalize spaces")
public void testSendingKeysToActiveElement() {
driver.get(pages.bodyTypingPage);
Action someKeys = getBuilder(driver).sendKeys("ab").build();
someKeys.perform();
assertThatBodyEventsFiredAreExactly("keypress keypress");
assertThatFormEventsFiredAreExactly("");
}
@Test
public void testBasicKeyboardInputOnActiveElement() {
driver.get(pages.javascriptPage);
WebElement keyReporter = driver.findElement(By.id("keyReporter"));
keyReporter.click();
Action sendLowercase = getBuilder(driver).sendKeys("abc def").build();
sendLowercase.perform();
shortWait.until(ExpectedConditions.attributeToBe(keyReporter, "value", "abc def"));
}
@Test
@NotYetImplemented(HTMLUNIT)
public void testThrowsIllegalArgumentExceptionWithNoParameters() {
driver.get(pages.javascriptPage);
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> driver.findElement(By.id("keyReporter")).sendKeys());
}
@Test
@NotYetImplemented(HTMLUNIT)
public void testThrowsIllegalArgumentExceptionWithNullParameter() {
driver.get(pages.javascriptPage);
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> driver.findElement(By.id("keyReporter")).sendKeys((CharSequence) null));
}
@Test
@NotYetImplemented(HTMLUNIT)
public void testThrowsIllegalArgumentExceptionWithNullInParameters() {
driver.get(pages.javascriptPage);
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> driver.findElement(By.id("keyReporter")).sendKeys("x", null, "y"));
}
@Test
@NotYetImplemented(HTMLUNIT)
public void testThrowsIllegalArgumentExceptionWithCharSequenceThatContainsNull() {
driver.get(pages.javascriptPage);
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(
() -> driver.findElement(By.id("keyReporter")).sendKeys("x", null, "y"));
}
@Test
@NotYetImplemented(HTMLUNIT)
public void testThrowsIllegalArgumentExceptionWithCharSequenceThatContainsNullOnly() {
driver.get(pages.javascriptPage);
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(
() -> driver.findElement(By.id("keyReporter")).sendKeys(new CharSequence[]{null}));
}
@Test
public void canGenerateKeyboardShortcuts() {
driver.get(appServer.whereIs("keyboard_shortcut.html"));
WebElement body = driver.findElement(By.xpath("//body"));
assertBackgroundColor(body, Colors.WHITE);
new Actions(driver).keyDown(Keys.SHIFT).sendKeys("1").keyUp(Keys.SHIFT).perform();
assertBackgroundColor(body, Colors.GREEN);
new Actions(driver).keyDown(Keys.ALT).sendKeys("1").keyUp(Keys.ALT).perform();
assertBackgroundColor(body, Colors.LIGHTBLUE);
new Actions(driver)
.keyDown(Keys.SHIFT).keyDown(Keys.ALT)
.sendKeys("1")
.keyUp(Keys.SHIFT).keyUp(Keys.ALT)
.perform();
assertBackgroundColor(body, Colors.SILVER);
}
@Test
@NotYetImplemented(value = MARIONETTE, reason = "https://bugzilla.mozilla.org/show_bug.cgi?id=1422583")
@NotYetImplemented(CHROME)
public void testSelectionSelectBySymbol() {
driver.get(appServer.whereIs("single_text_input.html"));
WebElement input = driver.findElement(By.id("textInput"));
getBuilder(driver).click(input).sendKeys("abc def").perform();
shortWait.until(ExpectedConditions.attributeToBe(input, "value", "abc def"));
getBuilder(driver).click(input)
.keyDown(Keys.SHIFT)
.sendKeys(Keys.LEFT)
.sendKeys(Keys.LEFT)
.keyUp(Keys.SHIFT)
.sendKeys(Keys.DELETE)
.perform();
assertThat(input.getAttribute("value")).isEqualTo("abc d");
}
@Test
@Ignore(IE)
@NotYetImplemented(value = MARIONETTE, reason = "https://bugzilla.mozilla.org/show_bug.cgi?id=1422583")
@NotYetImplemented(CHROME)
public void testSelectionSelectByWord() {
assumeFalse(
"MacOS has alternative keyboard",
TestUtilities.getEffectivePlatform().is(Platform.MAC));
driver.get(appServer.whereIs("single_text_input.html"));
WebElement input = driver.findElement(By.id("textInput"));
getBuilder(driver).click(input).sendKeys("abc def").perform();
wait.until(ExpectedConditions.attributeToBe(input, "value", "abc def"));
getBuilder(driver).click(input)
.keyDown(Keys.SHIFT)
.keyDown(Keys.CONTROL)
.sendKeys(Keys.LEFT)
.keyUp(Keys.CONTROL)
.keyUp(Keys.SHIFT)
.sendKeys(Keys.DELETE)
.perform();
wait.until(ExpectedConditions.attributeToBe(input, "value", "abc "));
}
@Test
public void testSelectionSelectAll() {
assumeFalse(
"MacOS has alternative keyboard",
TestUtilities.getEffectivePlatform().is(Platform.MAC));
driver.get(appServer.whereIs("single_text_input.html"));
WebElement input = driver.findElement(By.id("textInput"));
getBuilder(driver).click(input).sendKeys("abc def").perform();
shortWait.until(ExpectedConditions.attributeToBe(input, "value", "abc def"));
getBuilder(driver).click(input)
.keyDown(Keys.CONTROL)
.sendKeys("a")
.keyUp(Keys.CONTROL)
.sendKeys(Keys.DELETE)
.perform();
assertThat(input.getAttribute("value")).isEqualTo("");
}
private void assertBackgroundColor(WebElement el, Colors expected) {
Color actual = Color.fromString(el.getCssValue("background-color"));
assertThat(actual).isEqualTo(expected.getColorValue());
}
private void assertThatFormEventsFiredAreExactly(String message, String expected) {
assertThat(getFormEvents()).describedAs(message).isEqualTo(expected.trim());
}
private String getFormEvents() {
return driver.findElement(By.id("result")).getText().trim();
}
private void assertThatFormEventsFiredAreExactly(String expected) {
assertThatFormEventsFiredAreExactly("", expected);
}
private void assertThatBodyEventsFiredAreExactly(String expected) {
assertThat(driver.findElement(By.id("body_result")).getText().trim()).isEqualTo(expected.trim());
}
}
|
|
/*
* Copyright 1997-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avalon.framework;
import java.io.Serializable;
import java.util.StringTokenizer;
/**
* This class is used to hold version information pertaining to a Component or interface.
* <p />
*
* The version number of a <code>Component</code> is made up of three
* dot-separated fields:
* <p />
* "<b>major.minor.micro</b>"
* <p />
* The <b>major</b>, <b>minor</b> and <b>micro</b> fields are
* <i>integer</i> numbers represented in decimal notation and have the
* following meaning:
* <ul>
*
* <p /><li><b>major</b> - When the major version changes (in ex. from
* "1.5.12" to "2.0.0"), then backward compatibility
* with previous releases is not granted.</li><p />
*
* <p /><li><b>minor</b> - When the minor version changes (in ex. from
* "1.5.12" to "1.6.0"), then backward compatibility
* with previous releases is granted, but something changed in the
* implementation of the Component. (ie it methods could have been added)</li><p />
*
* <p /><li><b>micro</b> - When the micro version changes (in ex.
* from "1.5.12" to "1.5.13"), then the the changes are
* small forward compatible bug fixes or documentation modifications etc.
* </li>
* </ul>
*
* @author <a href="mailto:[email protected]">Avalon Development Team</a>
* @version CVS $Revision: 1.33 $ $Date: 2004/02/11 14:34:24 $
*/
public final class Version
implements Comparable, Serializable
{
private int m_major;
private int m_minor;
private int m_micro;
/**
* Parse a version out of a string.
* The version string format is <major>.<minor>.<micro> where
* both minor and micro are optional.
*
* @param version The input version string
* @return the new Version object
* @throws NumberFormatException if an error occurs
* @throws IllegalArgumentException if an error occurs
* @throws NullPointerException if the provided string is <code>null</code>
* @since 4.1
*/
public static Version getVersion( final String version )
throws NumberFormatException, IllegalArgumentException
{
if( version == null )
throw new NullPointerException( "version" );
final StringTokenizer tokenizer = new StringTokenizer( version, "." );
final String[] levels = new String[ tokenizer.countTokens() ];
for( int i = 0; i < levels.length; i++ )
{
levels[ i ] = tokenizer.nextToken();
}
int major = -1;
if( 0 < levels.length )
{
major = Integer.parseInt( levels[ 0 ] );
}
int minor = 0;
if( 1 < levels.length )
{
minor = Integer.parseInt( levels[ 1 ] );
}
int micro = 0;
if( 2 < levels.length )
{
micro = Integer.parseInt( levels[ 2 ] );
}
return new Version( major, minor, micro );
}
/**
* Create a new instance of a <code>Version</code> object with the
* specified version numbers.
*
* @param major This <code>Version</code> major number.
* @param minor This <code>Version</code> minor number.
* @param micro This <code>Version</code> micro number.
*/
public Version( final int major, final int minor, final int micro )
{
m_major = major;
m_minor = minor;
m_micro = micro;
}
/**
* Retrieve major component of version.
*
* @return the major component of version
* @since 4.1
*/
public int getMajor()
{
return m_major;
}
/**
* Retrieve minor component of version.
*
* @return the minor component of version
* @since 4.1
*/
public int getMinor()
{
return m_minor;
}
/**
* Retrieve micro component of version.
*
* @return the micro component of version.
* @since 4.1
*/
public int getMicro()
{
return m_micro;
}
/**
* Check this <code>Version</code> against another for equality.
* <p />
* If this <code>Version</code> is compatible with the specified one, then
* <b>true</b> is returned, otherwise <b>false</b>.
*
* @param other The other <code>Version</code> object to be compared with this
* for equality.
* @return <b>true</b> if this <code>Version</code> is compatible with the specified one
* @since 4.1
*/
public boolean equals( final Version other )
{
if( other == null )
return false;
boolean isEqual = ( getMajor() == other.getMajor() );
if ( isEqual )
{
isEqual = ( getMinor() == other.getMinor() );
}
if ( isEqual )
{
isEqual = ( getMicro() == other.getMicro() );
}
return isEqual;
}
/**
* Indicates whether some other object is "equal to" this <code>Version</code>.
* Returns <b>true</b> if the other object is an instance of <code>Version</code>
* and has the same major, minor, and micro components.
*
* @param other an <code>Object</code> value
* @return <b>true</b> if the other object is equal to this <code>Version</code>
*/
public boolean equals( final Object other )
{
boolean isEqual = false;
if( other instanceof Version )
{
isEqual = equals( (Version)other );
}
return isEqual;
}
/**
* Add a hashing function to ensure the Version object is
* treated as expected in hashmaps and sets. NOTE: any
* time the equals() is overridden, hashCode() should also
* be overridden.
*
* @return the hashCode
*/
public int hashCode()
{
int hash = getMajor();
hash >>>= 17;
hash += getMinor();
hash >>>= 17;
hash += getMicro();
return hash;
}
/**
* Check this <code>Version</code> against another for compliancy
* (compatibility).
* <p />
* If this <code>Version</code> is compatible with the specified one, then
* <b>true</b> is returned, otherwise <b>false</b>. Be careful when using
* this method since, in example, version 1.3.7 is compliant to version
* 1.3.6, while the opposite is not.
* <p />
* The following example displays the expected behaviour and results of version.
* <pre>
* final Version v1 = new Version( 1, 3 , 6 );
* final Version v2 = new Version( 1, 3 , 7 );
* final Version v3 = new Version( 1, 4 , 0 );
* final Version v4 = new Version( 2, 0 , 1 );
*
* assert( v1.complies( v1 ) );
* assert( ! v1.complies( v2 ) );
* assert( v2.complies( v1 ) );
* assert( ! v1.complies( v3 ) );
* assert( v3.complies( v1 ) );
* assert( ! v1.complies( v4 ) );
* assert( ! v4.complies( v1 ) );
* </pre>
*
* @param other The other <code>Version</code> object to be compared with this
* for compliancy (compatibility).
* @return <b>true</b> if this <code>Version</code> is compatible with the specified one
*/
public boolean complies( final Version other )
{
if( other == null )
return false;
if( other.m_major == -1 )
{
return true;
}
if( m_major != other.m_major )
{
return false;
}
else if( m_minor < other.m_minor )
{
//If of major version but lower minor version then incompatible
return false;
}
else if( m_minor == other.m_minor
&& m_micro < other.m_micro )
{
//If same major version, same minor version but lower micro level
//then incompatible
return false;
}
else
{
return true;
}
}
/**
* Overload toString to report version correctly.
*
* @return the dot seperated version string
*/
public String toString()
{
return m_major + "." + m_minor + "." + m_micro;
}
/**
* Compare two versions together according to the
* {@link Comparable} interface.
*
* @return number indicating relative value (-1, 0, 1)
*/
public int compareTo(Object o) {
if( o == null )
throw new NullPointerException( "o" );
Version other = (Version)o;
int val = 0;
if ( getMajor() < other.getMajor() ) val = -1;
if ( 0 == val && getMajor() > other.getMajor() ) val = 1;
if ( 0 == val && getMinor() < other.getMinor() ) val = -1;
if ( 0 == val && getMinor() > other.getMinor() ) val = 1;
if ( 0 == val && getMicro() < other.getMicro() ) val = -1;
if ( 0 == val && getMicro() > other.getMicro() ) val = 1;
return val;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.ipojo.manipulation;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.felix.ipojo.manipulation.ClassChecker.AnnotationDescriptor;
import org.objectweb.asm.ClassAdapter;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.FieldVisitor;
import org.objectweb.asm.Label;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* iPOJO Class Adapter.
* This class adapt the visited class to link the class with the container.
* @author <a href="mailto:[email protected]">Felix Project Team</a>
*/
public class MethodCreator extends ClassAdapter implements Opcodes {
/**
* Instance Manager Field.
*/
public static final String IM_FIELD = "__IM";
/**
* All POJO method will be renamed by using this prefix.
*/
public static final String PREFIX = "__M_";
/**
* POJO class.
*/
private static final String POJO = "org/apache/felix/ipojo/Pojo";
/**
* Filed flag prefix.
*/
private static final String FIELD_FLAG_PREFIX = "__F";
/**
* Method flag prefix.
*/
private static final String METHOD_FLAG_PREFIX = "__M";
/**
* onEntry method name.
*/
private static final String ENTRY = "onEntry";
/**
* onExit method name.
*/
private static final String EXIT = "onExit";
/**
* on Error method name.
*/
private static final String ERROR = "onError";
/**
* onGet method name.
*/
private static final String GET = "onGet";
/**
* onSet method name.
*/
private static final String SET = "onSet";
/**
* Name of the current manipulated class.
*/
private String m_owner;
/**
* Set of fields detected in the class.
* (this set is given by the previous analysis)
*/
private Set<String> m_fields;
/**
* List of methods contained in the class.
* This set contains method id.
*/
private List<String> m_methods = new ArrayList<String>();
/**
* List of fields injected as method flag in the class.
* This set contains field name generate from method id.
*/
private List<String> m_methodFlags = new ArrayList<String>();
/**
* The list of methods visited during the previous analysis.
* This list allows getting annotations to move to generated
* method.
*/
private List<MethodDescriptor> m_visitedMethods = new ArrayList<MethodDescriptor>();
/**
* Set to <code>true</code> when a suitable constructor
* is found. If not set to <code>true</code> at the end
* of the visit, the manipulator injects a constructor.
*/
private boolean m_foundSuitableConstructor = false;
/**
* Name of the super class.
*/
private String m_superclass;
/**
* Constructor.
* @param arg0 : class visitor.
* @param fields : fields map detected during the previous class analysis.
* @param methods : the list of the detected method during the previous class analysis.
*/
public MethodCreator(ClassVisitor arg0, Map<String, String> fields, List<MethodDescriptor> methods) {
super(arg0);
m_fields = fields.keySet();
m_visitedMethods = methods;
}
/**
* Visit method.
* This method store the current class name.
* Moreover the POJO interface is added to the list of implemented interface.
* Then the Instance manager field is added.
* @param version : version
* @param access : access flag
* @param name : class name
* @param signature : signature
* @param superName : parent class
* @param interfaces : implemented interface
* @see org.objectweb.asm.ClassAdapter#visit(int, int, java.lang.String, java.lang.String, java.lang.String, java.lang.String[])
*/
public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) {
m_owner = name;
m_superclass = superName;
addPOJOInterface(version, access, name, signature, superName, interfaces);
addIMField();
}
/**
* A method is visited.
* This method does not manipulate clinit and class$ methods.
* In the case of a constructor, this method will generate a constructor with the instance manager
* and will adapt the current constructor to call this constructor.
* For standard method, this method will create method header, rename the current method and adapt it.
* @param access : access flag.
* @param name : name of the method
* @param desc : method descriptor
* @param signature : signature
* @param exceptions : declared exceptions.
* @return the MethodVisitor wich will visit the method code.
* @see org.objectweb.asm.ClassAdapter#visitMethod(int, java.lang.String, java.lang.String, java.lang.String, java.lang.String[])
*/
public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
// Avoid manipulating special methods
if (name.equals("<clinit>") || name.equals("class$")) { return super.visitMethod(access, name, desc, signature, exceptions); }
// The constructor is manipulated separately
if (name.equals("<init>")) {
MethodDescriptor md = getMethodDescriptor("$init", desc);
// 1) change the constructor descriptor (add a component manager arg as first argument)
String newDesc = desc.substring(1);
newDesc = "(Lorg/apache/felix/ipojo/InstanceManager;" + newDesc;
Type[] args = Type.getArgumentTypes(desc);
// TODO HERE ! => All constructor matches, no distinction between the different constructors.
generateConstructor(access, desc, signature, exceptions, md.getAnnotations(), md.getParameterAnnotations());
if (args.length == 0) {
m_foundSuitableConstructor = true;
} else if (args.length == 1 && args[0].getClassName().equals("org.osgi.framework.BundleContext")) {
m_foundSuitableConstructor = true;
}
// Insert the new constructor
MethodVisitor mv = super.visitMethod(ACC_PRIVATE, "<init>", newDesc, signature, exceptions);
return new ConstructorCodeAdapter(mv, m_owner, m_fields, ACC_PRIVATE, name, newDesc, m_superclass);
}
if ((access & ACC_SYNTHETIC) == ACC_SYNTHETIC && name.startsWith("access$")) {
MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions);
return new MethodCodeAdapter(mv, m_owner, access, name, desc, m_fields);
}
// Do nothing on static methods
if ((access & ACC_STATIC) == ACC_STATIC) { return super.visitMethod(access, name, desc, signature, exceptions); }
// Do nothing on native methods
if ((access & ACC_NATIVE) == ACC_NATIVE) { return super.visitMethod(access, name, desc, signature, exceptions); }
MethodDescriptor md = getMethodDescriptor(name, desc);
if (md == null) {
generateMethodHeader(access, name, desc, signature, exceptions, null, null);
} else {
generateMethodHeader(access, name, desc, signature, exceptions, md.getAnnotations(), md.getParameterAnnotations());
}
String id = generateMethodFlag(name, desc);
if (! m_methodFlags.contains(id)) {
FieldVisitor flagField = cv.visitField(Opcodes.ACC_PRIVATE, id, "Z", null, null);
flagField.visitEnd();
m_methodFlags.add(id);
}
MethodVisitor mv = super.visitMethod(ACC_PRIVATE, PREFIX + name, desc, signature, exceptions);
return new MethodCodeAdapter(mv, m_owner, ACC_PRIVATE, PREFIX + name, desc, m_fields);
}
/**
* Gets the method descriptor for the specified name and descriptor.
* The method descriptor is looked inside the
* {@link MethodCreator#m_visitedMethods}
* @param name the name of the method
* @param desc the descriptor of the method
* @return the method descriptor or <code>null</code> if not found.
*/
private MethodDescriptor getMethodDescriptor(String name, String desc) {
for (int i = 0; i < m_visitedMethods.size(); i++) {
MethodDescriptor md = m_visitedMethods.get(i);
if (md.getName().equals(name) && md.getDescriptor().equals(desc)) {
return md;
}
}
return null;
}
/**
* Visit a Field.
* This field access is replaced by an invocation to the getter method or to the setter method.
* (except for static field).
* Inject the getter and the setter method for this field.
* @see org.objectweb.asm.ClassVisitor#visitField(int, java.lang.String, java.lang.String, java.lang.String, java.lang.Object)
* @param access : access modifier
* @param name : name of the field
* @param desc : description of the field
* @param signature : signature of the field
* @param value : value of the field
* @return FieldVisitor : null
*/
public FieldVisitor visitField(final int access, final String name, final String desc, final String signature, final Object value) {
if ((access & ACC_STATIC) == 0) {
FieldVisitor flag = cv.visitField(Opcodes.ACC_PRIVATE, FIELD_FLAG_PREFIX + name, "Z", null, null);
flag.visitEnd();
Type type = Type.getType(desc);
if (type.getSort() == Type.ARRAY) {
String gDesc = "()" + desc;
createArrayGetter(name, gDesc, type);
// Generates setter method
String sDesc = "(" + desc + ")V";
createArraySetter(name, sDesc, type);
} else {
// Generate the getter method
String gDesc = "()" + desc;
createSimpleGetter(name, gDesc, type);
// Generates setter method
String sDesc = "(" + desc + ")V";
createSimpleSetter(name, sDesc, type);
}
}
return cv.visitField(access, name, desc, signature, value);
}
/**
* Modify the given constructor to be something like:
* <code>
* this(null, params...);
* return;
* </code>
* The actual constructor is modified to support the instance manager argument.
* @param access : access flag
* @param descriptor : the original constructor descriptor
* @param signature : method signature
* @param exceptions : declared exception
* @param annotations : the annotations to move to this constructor.
*/
private void generateConstructor(int access, String descriptor, String signature, String[] exceptions, List<AnnotationDescriptor> annotations, Map<Integer, List<AnnotationDescriptor>> paramAnnotations) {
GeneratorAdapter mv = new GeneratorAdapter(
cv.visitMethod(access, "<init>", descriptor, signature, exceptions),
access, "<init>", descriptor);
// Compute the new signature
String newDesc = descriptor.substring(1); // Remove the first (
newDesc = "(Lorg/apache/felix/ipojo/InstanceManager;" + newDesc;
mv.visitCode();
mv.visitVarInsn(ALOAD, 0);
mv.visitInsn(ACONST_NULL);
mv.loadArgs();
mv.visitMethodInsn(INVOKESPECIAL, m_owner, "<init>", newDesc);
mv.visitInsn(RETURN);
// Move annotations
if (annotations != null) {
for (int i = 0; i < annotations.size(); i++) {
AnnotationDescriptor ad = annotations.get(i);
ad.visitAnnotation(mv);
}
}
// Move parameter annotations if any
if (paramAnnotations != null && ! paramAnnotations.isEmpty()) {
Iterator<Integer> ids = paramAnnotations.keySet().iterator();
while(ids.hasNext()) {
Integer id = ids.next();
List<AnnotationDescriptor> ads = paramAnnotations.get(id);
for (int i = 0; i < ads.size(); i++) {
AnnotationDescriptor ad = ads.get(i);
ad.visitParameterAnnotation(id.intValue(), mv);
}
}
}
mv.visitMaxs(0, 0);
mv.visitEnd();
}
/**
* Generate the method header of a POJO method.
* This method header encapsulate the POJO method call to
* signal entry exit and error to the container.
* @param access : access flag.
* @param name : method name.
* @param desc : method descriptor.
* @param signature : method signature.
* @param exceptions : declared exceptions.
* @param annotations : the annotations to move to this method.
* @param paramAnnotations : the parameter annotations to move to this method.
*/
private void generateMethodHeader(int access, String name, String desc, String signature, String[] exceptions, List<AnnotationDescriptor> annotations, Map<Integer, List<AnnotationDescriptor>> paramAnnotations) {
GeneratorAdapter mv = new GeneratorAdapter(cv.visitMethod(access, name, desc, signature, exceptions), access, name, desc);
mv.visitCode();
Type returnType = Type.getReturnType(desc);
// Compute result and exception stack location
int result = -1;
int exception = -1;
//int arguments = mv.newLocal(Type.getType((new Object[0]).getClass()));
if (returnType.getSort() != Type.VOID) {
// The method returns something
result = mv.newLocal(returnType);
exception = mv.newLocal(Type.getType(Throwable.class));
} else {
exception = mv.newLocal(Type.getType(Throwable.class));
}
Label l0 = new Label();
Label l1 = new Label();
Label l2 = new Label();
mv.visitTryCatchBlock(l0, l1, l2, "java/lang/Throwable");
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, generateMethodFlag(name, desc), "Z");
mv.visitJumpInsn(IFNE, l0);
mv.visitVarInsn(ALOAD, 0);
mv.loadArgs();
mv.visitMethodInsn(INVOKESPECIAL, m_owner, PREFIX + name, desc);
mv.visitInsn(returnType.getOpcode(IRETURN));
// end of the non intercepted method invocation.
mv.visitLabel(l0);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;");
mv.visitVarInsn(ALOAD, 0);
mv.visitLdcInsn(generateMethodId(name, desc));
mv.loadArgArray();
mv.visitMethodInsn(INVOKEVIRTUAL, "org/apache/felix/ipojo/InstanceManager", ENTRY, "(Ljava/lang/Object;Ljava/lang/String;[Ljava/lang/Object;)V");
mv.visitVarInsn(ALOAD, 0);
// Do not allow argument modification : just reload arguments.
mv.loadArgs();
mv.visitMethodInsn(INVOKESPECIAL, m_owner, PREFIX + name, desc);
if (returnType.getSort() != Type.VOID) {
mv.visitVarInsn(returnType.getOpcode(ISTORE), result);
}
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;");
mv.visitVarInsn(ALOAD, 0);
mv.visitLdcInsn(generateMethodId(name, desc));
if (returnType.getSort() != Type.VOID) {
mv.visitVarInsn(returnType.getOpcode(ILOAD), result);
mv.box(returnType);
} else {
mv.visitInsn(ACONST_NULL);
}
mv.visitMethodInsn(INVOKEVIRTUAL, "org/apache/felix/ipojo/InstanceManager", EXIT, "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/Object;)V");
mv.visitLabel(l1);
Label l7 = new Label();
mv.visitJumpInsn(GOTO, l7);
mv.visitLabel(l2);
mv.visitVarInsn(ASTORE, exception);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;");
mv.visitVarInsn(ALOAD, 0);
mv.visitLdcInsn(generateMethodId(name, desc));
mv.visitVarInsn(ALOAD, exception);
mv.visitMethodInsn(INVOKEVIRTUAL, "org/apache/felix/ipojo/InstanceManager", ERROR, "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/Throwable;)V");
mv.visitVarInsn(ALOAD, exception);
mv.visitInsn(ATHROW);
mv.visitLabel(l7);
if (returnType.getSort() != Type.VOID) {
mv.visitVarInsn(returnType.getOpcode(ILOAD), result);
}
mv.visitInsn(returnType.getOpcode(IRETURN));
// Move annotations
if (annotations != null) {
for (int i = 0; i < annotations.size(); i++) {
AnnotationDescriptor ad = annotations.get(i);
ad.visitAnnotation(mv);
}
}
// Move parameter annotations
if (paramAnnotations != null && ! paramAnnotations.isEmpty()) {
Iterator<Integer> ids = paramAnnotations.keySet().iterator();
while(ids.hasNext()) {
Integer id = ids.next();
List<AnnotationDescriptor> ads = paramAnnotations.get(id);
for (int i = 0; i < ads.size(); i++) {
AnnotationDescriptor ad = ads.get(i);
ad.visitParameterAnnotation(id.intValue(), mv);
}
}
}
mv.visitMaxs(0, 0);
mv.visitEnd();
}
/**
* Generate a method flag name.
* @param name : method name.
* @param desc : method descriptor.
* @return the method flag name
*/
private String generateMethodFlag(String name, String desc) {
return METHOD_FLAG_PREFIX + generateMethodId(name, desc);
}
/**
* Generate the method id based on the given method name and method descriptor.
* The method Id is unique for this method and serves to create the flag field (so
* must follow field name Java restrictions).
* @param name : method name
* @param desc : method descriptor
* @return method ID
*/
private String generateMethodId(String name, String desc) {
StringBuffer id = new StringBuffer(name);
Type[] args = Type.getArgumentTypes(desc);
for (int i = 0; i < args.length; i++) {
String arg = args[i].getClassName();
if (arg.endsWith("[]")) {
arg = arg.substring(0, arg.length() - 2);
id.append("$" + arg.replace('.', '_') + "__");
} else {
id.append("$" + arg.replace('.', '_'));
}
}
if (!m_methods.contains(id.toString())) {
m_methods.add(id.toString());
}
return id.toString();
}
/**
* Add the instance manager field (__im).
*/
private void addIMField() {
FieldVisitor fv = super.visitField(ACC_PRIVATE, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;", null, null);
fv.visitEnd();
}
/**
* Add the POJO interface to the visited class.
* @param version : class version
* @param access : class access
* @param name : class name
* @param signature : class signature
* @param superName : super class
* @param interfaces : implemented interfaces.
*/
private void addPOJOInterface(int version, int access, String name, String signature, String superName, String[] interfaces) {
// Add the POJO interface to the interface list
// Check that the POJO interface is not already in the list
boolean found = false;
for (int i = 0; i < interfaces.length; i++) {
if (interfaces[i].equals(POJO)) {
found = true;
}
}
String[] itfs;
if (!found) {
itfs = new String[interfaces.length + 1];
for (int i = 0; i < interfaces.length; i++) {
itfs[i] = interfaces[i];
}
itfs[interfaces.length] = POJO;
} else {
itfs = interfaces;
}
// If version = 1.7, use 1.6 if the ipojo.downgrade.classes system property is either
// not set of set to true.
int theVersion = version;
String downgrade = System.getProperty("ipojo.downgrade.classes");
if ((downgrade == null || "true".equals(downgrade)) && version == Opcodes.V1_7) {
theVersion = Opcodes.V1_6;
}
cv.visit(theVersion, access, name, signature, superName, itfs);
}
/**
* Visit end.
* Create helper methods.
* @see org.objectweb.asm.ClassAdapter#visitEnd()
*/
public void visitEnd() {
// Create the component manager setter method
createSetInstanceManagerMethod();
// Add the getComponentInstance
createGetComponentInstanceMethod();
// Need to inject a constructor?
if (! m_foundSuitableConstructor) { // No adequate constructor, create one.
createSimpleConstructor();
}
m_methods.clear();
m_methodFlags.clear();
cv.visitEnd();
}
/**
* Creates a simple constructor with an instance manager
* in argument if no suitable constructor is found during
* the visit.
*/
private void createSimpleConstructor() {
MethodVisitor mv = cv.visitMethod(ACC_PUBLIC, "<init>",
"(Lorg/apache/felix/ipojo/InstanceManager;)V", null, null);
mv.visitCode();
// Super call
mv.visitVarInsn(ALOAD, 0);
mv.visitMethodInsn(INVOKESPECIAL, m_superclass, "<init>", "()V");
// Call set instance manager
mv.visitVarInsn(ALOAD, 0);
mv.visitVarInsn(ALOAD, 1);
mv.visitMethodInsn(INVOKEVIRTUAL, m_owner, "_setInstanceManager",
"(Lorg/apache/felix/ipojo/InstanceManager;)V");
mv.visitInsn(RETURN);
mv.visitMaxs(0, 0);
mv.visitEnd();
}
/**
* Create the setter method for the __cm field.
*/
private void createSetInstanceManagerMethod() {
MethodVisitor mv = cv.visitMethod(ACC_PRIVATE, "_setInstanceManager", "(Lorg/apache/felix/ipojo/InstanceManager;)V", null, null);
mv.visitCode();
// If the given instance manager is null, just returns.
mv.visitVarInsn(ALOAD, 1);
Label l1 = new Label();
mv.visitJumpInsn(IFNONNULL, l1);
mv.visitInsn(RETURN);
mv.visitLabel(l1);
mv.visitVarInsn(ALOAD, 0);
mv.visitVarInsn(ALOAD, 1);
mv.visitFieldInsn(PUTFIELD, m_owner, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;");
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;");
mv.visitMethodInsn(INVOKEVIRTUAL, "org/apache/felix/ipojo/InstanceManager", "getRegistredFields", "()Ljava/util/Set;");
mv.visitVarInsn(ASTORE, 2);
mv.visitVarInsn(ALOAD, 2);
Label endif = new Label();
mv.visitJumpInsn(IFNULL, endif);
Iterator<String> it = m_fields.iterator();
while (it.hasNext()) {
String field = it.next();
mv.visitVarInsn(ALOAD, 2);
mv.visitLdcInsn(field);
mv.visitMethodInsn(INVOKEINTERFACE, "java/util/Set", "contains", "(Ljava/lang/Object;)Z");
Label l3 = new Label();
mv.visitJumpInsn(IFEQ, l3);
mv.visitVarInsn(ALOAD, 0);
mv.visitInsn(ICONST_1);
mv.visitFieldInsn(PUTFIELD, m_owner, FIELD_FLAG_PREFIX + field, "Z");
mv.visitLabel(l3);
}
mv.visitLabel(endif);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;");
mv.visitMethodInsn(INVOKEVIRTUAL, "org/apache/felix/ipojo/InstanceManager", "getRegistredMethods", "()Ljava/util/Set;");
mv.visitVarInsn(ASTORE, 2);
mv.visitVarInsn(ALOAD, 2);
Label endif2 = new Label();
mv.visitJumpInsn(IFNULL, endif2);
for (int i = 0; i < m_methods.size(); i++) {
String methodId = m_methods.get(i);
if (!methodId.equals("<init>")) {
mv.visitVarInsn(ALOAD, 2);
mv.visitLdcInsn(methodId);
mv.visitMethodInsn(INVOKEINTERFACE, "java/util/Set", "contains", "(Ljava/lang/Object;)Z");
Label l3 = new Label();
mv.visitJumpInsn(IFEQ, l3);
mv.visitVarInsn(ALOAD, 0);
mv.visitInsn(ICONST_1);
mv.visitFieldInsn(PUTFIELD, m_owner, METHOD_FLAG_PREFIX + methodId, "Z");
mv.visitLabel(l3);
}
}
mv.visitLabel(endif2);
mv.visitInsn(RETURN);
mv.visitMaxs(0, 0);
mv.visitEnd();
}
/**
* Create the getComponentInstance method.
*/
private void createGetComponentInstanceMethod() {
MethodVisitor mv = cv.visitMethod(ACC_PUBLIC, "getComponentInstance", "()Lorg/apache/felix/ipojo/ComponentInstance;", null, null);
mv.visitCode();
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;");
mv.visitInsn(ARETURN);
mv.visitMaxs(0, 0);
mv.visitEnd();
}
/**
* Create a getter method for an array.
* @param name : field name
* @param desc : method description
* @param type : contained type (inside the array)
*/
private void createArraySetter(String name, String desc, Type type) {
MethodVisitor mv = cv.visitMethod(0, "__set" + name, desc, null, null);
mv.visitCode();
String internalType = desc.substring(1);
internalType = internalType.substring(0, internalType.length() - 2);
Label l1 = new Label();
mv.visitLabel(l1);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, FIELD_FLAG_PREFIX + name, "Z");
Label l2 = new Label();
mv.visitJumpInsn(IFNE, l2);
mv.visitVarInsn(ALOAD, 0);
mv.visitVarInsn(ALOAD, 1);
mv.visitFieldInsn(PUTFIELD, m_owner, name, internalType);
mv.visitInsn(RETURN);
mv.visitLabel(l2);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;");
mv.visitVarInsn(ALOAD, 0);
mv.visitLdcInsn(name);
mv.visitVarInsn(ALOAD, 1);
mv.visitMethodInsn(INVOKEVIRTUAL, "org/apache/felix/ipojo/InstanceManager", SET, "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/Object;)V");
mv.visitInsn(RETURN);
// End
mv.visitMaxs(0, 0);
mv.visitEnd();
}
/**
* Create a setter method for an array.
* @param name : field name
* @param desc : method description
* @param type : contained type (inside the array)
*/
private void createArrayGetter(String name, String desc, Type type) {
String methodName = "__get" + name;
MethodVisitor mv = cv.visitMethod(0, methodName, desc, null, null);
mv.visitCode();
String internalType = desc.substring(2);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, FIELD_FLAG_PREFIX + name, "Z");
Label l1 = new Label();
mv.visitJumpInsn(IFNE, l1);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, name, internalType);
mv.visitInsn(ARETURN);
mv.visitLabel(l1);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;");
mv.visitVarInsn(ALOAD, 0);
mv.visitLdcInsn(name);
mv.visitMethodInsn(INVOKEVIRTUAL, "org/apache/felix/ipojo/InstanceManager", GET, "(Ljava/lang/Object;Ljava/lang/String;)Ljava/lang/Object;");
mv.visitTypeInsn(CHECKCAST, internalType);
mv.visitInsn(ARETURN);
// End
mv.visitMaxs(0, 0);
mv.visitEnd();
}
/**
* Create the getter for a field.
* @param name : field of the dependency
* @param desc : description of the getter method
* @param type : type to return
*/
private void createSimpleGetter(String name, String desc, Type type) {
String methodName = "__get" + name;
MethodVisitor mv = cv.visitMethod(0, methodName, desc, null, null);
mv.visitCode();
switch (type.getSort()) {
case Type.BOOLEAN:
case Type.CHAR:
case Type.BYTE:
case Type.SHORT:
case Type.INT:
String internalName = ManipulationProperty.PRIMITIVE_BOXING_INFORMATION[type.getSort()][0];
String boxingType = ManipulationProperty.PRIMITIVE_BOXING_INFORMATION[type.getSort()][1];
String unboxingMethod = ManipulationProperty.PRIMITIVE_BOXING_INFORMATION[type.getSort()][2];
Label l0 = new Label();
mv.visitLabel(l0);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, FIELD_FLAG_PREFIX + name, "Z");
Label l1 = new Label();
mv.visitJumpInsn(IFNE, l1);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, name, internalName);
mv.visitInsn(IRETURN);
mv.visitLabel(l1);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;");
mv.visitVarInsn(ALOAD, 0);
mv.visitLdcInsn(name);
mv.visitMethodInsn(INVOKEVIRTUAL, "org/apache/felix/ipojo/InstanceManager", GET, "(Ljava/lang/Object;Ljava/lang/String;)Ljava/lang/Object;");
mv.visitVarInsn(ASTORE, 1);
mv.visitVarInsn(ALOAD, 1);
mv.visitTypeInsn(CHECKCAST, boxingType);
mv.visitVarInsn(ASTORE, 2);
mv.visitVarInsn(ALOAD, 2);
mv.visitMethodInsn(INVOKEVIRTUAL, boxingType, unboxingMethod, "()" + internalName);
mv.visitInsn(type.getOpcode(IRETURN));
break;
case Type.LONG:
internalName = ManipulationProperty.PRIMITIVE_BOXING_INFORMATION[type.getSort()][0];
boxingType = ManipulationProperty.PRIMITIVE_BOXING_INFORMATION[type.getSort()][1];
unboxingMethod = ManipulationProperty.PRIMITIVE_BOXING_INFORMATION[type.getSort()][2];
l0 = new Label();
mv.visitLabel(l0);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, FIELD_FLAG_PREFIX + name, "Z");
l1 = new Label();
mv.visitJumpInsn(IFNE, l1);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, name, internalName);
mv.visitInsn(LRETURN);
mv.visitLabel(l1);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;");
mv.visitVarInsn(ALOAD, 0);
mv.visitLdcInsn(name);
mv.visitMethodInsn(INVOKEVIRTUAL, "org/apache/felix/ipojo/InstanceManager", GET, "(Ljava/lang/Object;Ljava/lang/String;)Ljava/lang/Object;");
mv.visitVarInsn(ASTORE, 1);
mv.visitVarInsn(ALOAD, 1);
mv.visitTypeInsn(CHECKCAST, boxingType);
mv.visitVarInsn(ASTORE, 2);
mv.visitVarInsn(ALOAD, 2);
mv.visitMethodInsn(INVOKEVIRTUAL, boxingType, unboxingMethod, "()" + internalName);
mv.visitInsn(LRETURN);
break;
case Type.DOUBLE:
internalName = ManipulationProperty.PRIMITIVE_BOXING_INFORMATION[type.getSort()][0];
boxingType = ManipulationProperty.PRIMITIVE_BOXING_INFORMATION[type.getSort()][1];
unboxingMethod = ManipulationProperty.PRIMITIVE_BOXING_INFORMATION[type.getSort()][2];
l0 = new Label();
mv.visitLabel(l0);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, FIELD_FLAG_PREFIX + name, "Z");
l1 = new Label();
mv.visitJumpInsn(IFNE, l1);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, name, internalName);
mv.visitInsn(DRETURN);
mv.visitLabel(l1);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;");
mv.visitVarInsn(ALOAD, 0);
mv.visitLdcInsn(name);
mv.visitMethodInsn(INVOKEVIRTUAL, "org/apache/felix/ipojo/InstanceManager", GET, "(Ljava/lang/Object;Ljava/lang/String;)Ljava/lang/Object;");
mv.visitVarInsn(ASTORE, 1);
mv.visitVarInsn(ALOAD, 1);
mv.visitTypeInsn(CHECKCAST, boxingType);
mv.visitVarInsn(ASTORE, 2);
mv.visitVarInsn(ALOAD, 2);
mv.visitMethodInsn(INVOKEVIRTUAL, boxingType, unboxingMethod, "()" + internalName);
mv.visitInsn(DRETURN);
break;
case Type.FLOAT:
internalName = ManipulationProperty.PRIMITIVE_BOXING_INFORMATION[type.getSort()][0];
boxingType = ManipulationProperty.PRIMITIVE_BOXING_INFORMATION[type.getSort()][1];
unboxingMethod = ManipulationProperty.PRIMITIVE_BOXING_INFORMATION[type.getSort()][2];
l0 = new Label();
mv.visitLabel(l0);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, FIELD_FLAG_PREFIX + name, "Z");
l1 = new Label();
mv.visitJumpInsn(IFNE, l1);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, name, internalName);
mv.visitInsn(FRETURN);
mv.visitLabel(l1);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;");
mv.visitVarInsn(ALOAD, 0);
mv.visitLdcInsn(name);
mv.visitMethodInsn(INVOKEVIRTUAL, "org/apache/felix/ipojo/InstanceManager", GET, "(Ljava/lang/Object;Ljava/lang/String;)Ljava/lang/Object;");
mv.visitVarInsn(ASTORE, 1);
mv.visitVarInsn(ALOAD, 1);
mv.visitTypeInsn(CHECKCAST, boxingType);
mv.visitVarInsn(ASTORE, 2);
mv.visitVarInsn(ALOAD, 2);
mv.visitMethodInsn(INVOKEVIRTUAL, boxingType, unboxingMethod, "()" + internalName);
mv.visitInsn(FRETURN);
break;
case Type.OBJECT:
l0 = new Label();
mv.visitLabel(l0);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, FIELD_FLAG_PREFIX + name, "Z");
l1 = new Label();
mv.visitJumpInsn(IFNE, l1);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, name, "L" + type.getInternalName() + ";");
mv.visitInsn(ARETURN);
mv.visitLabel(l1);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;");
mv.visitVarInsn(ALOAD, 0);
mv.visitLdcInsn(name);
mv.visitMethodInsn(INVOKEVIRTUAL, "org/apache/felix/ipojo/InstanceManager", GET, "(Ljava/lang/Object;Ljava/lang/String;)Ljava/lang/Object;");
mv.visitTypeInsn(CHECKCAST, type.getInternalName());
mv.visitInsn(ARETURN);
break;
default:
ManipulationProperty.getLogger().log(ManipulationProperty.SEVERE, "Manipulation problem in " + m_owner + " : a type is not implemented : " + type);
break;
}
mv.visitMaxs(0, 0);
mv.visitEnd();
}
/**
* Create the setter method for one property. The name of the method is _set+name of the field
* @param name : name of the field representing a property
* @param desc : description of the setter method
* @param type : type of the property
*/
private void createSimpleSetter(String name, String desc, Type type) {
MethodVisitor mv = cv.visitMethod(0, "__set" + name, desc, null, null);
mv.visitCode();
switch (type.getSort()) {
case Type.BOOLEAN:
case Type.CHAR:
case Type.BYTE:
case Type.SHORT:
case Type.INT:
case Type.FLOAT:
String internalName = ManipulationProperty.PRIMITIVE_BOXING_INFORMATION[type.getSort()][0];
String boxingType = ManipulationProperty.PRIMITIVE_BOXING_INFORMATION[type.getSort()][1];
Label l1 = new Label();
mv.visitLabel(l1);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, FIELD_FLAG_PREFIX + name, "Z");
Label l22 = new Label();
mv.visitJumpInsn(IFNE, l22);
mv.visitVarInsn(ALOAD, 0);
mv.visitVarInsn(type.getOpcode(ILOAD), 1);
mv.visitFieldInsn(PUTFIELD, m_owner, name, internalName);
mv.visitInsn(RETURN);
mv.visitLabel(l22);
mv.visitTypeInsn(NEW, boxingType);
mv.visitInsn(DUP);
mv.visitVarInsn(type.getOpcode(ILOAD), 1);
mv.visitMethodInsn(INVOKESPECIAL, boxingType, "<init>", "(" + internalName + ")V");
mv.visitVarInsn(ASTORE, 2);
Label l2 = new Label();
mv.visitLabel(l2);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;");
mv.visitVarInsn(ALOAD, 0);
mv.visitLdcInsn(name);
mv.visitVarInsn(ALOAD, 2);
mv.visitMethodInsn(INVOKEVIRTUAL, "org/apache/felix/ipojo/InstanceManager", SET, "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/Object;)V");
Label l3 = new Label();
mv.visitLabel(l3);
mv.visitInsn(RETURN);
break;
case Type.LONG:
case Type.DOUBLE:
internalName = ManipulationProperty.PRIMITIVE_BOXING_INFORMATION[type.getSort()][0];
boxingType = ManipulationProperty.PRIMITIVE_BOXING_INFORMATION[type.getSort()][1];
l1 = new Label();
mv.visitLabel(l1);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, FIELD_FLAG_PREFIX + name, "Z");
Label l23 = new Label();
mv.visitJumpInsn(IFNE, l23);
mv.visitVarInsn(ALOAD, 0);
mv.visitVarInsn(type.getOpcode(ILOAD), 1);
mv.visitFieldInsn(PUTFIELD, m_owner, name, internalName);
mv.visitInsn(RETURN);
mv.visitLabel(l23);
mv.visitTypeInsn(NEW, boxingType);
mv.visitInsn(DUP);
mv.visitVarInsn(type.getOpcode(ILOAD), 1);
mv.visitMethodInsn(INVOKESPECIAL, boxingType, "<init>", "(" + internalName + ")V");
mv.visitVarInsn(ASTORE, 3); // Double space
l2 = new Label();
mv.visitLabel(l2);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;");
mv.visitVarInsn(ALOAD, 0);
mv.visitLdcInsn(name);
mv.visitVarInsn(ALOAD, 3);
mv.visitMethodInsn(INVOKEVIRTUAL, "org/apache/felix/ipojo/InstanceManager", SET, "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/Object;)V");
l3 = new Label();
mv.visitLabel(l3);
mv.visitInsn(RETURN);
break;
case Type.OBJECT:
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, FIELD_FLAG_PREFIX + name, "Z");
Label l24 = new Label();
mv.visitJumpInsn(IFNE, l24);
mv.visitVarInsn(ALOAD, 0);
mv.visitVarInsn(ALOAD, 1);
mv.visitFieldInsn(PUTFIELD, m_owner, name, "L" + type.getInternalName() + ";");
mv.visitInsn(RETURN);
mv.visitLabel(l24);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, m_owner, IM_FIELD, "Lorg/apache/felix/ipojo/InstanceManager;");
mv.visitVarInsn(ALOAD, 0);
mv.visitLdcInsn(name);
mv.visitVarInsn(ALOAD, 1);
mv.visitMethodInsn(INVOKEVIRTUAL, "org/apache/felix/ipojo/InstanceManager", SET, "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/Object;)V");
mv.visitInsn(RETURN);
break;
default:
ManipulationProperty.getLogger().log(ManipulationProperty.SEVERE, "Manipulation Error : Cannot create the setter method for the field : " + name + " (" + type + ")");
break;
}
mv.visitMaxs(0, 0);
mv.visitEnd();
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gateway;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexGraveyard;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.MappingMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.indices.IndexClosedException;
import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
import org.elasticsearch.test.InternalTestCluster.RestartCallback;
import java.io.IOException;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.notNullValue;
@ClusterScope(scope = Scope.TEST, numDataNodes = 0)
public class GatewayIndexStateIT extends ESIntegTestCase {
private final Logger logger = LogManager.getLogger(GatewayIndexStateIT.class);
@Override
protected boolean addMockInternalEngine() {
// testRecoverBrokenIndexMetadata replies on the flushing on shutdown behavior which can be randomly disabled in MockInternalEngine.
return false;
}
public void testMappingMetadataParsed() throws Exception {
logger.info("--> starting 1 nodes");
internalCluster().startNode();
logger.info("--> creating test index, with meta routing");
client().admin().indices().prepareCreate("test")
.setMapping(XContentFactory.jsonBuilder().startObject().startObject("_doc").startObject("_routing")
.field("required", true).endObject().endObject().endObject())
.execute().actionGet();
logger.info("--> verify meta _routing required exists");
MappingMetadata mappingMd = client().admin().cluster().prepareState().execute().actionGet().getState().metadata()
.index("test").mapping();
assertThat(mappingMd.routingRequired(), equalTo(true));
logger.info("--> restarting nodes...");
internalCluster().fullRestart();
logger.info("--> waiting for yellow status");
ensureYellow();
logger.info("--> verify meta _routing required exists");
mappingMd = client().admin().cluster().prepareState().execute().actionGet().getState().metadata().index("test").mapping();
assertThat(mappingMd.routingRequired(), equalTo(true));
}
public void testSimpleOpenClose() throws Exception {
logger.info("--> starting 2 nodes");
internalCluster().startNodes(2);
logger.info("--> creating test index");
createIndex("test");
NumShards test = getNumShards("test");
logger.info("--> waiting for green status");
ensureGreen();
ClusterStateResponse stateResponse = client().admin().cluster().prepareState().execute().actionGet();
assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.OPEN));
assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries));
assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(),
equalTo(test.totalNumShards));
logger.info("--> indexing a simple document");
client().prepareIndex("test").setId("1").setSource("field1", "value1").get();
logger.info("--> closing test index...");
assertAcked(client().admin().indices().prepareClose("test"));
stateResponse = client().admin().cluster().prepareState().execute().actionGet();
assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.CLOSE));
assertThat(stateResponse.getState().routingTable().index("test"), notNullValue());
logger.info("--> verifying that the state is green");
ensureGreen();
logger.info("--> trying to index into a closed index ...");
try {
client().prepareIndex("test").setId("1").setSource("field1", "value1").execute().actionGet();
fail();
} catch (IndexClosedException e) {
// all is well
}
logger.info("--> creating another index (test2) by indexing into it");
client().prepareIndex("test2").setId("1").setSource("field1", "value1").execute().actionGet();
logger.info("--> verifying that the state is green");
ensureGreen();
logger.info("--> opening the first index again...");
assertAcked(client().admin().indices().prepareOpen("test"));
logger.info("--> verifying that the state is green");
ensureGreen();
stateResponse = client().admin().cluster().prepareState().execute().actionGet();
assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.OPEN));
assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries));
assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(),
equalTo(test.totalNumShards));
logger.info("--> trying to get the indexed document on the first index");
GetResponse getResponse = client().prepareGet("test", "1").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
logger.info("--> closing test index...");
assertAcked(client().admin().indices().prepareClose("test"));
stateResponse = client().admin().cluster().prepareState().execute().actionGet();
assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.CLOSE));
assertThat(stateResponse.getState().routingTable().index("test"), notNullValue());
logger.info("--> restarting nodes...");
internalCluster().fullRestart();
logger.info("--> waiting for two nodes and green status");
ensureGreen();
stateResponse = client().admin().cluster().prepareState().execute().actionGet();
assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.CLOSE));
assertThat(stateResponse.getState().routingTable().index("test"), notNullValue());
logger.info("--> trying to index into a closed index ...");
try {
client().prepareIndex("test").setId("1").setSource("field1", "value1").execute().actionGet();
fail();
} catch (IndexClosedException e) {
// all is well
}
logger.info("--> opening index...");
client().admin().indices().prepareOpen("test").execute().actionGet();
logger.info("--> waiting for green status");
ensureGreen();
stateResponse = client().admin().cluster().prepareState().execute().actionGet();
assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.OPEN));
assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries));
assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(),
equalTo(test.totalNumShards));
logger.info("--> trying to get the indexed document on the first round (before close and shutdown)");
getResponse = client().prepareGet("test", "1").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
logger.info("--> indexing a simple document");
client().prepareIndex("test").setId("2").setSource("field1", "value1").execute().actionGet();
}
public void testJustMasterNode() throws Exception {
logger.info("--> cleaning nodes");
logger.info("--> starting 1 master node non data");
internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false).build());
logger.info("--> create an index");
client().admin().indices().prepareCreate("test").setWaitForActiveShards(ActiveShardCount.NONE).execute().actionGet();
logger.info("--> restarting master node");
internalCluster().fullRestart(new RestartCallback(){
@Override
public Settings onNodeStopped(String nodeName) {
return Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false).build();
}
});
logger.info("--> waiting for test index to be created");
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setIndices("test")
.execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify we have an index");
ClusterStateResponse clusterStateResponse = client().admin().cluster().prepareState().setIndices("test").execute().actionGet();
assertThat(clusterStateResponse.getState().metadata().hasIndex("test"), equalTo(true));
}
public void testJustMasterNodeAndJustDataNode() {
logger.info("--> cleaning nodes");
logger.info("--> starting 1 master node non data");
internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false).build());
internalCluster().startNode(Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false).build());
logger.info("--> create an index");
client().admin().indices().prepareCreate("test").execute().actionGet();
client().prepareIndex("test").setSource("field1", "value1").execute().actionGet();
}
public void testTwoNodesSingleDoc() throws Exception {
logger.info("--> cleaning nodes");
logger.info("--> starting 2 nodes");
internalCluster().startNodes(2);
logger.info("--> indexing a simple document");
client().prepareIndex("test").setId("1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get();
logger.info("--> waiting for green status");
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus()
.setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify 1 doc in the index");
for (int i = 0; i < 10; i++) {
assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L);
}
logger.info("--> closing test index...");
assertAcked(client().admin().indices().prepareClose("test"));
ClusterStateResponse stateResponse = client().admin().cluster().prepareState().execute().actionGet();
assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.CLOSE));
assertThat(stateResponse.getState().routingTable().index("test"), notNullValue());
logger.info("--> opening the index...");
client().admin().indices().prepareOpen("test").execute().actionGet();
logger.info("--> waiting for green status");
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2")
.execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify 1 doc in the index");
assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L);
for (int i = 0; i < 10; i++) {
assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L);
}
}
/**
* This test ensures that when an index deletion takes place while a node is offline, when that
* node rejoins the cluster, it deletes the index locally instead of importing it as a dangling index.
*/
public void testIndexDeletionWhenNodeRejoins() throws Exception {
final String indexName = "test-index-del-on-node-rejoin-idx";
final int numNodes = 2;
final List<String> nodes;
logger.info("--> starting a cluster with " + numNodes + " nodes");
nodes = internalCluster().startNodes(numNodes,
Settings.builder().put(IndexGraveyard.SETTING_MAX_TOMBSTONES.getKey(), randomIntBetween(10, 100)).build());
logger.info("--> create an index");
createIndex(indexName);
logger.info("--> waiting for green status");
ensureGreen();
final String indexUUID = resolveIndex(indexName).getUUID();
logger.info("--> restart a random date node, deleting the index in between stopping and restarting");
internalCluster().restartRandomDataNode(new RestartCallback() {
@Override
public Settings onNodeStopped(final String nodeName) throws Exception {
nodes.remove(nodeName);
logger.info("--> stopped node[{}], remaining nodes {}", nodeName, nodes);
assert nodes.size() > 0;
final String otherNode = nodes.get(0);
logger.info("--> delete index and verify it is deleted");
final Client client = client(otherNode);
client.admin().indices().prepareDelete(indexName).execute().actionGet();
assertFalse(indexExists(indexName, client));
logger.info("--> index deleted");
return super.onNodeStopped(nodeName);
}
});
logger.info("--> wait until all nodes are back online");
client().admin().cluster().health(Requests.clusterHealthRequest().waitForEvents(Priority.LANGUID)
.waitForNodes(Integer.toString(numNodes))).actionGet();
logger.info("--> waiting for green status");
ensureGreen();
logger.info("--> verify that the deleted index is removed from the cluster and not reimported as dangling by the restarted node");
assertFalse(indexExists(indexName));
assertBusy(() -> {
final NodeEnvironment nodeEnv = internalCluster().getInstance(NodeEnvironment.class);
try {
assertFalse("index folder " + indexUUID + " should be deleted", nodeEnv.availableIndexFolders().contains(indexUUID));
} catch (IOException e) {
logger.error("Unable to retrieve available index folders from the node", e);
fail("Unable to retrieve available index folders from the node");
}
});
}
/**
* This test really tests worst case scenario where we have a broken setting or any setting that prevents an index from being
* allocated in our metadata that we recover. In that case we now have the ability to check the index on local recovery from disk
* if it is sane and if we can successfully create an IndexService. This also includes plugins etc.
*/
public void testRecoverBrokenIndexMetadata() throws Exception {
logger.info("--> starting one node");
internalCluster().startNode();
logger.info("--> indexing a simple document");
client().prepareIndex("test").setId("1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get();
logger.info("--> waiting for green status");
if (usually()) {
ensureYellow();
} else {
internalCluster().startNode();
client().admin().cluster()
.health(Requests.clusterHealthRequest()
.waitForGreenStatus()
.waitForEvents(Priority.LANGUID)
.waitForNoRelocatingShards(true).waitForNodes("2")).actionGet();
}
ClusterState state = client().admin().cluster().prepareState().get().getState();
final IndexMetadata metadata = state.getMetadata().index("test");
final IndexMetadata.Builder brokenMeta = IndexMetadata.builder(metadata).settings(Settings.builder().put(metadata.getSettings())
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.minimumIndexCompatibilityVersion().id)
// this is invalid but should be archived
.put("index.similarity.BM25.type", "boolean")
// this one is not validated ahead of time and breaks allocation
.put("index.analysis.filter.myCollator.type", "icu_collation"));
restartNodesOnBrokenClusterState(ClusterState.builder(state).metadata(Metadata.builder(state.getMetadata()).put(brokenMeta)));
// check that the cluster does not keep reallocating shards
assertBusy(() -> {
final RoutingTable routingTable = client().admin().cluster().prepareState().get().getState().routingTable();
final IndexRoutingTable indexRoutingTable = routingTable.index("test");
assertNotNull(indexRoutingTable);
for (IndexShardRoutingTable shardRoutingTable : indexRoutingTable) {
assertTrue(shardRoutingTable.primaryShard().unassigned());
assertEquals(UnassignedInfo.AllocationStatus.DECIDERS_NO,
shardRoutingTable.primaryShard().unassignedInfo().getLastAllocationStatus());
assertThat(shardRoutingTable.primaryShard().unassignedInfo().getNumFailedAllocations(), greaterThan(0));
}
}, 60, TimeUnit.SECONDS);
client().admin().indices().prepareClose("test").get();
state = client().admin().cluster().prepareState().get().getState();
assertEquals(IndexMetadata.State.CLOSE, state.getMetadata().index(metadata.getIndex()).getState());
assertEquals("boolean", state.getMetadata().index(metadata.getIndex()).getSettings().get("archived.index.similarity.BM25.type"));
// try to open it with the broken setting - fail again!
ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> client().admin().indices().prepareOpen("test").get());
assertEquals(ex.getMessage(), "Failed to verify index " + metadata.getIndex());
assertNotNull(ex.getCause());
assertEquals(IllegalArgumentException.class, ex.getCause().getClass());
assertEquals(ex.getCause().getMessage(), "Unknown filter type [icu_collation] for [myCollator]");
}
/**
* This test really tests worst case scenario where we have a missing analyzer setting.
* In that case we now have the ability to check the index on local recovery from disk
* if it is sane and if we can successfully create an IndexService.
* This also includes plugins etc.
*/
public void testRecoverMissingAnalyzer() throws Exception {
logger.info("--> starting one node");
internalCluster().startNode();
prepareCreate("test").setSettings(Settings.builder()
.put("index.analysis.analyzer.test.tokenizer", "standard")
.put("index.number_of_shards", "1"))
.setMapping("{\n" +
" \"properties\": {\n" +
" \"field1\": {\n" +
" \"type\": \"text\",\n" +
" \"analyzer\": \"test\"\n" +
" }\n" +
" }\n" +
" }}").get();
logger.info("--> indexing a simple document");
client().prepareIndex("test").setId("1").setSource("field1", "value one").setRefreshPolicy(IMMEDIATE).get();
logger.info("--> waiting for green status");
if (usually()) {
ensureYellow();
} else {
internalCluster().startNode();
client().admin().cluster()
.health(Requests.clusterHealthRequest()
.waitForGreenStatus()
.waitForEvents(Priority.LANGUID)
.waitForNoRelocatingShards(true).waitForNodes("2")).actionGet();
}
ClusterState state = client().admin().cluster().prepareState().get().getState();
final IndexMetadata metadata = state.getMetadata().index("test");
final IndexMetadata.Builder brokenMeta = IndexMetadata.builder(metadata).settings(metadata.getSettings()
.filter((s) -> "index.analysis.analyzer.test.tokenizer".equals(s) == false));
restartNodesOnBrokenClusterState(ClusterState.builder(state).metadata(Metadata.builder(state.getMetadata()).put(brokenMeta)));
// check that the cluster does not keep reallocating shards
assertBusy(() -> {
final RoutingTable routingTable = client().admin().cluster().prepareState().get().getState().routingTable();
final IndexRoutingTable indexRoutingTable = routingTable.index("test");
assertNotNull(indexRoutingTable);
for (IndexShardRoutingTable shardRoutingTable : indexRoutingTable) {
assertTrue(shardRoutingTable.primaryShard().unassigned());
assertEquals(UnassignedInfo.AllocationStatus.DECIDERS_NO,
shardRoutingTable.primaryShard().unassignedInfo().getLastAllocationStatus());
assertThat(shardRoutingTable.primaryShard().unassignedInfo().getNumFailedAllocations(), greaterThan(0));
}
}, 60, TimeUnit.SECONDS);
client().admin().indices().prepareClose("test").get();
// try to open it with the broken setting - fail again!
ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> client().admin().indices().prepareOpen("test").get());
assertEquals(ex.getMessage(), "Failed to verify index " + metadata.getIndex());
assertNotNull(ex.getCause());
assertEquals(MapperParsingException.class, ex.getCause().getClass());
assertThat(ex.getCause().getMessage(), containsString("analyzer [test] not found for field [field1]"));
}
public void testArchiveBrokenClusterSettings() throws Exception {
logger.info("--> starting one node");
internalCluster().startNode();
client().prepareIndex("test").setId("1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get();
logger.info("--> waiting for green status");
if (usually()) {
ensureYellow();
} else {
internalCluster().startNode();
client().admin().cluster()
.health(Requests.clusterHealthRequest()
.waitForGreenStatus()
.waitForEvents(Priority.LANGUID)
.waitForNoRelocatingShards(true).waitForNodes("2")).actionGet();
}
ClusterState state = client().admin().cluster().prepareState().get().getState();
final Metadata metadata = state.getMetadata();
final Metadata brokenMeta = Metadata.builder(metadata).persistentSettings(Settings.builder()
.put(metadata.persistentSettings()).put("this.is.unknown", true)
.put(Metadata.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey(), "broken").build()).build();
restartNodesOnBrokenClusterState(ClusterState.builder(state).metadata(brokenMeta));
ensureYellow("test"); // wait for state recovery
state = client().admin().cluster().prepareState().get().getState();
assertEquals("true", state.metadata().persistentSettings().get("archived.this.is.unknown"));
assertEquals("broken", state.metadata().persistentSettings().get("archived."
+ Metadata.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey()));
// delete these settings
client().admin().cluster().prepareUpdateSettings().setPersistentSettings(Settings.builder().putNull("archived.*")).get();
state = client().admin().cluster().prepareState().get().getState();
assertNull(state.metadata().persistentSettings().get("archived.this.is.unknown"));
assertNull(state.metadata().persistentSettings().get("archived."
+ Metadata.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey()));
assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L);
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/48701")
// This test relates to loading a broken state that was written by a 6.x node, but for now we do not load state from old nodes.
public void testHalfDeletedIndexImport() throws Exception {
// It's possible for a 6.x node to add a tombstone for an index but not actually delete the index metadata from disk since that
// deletion is slightly deferred and may race against the node being shut down; if you upgrade to 7.x when in this state then the
// node won't start.
internalCluster().startNode();
createIndex("test", Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
.build());
ensureGreen("test");
final Metadata metadata = internalCluster().getInstance(ClusterService.class).state().metadata();
final Path[] paths = internalCluster().getInstance(NodeEnvironment.class).nodeDataPaths();
// writeBrokenMeta(metaStateService -> {
// metaStateService.writeGlobalState("test", Metadata.builder(metadata)
// // we remove the manifest file, resetting the term and making this look like an upgrade from 6.x, so must also reset the
// // term in the coordination metadata
// .coordinationMetadata(CoordinationMetadata.builder(metadata.coordinationMetadata()).term(0L).build())
// // add a tombstone but do not delete the index metadata from disk
// .putCustom(IndexGraveyard.TYPE, IndexGraveyard.builder().addTombstone(metadata.index("test").getIndex()).build()).build());
// for (final Path path : paths) {
// try (Stream<Path> stateFiles = Files.list(path.resolve(MetadataStateFormat.STATE_DIR_NAME))) {
// for (final Path manifestPath : stateFiles
// .filter(p -> p.getFileName().toString().startsWith(Manifest.FORMAT.getPrefix())).collect(Collectors.toList())) {
// IOUtils.rm(manifestPath);
// }
// }
// }
// });
ensureGreen();
assertBusy(() -> assertThat(internalCluster().getInstance(NodeEnvironment.class).availableIndexFolders(), empty()));
}
private void restartNodesOnBrokenClusterState(ClusterState.Builder clusterStateBuilder) throws Exception {
Map<String, PersistedClusterStateService> lucenePersistedStateFactories = Stream.of(internalCluster().getNodeNames())
.collect(Collectors.toMap(Function.identity(),
nodeName -> internalCluster().getInstance(PersistedClusterStateService.class, nodeName)));
final ClusterState clusterState = clusterStateBuilder.build();
internalCluster().fullRestart(new RestartCallback(){
@Override
public Settings onNodeStopped(String nodeName) throws Exception {
final PersistedClusterStateService lucenePersistedStateFactory = lucenePersistedStateFactories.get(nodeName);
try (PersistedClusterStateService.Writer writer = lucenePersistedStateFactory.createWriter()) {
writer.writeFullStateAndCommit(clusterState.term(), clusterState);
}
return super.onNodeStopped(nodeName);
}
});
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.storage.am.btree;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.dataflow.common.utils.TupleUtils;
import org.apache.hyracks.storage.am.btree.impls.RangePredicate;
import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public abstract class AbstractSearchOperationCallbackTest extends AbstractOperationCallbackTest {
private static final int NUM_TASKS = 2;
protected final Lock lock;
protected final Condition condition;
protected ExecutorService executor;
protected boolean insertTaskStarted;
public AbstractSearchOperationCallbackTest() {
this.lock = new ReentrantLock(true);
this.condition = lock.newCondition();
this.insertTaskStarted = false;
}
@Override
@Before
public void setup() throws Exception {
executor = Executors.newFixedThreadPool(NUM_TASKS);
super.setup();
}
@Override
@After
public void tearDown() throws Exception {
executor.shutdown();
super.tearDown();
}
@Test
public void searchCallbackTest() throws Exception {
Future<Boolean> insertFuture = executor.submit(new InsertionTask());
Future<Boolean> searchFuture = executor.submit(new SearchTask());
Assert.assertTrue(searchFuture.get());
Assert.assertTrue(insertFuture.get());
}
private class SearchTask implements Callable<Boolean> {
private final ISearchOperationCallback cb;
private final IIndexAccessor accessor;
private final IIndexCursor cursor;
private final RangePredicate predicate;
private final ArrayTupleBuilder builder;
private final ArrayTupleReference tuple;
private boolean blockOnHigh;
private int blockingValue;
private int expectedAfterBlock;
public SearchTask() throws HyracksDataException {
this.cb = new SynchronizingSearchOperationCallback();
this.accessor = index.createAccessor(NoOpOperationCallback.INSTANCE, cb);
this.cursor = accessor.createSearchCursor(false);
this.predicate = new RangePredicate();
this.builder = new ArrayTupleBuilder(NUM_KEY_FIELDS);
this.tuple = new ArrayTupleReference();
this.blockOnHigh = false;
this.blockingValue = -1;
this.expectedAfterBlock = -1;
}
@Override
public Boolean call() throws Exception {
lock.lock();
try {
if (!insertTaskStarted) {
condition.await();
}
// begin a search on [101, +inf), blocking on 101
TupleUtils.createIntegerTuple(builder, tuple, 101);
predicate.setLowKey(tuple, true);
predicate.setHighKey(null, true);
accessor.search(cursor, predicate);
consumeIntTupleRange(101, 101, true, 101);
// consume tuples [102, 152], blocking on 151
consumeIntTupleRange(102, 151, true, 152);
// consume tuples [153, 300]
consumeIntTupleRange(153, 300, false, -1);
cursor.close();
} finally {
lock.unlock();
}
return true;
}
private void consumeIntTupleRange(int begin, int end, boolean blockOnHigh, int expectedAfterBlock)
throws Exception {
if (end < begin) {
throw new IllegalArgumentException("Invalid range: [" + begin + ", " + end + "]");
}
for (int i = begin; i <= end; i++) {
if (blockOnHigh == true && i == end) {
this.blockOnHigh = true;
this.blockingValue = end;
this.expectedAfterBlock = expectedAfterBlock;
}
TupleUtils.createIntegerTuple(builder, tuple, i);
if (!cursor.hasNext()) {
Assert.fail("Failed to consume entire tuple range since cursor is exhausted.");
}
cursor.next();
if (this.blockOnHigh) {
TupleUtils.createIntegerTuple(builder, tuple, expectedAfterBlock);
}
Assert.assertEquals(0, cmp.compare(tuple, cursor.getTuple()));
}
}
private class SynchronizingSearchOperationCallback implements ISearchOperationCallback {
@Override
public boolean proceed(ITupleReference tuple) throws HyracksDataException {
Assert.assertEquals(0, cmp.compare(SearchTask.this.tuple, tuple));
return false;
}
@Override
public void reconcile(ITupleReference tuple) throws HyracksDataException {
Assert.assertEquals(0, cmp.compare(SearchTask.this.tuple, tuple));
if (blockOnHigh) {
try {
TupleUtils.createIntegerTuple(builder, SearchTask.this.tuple, expectedAfterBlock);
} catch (HyracksDataException e) {
e.printStackTrace();
}
condition.signal();
condition.awaitUninterruptibly();
blockOnHigh = false;
}
}
@Override
public void cancel(ITupleReference tuple) {
try {
TupleUtils.createIntegerTuple(builder, SearchTask.this.tuple, blockingValue);
Assert.assertEquals(0, cmp.compare(tuple, SearchTask.this.tuple));
TupleUtils.createIntegerTuple(builder, SearchTask.this.tuple, expectedAfterBlock);
} catch (HyracksDataException e) {
e.printStackTrace();
}
}
@Override
public void complete(ITupleReference tuple) throws HyracksDataException {
}
@Override
public void before(ITupleReference tuple) throws HyracksDataException {
}
}
}
private class InsertionTask implements Callable<Boolean> {
private final IIndexAccessor accessor;
private final ArrayTupleBuilder builder;
private final ArrayTupleReference tuple;
public InsertionTask() throws HyracksDataException {
this.accessor = index.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
this.builder = new ArrayTupleBuilder(NUM_KEY_FIELDS);
this.tuple = new ArrayTupleReference();
}
@Override
public Boolean call() throws Exception {
lock.lock();
try {
insertTaskStarted = true;
// insert tuples [101, 200]
insertIntTupleRange(101, 200);
condition.signal();
condition.await();
// insert tuples [1, 100]
insertIntTupleRange(1, 100);
condition.signal();
condition.await();
// insert tuples [201, 300] and delete tuple 151
insertIntTupleRange(201, 300);
TupleUtils.createIntegerTuple(builder, tuple, 151);
accessor.delete(tuple);
condition.signal();
} finally {
lock.unlock();
}
return true;
}
private void insertIntTupleRange(int begin, int end) throws Exception {
if (end < begin) {
throw new IllegalArgumentException("Invalid range: [" + begin + ", " + end + "]");
}
for (int i = begin; i <= end; i++) {
TupleUtils.createIntegerTuple(builder, tuple, i);
accessor.insert(tuple);
}
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.curator.shaded.com.google.common.collect.ConcurrentHashMultiset;
import org.apache.curator.shaded.com.google.common.collect.Multiset;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.ProtobufCoprocessorService;
import org.apache.hadoop.hbase.ipc.DelegatingHBaseRpcController;
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
@Category({MediumTests.class, ClientTests.class})
public class TestRpcControllerFactory {
public static class StaticRpcControllerFactory extends RpcControllerFactory {
public StaticRpcControllerFactory(Configuration conf) {
super(conf);
}
@Override
public HBaseRpcController newController() {
return new CountingRpcController(super.newController());
}
@Override
public HBaseRpcController newController(final CellScanner cellScanner) {
return new CountingRpcController(super.newController(cellScanner));
}
@Override
public HBaseRpcController newController(final List<CellScannable> cellIterables) {
return new CountingRpcController(super.newController(cellIterables));
}
}
public static class CountingRpcController extends DelegatingHBaseRpcController {
private static Multiset<Integer> GROUPED_PRIORITY = ConcurrentHashMultiset.create();
private static AtomicInteger INT_PRIORITY = new AtomicInteger();
private static AtomicInteger TABLE_PRIORITY = new AtomicInteger();
public CountingRpcController(HBaseRpcController delegate) {
super(delegate);
}
@Override
public void setPriority(int priority) {
int oldPriority = getPriority();
super.setPriority(priority);
int newPriority = getPriority();
if (newPriority != oldPriority) {
INT_PRIORITY.incrementAndGet();
GROUPED_PRIORITY.add(priority);
}
}
@Override
public void setPriority(TableName tn) {
super.setPriority(tn);
// ignore counts for system tables - it could change and we really only want to check on what
// the client should change
if (tn != null && !tn.isSystemTable()) {
TABLE_PRIORITY.incrementAndGet();
}
}
}
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
@Rule
public TestName name = new TestName();
@BeforeClass
public static void setup() throws Exception {
// load an endpoint so we have an endpoint to test - it doesn't matter which one, but
// this is already in tests, so we can just use it.
Configuration conf = UTIL.getConfiguration();
conf.set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
ProtobufCoprocessorService.class.getName());
UTIL.startMiniCluster();
}
@AfterClass
public static void teardown() throws Exception {
UTIL.shutdownMiniCluster();
}
/**
* check some of the methods and make sure we are incrementing each time. Its a bit tediuous to
* cover all methods here and really is a bit brittle since we can always add new methods but
* won't be sure to add them here. So we just can cover the major ones.
* @throws Exception on failure
*/
@Test
public void testCountController() throws Exception {
Configuration conf = new Configuration(UTIL.getConfiguration());
// setup our custom controller
conf.set(RpcControllerFactory.CUSTOM_CONTROLLER_CONF_KEY,
StaticRpcControllerFactory.class.getName());
final TableName tableName = TableName.valueOf(name.getMethodName());
UTIL.createTable(tableName, fam1).close();
// change one of the connection properties so we get a new Connection with our configuration
conf.setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, HConstants.DEFAULT_HBASE_RPC_TIMEOUT + 1);
Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName);
byte[] row = Bytes.toBytes("row");
Put p = new Put(row);
p.addColumn(fam1, fam1, Bytes.toBytes("val0"));
table.put(p);
Integer counter = 1;
counter = verifyCount(counter);
Delete d = new Delete(row);
d.addColumn(fam1, fam1);
table.delete(d);
counter = verifyCount(counter);
Put p2 = new Put(row);
p2.addColumn(fam1, Bytes.toBytes("qual"), Bytes.toBytes("val1"));
table.batch(Lists.newArrayList(p, p2), null);
// this only goes to a single server, so we don't need to change the count here
counter = verifyCount(counter);
Append append = new Append(row);
append.addColumn(fam1, fam1, Bytes.toBytes("val2"));
table.append(append);
counter = verifyCount(counter);
// and check the major lookup calls as well
Get g = new Get(row);
table.get(g);
counter = verifyCount(counter);
ResultScanner scan = table.getScanner(fam1);
scan.next();
scan.close();
counter = verifyCount(counter + 1);
Get g2 = new Get(row);
table.get(Lists.newArrayList(g, g2));
// same server, so same as above for not changing count
counter = verifyCount(counter);
// make sure all the scanner types are covered
Scan scanInfo = new Scan(row);
// regular small
scanInfo.setSmall(true);
counter = doScan(table, scanInfo, counter);
// reversed, small
scanInfo.setReversed(true);
counter = doScan(table, scanInfo, counter);
// reversed, regular
scanInfo.setSmall(false);
counter = doScan(table, scanInfo, counter + 1);
// make sure we have no priority count
verifyPriorityGroupCount(HConstants.ADMIN_QOS, 0);
// lets set a custom priority on a get
Get get = new Get(row);
get.setPriority(HConstants.ADMIN_QOS);
table.get(get);
verifyPriorityGroupCount(HConstants.ADMIN_QOS, 1);
table.close();
connection.close();
}
int doScan(Table table, Scan scan, int expectedCount) throws IOException {
ResultScanner results = table.getScanner(scan);
results.next();
results.close();
return verifyCount(expectedCount);
}
int verifyCount(Integer counter) {
assertTrue(CountingRpcController.TABLE_PRIORITY.get() >= counter);
assertEquals(0, CountingRpcController.INT_PRIORITY.get());
return CountingRpcController.TABLE_PRIORITY.get() + 1;
}
void verifyPriorityGroupCount(int priorityLevel, int count) {
assertEquals(count, CountingRpcController.GROUPED_PRIORITY.count(priorityLevel));
}
@Test
public void testFallbackToDefaultRpcControllerFactory() {
Configuration conf = new Configuration(UTIL.getConfiguration());
conf.set(RpcControllerFactory.CUSTOM_CONTROLLER_CONF_KEY, "foo.bar.Baz");
// Should not fail
RpcControllerFactory factory = RpcControllerFactory.instantiate(conf);
assertNotNull(factory);
assertEquals(factory.getClass(), RpcControllerFactory.class);
}
}
|
|
/******************************************************************************
* Copyright 2016-2018 Octavio Calleya *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
******************************************************************************/
package org.docopt;
import java.util.*;
import static org.docopt.Python.*;
/**
* Leaf/terminal node of a pattern tree.
*/
abstract class LeafPattern extends Pattern {
static class SingleMatchResult {
private final Integer position;
private final LeafPattern match;
public SingleMatchResult(final Integer position, final LeafPattern match) {
this.position = position;
this.match = match;
}
public Integer getPosition() {
return position;
}
public LeafPattern getMatch() {
return match;
}
@Override
public String toString() {
return String.format("%s(%d, %s)", getClass().getSimpleName(),
position, match);
}
}
private final String name;
private Object value;
public LeafPattern(final String name, final Object value) {
this.name = name;
this.value = value;
}
public LeafPattern(final String name) {
this(name, null);
}
@Override
public String toString() {
return String.format("%s(%s, %s)", getClass().getSimpleName(),
repr(name), repr(value));
}
@Override
protected final List<Pattern> flat(final Class<?>... types) {
// >>> [self] if not types or type(self) in types else []
{
if (!bool(types) || in(getClass(), types)) {
return list((Pattern) this);
}
return list();
}
}
@Override
protected MatchResult match(final List<LeafPattern> left,
List<LeafPattern> collected) {
// >>> collected = [] if collected is None else collected
if (collected == null) {
collected = list();
}
Integer pos;
LeafPattern match;
// >>> pos, match = self.single_match(left)
{
final SingleMatchResult m = singleMatch(left);
pos = m.getPosition();
match = m.getMatch();
}
if (match == null) {
return new MatchResult(false, left, collected);
}
List<LeafPattern> left_;
// >>> left_ = left[:pos] + left[pos + 1:]
{
left_ = list();
left_.addAll(left.subList(0, pos));
if ((pos + 1) < left.size()) {
left_.addAll(left.subList(pos + 1, left.size()));
}
}
List<LeafPattern> sameName;
// >>> same_name = [a for a in collected if a.name == self.name]
{
sameName = list();
for (final LeafPattern a : collected) {
if (name.equals(a.getName())) {
sameName.add(a);
}
}
}
Object increment;
if ((value instanceof Integer) || (value instanceof List)) {
if (value instanceof Integer) {
increment = 1;
}
else {
final Object v = match.getValue();
increment = (v instanceof String) ? list(v) : v;
}
if (sameName.isEmpty()) {
match.setValue(increment);
return new MatchResult(true, left_,
plus(collected, list(match)));
}
// >>> same_name[0].value += increment
{
final LeafPattern p = sameName.get(0);
final Object v = p.getValue();
if (v instanceof Integer) {
final Integer a = (Integer) v;
final Integer b = (Integer) increment;
p.setValue(a + b);
}
else if (v instanceof List) {
@SuppressWarnings("unchecked")
final List<LeafPattern> a = (List<LeafPattern>) v;
@SuppressWarnings("unchecked")
final List<LeafPattern> b = (List<LeafPattern>) increment;
a.addAll(b);
}
}
// TODO: Should collected be copied to a new list?
return new MatchResult(true, left_, collected);
}
return new MatchResult(true, left_, plus(collected, list(match)));
}
protected abstract SingleMatchResult singleMatch(List<LeafPattern> left);
public String getName() {
return name;
}
public Object getValue() {
return value;
}
public void setValue(final Object value) {
this.value = value;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (getClass() != obj.getClass()) {
return false;
}
final LeafPattern other = (LeafPattern) obj;
if (name == null) {
if (other.name != null) {
return false;
}
}
else if (!name.equals(other.name)) {
return false;
}
if (value == null) {
if (other.value != null) {
return false;
}
}
else if (!value.equals(other.value)) {
return false;
}
return true;
}
}
|
|
package org.ethereum.core;
import org.ethereum.config.Constants;
import org.ethereum.config.SystemProperties;
import org.ethereum.crypto.HashUtil;
import org.ethereum.db.BlockStore;
import org.ethereum.listener.EthereumListener;
import org.ethereum.manager.AdminInfo;
import org.ethereum.net.BlockQueue;
import org.ethereum.net.server.ChannelManager;
import org.ethereum.trie.Trie;
import org.ethereum.trie.TrieImpl;
import org.ethereum.util.AdvancedDeviceUtils;
import org.ethereum.util.RLP;
import org.ethereum.vm.ProgramInvokeFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.spongycastle.util.encoders.Hex;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import org.springframework.util.FileSystemUtils;
import javax.annotation.Resource;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.*;
import static java.lang.Runtime.getRuntime;
import static org.ethereum.config.Constants.*;
import static org.ethereum.config.SystemProperties.CONFIG;
import static org.ethereum.core.Denomination.SZABO;
import static org.ethereum.core.ImportResult.*;
import static org.ethereum.util.BIUtil.isMoreThan;
/**
* The Ethereum blockchain is in many ways similar to the Bitcoin blockchain,
* although it does have some differences.
* <p>
* The main difference between Ethereum and Bitcoin with regard to the blockchain architecture
* is that, unlike Bitcoin, Ethereum blocks contain a copy of both the transaction list
* and the most recent state. Aside from that, two other values, the block number and
* the difficulty, are also stored in the block.
* </p>
* The block validation algorithm in Ethereum is as follows:
* <ol>
* <li>Check if the previous block referenced exists and is valid.</li>
* <li>Check that the timestamp of the block is greater than that of the referenced previous block and less than 15 minutes into the future</li>
* <li>Check that the block number, difficulty, transaction root, uncle root and gas limit (various low-level Ethereum-specific concepts) are valid.</li>
* <li>Check that the proof of work on the block is valid.</li>
* <li>Let S[0] be the STATE_ROOT of the previous block.</li>
* <li>Let TX be the block's transaction list, with n transactions.
* For all in in 0...n-1, set S[i+1] = APPLY(S[i],TX[i]).
* If any applications returns an error, or if the total gas consumed in the block
* up until this point exceeds the GASLIMIT, return an error.</li>
* <li>Let S_FINAL be S[n], but adding the block reward paid to the miner.</li>
* <li>Check if S_FINAL is the same as the STATE_ROOT. If it is, the block is valid; otherwise, it is not valid.</li>
* </ol>
* See <a href="https://github.com/ethereum/wiki/wiki/White-Paper#blockchain-and-mining">Ethereum Whitepaper</a>
*
* @author Roman Mandeleil
* @author Nick Savers
* @since 20.05.2014
*/
@Component
public class BlockchainImpl implements Blockchain, org.ethereum.facade.Blockchain {
private static final Logger logger = LoggerFactory.getLogger("blockchain");
private static final Logger stateLogger = LoggerFactory.getLogger("state");
// to avoid using minGasPrice=0 from Genesis for the wallet
private static final long INITIAL_MIN_GAS_PRICE = 10 * SZABO.longValue();
@Resource
@Qualifier("pendingTransactions")
private Set<Transaction> pendingTransactions = new HashSet<>();
@Autowired
private Repository repository;
private Repository track;
@Autowired
private BlockStore blockStore;
private Block bestBlock;
private BigInteger totalDifficulty = BigInteger.ZERO;
@Autowired
Wallet wallet;
@Autowired
private EthereumListener listener;
@Autowired
private BlockQueue blockQueue;
@Autowired
private ChannelManager channelManager;
private boolean syncDoneCalled = false;
@Autowired
ProgramInvokeFactory programInvokeFactory;
@Autowired
private AdminInfo adminInfo;
private List<Chain> altChains = new ArrayList<>();
private List<Block> garbage = new ArrayList<>();
long exitOn = Long.MAX_VALUE;
public boolean byTest = false;
public BlockchainImpl() {
}
//todo: autowire over constructor
public BlockchainImpl(BlockStore blockStore, Repository repository,
Wallet wallet, AdminInfo adminInfo,
EthereumListener listener) {
this.blockStore = blockStore;
this.repository = repository;
this.wallet = wallet;
this.adminInfo = adminInfo;
this.listener = listener;
}
@Override
public byte[] getBestBlockHash() {
return getBestBlock().getHash();
}
@Override
public long getSize() {
return bestBlock.getNumber() + 1;
}
@Override
public Block getBlockByNumber(long blockNr) {
return blockStore.getChainBlockByNumber(blockNr);
}
@Override
public TransactionReceipt getTransactionReceiptByHash(byte[] hash) {
throw new UnsupportedOperationException("TODO: will be implemented soon "); // FIXME: go and fix me
}
@Override
public Block getBlockByHash(byte[] hash) {
return blockStore.getBlockByHash(hash);
}
@Override
public List<byte[]> getListOfHashesStartFrom(byte[] hash, int qty) {
return blockStore.getListHashesEndWith(hash, qty);
}
private byte[] calcTxTrie(List<Transaction> transactions) {
Trie txsState = new TrieImpl(null);
if (transactions == null || transactions.isEmpty())
return HashUtil.EMPTY_TRIE_HASH;
for (int i = 0; i < transactions.size(); i++) {
txsState.update(RLP.encodeInt(i), transactions.get(i).getEncoded());
}
return txsState.getRootHash();
}
public ImportResult tryConnectAndFork(Block block) {
Repository savedRepo = this.repository;
Block savedBest = this.bestBlock;
BigInteger savedTD = this.totalDifficulty;
this.bestBlock = blockStore.getBlockByHash(block.getParentHash());
totalDifficulty = blockStore.getTotalDifficultyForHash(block.getParentHash());
this.repository = this.repository.getSnapshotTo(this.bestBlock.getStateRoot());
try {
// FIXME: adding block with no option for flush
add(block);
} catch (Throwable th) {
th.printStackTrace(); /* todo */
}
if (isMoreThan(this.totalDifficulty, savedTD)) {
logger.info("Rebranching: {} ~> {}", savedBest.getShortHash(), block.getShortHash());
// main branch become this branch
// cause we proved that total difficulty
// is greateer
blockStore.reBranch(block);
// The main repository rebranch
this.repository = savedRepo;
this.repository.syncToRoot(block.getStateRoot());
// flushing
if (!byTest){
repository.flush();
blockStore.flush();
System.gc();
}
return IMPORTED_BEST;
} else {
// Stay on previous branch
this.repository = savedRepo;
this.bestBlock = savedBest;
this.totalDifficulty = savedTD;
return IMPORTED_NOT_BEST;
}
}
public ImportResult tryToConnect(Block block) {
if (logger.isInfoEnabled())
logger.info("Try connect block hash: {}, number: {}",
Hex.toHexString(block.getHash()).substring(0, 6),
block.getNumber());
if (blockStore.getMaxNumber() >= block.getNumber() &&
blockStore.isBlockExist(block.getHash())) {
if (logger.isDebugEnabled())
logger.debug("Block already exist hash: {}, number: {}",
Hex.toHexString(block.getHash()).substring(0, 6),
block.getNumber());
// retry of well known block
return EXIST;
}
// The simple case got the block
// to connect to the main chain
if (bestBlock.isParentOf(block)) {
add(block);
recordBlock(block);
return IMPORTED_BEST;
} else {
if (blockStore.isBlockExist(block.getParentHash())) {
ImportResult result = tryConnectAndFork(block);
if (result == IMPORTED_BEST || result == IMPORTED_NOT_BEST) recordBlock(block);
return result;
}
}
return NO_PARENT;
}
@Override
public void add(Block block) {
if (exitOn < block.getNumber()) {
System.out.print("Exiting after block.number: " + getBestBlock().getNumber());
System.exit(-1);
}
if (!isValid(block)) {
logger.warn("Invalid block with number: {}", block.getNumber());
return;
}
track = repository.startTracking();
if (block == null)
return;
// keep chain continuity
if (!Arrays.equals(getBestBlock().getHash(),
block.getParentHash())) return;
if (block.getNumber() >= CONFIG.traceStartBlock() && CONFIG.traceStartBlock() != -1) {
AdvancedDeviceUtils.adjustDetailedTracing(block.getNumber());
}
List<TransactionReceipt> receipts = processBlock(block);
// Sanity checks
String receiptHash = Hex.toHexString(block.getReceiptsRoot());
String receiptListHash = Hex.toHexString(calcReceiptsTrie(receipts));
if (!receiptHash.equals(receiptListHash)) {
logger.error("Block's given Receipt Hash doesn't match: {} != {}", receiptHash, receiptListHash);
//return false;
}
String logBloomHash = Hex.toHexString(block.getLogBloom());
String logBloomListHash = Hex.toHexString(calcLogBloom(receipts));
if (!logBloomHash.equals(logBloomListHash)) {
logger.error("Block's given logBloom Hash doesn't match: {} != {}", logBloomHash, logBloomListHash);
//track.rollback();
//return;
}
//DEBUG
//System.out.println(" Receipts root is: " + receiptHash + " logbloomhash is " + logBloomHash);
//System.out.println(" Receipts listroot is: " + receiptListHash + " logbloomlisthash is " + logBloomListHash);
track.commit();
storeBlock(block, receipts);
if (needFlush(block)) {
repository.flush();
blockStore.flush();
System.gc();
}
// Remove all wallet transactions as they already approved by the net
wallet.removeTransactions(block.getTransactionsList());
// Clear pending transaction from the mem
clearPendingTransactions(block.getTransactionsList());
listener.trace(String.format("Block chain size: [ %d ]", this.getSize()));
listener.onBlock(block, receipts);
if (blockQueue != null &&
blockQueue.size() == 0 &&
!syncDoneCalled) {
logger.info("Sync done");
syncDoneCalled = true;
listener.onSyncDone();
}
}
private boolean needFlush(Block block) {
if (CONFIG.cacheFlushMemory() > 0) {
return needFlushByMemory(CONFIG.cacheFlushMemory());
} else if (CONFIG.cacheFlushBlocks() > 0) {
return block.getNumber() % CONFIG.cacheFlushBlocks() == 0;
} else {
return needFlushByMemory(.7);
}
}
private boolean needFlushByMemory(double maxMemoryPercents) {
return getRuntime().freeMemory() < (getRuntime().totalMemory() * (1 - maxMemoryPercents));
}
private byte[] calcReceiptsTrie(List<TransactionReceipt> receipts) {
//TODO Fix Trie hash for receipts - doesnt match cpp
Trie receiptsTrie = new TrieImpl(null);
if (receipts == null || receipts.isEmpty())
return HashUtil.EMPTY_TRIE_HASH;
for (int i = 0; i < receipts.size(); i++) {
receiptsTrie.update(RLP.encodeInt(i), receipts.get(i).getEncoded());
}
return receiptsTrie.getRootHash();
}
private byte[] calcLogBloom(List<TransactionReceipt> receipts) {
Bloom retBloomFilter = new Bloom();
if (receipts == null || receipts.isEmpty())
return retBloomFilter.getData();
for (int i = 0; i < receipts.size(); i++) {
retBloomFilter.or(receipts.get(i).getBloomFilter());
}
return retBloomFilter.getData();
}
public Block getParent(BlockHeader header) {
return blockStore.getBlockByHash(header.getParentHash());
}
public boolean isValid(BlockHeader header) {
Block parentBlock = getParent(header);
BigInteger parentDifficulty = parentBlock.getDifficultyBI();
long parentTimestamp = parentBlock.getTimestamp();
BigInteger minDifficulty = header.getTimestamp() >= parentTimestamp + DURATION_LIMIT ?
parentBlock.getDifficultyBI().subtract(parentDifficulty.divide(BigInteger.valueOf(Constants.DIFFICULTY_BOUND_DIVISOR))) :
parentBlock.getDifficultyBI().add(parentDifficulty.divide(BigInteger.valueOf(Constants.DIFFICULTY_BOUND_DIVISOR)));
BigInteger difficulty = new BigInteger(1, header.getDifficulty());
if (header.getNumber() != (parentBlock.getNumber() + 1)) {
logger.error("Block invalid: block number is not parentBlock number + 1, ");
return false;
}
if (header.getGasLimit() < header.getGasUsed()) {
logger.error("Block invalid: header.getGasLimit() < header.getGasUsed()");
return false;
}
if (difficulty.compareTo(minDifficulty) == -1) {
logger.error("Block invalid: difficulty < minDifficulty");
return false;
}
if (!CONFIG.genesisInfo().contains("frontier"))
if (header.getGasLimit() < MIN_GAS_LIMIT) {
logger.error("Block invalid: header.getGasLimit() < MIN_GAS_LIMIT");
return false;
}
if (header.getExtraData() != null && header.getExtraData().length > MAXIMUM_EXTRA_DATA_SIZE) {
logger.error("Block invalid: header.getExtraData().length > MAXIMUM_EXTRA_DATA_SIZE");
return false;
}
if (!CONFIG.genesisInfo().contains("frontier"))
if (header.getGasLimit() < Constants.MIN_GAS_LIMIT ||
header.getGasLimit() < parentBlock.getGasLimit() * (GAS_LIMIT_BOUND_DIVISOR - 1) / GAS_LIMIT_BOUND_DIVISOR ||
header.getGasLimit() > parentBlock.getGasLimit() * (GAS_LIMIT_BOUND_DIVISOR + 1) / GAS_LIMIT_BOUND_DIVISOR) {
logger.error("Block invalid: gas limit exceeds parentBlock.getGasLimit() (+-) GAS_LIMIT_BOUND_DIVISOR");
return false;
}
return true;
}
/**
* This mechanism enforces a homeostasis in terms of the time between blocks;
* a smaller period between the last two blocks results in an increase in the
* difficulty level and thus additional computation required, lengthening the
* likely next period. Conversely, if the period is too large, the difficulty,
* and expected time to the next block, is reduced.
*/
private boolean isValid(Block block) {
boolean isValid = true;
if (!block.isGenesis()) {
isValid = isValid(block.getHeader());
// Sanity checks
String trieHash = Hex.toHexString(block.getTxTrieRoot());
String trieListHash = Hex.toHexString(calcTxTrie(block.getTransactionsList()));
if( !trieHash.equals(trieListHash) ) {
logger.error("Block's given Trie Hash doesn't match: {} != {}", trieHash, trieListHash);
// FIXME: temporary comment out tx.trie validation
// return false;
}
String unclesHash = Hex.toHexString(block.getHeader().getUnclesHash());
String unclesListHash = Hex.toHexString(HashUtil.sha3(block.getHeader().getUnclesEncoded(block.getUncleList())));
if (!unclesHash.equals(unclesListHash)) {
logger.error("Block's given Uncle Hash doesn't match: {} != {}", unclesHash, unclesListHash);
return false;
}
if (block.getUncleList().size() > UNCLE_LIST_LIMIT) {
logger.error("Uncle list to big: block.getUncleList().size() > UNCLE_LIST_LIMIT");
return false;
}
for (BlockHeader uncle : block.getUncleList()) {
// - They are valid headers (not necessarily valid blocks)
if (!isValid(uncle)) return false;
//if uncle's parent's number is not less than currentBlock - UNCLE_GEN_LIMIT, mark invalid
isValid = !(getParent(uncle).getNumber() < (block.getNumber() - UNCLE_GENERATION_LIMIT));
if (!isValid) {
logger.error("Uncle too old: generationGap must be under UNCLE_GENERATION_LIMIT");
return false;
}
}
}
return isValid;
}
private List<TransactionReceipt> processBlock(Block block) {
List<TransactionReceipt> receipts = new ArrayList<>();
if (!block.isGenesis()) {
if (!CONFIG.blockChainOnly()) {
wallet.addTransactions(block.getTransactionsList());
receipts = applyBlock(block);
wallet.processBlock(block);
}
}
return receipts;
}
private List<TransactionReceipt> applyBlock(Block block) {
logger.info("applyBlock: block: [{}] tx.list: [{}]", block.getNumber(), block.getTransactionsList().size());
long saveTime = System.nanoTime();
int i = 1;
long totalGasUsed = 0;
List<TransactionReceipt> receipts = new ArrayList<>();
for (Transaction tx : block.getTransactionsList()) {
stateLogger.info("apply block: [{}] tx: [{}] ", block.getNumber(), i);
TransactionExecutor executor = new TransactionExecutor(tx, block.getCoinbase(),
track, blockStore,
programInvokeFactory, block, listener, totalGasUsed);
executor.init();
executor.execute();
executor.go();
executor.finalization();
totalGasUsed += executor.getGasUsed();
track.commit();
TransactionReceipt receipt = new TransactionReceipt();
receipt.setCumulativeGas(totalGasUsed);
receipt.setPostTxState(repository.getRoot());
receipt.setTransaction(tx);
receipt.setLogInfoList(executor.getVMLogs());
stateLogger.info("block: [{}] executed tx: [{}] \n state: [{}]", block.getNumber(), i,
Hex.toHexString(repository.getRoot()));
stateLogger.info("[{}] ", receipt.toString());
if (stateLogger.isInfoEnabled())
stateLogger.info("tx[{}].receipt: [{}] ", i, Hex.toHexString(receipt.getEncoded()));
if (block.getNumber() >= CONFIG.traceStartBlock())
repository.dumpState(block, totalGasUsed, i++, tx.getHash());
receipts.add(receipt);
}
addReward(block);
updateTotalDifficulty(block);
track.commit();
stateLogger.info("applied reward for block: [{}] \n state: [{}]",
block.getNumber(),
Hex.toHexString(repository.getRoot()));
if (block.getNumber() >= CONFIG.traceStartBlock())
repository.dumpState(block, totalGasUsed, 0, null);
long totalTime = System.nanoTime() - saveTime;
adminInfo.addBlockExecTime(totalTime);
logger.info("block: num: [{}] hash: [{}], executed after: [{}]nano", block.getNumber(), block.getShortHash(), totalTime);
return receipts;
}
/**
* Add reward to block- and every uncle coinbase
* assuming the entire block is valid.
*
* @param block object containing the header and uncles
*/
private void addReward(Block block) {
// Add standard block reward
BigInteger totalBlockReward = Block.BLOCK_REWARD;
// Add extra rewards based on number of uncles
if (block.getUncleList().size() > 0) {
for (BlockHeader uncle : block.getUncleList()) {
track.addBalance(uncle.getCoinbase(),
new BigDecimal(block.BLOCK_REWARD).multiply(BigDecimal.valueOf(8 + uncle.getNumber() - block.getNumber()).divide(new BigDecimal(8))).toBigInteger());
totalBlockReward = totalBlockReward.add(Block.INCLUSION_REWARD);
}
}
track.addBalance(block.getCoinbase(), totalBlockReward);
}
@Override
public void storeBlock(Block block, List<TransactionReceipt> receipts) {
/* Debug check to see if the state is still as expected */
String blockStateRootHash = Hex.toHexString(block.getStateRoot());
String worldStateRootHash = Hex.toHexString(repository.getRoot());
if (!SystemProperties.CONFIG.blockChainOnly())
if (!blockStateRootHash.equals(worldStateRootHash)) {
stateLogger.error("BLOCK: STATE CONFLICT! block: {} worldstate {} mismatch", block.getNumber(), worldStateRootHash);
// stateLogger.error("DO ROLLBACK !!!");
adminInfo.lostConsensus();
System.out.println("CONFLICT: BLOCK #" + block.getNumber() );
// System.exit(1);
// in case of rollback hard move the root
// Block parentBlock = blockStore.getBlockByHash(block.getParentHash());
// repository.syncToRoot(parentBlock.getStateRoot());
// return false;
}
blockStore.saveBlock(block, totalDifficulty, true);
setBestBlock(block);
if (logger.isDebugEnabled())
logger.debug("block added to the blockChain: index: [{}]", block.getNumber());
if (block.getNumber() % 100 == 0)
logger.info("*** Last block added [ #{} ]", block.getNumber());
}
public boolean hasParentOnTheChain(Block block) {
return getParent(block.getHeader()) != null;
}
@Override
public List<Chain> getAltChains() {
return altChains;
}
@Override
public List<Block> getGarbage() {
return garbage;
}
@Override
public BlockQueue getQueue() {
return blockQueue;
}
@Override
public void setBestBlock(Block block) {
bestBlock = block;
}
@Override
public Block getBestBlock() {
return bestBlock;
}
@Override
public void close() {
blockQueue.close();
}
@Override
public BigInteger getTotalDifficulty() {
return totalDifficulty;
}
@Override
public void updateTotalDifficulty(Block block) {
totalDifficulty = totalDifficulty.add(block.getDifficultyBI());
}
@Override
public void setTotalDifficulty(BigInteger totalDifficulty) {
this.totalDifficulty = totalDifficulty;
}
private void recordBlock(Block block) {
if (!CONFIG.recordBlocks()) return;
if (block.getNumber() == 1) {
FileSystemUtils.deleteRecursively(new File(CONFIG.dumpDir()));
}
String dir = CONFIG.dumpDir() + "/";
File dumpFile = new File(System.getProperty("user.dir") + "/" + dir + "_blocks_rec.txt");
FileWriter fw = null;
BufferedWriter bw = null;
try {
dumpFile.getParentFile().mkdirs();
if (!dumpFile.exists()) dumpFile.createNewFile();
fw = new FileWriter(dumpFile.getAbsoluteFile(), true);
bw = new BufferedWriter(fw);
if (bestBlock.isGenesis()) {
bw.write(Hex.toHexString(bestBlock.getEncoded()));
bw.write("\n");
}
bw.write(Hex.toHexString(block.getEncoded()));
bw.write("\n");
} catch (IOException e) {
logger.error(e.getMessage(), e);
} finally {
try {
if (bw != null) bw.close();
if (fw != null) fw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
public void addPendingTransactions(Set<Transaction> transactions) {
logger.info("Pending transaction list added: size: [{}]", transactions.size());
if (listener != null)
listener.onPendingTransactionsReceived(transactions);
pendingTransactions.addAll(transactions);
}
public void clearPendingTransactions(List<Transaction> receivedTransactions) {
for (Transaction tx : receivedTransactions) {
logger.info("Clear transaction, hash: [{}]", Hex.toHexString(tx.getHash()));
pendingTransactions.remove(tx);
}
}
public Set<Transaction> getPendingTransactions() {
return pendingTransactions;
}
public void setRepository(Repository repository) {
this.repository = repository;
}
public void setProgramInvokeFactory(ProgramInvokeFactory factory) {
this.programInvokeFactory = factory;
}
public void startTracking() {
track = repository.startTracking();
}
public void commitTracking() {
track.commit();
}
public void setExitOn(long exitOn) {
this.exitOn = exitOn;
}
public boolean isBlockExist(byte[] hash) {
return blockStore.isBlockExist(hash);
}
}
|
|
/**
*
*/
package uk.co.jemos.podam.test.dto.annotations;
import java.io.Serializable;
import uk.co.jemos.podam.annotations.PodamIntValue;
import uk.co.jemos.podam.test.utils.PodamTestConstants;
/**
* POJO to test the {@link PodamIntValue} annotation
*
* @author mtedone
*
*/
public class IntegerValuePojo implements Serializable {
// ------------------->> Constants
private static final long serialVersionUID = 1L;
// ------------------->> Instance / Static variables
@PodamIntValue(minValue = PodamTestConstants.NUMBER_INT_MIN_VALUE)
private int intFieldWithMinValueOnly;
@PodamIntValue(numValue = PodamTestConstants.INTEGER_PRECISE_VALUE)
private int intFieldWithPreciseValue;
@PodamIntValue(maxValue = PodamTestConstants.NUMBER_INT_ONE_HUNDRED)
private int intFieldWithMaxValueOnly;
@PodamIntValue(minValue = PodamTestConstants.NUMBER_INT_MIN_VALUE, maxValue = PodamTestConstants.NUMBER_INT_MAX_VALUE)
private int intFieldWithMinAndMaxValue;
@PodamIntValue(minValue = PodamTestConstants.NUMBER_INT_MIN_VALUE)
private Integer integerObjectFieldWithMinValueOnly;
@PodamIntValue(maxValue = PodamTestConstants.NUMBER_INT_ONE_HUNDRED)
private Integer integerObjectFieldWithMaxValueOnly;
@PodamIntValue(minValue = PodamTestConstants.NUMBER_INT_MIN_VALUE, maxValue = PodamTestConstants.NUMBER_INT_MAX_VALUE)
private Integer integerObjectFieldWithMinAndMaxValue;
@PodamIntValue(numValue = PodamTestConstants.INTEGER_PRECISE_VALUE)
private Integer integerObjectFieldWithPreciseValue;
// ------------------->> Constructors
// ------------------->> Public methods
// ------------------->> Getters / Setters
/**
* @return the intFieldWithMinValueOnly
*/
public int getIntFieldWithMinValueOnly() {
return intFieldWithMinValueOnly;
}
/**
* @param intFieldWithMinValueOnly
* the intFieldWithMinValueOnly to set
*/
public void setIntFieldWithMinValueOnly(int intFieldWithMinValueOnly) {
this.intFieldWithMinValueOnly = intFieldWithMinValueOnly;
}
/**
* @return the intFieldWithMaxValueOnly
*/
public int getIntFieldWithMaxValueOnly() {
return intFieldWithMaxValueOnly;
}
/**
* @param intFieldWithMaxValueOnly
* the intFieldWithMaxValueOnly to set
*/
public void setIntFieldWithMaxValueOnly(int intFieldWithMaxValueOnly) {
this.intFieldWithMaxValueOnly = intFieldWithMaxValueOnly;
}
/**
* @return the intFieldWithMinAndMaxValue
*/
public int getIntFieldWithMinAndMaxValue() {
return intFieldWithMinAndMaxValue;
}
/**
* @param intFieldWithMinAndMaxValue
* the intFieldWithMinAndMaxValue to set
*/
public void setIntFieldWithMinAndMaxValue(int intFieldWithMinAndMaxValue) {
this.intFieldWithMinAndMaxValue = intFieldWithMinAndMaxValue;
}
/**
* @return the integerObjectFieldWithMinValueOnly
*/
public Integer getIntegerObjectFieldWithMinValueOnly() {
return integerObjectFieldWithMinValueOnly;
}
/**
* @param integerObjectFieldWithMinValueOnly
* the integerObjectFieldWithMinValueOnly to set
*/
public void setIntegerObjectFieldWithMinValueOnly(
Integer integerObjectFieldWithMinValueOnly) {
this.integerObjectFieldWithMinValueOnly = integerObjectFieldWithMinValueOnly;
}
/**
* @return the integerObjectFieldWithMaxValueOnly
*/
public Integer getIntegerObjectFieldWithMaxValueOnly() {
return integerObjectFieldWithMaxValueOnly;
}
/**
* @param integerObjectFieldWithMaxValueOnly
* the integerObjectFieldWithMaxValueOnly to set
*/
public void setIntegerObjectFieldWithMaxValueOnly(
Integer integerObjectFieldWithMaxValueOnly) {
this.integerObjectFieldWithMaxValueOnly = integerObjectFieldWithMaxValueOnly;
}
/**
* @return the integerObjectFieldWithMinAndMaxValue
*/
public Integer getIntegerObjectFieldWithMinAndMaxValue() {
return integerObjectFieldWithMinAndMaxValue;
}
/**
* @param integerObjectFieldWithMinAndMaxValue
* the integerObjectFieldWithMinAndMaxValue to set
*/
public void setIntegerObjectFieldWithMinAndMaxValue(
Integer integerObjectFieldWithMinAndMaxValue) {
this.integerObjectFieldWithMinAndMaxValue = integerObjectFieldWithMinAndMaxValue;
}
/**
* @return the intFieldWithPreciseValue
*/
public int getIntFieldWithPreciseValue() {
return intFieldWithPreciseValue;
}
/**
* @param intFieldWithPreciseValue
* the intFieldWithPreciseValue to set
*/
public void setIntFieldWithPreciseValue(int intFieldWithPreciseValue) {
this.intFieldWithPreciseValue = intFieldWithPreciseValue;
}
/**
* @return the integerObjectFieldWithPreciseValue
*/
public Integer getIntegerObjectFieldWithPreciseValue() {
return integerObjectFieldWithPreciseValue;
}
/**
* @param integerObjectFieldWithPreciseValue
* the integerObjectFieldWithPreciseValue to set
*/
public void setIntegerObjectFieldWithPreciseValue(
Integer integerObjectFieldWithPreciseValue) {
this.integerObjectFieldWithPreciseValue = integerObjectFieldWithPreciseValue;
}
/**
* Constructs a <code>String</code> with all attributes
* in name = value format.
*
* @return a <code>String</code> representation
* of this object.
*/
public String toString()
{
final String TAB = " ";
StringBuilder retValue = new StringBuilder();
retValue.append("IntegerRangeValuesPojo ( ")
.append("intFieldWithMinValueOnly = ").append(this.intFieldWithMinValueOnly).append(TAB)
.append("intFieldWithPreciseValue = ").append(this.intFieldWithPreciseValue).append(TAB)
.append("intFieldWithMaxValueOnly = ").append(this.intFieldWithMaxValueOnly).append(TAB)
.append("intFieldWithMinAndMaxValue = ").append(this.intFieldWithMinAndMaxValue).append(TAB)
.append("integerObjectFieldWithMinValueOnly = ").append(this.integerObjectFieldWithMinValueOnly).append(TAB)
.append("integerObjectFieldWithMaxValueOnly = ").append(this.integerObjectFieldWithMaxValueOnly).append(TAB)
.append("integerObjectFieldWithMinAndMaxValue = ").append(this.integerObjectFieldWithMinAndMaxValue).append(TAB)
.append("integerObjectFieldWithPreciseValue = ").append(this.integerObjectFieldWithPreciseValue).append(TAB)
.append(" )");
return retValue.toString();
}
// ------------------->> Private methods
// ------------------->> equals() / hashcode() / toString()
// ------------------->> Inner classes
}
|
|
/*
* Copyright (c) 2003-2012 Fred Hutchinson Cancer Research Center
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fhcrc.cpl.viewer.gui;
import org.fhcrc.cpl.toolbox.proteomics.feature.Feature;
import org.fhcrc.cpl.toolbox.proteomics.feature.FeatureSet;
import org.fhcrc.cpl.viewer.util.SharedProperties;
import org.fhcrc.cpl.viewer.Localizer;
import org.fhcrc.cpl.toolbox.ApplicationContext;
import org.fhcrc.cpl.toolbox.TextProvider;
import org.fhcrc.cpl.toolbox.proteomics.Protein;
import org.fhcrc.cpl.toolbox.proteomics.PeptideGenerator;
import org.fhcrc.cpl.toolbox.proteomics.Peptide;
import org.fhcrc.cpl.toolbox.proteomics.filehandler.FastaLoader;
import javax.swing.*;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.table.AbstractTableModel;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.MutableTreeNode;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.File;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.*;
import java.util.List;
/**
* User: migra
* Date: Jun 29, 2004
* Time: 11:48:16 AM
*
*/
public class OpenFastaDialog extends JDialog implements PropertyChangeListener
{
private static final double _hMass = PeptideGenerator.AMINO_ACID_MONOISOTOPIC_MASSES[PeptideGenerator.H_ION_INDEX];
private static OpenFastaDialog _instance ;
static final NumberFormat massFormat = new DecimalFormat("#####.0000");
private static class ResidueMod
{
final double addMass;
final char residue;
final String desc;
final boolean isDefault;
public ResidueMod(char residue, double addMass, String desc, boolean isDefault)
{
this.residue = residue;
this.addMass = addMass;
this.desc = desc;
this.isDefault = isDefault;
}
public String toString()
{
return desc + " (" + residue + (addMass >= 0.0 ? " +" : " ") + addMass + ")";
}
}
private static final ResidueMod[] residueMods = new ResidueMod[]{
new ResidueMod('C', 0.0, "Reduced form", false),
new ResidueMod('C', 58.004, "Iodoacetic Acid", false),
new ResidueMod('C', 57.0214, "Iodoacetamide", true), //IAM is our default
new ResidueMod('K', 6.020124, "SILAC", false),
};
private Container contentPanel;
private JTextField textMass;
private JTree treePeptide;
private JLabel labelStatus;
private File _fastaFile;
private JTextField textTolerance;
private JComboBox comboUnits;
private JComboBox comboMissedCleavages;
private JComboBox comboCysteine;
private JButton buttonSearch;
private JTable tablePeptides;
private JLabel labelSequence;
private JButton buttonFindProtein;
private JTextField textProteinName;
private JSplitPane splitPane;
private Protein _protein;
public static OpenFastaDialog getInstance()
{
if (null == _instance)
_instance = new OpenFastaDialog();
return _instance;
}
//TODO: Pre-expand proteins since that's the only interesting thing
public OpenFastaDialog()
{
try
{
contentPanel =
Localizer.renderSwixml("org/fhcrc/cpl/viewer/gui/OpenFastaDialog.xml",
this);
}
catch (Exception x)
{
ApplicationContext.errorMessage(TextProvider.getText("ERROR_CREATING_DIALOG"), x);
throw new RuntimeException(x);
}
setContentPane(contentPanel);
MutableTreeNode rootNode = new DefaultMutableTreeNode("No Peptides");
treePeptide.setModel(new DefaultTreeModel(rootNode));
textTolerance.setText(".1");
comboUnits.addItem("Daltons");
comboUnits.addItem("PPM");
for (int i = 0; i < residueMods.length; i++)
{
comboCysteine.addItem(residueMods[i].toString());
if ( residueMods[i].isDefault )
comboCysteine.setSelectedIndex(i);
}
comboMissedCleavages.addItem("0");
comboMissedCleavages.addItem("1");
comboMissedCleavages.addItem("2");
comboMissedCleavages.setSelectedIndex(0); //1 missed cleavage
tablePeptides.setModel(new PeptideTableModel(null));
ActionListener searchListener = new ActionListener(){
public void actionPerformed(ActionEvent event)
{
doSearch();
}
};
textMass.addActionListener(searchListener);
buttonSearch.addActionListener(searchListener);
buttonSearch.setDefaultCapable(true);
tablePeptides.getSelectionModel().addListSelectionListener(new PeptideTableSelectionListener());
ApplicationContext.addPropertyChangeListener(this);
this.setDefaultCloseOperation(JDialog.DISPOSE_ON_CLOSE);
FocusListener topPaneFocusListener = new FocusListener()
{
public void focusGained(FocusEvent event)
{
getRootPane().setDefaultButton(buttonSearch);
if (event.getSource() instanceof JTextField)
{
JTextField textField = (JTextField) event.getSource();
textField.setSelectionStart(0);
textField.setSelectionEnd(textField.getText().length());
}
}
public void focusLost(FocusEvent event)
{
}
};
comboMissedCleavages.addFocusListener(topPaneFocusListener);
textMass.addFocusListener(topPaneFocusListener);
textTolerance.addFocusListener(topPaneFocusListener);
comboUnits.addFocusListener(topPaneFocusListener);
textProteinName.addFocusListener(new FocusListener(){
public void focusGained(FocusEvent event)
{
getRootPane().setDefaultButton(buttonFindProtein);
textProteinName.setSelectionStart(0);
textProteinName.setSelectionEnd(textProteinName.getText().length());
//To change body of implemented methods use File | Settings | File Templates.
}
public void focusLost(FocusEvent event)
{
}
});
buttonFindProtein.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent event)
{
_protein = findProtein(textProteinName.getText());
showProtein(_protein, null);
}
});
ApplicationContext.addPropertyChangeListener(SharedProperties.FEATURE_RANGES, new PropertyChangeListener()
{
public void propertyChange(PropertyChangeEvent event)
{
if (null != _protein)
{
ListSelectionModel lsm = tablePeptides.getSelectionModel();
Peptide pep = null;
PeptideTableModel tableModel = (PeptideTableModel) tablePeptides.getModel();
if (!lsm.isSelectionEmpty())
pep = tableModel.getPeptides()[lsm.getMinSelectionIndex()];
showProtein(_protein, pep);
}
}
});
splitPane.setDividerLocation(200);
this.setSize(530, 600);
}
public File getFastaFile()
{
return _fastaFile;
}
public void setFastaFile(File fastaFile)
{
_fastaFile = fastaFile;
_protein = null;
showProtein(null, null);
treePeptide.setModel(new DefaultTreeModel(new DefaultMutableTreeNode("Peptides")));
this.setTitle(fastaFile.getAbsolutePath());
ApplicationContext.setProperty("fastaFile", fastaFile);
}
//UNDONE: Cache this and listen for change event
public double getTolerance()
{
String toleranceString = textTolerance.getText();
double tolerance;
try
{
tolerance = Double.parseDouble(toleranceString);
if (comboUnits.getSelectedIndex() == 1)
tolerance = (tolerance * getMass()) / 1000000.0; //PPM
}
catch (NumberFormatException x)
{
return 0;
}
return tolerance;
}
public double getMass()
{
String massString = textMass.getText();
double mass;
try
{
mass = Double.parseDouble(massString);
}
catch (NumberFormatException x)
{
return 0;
}
return mass;
}
private void doSearch()
{
double mass = getMass();
double tolerance = getTolerance();
PeptideGenerator pepGen = new PeptideGenerator();
pepGen.setInputFileName(_fastaFile.getAbsolutePath());
pepGen.setMinMass(mass - tolerance);
pepGen.setMaxMass(mass + tolerance);
pepGen.addListener(new PeptideCollector());
double[] massTab = PeptideGenerator.getMasses(true);
//Residue modifications
int i = comboCysteine.getSelectedIndex();
massTab[residueMods[i].residue] += residueMods[i].addMass;
pepGen.setMassTable(massTab);
pepGen.setMaxMissedCleavages(comboMissedCleavages.getSelectedIndex());
labelStatus.setText("Searching for masses " + String.valueOf(pepGen.getMinMass()) + "-" + String.valueOf(pepGen.getMaxMass()));
treePeptide.setModel(new DefaultTreeModel(new DefaultMutableTreeNode("Searching...")));
tablePeptides.setModel(new PeptideTableModel(null));
ApplicationContext.setProperty(SharedProperties.HIGHLIGHT_FEATURES, null);
Thread t = new Thread(pepGen);
t.start();
}
//UNDONE: Do this in the background?
public Protein findProtein(String proteinText)
{
proteinText = proteinText.trim().toLowerCase();
FastaLoader fastaLoader = new FastaLoader(_fastaFile);
FastaLoader.ProteinIterator iterator = (FastaLoader.ProteinIterator) fastaLoader.iterator();
List proteins = new ArrayList();
while (iterator.hasNext())
{
Protein protein = (Protein) iterator.next();
if (protein.getHeader().toLowerCase().indexOf(proteinText) != -1)
{
proteins.add(protein);
}
}
if (proteins.size() == 0)
return null;
if (proteins.size() == 1)
return (Protein) proteins.get(0);
Object[] arr = proteins.toArray();
return (Protein) JOptionPane.showInputDialog(this, "Multiple proteins found, please pick one", "Select Protein", JOptionPane.PLAIN_MESSAGE, null, arr, arr[0]);
}
private void showProtein(Protein protein, Peptide peptide)
{
if (null == protein)
{
tablePeptides.setModel(new PeptideTableModel(null));
ApplicationContext.setProperty(SharedProperties.HIGHLIGHT_FEATURES, null);
labelSequence.setText("");
textProteinName.setText("<no protein>");
return;
}
PeptideGenerator pepGen = new PeptideGenerator();
double[] massTab = getMassTab();
pepGen.setMassTable(massTab);
pepGen.setMaxMissedCleavages(comboMissedCleavages.getSelectedIndex());
Peptide[] peptides = pepGen.digestProtein(protein);
PeptideTableModel tableModel =new PeptideTableModel(peptides);
labelSequence.setText(getProteinHtml(protein, tableModel, peptide));
tablePeptides.setModel(tableModel);
//UNDONE: This overwrites search string. Should put protein name elsewhere
textProteinName.setText(protein.getHeader());
int index = -1;
if (null != peptide)
index = tableModel.findPeptide(peptide);
if (index != -1)
tablePeptides.changeSelection(index, 0, true, false);
//TODO: Put this on another thread
ApplicationContext.setProperty(SharedProperties.HIGHLIGHT_FEATURES, tableModel.findHighlightedFeatures());
}
public double[] getMassTab()
{
double[] massTab = PeptideGenerator.getMasses(true);
//Residue modifications
int i = comboCysteine.getSelectedIndex();
massTab[residueMods[i].residue] += residueMods[i].addMass;
return massTab;
}
public int getMissedCleavages()
{
return comboMissedCleavages.getSelectedIndex();
}
private static final int PLAIN_RESIDUES = 0;
private static final int PEPTIDE_RESIDUES = 1; //Residues are covered by a peptide
private static final int FEATURE_PEPTIDE_RESIDUES = 2; //Match a feature within given tolerance
private static final int SELECTED_PEPTIDE_RESIDUES = 3; //UI selection is over this peptide
public String getProteinHtml(Protein protein, PeptideTableModel tableModel, Peptide peptide)
{
Peptide[] peptides = tableModel.getPeptides();
String seq = protein.getSequenceAsString();
StringBuffer sb = new StringBuffer();
int chIndex = 0;
sb.append("<html><pre>");
int pepIndex = 0;
int residueType;
while(pepIndex < peptides.length)
{
int pepStart = peptides[pepIndex].getStart();
if (chIndex < pepStart)
{
int len = Math.min(10 - (chIndex % 10), pepStart - chIndex);
sb.append(seq.substring(chIndex, chIndex + len).toLowerCase());
chIndex += len;
}
else
{
residueType = getResidueType(tableModel, pepIndex, peptide);
int len = Math.min(10 - (chIndex % 10), pepStart + peptides[pepIndex].getLength() - chIndex);
//If we find an equal or "more selected" peptide in the next spot, render it in preference
//to this one.
//TODO: This really should loop for the case where there are 3 peptides that start
//at same place. First is FEATURE, second is nothing, third is SELECTED
//Currently first will be rendered FEATURE and rest will be rendered SELECTED
if (residueType != SELECTED_PEPTIDE_RESIDUES && pepIndex < peptides.length - 1)
{
int resTypeNext = getResidueType(tableModel, pepIndex + 1, peptide);
if (resTypeNext >= residueType)
len = Math.min(len, peptides[pepIndex+1].getStart() - chIndex);
}
if (len <= 0)
{
pepIndex++;
continue;
}
if (residueType == SELECTED_PEPTIDE_RESIDUES)
sb.append("<font color=\"#FF00FF\">");
else if (residueType == FEATURE_PEPTIDE_RESIDUES)
sb.append("<font color=\"#FFA500\">");
sb.append(seq.substring(chIndex, chIndex + len));
if (residueType != PEPTIDE_RESIDUES)
sb.append("</font>");
chIndex += len;
}
if (chIndex % 60 == 0)
sb.append("\n");
else if (chIndex % 10 == 0)
sb.append(" ");
while (pepIndex < peptides.length && chIndex >= peptides[pepIndex].getStart() + peptides[pepIndex].getLength())
pepIndex++;
}
while (chIndex < seq.length())
{
int len = Math.min(10 - (chIndex % 10), seq.length() - chIndex);
sb.append(seq.substring(chIndex, chIndex + len).toLowerCase());
chIndex += len;
}
sb.append("</pre></html>");
return sb.toString();
}
private int getResidueType(PeptideTableModel tableModel, int row, Peptide selected)
{
Peptide[] peptides = tableModel.getPeptides();
Peptide pep = peptides[row];
if (pep.equals(selected))
return SELECTED_PEPTIDE_RESIDUES;
Feature nearestFeature = tableModel.getNearestFeature(row);
if (null != nearestFeature && Math.abs(nearestFeature.mass - peptides[row].getMass()) < getTolerance())
return FEATURE_PEPTIDE_RESIDUES;
return PEPTIDE_RESIDUES;
}
public Feature[] findHighlightedFeatures()
{
PeptideTableModel tableModel = (PeptideTableModel) tablePeptides.getModel();
return tableModel.findHighlightedFeatures();
}
public void propertyChange(PropertyChangeEvent event)
{
if (!this.isVisible())
return;
if (!SharedProperties.SELECTED_POINT.equals(event.getPropertyName()))
return;
if (null != event.getNewValue() && event.getNewValue() instanceof Feature)
{
Feature f = (Feature) event.getNewValue();
if (f.mass > 0)
{
textMass.setText("" + f.mass);
doSearch();
}
}
}
public class PeptideTableSelectionListener implements ListSelectionListener
{
public void valueChanged(ListSelectionEvent e)
{
if (e.getValueIsAdjusting())
return;
ListSelectionModel lsm = (ListSelectionModel) e.getSource();
if (!lsm.isSelectionEmpty())
{
PeptideTableModel tableModel = (PeptideTableModel) tablePeptides.getModel();
Peptide pep = tableModel.getPeptides()[lsm.getMinSelectionIndex()];
labelSequence.setText(getProteinHtml(pep.getProtein(), tableModel, pep));
}
}
}
private class PeptideTableModel extends AbstractTableModel
{
private final String[] colNamesBase = new String[] {"Peptide", "M (monoisotopic)", "[M + H]+", "[M + H]2+"};
private final String[] colNamesFeatures = new String[] {"Peptide", "M (monoisotopic)", "[M + H]+", "[M + H]2+", "Nearest Feature", "Mass", "Scan"};
private Peptide[] _peptides;
private FeatureSet.FeatureSelector _featureSelector = null;
private double _tolerance;
private String[] _colNames = colNamesBase;
private Feature[] _nearestNeighbors = null;
private Feature[] _highlightedFeatures;
public PeptideTableModel(Peptide[] peptides)
{
_peptides = peptides;
_featureSelector = (FeatureSet.FeatureSelector) ApplicationContext.getProperty("featureSelector");
if (null != _featureSelector)
{
_colNames = colNamesFeatures;
if (null != peptides)
_nearestNeighbors = new Feature[peptides.length];
}
_tolerance = getTolerance();
}
public int getRowCount()
{
return _peptides == null ? 0 : _peptides.length; //To change body of implemented methods use File | Settings | File Templates.
}
public int getColumnCount()
{
return _colNames.length;
}
public Peptide[] getPeptides()
{
return _peptides;
}
public int findPeptide(Peptide peptide)
{
for (int i = 0; i < _peptides.length; i++)
if (_peptides[i].equals(peptide))
return i;
return -1;
}
public Object getValueAt(int row, int col)
{
if (null == _peptides || row >= _peptides.length || col >= _colNames.length)
return null;
Feature nearestFeature = null;
switch(col)
{
case 0:
return _peptides[row].toString();
case 1:
return massFormat.format(_peptides[row].getMass());
case 2:
return massFormat.format(_peptides[row].getMass() + _hMass);
case 3:
return massFormat.format(_peptides[row].getMass()/2 + _hMass);
case 4:
nearestFeature = getNearestFeature(row);
return null == nearestFeature ? null : massFormat.format(nearestFeature.mass - _peptides[row].getMass());
case 5:
nearestFeature = getNearestFeature(row);
return null == nearestFeature ? null : massFormat.format(nearestFeature.mass);
case 6:
nearestFeature = getNearestFeature(row);
return null == nearestFeature ? null : Integer.toString(nearestFeature.scan);
default:
return null;
}
}
public Feature getNearestFeature(int row)
{
if (null == _nearestNeighbors)
return null;
Feature nearestFeature = _nearestNeighbors[row];
if (null != nearestFeature)
return nearestFeature;
double distance = Double.MAX_VALUE;
java.util.List featureSets = (java.util.List) ApplicationContext.getProperty(SharedProperties.FEATURE_RANGES);
if (null == featureSets)
return null;
Iterator featureSetIterator = featureSets.iterator();
while (featureSetIterator.hasNext())
{
FeatureSet fs = (FeatureSet) featureSetIterator.next();
if (!fs.isDisplayed())
continue;
Feature[] features = fs.getFeatures();
//Just find nearest by mass
for (int i = 0; i < features.length; i++)
{
Feature feature = features[i];
if (Math.abs(feature.mass - _peptides[row].getMass()) < distance)
{
nearestFeature = feature;
distance = Math.abs(feature.mass - _peptides[row].getMass());
}
}
}
_nearestNeighbors[row] = nearestFeature;
return nearestFeature;
}
//TODO: Make this use a property listener approach
//TODO: Do this on a background thread
/*
public Spectrum.Feature[] findHighlightedFeatures(boolean recalc)
{
if (recalc)
{
//Re-find everything...
Arrays.fill(_nearestNeighbors, null);
_highlightedFeatures = null;
}
else if (null != _highlightedFeatures)
return _highlightedFeatures;
if (null == _nearestNeighbors || null == _peptides)
return null;
ArrayList features = new ArrayList();
double tolerance = getTolerance();
for (int i = 0; i < getRowCount(); i++)
{
Spectrum.Feature feature = getNearestFeature(i);
if (null != feature && Math.abs(feature.mass - _peptides[i].getMass()) <= tolerance)
features.add(feature);
}
_highlightedFeatures =(Spectrum.FeatureRange[]) features.toArray(new Spectrum.FeatureRange[features.size()]);
return _highlightedFeatures;
}
*/
public String getColumnName(int col)
{
return _colNames[col];
}
public Feature[] findHighlightedFeatures()
{
if (null == _peptides)
return null;
List featureSets = (List) ApplicationContext.getProperty(SharedProperties.FEATURE_RANGES);
if (null == featureSets)
return null;
int size = 0;
for (int i = 0; i < featureSets.size(); i++)
{
FeatureSet fs = (FeatureSet) featureSets.get(i);
if (!fs.isDisplayed())
continue;
FeatureSet.FeatureSelector sel = new FeatureSet.FeatureSelector();
size += fs.getFeatures().length;
}
Feature[] features = new Feature[size];
int offset = 0;
for (int i = 0; i < featureSets.size(); i++)
{
FeatureSet fs = (FeatureSet) featureSets.get(i);
if (!fs.isDisplayed())
continue;
System.arraycopy(fs.getFeatures(), 0, features, offset, fs.getFeatures().length);
offset += fs.getFeatures().length;
}
Comparator comp = new Feature.MassAscComparator();
Arrays.sort(features, comp);
ArrayList featureList = new ArrayList();
double tolerance = getTolerance();
for (int i = 0; i < _peptides.length; i++)
{
Peptide pep = _peptides[i];
float minMass = (float) (pep.getMass() - tolerance);
float maxMass = (float) (pep.getMass() + tolerance);
Feature feat = new Feature();
feat.mass = minMass;
int indexStart = Arrays.binarySearch(features, feat, comp);
if (indexStart < 0)
indexStart = -indexStart - 1;
feat.mass = maxMass;
int indexEnd = Arrays.binarySearch(features, feat, comp);
if (indexEnd < 0)
indexEnd = -indexEnd - 1;
if (indexEnd >= features.length)
indexEnd = features.length -1;
for (int j = indexStart; j <= indexEnd; j++)
{
//Still possible that nearest feature is not near enough
if (features[j].mass >= minMass && features[j].mass <= maxMass)
{
featureList.add(features[j]);
}
}
}
_highlightedFeatures = (Feature[]) featureList.toArray(new Feature[featureList.size()]);
return _highlightedFeatures;
}
}
private class PeptideCollector implements PeptideGenerator.PeptideListener
{
HashMap hm = new HashMap();
public void handlePeptide(Peptide peptide)
{
ArrayList protList = (ArrayList) hm.get(peptide);
if (null == protList)
{
protList = new ArrayList();
protList.add(peptide.getProtein());
hm.put(peptide, protList);
}
else
protList.add(peptide.getProtein());
}
public void handleDone()
{
final HashMap hm = this.hm;
final double m = getMass();
EventQueue.invokeLater(new Runnable()
{
public void run()
{
MutableTreeNode rootNode = new DefaultMutableTreeNode("Peptides");
Collection peptides = hm.keySet();
Iterator iter = peptides.iterator();
while (iter.hasNext())
{
Peptide pep = (Peptide) iter.next();
ArrayList proteinList = (ArrayList) hm.get(pep);
MutableTreeNode node = new DefaultMutableTreeNode(new PepDisplay(pep, proteinList.size()));
for(int i = 0; i < proteinList.size(); i++)
{
Protein protein = (Protein) proteinList.get(i);
MutableTreeNode proteinNode = new DefaultMutableTreeNode(protein);
node.insert(proteinNode, i);
}
rootNode.insert(node, rootNode.getChildCount());
}
treePeptide.setModel(new DefaultTreeModel(rootNode));
treePeptide.addTreeSelectionListener(new TreeSelectionListener() {
public void valueChanged(TreeSelectionEvent e)
{
DefaultMutableTreeNode node = (DefaultMutableTreeNode) treePeptide.getLastSelectedPathComponent();
if (null == node)
return;
Object userObject = node.getUserObject();
if (userObject instanceof Protein)
{
DefaultMutableTreeNode pepNode = (DefaultMutableTreeNode) node.getParent();
Peptide pep = ((PepDisplay) pepNode.getUserObject()).peptide;
_protein= (Protein) userObject;
showProtein(_protein, pep);
}
else
labelSequence.setText("");
}
});
labelStatus.setText(String.valueOf(hm.size()) + " peptides found.");
}
});
}
}
public class PepDisplay
{
protected Peptide peptide;
private int nProteins;
public PepDisplay(Peptide peptide, int nProteins)
{
this.nProteins = nProteins;
this.peptide = peptide;
}
public String toString()
{
return peptide.toString() + " m=" + massFormat.format((double) peptide.getMass())
+ " (" + massFormat.format(peptide.getMass() - getMass()) + ") - " + nProteins + " proteins";
}
}
public static class OpenFastaAction extends AbstractAction
{
JFileChooser chooser;
public OpenFastaAction(JFileChooser chooser)
{
super(TextProvider.getText("OPEN_FASTA_DOTDOTDOT"));
if (null == chooser)
chooser = new WorkbenchFileChooser();
this.chooser = chooser;
}
public void actionPerformed(ActionEvent evt)
{
JFrame frame = ApplicationContext.getFrame();
int chooserStatus = chooser.showOpenDialog(frame);
//if user didn't hit OK, ignore
if (chooserStatus != JFileChooser.APPROVE_OPTION)
return;
final File file = chooser.getSelectedFile();
if (null == file)
return;
VerifyFile:
{
if (!file.exists())
break VerifyFile;
OpenFastaDialog openFastaDialog =
new OpenFastaDialog();
openFastaDialog.setFastaFile(file);
openFastaDialog.setVisible(true);
return;
}
//JOptionPane.showMessageDialog(frame, "Could not open file.", "Open File", JOptionPane.INFORMATION_MESSAGE, null);
ApplicationContext.errorMessage("Could not open file: " + file.getPath(), null);
}
}
}
|
|
/*
Copyright 2006 by Sean Luke
Licensed under the Academic Free License version 3.0
See the file "LICENSE" for more information
*/
package ec.util;
import java.io.*;
import ec.*;
/*
* Code.java
*
* Created: Sat Oct 23 13:45:20 1999
* By: Sean Luke
*/
/**
* Code provides some simple wrapper functions for encoding and decoding
* basic data types for storage in a pseudo-Java source code strings
* format. This differs from just "printing"
* them to string in that the actual precision of the object is maintained.
* Code attempts to keep the representations as "Java-like" as possible --
* the exceptions being primarily floats and doubles, which are encoded as
* ints and longs. Encoding of objects and arrays is not supported. You'll
* have to handle that yourself. Strings are supported.
*
* <p>Everything is case-SENSITIVE. Here's the breakdown.
*
<p><table>
<tr><td><b>Type</b></td><td><b>Format</b></td></tr>
<tr><td>boolean</td><td><tt>true</tt> or <tt>false</tt> (old style, case sensitive) or <tt>T</tt> or <tt>F</tt> (new style, case sensitive)</td></tr>
<tr><td>byte</td><td><tt>b<i>byte</i>|</tt></td></tr>
<tr><td>short</td><td><tt>s<i>short</i>|</tt></td></tr>
<tr><td>int</td><td><tt>i<i>int</i>|</tt></td></tr>
<tr><td>long</td><td><tt>l<i>long</i>|</tt></td></tr>
<tr><td>float</td><td><tt>f<i>floatConvertedToIntForStorage</i>|<i>humanReadableFloat</i>|</tt> or (only for reading in) f|<i>humanReadableFloat</i>|</td></tr>
<tr><td>float</td><td><tt>d<i>doubleConvertedToLongForStorage</i>|<i>humanReadableDouble</i>|</tt> or (only for reading in) d|<i>humanReadableDouble</i>|</td></tr>
<tr><td>char</td><td>standard Java char, except that the only valid escape sequences are: \0 \t \n \b \' \" \ u <i>unicodeHex</i></td></tr>
<tr><td>string</td><td>standard Java string with \ u ...\ u Unicode escapes, except that the only other valid escape sequences are: \0 \t \n \b \' \" </i></td></tr>
</table>
*
*
* @author Sean Luke
* @version 1.0
*/
public class Code
{
/** Encodes a boolean. */
public static String encode(final boolean b)
// old style -- no longer used
// { return b ? Boolean.TRUE.toString() : Boolean.FALSE.toString(); }
{ return b ? "T" : "F"; }
/** Encodes a byte. */
public static String encode(final byte b)
{ return "b" + Byte.toString(b) + "|"; }
/** Encodes a character. */
public static String encode(final char c)
{
if (c >= 32 && c < 127 && c !='\\' &&
c!= '\'') // we can safely print it
return "'" + String.valueOf(c) + "'";
else
{
// print it with an escape character
if (c=='\b')
return "'\\b'";
else if (c=='\n')
return "'\\n'";
else if (c=='\t')
return "'\\t'";
else if (c=='\'')
return "'\\''";
else if (c=='\\')
return "'\\\\'";
else if (c=='\0')
return "'\\\\0";
else
{
String s = Integer.toHexString((int)c);
// pad with 0's -- Java's parser freaks out otherwise
switch (s.length())
{
case 1: s = "'\\u000" + s + "'"; break;
case 2: s = "'\\u00" + s + "'"; break;
case 3: s = "'\\u0" + s + "'"; break;
case 4: s = "'\\u" + s + "'"; break;
}
return s;
}
}
}
/** Encodes a short. */
public static String encode(final short s)
{ return "s" + Short.toString(s) + "|"; }
/** Encodes an int. */
public static String encode(final int i)
{ return "i" + Integer.toString(i) + "|"; }
/** Encodes a long. */
public static String encode(final long l)
{ return "l" + Long.toString(l) + "|"; }
/** Encodes a float. */
public static String encode(final float f)
{ return "f" + Integer.toString(Float.floatToIntBits(f))+ "|" + String.valueOf(f) + "|"; }
/** Encodes a double. */
public static String encode(final double d)
{ return "d" + Long.toString(Double.doubleToLongBits(d))+ "|" + String.valueOf(d) + "|"; }
/** Encodes a String. */
public static String encode(final String s)
{
boolean inUnicode = false;
int l = s.length();
StringBuilder sb = new StringBuilder(l);
sb.append("\"");
for(int x=0;x<l;x++)
{
char c = s.charAt(x);
if ( c >= 32 && c < 127 && c !='\\' && c!= '"') // we allow spaces
// we can safely print it
{
if (inUnicode) { sb.append("\\u"); inUnicode=false; }
sb.append(c);
}
else
{
// print it with an escape character
if (c=='\b')
{
if (inUnicode) { sb.append("\\u"); inUnicode=false; }
sb.append("\\b");
}
else if (c=='\n')
{
if (inUnicode) { sb.append("\\u"); inUnicode=false; }
sb.append("\\n");
}
else if (c=='\t')
{
if (inUnicode) { sb.append("\\u"); inUnicode=false; }
sb.append("\\t");
}
else if (c=='"')
{
if (inUnicode) { sb.append("\\u"); inUnicode=false; }
sb.append("\\\"");
}
else if (c=='\\')
{
if (inUnicode) { sb.append("\\u"); inUnicode=false; }
sb.append("\\\\");
}
else if (c=='\0')
{
if (inUnicode) { sb.append("\\u"); inUnicode=false; }
sb.append("\\0");
}
else
{
if (!inUnicode) {sb.append("\\u"); inUnicode=true; }
String ss = Integer.toHexString((int)c);
// pad with 0's -- Java's parser freaks out otherwise
switch (ss.length())
{
case 1: sb.append("000" + ss); break;
case 2: sb.append("00" + ss); break;
case 3: sb.append("0" + ss); break;
case 4: sb.append(ss); break;
}
}
}
}
if (inUnicode) sb.append("\\u");
sb.append("\"");
return sb.toString();
}
/** Decodes the next item out of a DecodeReturn and modifies the DecodeReturn to hold the results. See DecodeReturn for more
explanations about how to interpret the results. */
public static void decode(DecodeReturn d)
{
String dat = d.data;
int x = d.pos;
int len = d.data.length();
// look for whitespace or ( or )
for ( ; x<len; x++ )
if (!Character.isWhitespace(dat.charAt(x))) break;
// am I at the end of my rope?
if (x==len) { d.type = DecodeReturn.T_ERROR; d.s = "Out of tokens"; return; }
// what type am I?
switch(dat.charAt(x))
{
case 't': // boolean (true)
if (x+3 < len && /* enough space */
dat.charAt(x+1)=='r' &&
dat.charAt(x+2)=='u' &&
dat.charAt(x+3)=='e')
{ d.type = DecodeReturn.T_BOOLEAN; d.l = 1; d.pos = x+4; return; }
else { d.type = DecodeReturn.T_ERROR; d.s = "Expected a (true) boolean"; return; }
//break;
case 'T': // boolean (true)
{ d.type = DecodeReturn.T_BOOLEAN; d.l = 1; d.pos = x+1; return; }
//break;
case 'F': // boolean (false)
{ d.type = DecodeReturn.T_BOOLEAN; d.l = 0; d.pos = x+1; return; }
//break;
case 'f': // float or boolean
if (x+4 < len && /* enough space */
dat.charAt(x+1)=='a' && dat.charAt(x+2)=='l' && dat.charAt(x+3)=='s' && dat.charAt(x+4)=='e' )
{ d.type = DecodeReturn.T_BOOLEAN; d.l = 0; d.pos = x+5; return; }
else
{
boolean readHuman = false;
String sf = null;
int initial = x+1;
// look for next '|'
for ( ; x < len; x++)
if (dat.charAt(x)=='|') break;
if (x==initial) readHuman=true;
if ( x >= len )
{ d.type = DecodeReturn.T_ERROR; d.s = "Expected a float"; return; }
if (!readHuman)
sf = dat.substring(initial,x);
x++;
// look for next '|'
int initial2 = x; // x is now just past first |
for ( ; x < len; x++)
if (dat.charAt(x)=='|') break;
if ( x >= len )
{ d.type = DecodeReturn.T_ERROR; d.s = "Expected a float"; return; }
if (readHuman)
sf = dat.substring(initial2,x);
float f;
try
{
if (readHuman) f = Float.parseFloat(sf);
else f = Float.intBitsToFloat(Integer.parseInt(sf));
}
catch (NumberFormatException e)
{ d.type = DecodeReturn.T_ERROR; d.s = "Expected a float"; return; }
d.type = DecodeReturn.T_FLOAT;
d.d = f;
d.pos = x+1;
return;
}
case 'd': // double
{
boolean readHuman = false;
String sf = null;
int initial = x+1;
// look for next '|'
for ( ; x < len; x++)
if (dat.charAt(x)=='|') break;
if (x==initial) readHuman=true;
if ( x >= len )
{ d.type = DecodeReturn.T_ERROR; d.s = "Expected a double"; return; }
if (!readHuman)
sf = dat.substring(initial,x);
x++;
// look for next '|'
int initial2 = x; // x is now just past first |
for ( ; x < len; x++)
if (dat.charAt(x)=='|') break;
if ( x >= len )
{ d.type = DecodeReturn.T_ERROR; d.s = "Expected a double"; return; }
if (readHuman)
sf = dat.substring(initial2,x);
double f;
try
{
if (readHuman) f = Double.parseDouble(sf);
else f = Double.longBitsToDouble(Long.parseLong(sf));
}
catch (NumberFormatException e)
{ d.type = DecodeReturn.T_ERROR; d.s = "Expected a double"; return; }
d.type = DecodeReturn.T_DOUBLE;
d.d = f;
d.pos = x+1;
return;
}
// break;
case 'b': // byte
{
int initial = x+1;
// look for next '|'
for ( ; x < len; x++)
if (dat.charAt(x)=='|') break;
if ( x >= len )
{ d.type = DecodeReturn.T_ERROR; d.s = "Expected a byte"; return; }
String sf = dat.substring(initial,x);
byte f;
try
{ f = Byte.parseByte(sf); }
catch (NumberFormatException e)
{ d.type = DecodeReturn.T_ERROR; d.s = "Expected a byte"; return; }
d.type = DecodeReturn.T_BYTE;
d.l = f;
d.pos = x+1;
return;
}
// break;
case 's': // short
{
int initial = x+1;
// look for next '|'
for ( ; x < len; x++)
if (dat.charAt(x)=='|') break;
if ( x >= len )
{ d.type = DecodeReturn.T_ERROR; d.s = "Expected a short"; return; }
String sf = dat.substring(initial,x);
short f;
try
{ f = Short.parseShort(sf); }
catch (NumberFormatException e)
{ d.type = DecodeReturn.T_ERROR; d.s = "Expected a short"; return; }
d.type = DecodeReturn.T_SHORT;
d.l = f;
d.pos = x+1;
return;
}
// break;
case 'i': // int
{
int initial = x+1;
// look for next '|'
for ( ; x < len; x++)
if (dat.charAt(x)=='|') break;
if ( x >= len )
{ d.type = DecodeReturn.T_ERROR; d.s = "Expected an int"; return; }
String sf = dat.substring(initial,x);
int f;
try
{ f = Integer.parseInt(sf); }
catch (NumberFormatException e)
{ d.type = DecodeReturn.T_ERROR; d.s = "Expected an int"; return; }
d.type = DecodeReturn.T_INT;
d.l = f;
d.pos = x+1;
return;
}
// break;
case 'l': // long
{
int initial = x+1;
// look for next '|'
for ( ; x < len; x++)
if (dat.charAt(x)=='|') break;
if ( x >= len )
{ d.type = DecodeReturn.T_ERROR; d.s = "Expected a long"; return; }
String sf = dat.substring(initial,x);
long f;
try
{ f = Long.parseLong(sf); }
catch (NumberFormatException e)
{ d.type = DecodeReturn.T_ERROR; d.s = "Expected a long"; return; }
d.type = DecodeReturn.T_LONG;
d.l = f;
d.pos = x+1;
return;
}
// break;
case '"': // string
{
StringBuilder sb = new StringBuilder();
boolean inUnicode = false;
x++;
for ( ; x < len; x++)
{
char c = dat.charAt(x);
if (c=='"')
{
// done with the string
if (inUnicode) // uh oh
{ d.type = DecodeReturn.T_ERROR; d.s = "Forgot to terminate Unicode with a '\\u' in the string"; return; }
d.type = DecodeReturn.T_STRING;
d.s = sb.toString();
d.pos = x+1;
return;
}
else if (c=='\\') // escape
{
x++;
if ( x >= len )
{ d.type = DecodeReturn.T_ERROR; d.s = "Unterminated String"; return; }
if (dat.charAt(x)!='u' && inUnicode)
{ d.type = DecodeReturn.T_ERROR; d.s = "Escape character in Unicode sequence"; return; }
switch (dat.charAt(x))
{
case 'u': inUnicode = !inUnicode; break;
case 'b': sb.append('\b'); break;
case 'n': sb.append('\n'); break;
case '"': sb.append('"'); break;
case '\'': sb.append('\''); break;
case 't': sb.append('\t'); break;
case '\\': sb.append('\\'); break;
case '0': sb.append('\0'); break;
default:
{ d.type = DecodeReturn.T_ERROR; d.s = "Bad escape char in String"; return; }
}
}
else if (inUnicode)
{
if ( x + 3 >= len )
{ d.type = DecodeReturn.T_ERROR; d.s = "Unterminated String"; return; }
try
{
sb.append((char)(Integer.decode("0x" + c +
dat.charAt(x+1) +
dat.charAt(x+2) +
dat.charAt(x+3)).intValue()));;
x+=3;
}
catch (NumberFormatException e)
{ d.type = DecodeReturn.T_ERROR; d.s = "Bad Unicode in String"; return; }
}
else sb.append(c);
}
d.type = DecodeReturn.T_ERROR; d.s = "Unterminated String"; return;
}
//break;
case '\'': // char
{
x++;
if ( x >= len )
{ d.type = DecodeReturn.T_ERROR; d.s = "Unterminated char"; return; }
char c = dat.charAt(x);
if (c=='\\')
{
x++;
if (x>=len)
{ d.type = DecodeReturn.T_ERROR; d.s = "Unterminated char"; return; }
switch (dat.charAt(x))
{
case 'u':
if ( x + 4 >= len )
{ d.type = DecodeReturn.T_ERROR; d.s = "Unterminated char"; return; }
try
{
c = (char)(Integer.decode("0x" +
dat.charAt(x+1) +
dat.charAt(x+2) +
dat.charAt(x+3) +
dat.charAt(x+4)).intValue());
}
catch (NumberFormatException e)
{ d.type = DecodeReturn.T_ERROR; d.s = "Bad Unicode in char"; return; }
x+=5;
break;
case 'b': c = '\b'; x++; break;
case 'n': c = '\n'; x++; break;
case '"': c = '"'; x++; break;
case '\'': c = '\''; x++; break;
case 't': c = '\t'; x++; break;
case '\\': c = '\\'; x++; break;
case '0': c = '\0'; x++; break;
default:
{ d.type = DecodeReturn.T_ERROR; d.s = "Bad escape char in char"; return; }
}
if (dat.charAt(x)!='\'')
{ d.type = DecodeReturn.T_ERROR; d.s = "Bad char"; return; }
d.type = DecodeReturn.T_CHAR;
d.l = c;
d.pos = x+1;
return;
}
else
{
x++;
if ( x >= len )
{ d.type = DecodeReturn.T_ERROR; d.s = "Unterminated char"; return; }
if (dat.charAt(x)!='\'')
{ d.type = DecodeReturn.T_ERROR; d.s = "Bad char"; return; }
d.type = DecodeReturn.T_CHAR;
d.l = c;
d.pos = x + 1;
return;
}
}
//break;
default:
d.type = DecodeReturn.T_ERROR; d.s = "Unknown token"; return;
// break;
}
}
/** Finds the next nonblank line, then trims the line and checks the preamble. Returns a DecodeReturn on the line if successful, else posts a fatal error.
Sets the DecodeReturn's line number. The DecodeReturn has not yet been decoded. You'll need to do that with Code.decode(...) */
public static DecodeReturn checkPreamble(String preamble, final EvolutionState state,
final LineNumberReader reader)
{
int linenumber = 0; // throw it away later
try
{
// get non-blank line
String s = "";
while(s != null && s.trim().equals(""))
{
linenumber = reader.getLineNumber();
s = reader.readLine();
}
// check the preamble
if (s==null || !(s = s.trim()).startsWith(preamble)) // uh oh
state.output.fatal("Line " + linenumber +
" has a bad preamble.Expected '" + preamble + "'\n-->" + s);
DecodeReturn d = new DecodeReturn(s, preamble.length());
d.lineNumber = linenumber;
return d;
}
catch (IOException e)
{
state.output.fatal("On line " + linenumber + " an IO error occurred:\n\n" + e);
return null; // never happens
}
}
/** Finds the next nonblank line, skips past an expected preamble, and reads in a string if there is one, and returns it.
Generates an error otherwise. */
public static String readStringWithPreamble(String preamble, final EvolutionState state,
final LineNumberReader reader)
{
DecodeReturn d = checkPreamble(preamble, state, reader);
Code.decode(d);
if (d.type!=DecodeReturn.T_STRING)
state.output.fatal("Line " + d.lineNumber +
" has no string after preamble '" + preamble + "'\n-->" + d.data);
return (String)(d.s);
}
/** Finds the next nonblank line, skips past an expected preamble, and reads in a character if there is one, and returns it.
Generates an error otherwise. */
public static char readCharacterWithPreamble(String preamble, final EvolutionState state,
final LineNumberReader reader)
{
DecodeReturn d = checkPreamble(preamble, state, reader);
Code.decode(d);
if (d.type!=DecodeReturn.T_CHAR)
state.output.fatal("Line " + d.lineNumber +
" has no character after preamble '" + preamble + "'\n-->" + d.data);
return (char)(d.l);
}
/** Finds the next nonblank line, skips past an expected preamble, and reads in a byte if there is one, and returns it.
Generates an error otherwise. */
public static byte readByteWithPreamble(String preamble, final EvolutionState state,
final LineNumberReader reader)
{
DecodeReturn d = checkPreamble(preamble, state, reader);
Code.decode(d);
if (d.type!=DecodeReturn.T_BYTE)
state.output.fatal("Line " + d.lineNumber +
" has no byte after preamble '" + preamble + "'\n-->" + d.data);
return (byte)(d.l);
}
/** Finds the next nonblank line, skips past an expected preamble, and reads in a short if there is one, and returns it.
Generates an error otherwise. */
public static short readShortWithPreamble(String preamble, final EvolutionState state,
final LineNumberReader reader)
{
DecodeReturn d = checkPreamble(preamble, state, reader);
Code.decode(d);
if (d.type!=DecodeReturn.T_SHORT)
state.output.fatal("Line " + d.lineNumber +
" has no short after preamble '" + preamble + "'\n-->" + d.data);
return (short)(d.l);
}
/** Finds the next nonblank line, skips past an expected preamble, and reads in a long if there is one, and returns it.
Generates an error otherwise. */
public static long readLongWithPreamble(String preamble, final EvolutionState state,
final LineNumberReader reader)
{
DecodeReturn d = checkPreamble(preamble, state, reader);
Code.decode(d);
if (d.type!=DecodeReturn.T_LONG)
state.output.fatal("Line " + d.lineNumber +
" has no long after preamble '" + preamble + "'\n-->" + d.data);
return (long)(d.l);
}
/** Finds the next nonblank line, skips past an expected preamble, and reads in an integer if there is one, and returns it.
Generates an error otherwise. */
public static int readIntegerWithPreamble(String preamble, final EvolutionState state,
final LineNumberReader reader)
{
DecodeReturn d = checkPreamble(preamble, state, reader);
Code.decode(d);
if (d.type!=DecodeReturn.T_INT)
state.output.fatal("Line " + d.lineNumber +
" has no integer after preamble '" + preamble + "'\n-->" + d.data);
return (int)(d.l);
}
/** Finds the next nonblank line, skips past an expected preamble, and reads in a float if there is one, and returns it.
Generates an error otherwise. */
public static float readFloatWithPreamble(String preamble, final EvolutionState state,
final LineNumberReader reader)
{
DecodeReturn d = checkPreamble(preamble, state, reader);
Code.decode(d);
if (d.type!=DecodeReturn.T_FLOAT)
state.output.fatal("Line " + d.lineNumber +
" has no floating point number after preamble '" + preamble + "'\n-->" + d.data);
return (float)(d.d);
}
/** Finds the next nonblank line, skips past an expected preamble, and reads in a double if there is one, and returns it.
Generates an error otherwise. */
public static double readDoubleWithPreamble(String preamble, final EvolutionState state,
final LineNumberReader reader)
{
DecodeReturn d = checkPreamble(preamble, state, reader);
Code.decode(d);
if (d.type!=DecodeReturn.T_DOUBLE)
state.output.fatal("Line " + d.lineNumber +
" has no double floating point number after preamble '" + preamble + "'. -->" + d.data);
return d.d;
}
/** Finds the next nonblank line, skips past an expected preamble, and reads in a boolean value ("true" or "false") if there is one, and returns it.
Generates an error otherwise. */
public static boolean readBooleanWithPreamble(String preamble, final EvolutionState state,
final LineNumberReader reader)
{
DecodeReturn d = checkPreamble(preamble, state, reader);
Code.decode(d);
if (d.type!=DecodeReturn.T_BOOLEAN)
state.output.fatal("Line " + d.lineNumber +
" has no boolean value ('true' or 'false') after preamble '" + preamble + "'\n-->" + d.data);
return (d.l != 0);
}
}
/*
(BeanShell testing for decoding)
s = " true false s-12| i232342|b22|f234123|3234.1| d234111231|4342.31|"
s = "\"Hello\" true false s-12| i232342|b22|f234123|3234.1| d234111231|4342.31| ' ' '\\'' '\\n' \"Hello\\u0000\\uWorld\""
c = new ec.util.Code();
r = new ec.util.DecodeReturn(s);
c.decode(r);
System.out.println(r.type);
System.out.println(r.l);
System.out.println(r.d);
System.out.println(r.s);
*/
|
|
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.fileEditor.ex;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.editor.Caret;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.EditorDataProvider;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.FileEditorProvider;
import com.intellij.openapi.fileEditor.impl.EditorComposite;
import com.intellij.openapi.fileEditor.impl.EditorWindow;
import com.intellij.openapi.fileEditor.impl.EditorsSplitters;
import com.intellij.openapi.fileEditor.impl.FileEditorManagerImpl;
import com.intellij.openapi.fileEditor.impl.text.AsyncEditorLoader;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.ActionCallback;
import com.intellij.openapi.util.BusyObject;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.concurrency.Promise;
import javax.swing.*;
import java.awt.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
public abstract class FileEditorManagerEx extends FileEditorManager implements BusyObject {
private final List<EditorDataProvider> myDataProviders = new ArrayList<>();
public static FileEditorManagerEx getInstanceEx(@NotNull Project project) {
return (FileEditorManagerEx)getInstance(project);
}
/**
* @return {@code JComponent} which represent the place where all editors are located
*/
public abstract JComponent getComponent();
/**
* @return preferred focused component inside myEditor tabbed container.
* This method does similar things like {@link FileEditor#getPreferredFocusedComponent()}
* but it also tracks (and remember) focus movement inside tabbed container.
*
* @see EditorComposite#getPreferredFocusedComponent()
*/
@Nullable
public abstract JComponent getPreferredFocusedComponent();
@NotNull
public abstract Pair<FileEditor[], FileEditorProvider[]> getEditorsWithProviders(@NotNull VirtualFile file);
@Nullable
public abstract VirtualFile getFile(@NotNull FileEditor editor);
/**
* Refreshes the text, colors and icon of the editor tabs representing the specified file.
*
* @param file the file to refresh.
*/
public abstract void updateFilePresentation(@NotNull VirtualFile file);
/**
* Synchronous version of {@link #getActiveWindow()}. Will return {@code null} if invoked not from EDT.
* @return current window in splitters
*/
public abstract EditorWindow getCurrentWindow();
/**
* Asynchronous version of {@link #getCurrentWindow()}. Execution happens after focus settle down. Can be invoked on any thread.
*/
@NotNull
public abstract Promise<EditorWindow> getActiveWindow();
public abstract void setCurrentWindow(EditorWindow window);
/**
* Closes editors for the file opened in particular window.
*
* @param file file to be closed. Cannot be null.
*/
public abstract void closeFile(@NotNull VirtualFile file, @NotNull EditorWindow window);
public abstract void unsplitWindow();
public abstract void unsplitAllWindow();
public abstract int getWindowSplitCount();
public abstract boolean hasSplitOrUndockedWindows();
@NotNull
public abstract EditorWindow[] getWindows();
/**
* @return arrays of all files (including {@code file} itself) that belong
* to the same tabbed container. The method returns empty array if {@code file}
* is not open. The returned files have the same order as they have in the
* tabbed container.
*/
@NotNull
public abstract VirtualFile[] getSiblings(@NotNull VirtualFile file);
public abstract void createSplitter(int orientation, @Nullable EditorWindow window);
public abstract void changeSplitterOrientation();
public abstract void flipTabs();
public abstract boolean tabsMode();
public abstract boolean isInSplitter();
public abstract boolean hasOpenedFile ();
@Nullable
public abstract VirtualFile getCurrentFile();
@Nullable
public abstract Pair <FileEditor, FileEditorProvider> getSelectedEditorWithProvider(@NotNull VirtualFile file);
/**
* Closes all files IN ACTIVE SPLITTER (window).
*
* @see com.intellij.ui.docking.DockManager#getContainers()
* @see com.intellij.ui.docking.DockContainer#closeAll()
*/
public abstract void closeAllFiles();
@NotNull
public abstract EditorsSplitters getSplitters();
@Override
@NotNull
public FileEditor[] openFile(@NotNull final VirtualFile file, final boolean focusEditor) {
return openFileWithProviders(file, focusEditor, false).getFirst ();
}
@NotNull
@Override
public FileEditor[] openFile(@NotNull VirtualFile file, boolean focusEditor, boolean searchForOpen) {
return openFileWithProviders(file, focusEditor, searchForOpen).getFirst();
}
@NotNull
public abstract Pair<FileEditor[],FileEditorProvider[]> openFileWithProviders(@NotNull VirtualFile file,
boolean focusEditor,
boolean searchForSplitter);
@NotNull
public abstract Pair<FileEditor[],FileEditorProvider[]> openFileWithProviders(@NotNull VirtualFile file,
boolean focusEditor,
@NotNull EditorWindow window);
public abstract boolean isChanged(@NotNull EditorComposite editor);
public abstract EditorWindow getNextWindow(@NotNull final EditorWindow window);
public abstract EditorWindow getPrevWindow(@NotNull final EditorWindow window);
public abstract boolean isInsideChange();
@Override
@Nullable
public final Object getData(@NotNull String dataId, @NotNull Editor editor, @NotNull Caret caret) {
for (final EditorDataProvider dataProvider : myDataProviders) {
final Object o = dataProvider.getData(dataId, editor, caret);
if (o != null) return o;
}
return null;
}
@Override
public void registerExtraEditorDataProvider(@NotNull final EditorDataProvider provider, Disposable parentDisposable) {
myDataProviders.add(provider);
if (parentDisposable != null) {
Disposer.register(parentDisposable, () -> myDataProviders.remove(provider));
}
}
public void refreshIcons() {
if (this instanceof FileEditorManagerImpl) {
final FileEditorManagerImpl mgr = (FileEditorManagerImpl)this;
Set<EditorsSplitters> splitters = mgr.getAllSplitters();
for (EditorsSplitters each : splitters) {
for (VirtualFile file : mgr.getOpenFiles()) {
each.updateFileIcon(file);
}
}
}
}
public abstract EditorsSplitters getSplittersFor(Component c);
@NotNull
public abstract ActionCallback notifyPublisher(@NotNull Runnable runnable);
@Override
public void runWhenLoaded(@NotNull Editor editor, @NotNull Runnable runnable) {
AsyncEditorLoader.performWhenLoaded(editor, runnable);
}
}
|
|
package io.mzb.Appbot;
import io.mzb.Appbot.commands.CommandManager;
import io.mzb.Appbot.events.EventListener;
import io.mzb.Appbot.events.EventManager;
import io.mzb.Appbot.log.AppbotLogger;
import io.mzb.Appbot.plugin.PluginManager;
import io.mzb.Appbot.threads.ShutdownThread;
import io.mzb.Appbot.threads.TaskManager;
import io.mzb.Appbot.twitch.Channel;
import io.mzb.Appbot.twitch.irc.IRCHandler;
import org.apache.commons.io.FileUtils;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import java.io.*;
import java.net.URISyntaxException;
import java.text.SimpleDateFormat;
import java.util.Date;
public class Appbot extends EventListener {
private static final String defaultSettingsJson = "{\n" +
" \"auth\": {\n" +
" \"username\": \"your_bot_username\",\n" +
" \"oauth\": \"your_bot_oauth_key\",\n" +
" \"clientid\": \"your_client_id\"\n" +
" },\n" +
" \"connect\": {\n" +
" \"channel\": \"your_stream_channel\"\n" +
" }\n" +
"}";
private final String VERSION = "beta-1";
private static String BOT_NAME, BOT_OAUTH, BOT_CLIENTID;
private static Channel CHANNEL;
private static TaskManager taskManager;
private static PluginManager pluginManager;
private static IRCHandler ircHandler;
private static EventManager eventManager;
private static CommandManager commandManager;
public Appbot() throws IOException, InterruptedException, ParseException {
System.out.println("Starting Appbot version " + VERSION);
System.out.println("Active location: " + getActiveLocation().toString());
Runtime.getRuntime().addShutdownHook(new ShutdownThread());
setupLogger();
setupPluginsFolder();
setupSettingsFile();
taskManager = new TaskManager();
loadSettings(() -> {
if (CHANNEL.isValid()) {
System.out.println("Loading managers");
// Load only if a default channel is found and valid
eventManager = new EventManager();
commandManager = new CommandManager();
System.out.println("Managers loaded");
System.out.println("Loading plugins");
// Plugins - only loaded if channel is valid!
// Must be loaded after all other managers so plugins can use them
pluginManager = new PluginManager(getPluginsFolder());
System.out.println("Default channel loaded: " + CHANNEL.getName());
System.out.println("Connecting to IRC");
// Init the irc handler
ircHandler = new IRCHandler();
// Connect to the irc
ircHandler.connect();
// Send auth info
ircHandler.sendAuth();
// Join the channel on the irc
CHANNEL.joinIrc();
System.out.println("IRC connection complete");
} else {
// In the case the channel is not valid (Does not exist, can't connect to api, twitch returned error)
// Application will terminate, nothing else can happen if there is no channel to connect to.w
System.out.println("Default channel is invalid: " + CHANNEL.getInvalidReason());
System.out.println("Please make sure that you have typed the channel name correctly!");
System.exit(1);
}
});
taskManager.initMainThreadQueue();
}
/**
* Setup a new log file and set the default system output to print to this file and the normal console
*
* @throws FileNotFoundException Log file can't be found
*/
private void setupLogger() throws FileNotFoundException {
// Get new log file
File logFile = getNewLogFile();
// File output stream for the log file
FileOutputStream fileOutputStream = new FileOutputStream(logFile);
PrintStream printStream = new PrintStream(fileOutputStream);
AppbotLogger logger = new AppbotLogger(printStream, System.out);
System.setOut(logger);
System.setErr(logger);
}
/**
* Only needs to be called on startup
* Checks if plugin folder exists, if not then create it.
*/
private void setupPluginsFolder() {
if (!getPluginsFolder().exists()) {
if (getPluginsFolder().mkdirs())
System.out.println("Plugin folder made for the first time");
}
}
/**
* Only needs to be called on startup
* Checks if a settings file exists, if not then create it
* When the settings file is made for the first time it will be filled with the default
* settings template and the application will sleep for 5 seconds then stop.
* The user is warned that they should edit this file after is it made.
*
* @throws IOException Read/Write of file failed
* @throws InterruptedException Read/Write was interrupted
*/
private void setupSettingsFile() throws IOException, InterruptedException {
// Setup settings file
if (!getSettingsFile().exists()) {
if(getSettingsFile().createNewFile()) {
FileUtils.writeStringToFile(getSettingsFile(), defaultSettingsJson, "UTF-8");
System.out.println("Settings file made for the first time");
System.out.println("Please configure your settings file before starting this bot again!");
Thread.sleep(5000);
} else {
System.out.println("Settings file failed to make!");
System.exit(1);
}
}
}
/**
* Loads the settings from the settings.json file
* If any setting is not found the runnable will not be called.
* After all settings have been loaded the default channel will be set.
* In the case this is used for a settings file reload, the default channel should be killed first.
*
* @param runnable Will be passed to the default channel init.
* @throws IOException Failed to read the file
* @throws ParseException Failed to parse the file
*/
private void loadSettings(Runnable runnable) throws IOException, ParseException {
// Load settings
JSONObject settings = (JSONObject) new JSONParser().parse(FileUtils.readFileToString(getSettingsFile(), "UTF-8"));
if (settings == null) {
// Settings file does not exist!
System.err.println("Settings file null!");
return;
}
// Get auth section of the settings
JSONObject auth = (JSONObject) settings.get("auth");
if (auth == null) {
// Settings file does not contain auth section!
System.err.println("Settings file does not contain auth!");
return;
}
if (!auth.containsKey("username")) {
// Auth settings does not contain username
System.err.println("Settings auth does not contain username!");
return;
}
// Set bot name from the auth
BOT_NAME = auth.get("username").toString();
if (!auth.containsKey("oauth")) {
// Auth does not contain oauth token
System.err.println("Settings auth does not contain oauth!");
return;
}
// Set oauth token from auth
BOT_OAUTH = auth.get("oauth").toString();
if (!auth.containsKey("clientid")) {
// Client Id not in auth settings
System.err.println("Settings auth does not contain clientid!");
return;
}
// Set client id from auth
BOT_CLIENTID = auth.get("clientid").toString();
// Get connection part of settings
JSONObject connection = (JSONObject) settings.get("connect");
String channelName = connection.get("channel").toString().toLowerCase();
CHANNEL = new Channel(channelName, runnable);
}
/**
* @return The application task manager. Allows for easy multi-threading.
*/
public static TaskManager getTaskManager() {
return taskManager;
}
/**
* @return The application event manager. Allows for simple event systems for plugins.
*/
public static EventManager getEventManager() {
return eventManager;
}
/**
* @return Application irc handler. Handles any irc related stuff.
*/
public static IRCHandler getIrcHandler() {
return ircHandler;
}
/**
* @return Command manager, manages any commands that plugins may register
*/
public static CommandManager getCommandManager() {
return commandManager;
}
/**
* @return Plugin manager, manages all plugins
*/
public static PluginManager getPluginManager() {
return pluginManager;
}
/**
* @return The default channel connection specified in the settings.json
*/
public static Channel getDefaultChannel() {
return CHANNEL;
}
/**
* @return The client id used to talk with the twitch api
*/
public static String getClientId() {
return BOT_CLIENTID;
}
/**
* @return The twitch username that the bot is using in the irc
*/
public static String getName() {
return BOT_NAME;
}
/**
* @return The token that is used to authenticate with the irc server
*/
public static String getOAuthToken() {
return BOT_OAUTH;
}
/**
* @return The directory that the application is being ran from
*/
private File getActiveLocation() {
try {
File activeLocation = new File(Appbot.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath());
if(activeLocation.toString().endsWith(".jar")) {
int lastSeparator = activeLocation.toString().lastIndexOf(activeLocation.separator);
String execPath = activeLocation.toString().substring(0, lastSeparator);
return new File(execPath);
} else {
return activeLocation;
}
} catch (URISyntaxException e) {
e.printStackTrace();
}
return null;
}
/**
* @return The file location all plugins should be located under
*/
private File getPluginsFolder() {
return new File(getActiveLocation(), "plugins");
}
/**
* @return The file that contains all of the settings for the application
*/
private File getSettingsFile() {
return new File(getActiveLocation(), "settings.json");
}
/**
* @return The parent log folder that contains all text logs from the application
*/
private File getLogsFile() {
return new File(getActiveLocation(), "logs");
}
/**
* @return A new plain text file that will contain logs for the current application instance.
*/
private File getNewLogFile() {
if (!getLogsFile().exists()) {
getLogsFile().mkdirs();
}
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH-mm-ss");
File file = new File(getLogsFile(), sdf.format(new Date()) + ".txt");
if (!file.exists()) {
try {
file.createNewFile();
} catch (IOException e) {
System.out.println("Failed to create new log file! Stopping!");
e.printStackTrace();
System.exit(1);
}
}
return file;
}
// Program init
public static void main(String[] args) {
try {
new Appbot();
} catch (IOException | InterruptedException | ParseException e) {
e.printStackTrace();
}
}
}
|
|
package org.netbeans.modules.atom.electron.options;
import org.netbeans.modules.atom.electron.glue.Preferences;
import org.netbeans.modules.atom.electron.cmd.Command;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import javax.swing.JFileChooser;
import javax.swing.SwingUtilities;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import org.openide.awt.HtmlBrowser.URLDisplayer;
import org.openide.util.Exceptions;
import org.openide.util.NbBundle;
import static org.netbeans.modules.atom.electron.cmd.AbstractCommandFactory.createCommand;
import org.netbeans.modules.atom.electron.glue.CommandType;
import org.netbeans.modules.atom.electron.glue.CommandType.Type;
final class ElectronPanel extends javax.swing.JPanel {
private final String invalidPathMessage;
private final ElectronOptionsPanelController controller;
private final ElectronPreferences prefs;
ElectronPanel(ElectronOptionsPanelController controller) {
this.controller = controller;
this.prefs = new ElectronPreferences();
initComponents();
txtPath.getDocument().addDocumentListener(new DocListener());
invalidPathMessage = NbBundle.getMessage(ElectronPanel.class, "ElectronPanel.errLblPath.invalid");
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
lblPath = new javax.swing.JLabel();
txtPath = new javax.swing.JTextField();
btnBrowse = new javax.swing.JButton();
errLblPath = new javax.swing.JLabel();
lblCmd = new javax.swing.JLabel();
txtRunCmd = new javax.swing.JTextField();
lblDebugPort = new javax.swing.JLabel();
txtDebugPort = new javax.swing.JTextField();
chkPause = new javax.swing.JCheckBox();
jSeparator1 = new javax.swing.JSeparator();
lblReq = new javax.swing.JLabel();
lblNodeInspec = new javax.swing.JLabel();
lblDebugUrl = new javax.swing.JLabel();
txtDebugUrl = new javax.swing.JTextField();
lblPath.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
org.openide.awt.Mnemonics.setLocalizedText(lblPath, org.openide.util.NbBundle.getMessage(ElectronPanel.class, "ElectronPanel.lblPath.text")); // NOI18N
txtPath.setText(org.openide.util.NbBundle.getMessage(ElectronPanel.class, "ElectronPanel.txtPath.text")); // NOI18N
org.openide.awt.Mnemonics.setLocalizedText(btnBrowse, org.openide.util.NbBundle.getMessage(ElectronPanel.class, "ElectronPanel.btnBrowse.text")); // NOI18N
btnBrowse.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnBrowseActionPerformed(evt);
}
});
errLblPath.setForeground(new java.awt.Color(255, 0, 0));
org.openide.awt.Mnemonics.setLocalizedText(errLblPath, org.openide.util.NbBundle.getMessage(ElectronPanel.class, "ElectronPanel.errLblPath.text")); // NOI18N
lblCmd.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
org.openide.awt.Mnemonics.setLocalizedText(lblCmd, org.openide.util.NbBundle.getMessage(ElectronPanel.class, "ElectronPanel.lblCmd.text")); // NOI18N
txtRunCmd.setEditable(false);
txtRunCmd.setText(org.openide.util.NbBundle.getMessage(ElectronPanel.class, "ElectronPanel.txtRunCmd.text")); // NOI18N
lblDebugPort.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
org.openide.awt.Mnemonics.setLocalizedText(lblDebugPort, org.openide.util.NbBundle.getMessage(ElectronPanel.class, "ElectronPanel.lblDebugPort.text")); // NOI18N
txtDebugPort.setText(org.openide.util.NbBundle.getMessage(ElectronPanel.class, "ElectronPanel.txtDebugPort.text")); // NOI18N
org.openide.awt.Mnemonics.setLocalizedText(chkPause, org.openide.util.NbBundle.getMessage(ElectronPanel.class, "ElectronPanel.chkPause.text")); // NOI18N
chkPause.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
chkPauseActionPerformed(evt);
}
});
org.openide.awt.Mnemonics.setLocalizedText(lblReq, org.openide.util.NbBundle.getMessage(ElectronPanel.class, "ElectronPanel.lblReq.text")); // NOI18N
lblNodeInspec.setForeground(new java.awt.Color(0, 0, 204));
org.openide.awt.Mnemonics.setLocalizedText(lblNodeInspec, org.openide.util.NbBundle.getMessage(ElectronPanel.class, "ElectronPanel.lblNodeInspec.text")); // NOI18N
lblNodeInspec.setCursor(new java.awt.Cursor(java.awt.Cursor.HAND_CURSOR));
lblNodeInspec.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
lblNodeInspecMouseClicked(evt);
}
});
org.openide.awt.Mnemonics.setLocalizedText(lblDebugUrl, org.openide.util.NbBundle.getMessage(ElectronPanel.class, "ElectronPanel.lblDebugUrl.text")); // NOI18N
txtDebugUrl.setText(org.openide.util.NbBundle.getMessage(ElectronPanel.class, "ElectronPanel.txtDebugUrl.text")); // NOI18N
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(10, 10, 10)
.addComponent(lblReq)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(lblNodeInspec, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jSeparator1)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(lblDebugUrl)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(lblPath, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(lblDebugPort, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(lblCmd, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(chkPause)
.addGap(0, 0, Short.MAX_VALUE))
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(txtDebugUrl, javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(errLblPath, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtPath, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, 193, Short.MAX_VALUE)
.addComponent(txtRunCmd, javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(txtDebugPort, javax.swing.GroupLayout.Alignment.LEADING))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(btnBrowse)))))
.addContainerGap())))
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(lblCmd)
.addComponent(txtRunCmd, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(lblPath)
.addComponent(txtPath, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnBrowse))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(errLblPath, javax.swing.GroupLayout.PREFERRED_SIZE, 20, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(1, 1, 1)
.addComponent(jSeparator1, javax.swing.GroupLayout.PREFERRED_SIZE, 10, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(lblReq)
.addComponent(lblNodeInspec, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(lblDebugUrl)
.addComponent(txtDebugUrl, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(lblDebugPort)
.addComponent(txtDebugPort, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(chkPause)
.addContainerGap(13, Short.MAX_VALUE))
);
}// </editor-fold>//GEN-END:initComponents
private void btnBrowseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnBrowseActionPerformed
JFileChooser fileChooser = new JFileChooser();
int retVal = fileChooser.showOpenDialog(this);
if (retVal == JFileChooser.APPROVE_OPTION) {
File file = fileChooser.getSelectedFile();
txtPath.setText(file.getAbsolutePath());
}
}//GEN-LAST:event_btnBrowseActionPerformed
private void chkPauseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_chkPauseActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_chkPauseActionPerformed
private void lblNodeInspecMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_lblNodeInspecMouseClicked
try {
URL url = new URL("https://github.com/node-inspector/node-inspector#quick-start");
URLDisplayer.getDefault().showURL(url);
} catch (IOException e) {
Exceptions.printStackTrace(e);
}
}//GEN-LAST:event_lblNodeInspecMouseClicked
void load() {
txtRunCmd.setText(buildCommandText());
txtPath.setText(prefs.getExecutable());
txtDebugUrl.setText(prefs.getDebugUrl());
txtDebugPort.setText(prefs.getDebugPort());
chkPause.setSelected(prefs.isBreakOnFirstLine());
}
private String buildCommandText() {
StringBuilder builder = new StringBuilder();
Command cmd = createCommand(new CommandType() {
@Override
public Type getType() {
return Type.ELECTRON_RUN;
}
@Override
public Preferences getPreferences() {
return null;
}
});
builder.append(cmd.getExecutable()).append(" ");
cmd.getArguments().forEach(s -> builder.append(s).append(" "));
return builder.toString().trim();
}
void store() {
prefs.setExecutable(txtPath.getText());
prefs.setBreakOnFirstLine(chkPause.isSelected());
prefs.setDebugUrl(txtDebugUrl.getText());
prefs.setDebugPort(txtDebugPort.getText());
}
boolean valid() {
File f = new File(txtPath.getText());
return f.exists() && f.canRead();
}
private void updateErrors() {
SwingUtilities.invokeLater(() -> {
if (!valid()) {
errLblPath.setText(invalidPathMessage);
} else {
errLblPath.setText("");
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton btnBrowse;
private javax.swing.JCheckBox chkPause;
private javax.swing.JLabel errLblPath;
private javax.swing.JSeparator jSeparator1;
private javax.swing.JLabel lblCmd;
private javax.swing.JLabel lblDebugPort;
private javax.swing.JLabel lblDebugUrl;
private javax.swing.JLabel lblNodeInspec;
private javax.swing.JLabel lblPath;
private javax.swing.JLabel lblReq;
private javax.swing.JTextField txtDebugPort;
private javax.swing.JTextField txtDebugUrl;
private javax.swing.JTextField txtPath;
private javax.swing.JTextField txtRunCmd;
// End of variables declaration//GEN-END:variables
private class DocListener implements DocumentListener {
@Override
public void insertUpdate(DocumentEvent e) {
changedUpdate(e);
}
@Override
public void removeUpdate(DocumentEvent e) {
changedUpdate(e);
}
@Override
public void changedUpdate(DocumentEvent e) {
updateErrors();
controller.changed();
}
}
}
|
|
package com.seven10.update_guy.server.repository;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import javax.ws.rs.core.Response.Status;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import com.google.gson.annotations.Expose;
import com.seven10.update_guy.common.FileFingerPrint;
import com.seven10.update_guy.common.GsonFactory;
import com.seven10.update_guy.server.exceptions.RepositoryException;
@XmlRootElement
public class RepositoryInfo
{
private byte[] toByteArray() throws RepositoryException
{
ByteArrayOutputStream outputStream = new ByteArrayOutputStream( );
try
{
outputStream.write( description.getBytes() );
outputStream.write( manifestPath.getBytes() );
outputStream.write( password.getBytes() );
outputStream.write( repoAddress.getBytes() );
outputStream.write( repoType.toString().getBytes() );
outputStream.write( user.getBytes() );
outputStream.write( Integer.toString(port).getBytes() );
}
catch (IOException e)
{
throw new RepositoryException(Status.INTERNAL_SERVER_ERROR, "Could not convert RepoInfo object to byte array. reason: %s", e.getMessage());
}
return outputStream.toByteArray();
}
public enum RepositoryType
{
@Expose
@XmlElement
local,
@Expose
@XmlElement
ftp
}
public RepositoryInfo()
{
repoAddress = "";
port = 0;
user = "";
password = "";
manifestPath = ".";
repoType = RepositoryType.local;
//cachePath = ".";
description = "unknown";
}
/**
* The DNS-resolvable name or IP address for the repo. This should be 'localhost' for
* local repos
*/
@Expose
public String repoAddress;
/**
* The port to use for this repo
*/
@Expose
public int port;
/**
* The user account for this repo. This value is ignored for local repos
*/
@Expose
public String user;
/**
* The password for this repo. This value is ignored for local repos
* Note: This value should be filtered out or masked when serialized to a string
*/
@Expose
public String password;
/**
* The path on the repo where any manifests are stored
*/
@Expose
public String manifestPath;
/**
* a human readable description
*/
@Expose
public String description;
/**
* Repository type
*/
@Expose
public RepositoryType repoType;
/**
* order in which to look for information when autodetecting the correct repo
*/
@Expose
public int priority;
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + ((description == null) ? 0 : description.hashCode());
result = prime * result + ((manifestPath == null) ? 0 : manifestPath.hashCode());
result = prime * result + ((password == null) ? 0 : password.hashCode());
result = prime * result + ((repoAddress == null) ? 0 : repoAddress.hashCode());
result = prime * result + ((repoType == null) ? 0 : repoType.hashCode());
result = prime * result + ((user == null) ? 0 : user.hashCode());
result = prime * result + (port);
return result;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj)
{
if (this == obj)
{
return true;
}
if (obj == null)
{
return false;
}
if (!(obj instanceof RepositoryInfo))
{
return false;
}
RepositoryInfo other = (RepositoryInfo) obj;
if (description == null)
{
if (other.description != null)
{
return false;
}
}
else if (!description.equals(other.description))
{
return false;
}
if (manifestPath == null)
{
if (other.manifestPath != null)
{
return false;
}
}
else if (!manifestPath.equals(other.manifestPath))
{
return false;
}
if (password == null)
{
if (other.password != null)
{
return false;
}
}
else if (!password.equals(other.password))
{
return false;
}
if (repoAddress == null)
{
if (other.repoAddress != null)
{
return false;
}
}
else if (!repoAddress.equals(other.repoAddress))
{
return false;
}
if( port != other.port)
{
return false;
}
if (repoType != other.repoType)
{
return false;
}
if (user == null)
{
if (other.user != null)
{
return false;
}
}
else if (!user.equals(other.user))
{
return false;
}
return true;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString()
{
return GsonFactory.getGson().toJson(this);
}
public String getShaHash() throws RepositoryException
{
try
{
return FileFingerPrint.create(this.toByteArray());
}
catch (IOException e)
{
throw new RepositoryException(Status.INTERNAL_SERVER_ERROR, "Could not create message digest for RepositoryInfo. reason: %s", e.getMessage());
}
}
public Path getRemoteManifestPath()
{
return Paths.get(this.manifestPath);
}
}
|
|
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.features.go;
import com.facebook.buck.core.cell.CellPathResolver;
import com.facebook.buck.core.description.MetadataProvidingDescription;
import com.facebook.buck.core.description.arg.CommonDescriptionArg;
import com.facebook.buck.core.description.arg.HasContacts;
import com.facebook.buck.core.description.arg.HasDeclaredDeps;
import com.facebook.buck.core.description.arg.HasSrcs;
import com.facebook.buck.core.description.arg.HasTestTimeout;
import com.facebook.buck.core.description.attr.ImplicitDepsInferringDescription;
import com.facebook.buck.core.exceptions.HumanReadableException;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.Flavor;
import com.facebook.buck.core.model.FlavorDomain;
import com.facebook.buck.core.model.Flavored;
import com.facebook.buck.core.model.InternalFlavor;
import com.facebook.buck.core.model.targetgraph.BuildRuleCreationContextWithTargetGraph;
import com.facebook.buck.core.model.targetgraph.DescriptionWithTargetGraph;
import com.facebook.buck.core.rules.ActionGraphBuilder;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.core.rules.BuildRuleParams;
import com.facebook.buck.core.rules.SourcePathRuleFinder;
import com.facebook.buck.core.rules.common.BuildableSupport;
import com.facebook.buck.core.rules.impl.NoopBuildRuleWithDeclaredAndExtraDeps;
import com.facebook.buck.core.sourcepath.SourcePath;
import com.facebook.buck.core.sourcepath.resolver.SourcePathResolver;
import com.facebook.buck.core.sourcepath.resolver.impl.DefaultSourcePathResolver;
import com.facebook.buck.core.toolchain.ToolchainProvider;
import com.facebook.buck.core.toolchain.tool.Tool;
import com.facebook.buck.core.util.immutables.BuckStyleImmutable;
import com.facebook.buck.cxx.toolchain.CxxPlatforms;
import com.facebook.buck.cxx.toolchain.linker.Linker;
import com.facebook.buck.features.go.GoListStep.ListType;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.versions.Version;
import com.facebook.buck.versions.VersionRoot;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Optional;
import org.immutables.value.Value;
public class GoTestDescription
implements DescriptionWithTargetGraph<GoTestDescriptionArg>,
Flavored,
MetadataProvidingDescription<GoTestDescriptionArg>,
ImplicitDepsInferringDescription<GoTestDescription.AbstractGoTestDescriptionArg>,
VersionRoot<GoTestDescriptionArg> {
private static final Flavor TEST_LIBRARY_FLAVOR = InternalFlavor.of("test-library");
private final GoBuckConfig goBuckConfig;
private final ToolchainProvider toolchainProvider;
public GoTestDescription(GoBuckConfig goBuckConfig, ToolchainProvider toolchainProvider) {
this.goBuckConfig = goBuckConfig;
this.toolchainProvider = toolchainProvider;
}
@Override
public Class<GoTestDescriptionArg> getConstructorArgType() {
return GoTestDescriptionArg.class;
}
@Override
public boolean hasFlavors(ImmutableSet<Flavor> flavors) {
return getGoToolchain().getPlatformFlavorDomain().containsAnyOf(flavors)
|| flavors.contains(TEST_LIBRARY_FLAVOR);
}
@Override
public <U> Optional<U> createMetadata(
BuildTarget buildTarget,
ActionGraphBuilder graphBuilder,
CellPathResolver cellRoots,
GoTestDescriptionArg args,
Optional<ImmutableMap<BuildTarget, Version>> selectedVersions,
Class<U> metadataClass) {
Optional<GoPlatform> platform =
getGoToolchain().getPlatformFlavorDomain().getValue(buildTarget);
if (metadataClass.isAssignableFrom(GoLinkable.class)
&& buildTarget.getFlavors().contains(TEST_LIBRARY_FLAVOR)) {
Preconditions.checkState(platform.isPresent());
Path packageName = getGoPackageName(graphBuilder, buildTarget, args);
SourcePath output = graphBuilder.requireRule(buildTarget).getSourcePathToOutput();
return Optional.of(
metadataClass.cast(
GoLinkable.builder().setGoLinkInput(ImmutableMap.of(packageName, output)).build()));
} else if (buildTarget.getFlavors().contains(GoDescriptors.TRANSITIVE_LINKABLES_FLAVOR)
&& buildTarget.getFlavors().contains(TEST_LIBRARY_FLAVOR)) {
Preconditions.checkState(platform.isPresent());
ImmutableSet<BuildTarget> deps;
if (args.getLibrary().isPresent()) {
GoLibraryDescriptionArg libraryArg =
graphBuilder
.requireMetadata(args.getLibrary().get(), GoLibraryDescriptionArg.class)
.get();
deps =
ImmutableSortedSet.<BuildTarget>naturalOrder()
.addAll(args.getDeps())
.addAll(libraryArg.getDeps())
.build();
} else {
deps = args.getDeps();
}
return Optional.of(
metadataClass.cast(
GoDescriptors.requireTransitiveGoLinkables(
buildTarget, graphBuilder, platform.get(), deps, /* includeSelf */ true)));
} else {
return Optional.empty();
}
}
private GoTestMain requireTestMainGenRule(
BuildTarget buildTarget,
ProjectFilesystem projectFilesystem,
BuildRuleParams params,
ActionGraphBuilder graphBuilder,
GoPlatform platform,
ImmutableSet<SourcePath> srcs,
ImmutableMap<Path, ImmutableMap<String, Path>> coverVariables,
GoTestCoverStep.Mode coverageMode,
Path packageName) {
Tool testMainGenerator =
GoDescriptors.getTestMainGenerator(
goBuckConfig, platform, buildTarget, projectFilesystem, params, graphBuilder);
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(graphBuilder);
GoTestMain generatedTestMain =
new GoTestMain(
buildTarget.withAppendedFlavors(InternalFlavor.of("test-main-src")),
projectFilesystem,
params.withDeclaredDeps(
ImmutableSortedSet.<BuildRule>naturalOrder()
.addAll(BuildableSupport.getDepsCollection(testMainGenerator, ruleFinder))
.build()),
testMainGenerator,
srcs,
packageName,
platform,
coverVariables,
coverageMode);
graphBuilder.addToIndex(generatedTestMain);
return generatedTestMain;
}
@Override
public BuildRule createBuildRule(
BuildRuleCreationContextWithTargetGraph context,
BuildTarget buildTarget,
BuildRuleParams params,
GoTestDescriptionArg args) {
GoPlatform platform = getGoPlatform(buildTarget, args);
ActionGraphBuilder graphBuilder = context.getActionGraphBuilder();
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(graphBuilder);
SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder);
ProjectFilesystem projectFilesystem = context.getProjectFilesystem();
GoTestCoverStep.Mode coverageMode;
ImmutableSortedSet.Builder<BuildRule> extraDeps = ImmutableSortedSet.naturalOrder();
ImmutableSet.Builder<SourcePath> srcs;
ImmutableMap<String, Path> coverVariables;
ImmutableSet.Builder<SourcePath> rawSrcs = ImmutableSet.builder();
rawSrcs.addAll(args.getSrcs());
if (args.getLibrary().isPresent()) {
GoLibraryDescriptionArg libraryArg =
graphBuilder
.requireMetadata(args.getLibrary().get(), GoLibraryDescriptionArg.class)
.get();
rawSrcs.addAll(libraryArg.getSrcs());
}
if (args.getCoverageMode().isPresent()) {
coverageMode = args.getCoverageMode().get();
GoTestCoverStep.Mode coverage = coverageMode;
GoTestCoverSource coverSource =
(GoTestCoverSource)
graphBuilder.computeIfAbsent(
buildTarget.withAppendedFlavors(InternalFlavor.of("gen-cover")),
target ->
new GoTestCoverSource(
target,
projectFilesystem,
ruleFinder,
pathResolver,
platform,
rawSrcs.build(),
platform.getCover(),
coverage));
coverVariables = coverSource.getVariables();
srcs = ImmutableSet.builder();
srcs.addAll(coverSource.getCoveredSources()).addAll(coverSource.getTestSources());
extraDeps.add(coverSource);
} else {
srcs = rawSrcs;
coverVariables = ImmutableMap.of();
coverageMode = GoTestCoverStep.Mode.NONE;
}
if (buildTarget.getFlavors().contains(TEST_LIBRARY_FLAVOR)) {
return createTestLibrary(
buildTarget,
projectFilesystem,
params.copyAppendingExtraDeps(extraDeps.build()),
graphBuilder,
srcs.build(),
args,
platform);
}
GoBinary testMain =
createTestMainRule(
buildTarget,
projectFilesystem,
params.copyAppendingExtraDeps(extraDeps.build()),
graphBuilder,
srcs.build(),
coverVariables,
coverageMode,
args,
platform);
graphBuilder.addToIndex(testMain);
return new GoTest(
buildTarget,
projectFilesystem,
params.withDeclaredDeps(ImmutableSortedSet.of(testMain)).withoutExtraDeps(),
testMain,
args.getLabels(),
args.getContacts(),
args.getTestRuleTimeoutMs()
.map(Optional::of)
.orElse(goBuckConfig.getDelegate().getDefaultTestRuleTimeoutMs()),
args.getRunTestSeparately(),
args.getResources(),
coverageMode);
}
private GoBinary createTestMainRule(
BuildTarget buildTarget,
ProjectFilesystem projectFilesystem,
BuildRuleParams params,
ActionGraphBuilder graphBuilder,
ImmutableSet<SourcePath> srcs,
ImmutableMap<String, Path> coverVariables,
GoTestCoverStep.Mode coverageMode,
GoTestDescriptionArg args,
GoPlatform platform) {
Path packageName = getGoPackageName(graphBuilder, buildTarget, args);
boolean createResourcesSymlinkTree =
goBuckConfig.getDelegate().getExternalTestRunner().isPresent();
BuildRule testLibrary =
new NoopBuildRuleWithDeclaredAndExtraDeps(
buildTarget.withAppendedFlavors(TEST_LIBRARY_FLAVOR), projectFilesystem, params);
graphBuilder.addToIndex(testLibrary);
BuildRule generatedTestMain =
requireTestMainGenRule(
buildTarget,
projectFilesystem,
params,
graphBuilder,
platform,
srcs,
ImmutableMap.of(packageName, coverVariables),
coverageMode,
packageName);
GoBinary testMain =
GoDescriptors.createGoBinaryRule(
buildTarget.withAppendedFlavors(InternalFlavor.of("test-main")),
projectFilesystem,
params
.withDeclaredDeps(ImmutableSortedSet.of(testLibrary))
.withExtraDeps(ImmutableSortedSet.of(generatedTestMain)),
graphBuilder,
goBuckConfig,
args.getLinkStyle().orElse(Linker.LinkableDepType.STATIC_PIC),
ImmutableSet.of(generatedTestMain.getSourcePathToOutput()),
createResourcesSymlinkTree ? args.getResources() : ImmutableSortedSet.of(),
args.getCompilerFlags(),
args.getAssemblerFlags(),
args.getLinkerFlags(),
args.getExternalLinkerFlags(),
platform);
graphBuilder.addToIndex(testMain);
return testMain;
}
private Path getGoPackageName(
ActionGraphBuilder graphBuilder, BuildTarget target, GoTestDescriptionArg args) {
target = target.withFlavors(); // remove flavors.
if (args.getLibrary().isPresent()) {
Optional<GoLibraryDescriptionArg> libraryArg =
graphBuilder.requireMetadata(args.getLibrary().get(), GoLibraryDescriptionArg.class);
if (!libraryArg.isPresent()) {
throw new HumanReadableException(
"Library specified in %s (%s) is not a go_library rule.",
target, args.getLibrary().get());
}
if (args.getPackageName().isPresent()) {
throw new HumanReadableException(
"Test target %s specifies both library and package_name - only one should be specified",
target);
}
if (!libraryArg.get().getTests().contains(target)) {
throw new HumanReadableException(
"go internal test target %s is not listed in `tests` of library %s",
target, args.getLibrary().get());
}
return libraryArg
.get()
.getPackageName()
.map(Paths::get)
.orElse(goBuckConfig.getDefaultPackageName(args.getLibrary().get()));
} else if (args.getPackageName().isPresent()) {
return Paths.get(args.getPackageName().get());
} else {
Path packageName = goBuckConfig.getDefaultPackageName(target);
return packageName.resolveSibling(packageName.getFileName() + "_test");
}
}
private GoCompile createTestLibrary(
BuildTarget buildTarget,
ProjectFilesystem projectFilesystem,
BuildRuleParams params,
ActionGraphBuilder graphBuilder,
ImmutableSet<SourcePath> srcs,
GoTestDescriptionArg args,
GoPlatform platform) {
Path packageName = getGoPackageName(graphBuilder, buildTarget, args);
GoCompile testLibrary;
if (args.getLibrary().isPresent()) {
// We should have already type-checked the arguments in the base rule.
GoLibraryDescriptionArg libraryArg =
graphBuilder
.requireMetadata(args.getLibrary().get(), GoLibraryDescriptionArg.class)
.get();
BuildRuleParams testTargetParams =
params
.withDeclaredDeps(
() ->
ImmutableSortedSet.<BuildRule>naturalOrder()
.addAll(params.getDeclaredDeps().get())
.addAll(graphBuilder.getAllRules(libraryArg.getDeps()))
.build())
.withExtraDeps(
() -> {
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(graphBuilder);
return ImmutableSortedSet.<BuildRule>naturalOrder()
.addAll(params.getExtraDeps().get())
// Make sure to include dynamically generated sources as deps.
.addAll(ruleFinder.filterBuildRuleInputs(libraryArg.getSrcs()))
.build();
});
testLibrary =
GoDescriptors.createGoCompileRule(
buildTarget,
projectFilesystem,
testTargetParams,
graphBuilder,
goBuckConfig,
packageName,
ImmutableSet.<SourcePath>builder().addAll(srcs).build(),
ImmutableList.<String>builder()
.addAll(libraryArg.getCompilerFlags())
.addAll(args.getCompilerFlags())
.build(),
ImmutableList.<String>builder()
.addAll(libraryArg.getAssemblerFlags())
.addAll(args.getAssemblerFlags())
.build(),
platform,
testTargetParams
.getDeclaredDeps()
.get()
.stream()
.map(BuildRule::getBuildTarget)
.collect(ImmutableList.toImmutableList()),
ImmutableList.of(),
Arrays.asList(ListType.GoFiles, ListType.TestGoFiles));
} else {
testLibrary =
GoDescriptors.createGoCompileRule(
buildTarget,
projectFilesystem,
params,
graphBuilder,
goBuckConfig,
packageName,
srcs,
args.getCompilerFlags(),
args.getAssemblerFlags(),
platform,
params
.getDeclaredDeps()
.get()
.stream()
.map(BuildRule::getBuildTarget)
.collect(ImmutableList.toImmutableList()),
ImmutableList.of(),
Arrays.asList(ListType.GoFiles, ListType.TestGoFiles, ListType.XTestGoFiles));
}
return testLibrary;
}
@Override
public void findDepsForTargetFromConstructorArgs(
BuildTarget buildTarget,
CellPathResolver cellRoots,
AbstractGoTestDescriptionArg constructorArg,
ImmutableCollection.Builder<BuildTarget> extraDepsBuilder,
ImmutableCollection.Builder<BuildTarget> targetGraphOnlyDepsBuilder) {
// Add the C/C++ platform parse time deps.
targetGraphOnlyDepsBuilder.addAll(
CxxPlatforms.getParseTimeDeps(getGoPlatform(buildTarget, constructorArg).getCxxPlatform()));
}
private GoToolchain getGoToolchain() {
return toolchainProvider.getByName(GoToolchain.DEFAULT_NAME, GoToolchain.class);
}
private GoPlatform getGoPlatform(BuildTarget target, AbstractGoTestDescriptionArg arg) {
GoToolchain toolchain = getGoToolchain();
FlavorDomain<GoPlatform> platforms = toolchain.getPlatformFlavorDomain();
return platforms
.getValue(target)
.orElseGet(
() ->
arg.getPlatform()
.map(platforms::getValue)
.orElseGet(toolchain::getDefaultPlatform));
}
@BuckStyleImmutable
@Value.Immutable
interface AbstractGoTestDescriptionArg
extends CommonDescriptionArg, HasContacts, HasDeclaredDeps, HasSrcs, HasTestTimeout {
Optional<Flavor> getPlatform();
Optional<BuildTarget> getLibrary();
Optional<String> getPackageName();
Optional<GoTestCoverStep.Mode> getCoverageMode();
Optional<Linker.LinkableDepType> getLinkStyle();
ImmutableList<String> getCompilerFlags();
ImmutableList<String> getAssemblerFlags();
ImmutableList<String> getLinkerFlags();
ImmutableList<String> getExternalLinkerFlags();
@Value.Default
default boolean getRunTestSeparately() {
return false;
}
@Value.NaturalOrder
ImmutableSortedSet<SourcePath> getResources();
}
}
|
|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.projectView.impl.nodes;
import com.intellij.ide.projectView.PresentationData;
import com.intellij.ide.projectView.NodeSortOrder;
import com.intellij.ide.projectView.NodeSortSettings;
import com.intellij.ide.projectView.ViewSettings;
import com.intellij.ide.projectView.impl.CompoundIconProvider;
import com.intellij.ide.projectView.impl.ProjectRootsUtil;
import com.intellij.ide.util.treeView.AbstractTreeNode;
import com.intellij.idea.ActionsBundle;
import com.intellij.openapi.fileTypes.FileTypeRegistry;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleGrouperKt;
import com.intellij.openapi.module.ModuleType;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.OrderEntry;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.roots.libraries.LibraryUtil;
import com.intellij.openapi.roots.ui.configuration.ProjectSettingsService;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.pom.NavigatableWithText;
import com.intellij.projectImport.ProjectAttachProcessor;
import com.intellij.psi.PsiDirectory;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.psi.impl.file.PsiDirectoryFactory;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.util.IconUtil;
import com.intellij.util.PlatformUtils;
import com.intellij.util.containers.SmartHashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.Collection;
import java.util.Set;
public class PsiDirectoryNode extends BasePsiNode<PsiDirectory> implements NavigatableWithText {
// the chain from a parent directory to this one usually contains only one virtual file
private final Set<VirtualFile> chain = new SmartHashSet<>();
private final PsiFileSystemItemFilter myFilter;
public PsiDirectoryNode(Project project, @NotNull PsiDirectory value, ViewSettings viewSettings) {
this(project, value, viewSettings, null);
}
public PsiDirectoryNode(Project project, @NotNull PsiDirectory value, ViewSettings viewSettings, @Nullable PsiFileSystemItemFilter filter) {
super(project, value, viewSettings);
myFilter = filter;
}
@Nullable
public PsiFileSystemItemFilter getFilter() {
return myFilter;
}
protected boolean shouldShowModuleName() {
return !PlatformUtils.isCidr();
}
protected boolean shouldShowSourcesRoot() {
return true;
}
@Override
protected void updateImpl(@NotNull PresentationData data) {
Project project = getProject();
assert project != null : this;
PsiDirectory psiDirectory = getValue();
assert psiDirectory != null : this;
VirtualFile directoryFile = psiDirectory.getVirtualFile();
Object parentValue = getParentValue();
synchronized (chain) {
if (chain.isEmpty()) {
VirtualFile ancestor = getVirtualFile(parentValue);
if (ancestor != null) {
for (VirtualFile file = directoryFile; file != null && VfsUtilCore.isAncestor(ancestor, file, true); file = file.getParent()) {
chain.add(file);
}
}
if (chain.isEmpty()) chain.add(directoryFile);
}
}
if (ProjectRootsUtil.isModuleContentRoot(directoryFile, project)) {
ProjectFileIndex fi = ProjectRootManager.getInstance(project).getFileIndex();
Module module = fi.getModuleForFile(directoryFile);
data.setPresentableText(directoryFile.getName());
if (module != null) {
if (!(parentValue instanceof Module)) {
if (ModuleType.isInternal(module) || !shouldShowModuleName()) {
data.addText(directoryFile.getName() + " ", SimpleTextAttributes.REGULAR_ATTRIBUTES);
}
else if (moduleNameMatchesDirectoryName(module, directoryFile, fi)) {
data.addText(directoryFile.getName(), SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES);
}
else {
data.addText(directoryFile.getName() + " ", SimpleTextAttributes.REGULAR_ATTRIBUTES);
data.addText("[" + module.getName() + "]", SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES);
}
}
else {
data.addText(directoryFile.getName(), SimpleTextAttributes.REGULAR_ATTRIBUTES);
}
boolean shouldShowUrl = getSettings().isShowURL() && (parentValue instanceof Module || parentValue instanceof Project);
data.setLocationString(ProjectViewDirectoryHelper.getInstance(project).getLocationString(psiDirectory,
shouldShowUrl,
shouldShowSourcesRoot()));
setupIcon(data, psiDirectory);
return;
}
}
String name = parentValue instanceof Project
? psiDirectory.getVirtualFile().getPresentableUrl()
: ProjectViewDirectoryHelper.getInstance(psiDirectory.getProject()).getNodeName(getSettings(), parentValue, psiDirectory);
if (name == null) {
setValue(null);
return;
}
data.setPresentableText(name);
data.setLocationString(ProjectViewDirectoryHelper.getInstance(project).getLocationString(psiDirectory, false, false));
setupIcon(data, psiDirectory);
}
protected static boolean canRealModuleNameBeHidden() {
return Registry.is("ide.hide.real.module.name");
}
private static boolean moduleNameMatchesDirectoryName(@NotNull Module module, @NotNull VirtualFile directoryFile, @NotNull ProjectFileIndex fileIndex) {
if (canRealModuleNameBeHidden()) return true;
String moduleName = module.getName();
String directoryName = directoryFile.getName();
if (moduleName.equalsIgnoreCase(directoryName)) {
return true;
}
if (ModuleGrouperKt.isQualifiedModuleNamesEnabled(module.getProject()) && StringUtil.endsWithIgnoreCase(moduleName, directoryName)) {
int parentPrefixLength = moduleName.length() - directoryName.length() - 1;
if (parentPrefixLength > 0 && moduleName.charAt(parentPrefixLength) == '.') {
VirtualFile parentDirectory = directoryFile.getParent();
if (ProjectRootsUtil.isModuleContentRoot(parentDirectory, module.getProject())) {
Module parentModule = fileIndex.getModuleForFile(parentDirectory);
if (parentModule != null && parentModule.getName().length() == parentPrefixLength
&& moduleName.startsWith(parentModule.getName())) {
return true;
}
}
}
}
return false;
}
protected void setupIcon(PresentationData data, PsiDirectory psiDirectory) {
final VirtualFile virtualFile = psiDirectory.getVirtualFile();
if (PlatformUtils.isAppCode()) {
final Icon icon = IconUtil.getIcon(virtualFile, 0, myProject);
data.setIcon(icon);
}
else {
Icon icon = CompoundIconProvider.findIcon(psiDirectory, 0);
if (icon != null) data.setIcon(icon);
}
}
@Override
public Collection<AbstractTreeNode<?>> getChildrenImpl() {
return ProjectViewDirectoryHelper.getInstance(myProject).getDirectoryChildren(getValue(), getSettings(), true, getFilter());
}
@Override
@SuppressWarnings("deprecation")
public String getTestPresentation() {
return "PsiDirectory: " + getValue().getName();
}
public boolean isFQNameShown() {
return ProjectViewDirectoryHelper.getInstance(getProject()).isShowFQName(getSettings(), getParentValue(), getValue());
}
@Override
public boolean contains(@NotNull VirtualFile file) {
final PsiDirectory value = getValue();
if (value == null) {
return false;
}
VirtualFile directory = value.getVirtualFile();
if (directory.getFileSystem() instanceof LocalFileSystem) {
file = VfsUtil.getLocalFile(file);
}
if (!VfsUtilCore.isAncestor(directory, file, false)) {
return false;
}
final Project project = value.getProject();
PsiFileSystemItemFilter filter = getFilter();
if (filter != null) {
PsiFile psiFile = PsiManager.getInstance(project).findFile(file);
if (psiFile != null && !filter.shouldShow(psiFile)) return false;
PsiDirectory psiDirectory = PsiManager.getInstance(project).findDirectory(file);
if (psiDirectory != null && !filter.shouldShow(psiDirectory)) return false;
}
if (Registry.is("ide.hide.excluded.files")) {
final ProjectFileIndex fileIndex = ProjectRootManager.getInstance(project).getFileIndex();
return !fileIndex.isExcluded(file);
}
else {
return !FileTypeRegistry.getInstance().isFileIgnored(file);
}
}
/**
* @return a virtual file that identifies the given element
*/
@Nullable
private static VirtualFile getVirtualFile(Object element) {
if (element instanceof PsiDirectory) {
PsiDirectory directory = (PsiDirectory)element;
return directory.getVirtualFile();
}
return element instanceof VirtualFile ? (VirtualFile)element : null;
}
@Override
public boolean canRepresent(final Object element) {
VirtualFile file = getVirtualFile(element);
if (file != null) {
synchronized (chain) {
if (chain.contains(file)) return true;
}
}
if (super.canRepresent(element)) return true;
PsiDirectory directory = getValue();
Object owner = getParentValue();
if (file == null || directory == null || !(owner instanceof PsiDirectory)) return false;
return ProjectViewDirectoryHelper.getInstance(getProject())
.canRepresent(file, directory, (PsiDirectory)owner, getSettings());
}
@Override
public boolean isValid() {
if (!super.isValid()) return false;
return ProjectViewDirectoryHelper.getInstance(getProject())
.isValidDirectory(getValue(), getParentValue(), getSettings(), getFilter());
}
@Override
public boolean canNavigate() {
VirtualFile file = getVirtualFile();
Project project = getProject();
ProjectSettingsService service = ProjectSettingsService.getInstance(myProject);
return file != null && (ProjectRootsUtil.isModuleContentRoot(file, project) && service.canOpenModuleSettings() ||
ProjectRootsUtil.isModuleSourceRoot(file, project) && service.canOpenContentEntriesSettings() ||
ProjectRootsUtil.isLibraryRoot(file, project) && service.canOpenModuleLibrarySettings());
}
@Override
public boolean canNavigateToSource() {
return false;
}
@Override
public void navigate(final boolean requestFocus) {
Module module = ModuleUtilCore.findModuleForPsiElement(getValue());
if (module != null) {
final VirtualFile file = getVirtualFile();
final Project project = getProject();
ProjectSettingsService service = ProjectSettingsService.getInstance(myProject);
if (ProjectRootsUtil.isModuleContentRoot(file, project)) {
service.openModuleSettings(module);
}
else if (ProjectRootsUtil.isLibraryRoot(file, project)) {
final OrderEntry orderEntry = LibraryUtil.findLibraryEntry(file, module.getProject());
if (orderEntry != null) {
service.openLibraryOrSdkSettings(orderEntry);
}
}
else {
service.openContentEntriesSettings(module);
}
}
}
@Override
public String getNavigateActionText(boolean focusEditor) {
VirtualFile file = getVirtualFile();
Project project = getProject();
if (file != null && project != null) {
if (ProjectRootsUtil.isModuleContentRoot(file, project) || ProjectRootsUtil.isModuleSourceRoot(file, project)) {
return ActionsBundle.message("action.ModuleSettings.navigate");
}
if (ProjectRootsUtil.isLibraryRoot(file, project)) {
return ActionsBundle.message("action.LibrarySettings.navigate");
}
}
return null;
}
@Override
public int getWeight() {
ViewSettings settings = getSettings();
if (settings == null || settings.isFoldersAlwaysOnTop()) {
return 20;
}
return isFQNameShown() ? 70 : 0;
}
@Override
public String getTitle() {
final PsiDirectory directory = getValue();
if (directory != null) {
return PsiDirectoryFactory.getInstance(getProject()).getQualifiedName(directory, true);
}
return super.getTitle();
}
@Override
public @NotNull NodeSortOrder getSortOrder(@NotNull NodeSortSettings settings) {
return settings.isFoldersAlwaysOnTop() ? NodeSortOrder.FOLDER : super.getSortOrder(settings);
}
@Override
public Comparable getSortKey() {
if (ProjectAttachProcessor.canAttachToProject()) {
// primary module is always on top; attached modules are sorted alphabetically
final VirtualFile file = getVirtualFile();
if (Comparing.equal(file, myProject.getBaseDir())) {
return ""; // sorts before any other name
}
return toString();
}
return null;
}
@Override
public Comparable getTypeSortKey() {
VirtualFile file = getVirtualFile();
String extension = file == null ? null : file.getExtension();
return extension == null ? null : new PsiFileNode.ExtensionSortKey(extension);
}
@Override
public String getQualifiedNameSortKey() {
final PsiDirectoryFactory factory = PsiDirectoryFactory.getInstance(getProject());
return factory.getQualifiedName(getValue(), true);
}
@Override
public int getTypeSortWeight(final boolean sortByType) {
return 3;
}
@Override
public boolean shouldDrillDownOnEmptyElement() {
return true;
}
@Override
public boolean isAlwaysShowPlus() {
final VirtualFile file = getVirtualFile();
return file == null || file.getChildren().length > 0;
}
}
|
|
/*
* Copyright 2018 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.spark;
import com.google.common.net.UrlEscapers;
import org.apache.commons.lang3.text.StrSubstitutor;
import static com.google.common.collect.ImmutableMap.of;
import static com.thoughtworks.go.CurrentGoCDVersion.apiDocsUrl;
public class Routes {
public static class Backups {
public static final String BASE = "/api/backups";
public static final String DOC = apiDocsUrl("#backups");
public static final String ID_PATH = "/:id";
public static String serverBackup(String id) {
return BASE + ID_PATH.replaceAll(":id", id);
}
}
public static class MaintenanceMode {
public static final String BASE = "/api/admin/maintenance_mode";
public static final String SPA_BASE = "/admin/maintenance_mode";
public static final String ENABLE = "/enable";
public static final String DISABLE = "/disable";
public static final String INFO = "/info";
public static final String INFO_DOC = apiDocsUrl("#maintenance-mode-info");
}
public static class CurrentUser {
public static final String BASE = "/api/current_user";
}
public static class Encrypt {
public static final String BASE = "/api/admin/encrypt";
}
public static class PluginImages {
public static final String BASE = "/api/plugin_images";
public static final String PLUGIN_ID_HASH_PATH = "/:plugin_id/:hash";
public static String pluginImage(String pluginId, String hash) {
return BASE + PLUGIN_ID_HASH_PATH.replaceAll(":plugin_id", pluginId).replaceAll(":hash", hash);
}
}
public static class ConfigView {
public static final String SELF = "/admin/config_xml";
}
public static class ConfigRepos {
public static final String SPA_BASE = "/admin/config_repos";
public static final String INTERNAL_BASE = "/api/internal/config_repos";
public static final String OPERATIONS_BASE = "/api/admin/config_repo_ops";
public static final String PREFLIGHT_PATH = "/preflight";
public static final String STATUS_PATH = "/:id/status";
public static final String TRIGGER_UPDATE_PATH = "/:id/trigger_update";
public static final String BASE = "/api/admin/config_repos";
public static final String DOC = apiDocsUrl("#config-repos");
public static final String INDEX_PATH = "";
public static final String REPO_PATH = "/:id";
public static final String CREATE_PATH = INDEX_PATH;
public static final String UPDATE_PATH = REPO_PATH;
public static final String DELETE_PATH = REPO_PATH;
// For building _links entry in API response
public static String find() {
return BASE + REPO_PATH;
}
public static String id(String id) {
return find().replaceAll(":id", id);
}
}
public static class Export {
public static final String BASE = "/api/admin/export";
public static final String PIPELINES_PATH = "/pipelines/:pipeline_name";
public static String pipeline(String name) {
return (BASE + PIPELINES_PATH).replaceAll(":pipeline_name", name);
}
}
public static class Roles {
public static final String BASE = "/api/admin/security/roles";
public static final String SPA_BASE = "/admin/security/roles";
public static final String DOC = apiDocsUrl("#roles");
public static final String NAME_PATH = "/:role_name";
public static String find() {
return BASE + NAME_PATH;
}
public static String name(String name) {
return BASE + NAME_PATH.replaceAll(":role_name", name);
}
}
public static class SystemAdmins {
public static final String BASE = "/api/admin/security/system_admins";
public static final String DOC = apiDocsUrl("#system-admins");
}
public static class Dashboard {
public static final String SELF = "/api/dashboard";
public static final String DOC = "https://api.go.cd/current/#dashboard";
}
public static class Materials {
public static String vsm(String materialFingerprint, String revision) {
return StrSubstitutor.replace("/materials/value_stream_map/${material_fingerprint}/${revision}", of(
"material_fingerprint", materialFingerprint,
"revision", revision));
}
}
public static class PipelineGroup {
public static final String DOC = "https://api.go.cd/current/#pipeline-groups";
public static final String SELF = "/api/config/pipeline_groups";
}
public static class PipelineGroupsAdmin {
public static final String DOC = "https://api.go.cd/current/#pipeline-group-config";
public static final String BASE = "/api/admin/pipeline_groups";
public static final String NAME_PATH = "/:group_name";
public static String find() {
return BASE + NAME_PATH;
}
public static String name(String name) {
return BASE + NAME_PATH.replaceAll(":group_name", name);
}
}
public static class EnvironmentConfig {
public static final String DOC = apiDocsUrl("#environment-config");
static final String NAME = "/api/admin/environments/:name";
public static String name(String name) {
return NAME.replaceAll(":name", name);
}
}
public static class Environments {
public static final String DOC = "https://api.go.cd/current/#environment-config";
public static final String BASE = "/api/admin/environments";
public static final String NAME = "/:name";
public static String find() {
return BASE + NAME;
}
public static String name(String name) {
return BASE + NAME.replaceAll(":name", name);
}
}
public static class DataSharing {
public static final String USAGE_DATA_PATH = "/api/internal/data_sharing/usagedata";
public static final String SETTINGS_PATH = "/api/data_sharing/settings";
public static final String REPORTING_PATH = "/api/internal/data_sharing/reporting";
public static final String SETTINGS_DOC = "https://api.go.cd/current/#data_sharing_settings";
}
public static class Pipeline {
public static final String BASE = "/api/pipelines";
public static final String DOC = "https://api.go.cd/current/#pipelines";
public static final String DOC_TRIGGER_OPTIONS = "https://api.go.cd/current/#pipeline-trigger-options";
public static final String PAUSE_PATH = "/:pipeline_name/pause";
public static final String UNPAUSE_PATH = "/:pipeline_name/unpause";
public static final String UNLOCK_PATH = "/:pipeline_name/unlock";
public static final String TRIGGER_OPTIONS_PATH = "/:pipeline_name/trigger_options";
public static final String SCHEDULE_PATH = "/:pipeline_name/schedule";
public static final String HISTORY_PATH = "/:pipeline_name/history";
public static final String INSTANCE_PATH = "/:pipeline_name/instance/:pipeline_counter";
public static String history(String pipelineName) {
return BASE + HISTORY_PATH.replaceAll(":pipeline_name", pipelineName);
}
public static String triggerOptions(String pipelineName) {
return BASE + TRIGGER_OPTIONS_PATH.replaceAll(":pipeline_name", pipelineName);
}
public static String schedule(String pipelineName) {
return BASE + SCHEDULE_PATH.replaceAll(":pipeline_name", pipelineName);
}
public static String pause(String pipelineName) {
return BASE + PAUSE_PATH.replaceAll(":pipeline_name", pipelineName);
}
public static String unpause(String pipelineName) {
return BASE + UNPAUSE_PATH.replaceAll(":pipeline_name", pipelineName);
}
public static String unlock(String pipelineName) {
return BASE + UNLOCK_PATH.replaceAll(":pipeline_name", pipelineName);
}
public static String instance(String pipelineName, int pipelineCounter) {
return BASE + INSTANCE_PATH
.replaceAll(":pipeline_name", pipelineName)
.replaceAll(":pipeline_counter", String.valueOf(pipelineCounter));
}
}
public static class PipelineInstance {
public static String vsm(String pipelineName, int pipelineCounter) {
return StrSubstitutor.replace("/pipelines/value_stream_map/${pipeline_name}/${pipeline_counter}",
of("pipeline_name", pipelineName, "pipeline_counter", pipelineCounter));
}
}
public static class Stage {
public static final String BASE = "/api/stages";
public static final String TRIGGER_STAGE_PATH = "/:pipeline_name/:pipeline_counter/:stage_name/run";
public static final String TRIGGER_FAILED_JOBS_PATH = "/:pipeline_name/:pipeline_counter/:stage_name/:stage_counter/run-failed-jobs";
public static final String TRIGGER_SELECTED_JOBS_PATH = "/:pipeline_name/:pipeline_counter/:stage_name/:stage_counter/run-selected-jobs";
public static final String INSTANCE_BY_COUNTER = "/:pipeline_name/:stage_name/instance/:pipeline_counter/:stage_counter";
public static final String STAGE_HISTORY = "/:pipeline_name/:stage_name/history";
public static final String STAGE_HISTORY_OFFSET = "/:pipeline_name/:stage_name/history/:offset";
public static String self(String pipelineName, String pipelineCounter, String stageName, String stageCounter) {
return StrSubstitutor.replace("/api/stages/${pipeline_name}/${pipeline_counter}/${stage_name}/${stage_counter}", of(
"pipeline_name", pipelineName,
"pipeline_counter", pipelineCounter,
"stage_name", stageName,
"stage_counter", stageCounter));
}
public static String stageDetailTab(String pipelineName, int pipelineCounter, String stageName, int stageCounter) {
return StrSubstitutor.replace("/pipelines/${pipeline_name}/${pipeline_counter}/${stage_name}/${stage_counter}", of(
"pipeline_name", pipelineName,
"pipeline_counter", pipelineCounter,
"stage_name", stageName,
"stage_counter", stageCounter));
}
}
public static class UserSummary {
public static final String DOC = apiDocsUrl("#users");
public static final String CURRENT_USER = "/api/current_user";
public static final String BASE = "/api/users/";
public static String self(String loginName) {
return StrSubstitutor.replace(BASE + "${loginName}", of("loginName", loginName));
}
public static String find() {
return BASE + ":login_name";
}
}
public static class UserSearch {
public static final String BASE = "/api/user_search";
public static final String DOC = apiDocsUrl("#user-search");
public static String self(String searchTerm) {
return StrSubstitutor.replace(BASE + "?q=${searchTerm}", of("searchTerm", UrlEscapers.urlFormParameterEscaper().escape(searchTerm)));
}
public static String find() {
return BASE + "?q=:search_term";
}
}
public static class ArtifactStoreConfig {
public static final String BASE = "/api/admin/artifact_stores";
public static final String ID = "/:id";
public static final String DOC = apiDocsUrl("#artifact-store");
public static String find() {
return BASE + ID;
}
public static String id(String id) {
return find().replaceAll(":id", id);
}
}
public static class PipelineConfig {
public static final String SPA_CREATE = "/admin/pipelines/create";
public static final String BASE = "/api/admin/pipelines";
public static final String NAME = "/:pipeline_name";
public static final String DOC = apiDocsUrl("#pipeline-config");
public static String find() {
return BASE + NAME;
}
public static String name(String name) {
return find().replaceAll(":pipeline_name", name);
}
}
public static class PipelineTemplateConfig {
public static final String BASE = "/api/admin/templates";
public static final String NAME = "/:template_name";
public static final String DOC = apiDocsUrl("#template-config");
public static String find() {
return BASE + NAME;
}
public static String name(String name) {
return find().replaceAll(":template_name", name);
}
}
public static class ElasticProfileAPI {
public static final String BASE = "/api/elastic/profiles";
public static final String INTERNAL_BASE = "/api/internal/elastic/profiles";
public static final String ID = "/:profile_id";
public static final String DOC = apiDocsUrl("#elastic-agent-profiles");
public static final String USAGES = "/usages";
public static String find() {
return BASE + ID;
}
public static String id(String id) {
return find().replaceAll(":profile_id", id);
}
}
public static class SecretConfigsAPI {
public static final String BASE = "/api/admin/secret_configs";
public static final String ID = "/:config_id";
public static final String DOC = apiDocsUrl("#secret-configs");
public static String find() {
return BASE + ID;
}
public static String id(String id) {
return find().replaceAll(":config_id", id);
}
}
public static class SecretConfigs {
public static final String SPA_BASE = "/admin/secret_configs";
}
public static class ClusterProfilesAPI {
public static final String BASE = "/api/admin/elastic/cluster_profiles";
public static final String ID = "/:cluster_id";
public static final String DOC = apiDocsUrl("#cluster-profiles");
public static String find() {
return BASE + ID;
}
public static String id(String id) {
return find().replaceAll(":cluster_id", id);
}
}
public static class PluginInfoAPI {
public static final String BASE = "/api/admin/plugin_info";
public static final String ID = "/:id";
public static final String DOC = apiDocsUrl("#plugin-info");
public static String find() {
return BASE + ID;
}
public static String id(String id) {
return find().replaceAll(":id", id);
}
}
public static class AgentsAPI {
public static final String BASE = "/api/agents";
public static final String UUID = "/:uuid";
public static final String DOC = apiDocsUrl("#agents");
public static String find() {
return BASE + UUID;
}
public static String uuid(String uuid) {
return find().replaceAll(":uuid", uuid);
}
}
public class ServerHealthMessages {
public static final String BASE = "/api/server_health_messages";
}
public class MaterialSearch {
public static final String BASE = "/api/internal/material_search";
}
public class DependencyMaterialAutocomplete {
public static final String BASE = "/api/internal/dependency_material/autocomplete_suggestions";
}
public class RolesSPA {
public static final String BASE = "/admin/security/roles";
}
public class PipelineSelection {
public static final String BASE = "/api/internal/pipeline_selection";
public static final String PIPELINES_DATA = "/pipelines_data";
}
public class BuildCause {
public static final String BASE = "/api/internal/build_cause";
public static final String PATH = "/:pipeline_name/:pipeline_counter";
}
public class AgentsSPA {
public static final String BASE = "/agents";
}
public class AnalyticsSPA {
public static final String BASE = "/analytics";
public static final String SHOW_PATH = ":plugin_id/:type/:id";
}
public class ElasticProfilesSPA {
public static final String BASE = "/admin/elastic_profiles";
}
public class NewDashboardSPA {
public static final String BASE = "/dashboard";
}
public class PluginsSPA {
public static final String BASE = "/admin/plugins";
}
public class DataSharingSettingsSPA {
public static final String BASE = "/admin/data_sharing/settings";
}
public class BackupsSPA {
public static final String BASE = "/admin/backup";
}
public class ServerHealth {
public static final String BASE = "/api/v1/health";
}
public class KitchenSink {
public static final String SPA_BASE = "/kitchen-sink";
}
public static class Version {
public static final String BASE = "/api/version";
public static final String DOC = apiDocsUrl("#version");
public static final String COMMIT_URL = "https://github.com/gocd/gocd/commit/";
}
public class AuthConfigs {
public static final String SPA_BASE = "/admin/security/auth_configs";
}
public static class Users {
public static final String BASE = "/api/users";
public static final String USER_NAME = "/:login_name";
public static final String SPA_BASE = "/admin/users";
public static final String DOC = apiDocsUrl("#users");
public static final String USER_STATE = "/operations/state";
}
public class ArtifactStores {
public static final String SPA_BASE = "/admin/artifact_stores";
}
public interface FindUrlBuilder<Identifier> {
String find();
String find(Identifier id);
String doc();
String base();
}
public static class CurrentUserAccessToken implements FindUrlBuilder<Long> {
public static final String BASE = "/api/current_user/access_tokens";
public static final String ID = "/:id";
public static final String REVOKE = ID + "/revoke";
private static final String DOC = apiDocsUrl("#access-tokens");
@Override
public String find() {
return BASE + ID;
}
@Override
public String find(Long id) {
return find().replaceAll(":id", String.valueOf(id));
}
@Override
public String doc() {
return DOC;
}
@Override
public String base() {
return BASE;
}
}
public static class AdminUserAccessToken implements FindUrlBuilder<Long> {
public static final String BASE = "/api/admin/access_tokens";
public static final String ID = "/:id";
public static final String REVOKE = ID + "/revoke";
private static final String DOC = apiDocsUrl("#access-tokens");
@Override
public String find() {
return BASE + ID;
}
@Override
public String find(Long id) {
return find().replaceAll(":id", String.valueOf(id));
}
@Override
public String doc() {
return DOC;
}
@Override
public String base() {
return BASE;
}
}
public class AccessTokens {
public static final String SPA_BASE = "/access_tokens";
}
public class AdminAccessTokens {
public static final String SPA_BASE = "/admin/admin_access_tokens";
}
public class CCTray {
public static final String BASE = "/cctray.xml";
}
public class LoginPage {
public static final String SPA_BASE = "/auth/login";
}
public class LogoutPage {
public static final String SPA_BASE = "/auth/logout";
}
public class Support {
public static final String BASE = "/api/support";
}
public class ClusterProfiles {
public static final String SPA_BASE = "/admin/cluster_profiles";
}
}
|
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.android;
import com.facebook.buck.android.DxStep.Option;
import com.facebook.buck.android.toolchain.AndroidPlatformTarget;
import com.facebook.buck.core.build.context.BuildContext;
import com.facebook.buck.core.build.execution.context.ExecutionContext;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.io.BuildCellRelativePath;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.StepExecutionResult;
import com.facebook.buck.step.StepExecutionResults;
import com.facebook.buck.step.StepFailedException;
import com.facebook.buck.step.StepRunner;
import com.facebook.buck.step.fs.RmStep;
import com.facebook.buck.step.fs.WriteFileStep;
import com.facebook.buck.step.fs.XzStep;
import com.facebook.buck.util.MoreSuppliers;
import com.facebook.buck.util.concurrent.MoreFutures;
import com.facebook.buck.util.sha1.Sha1HashCode;
import com.facebook.buck.util.types.Unit;
import com.facebook.buck.util.zip.ZipCompressionLevel;
import com.facebook.buck.zip.RepackZipEntriesStep;
import com.facebook.buck.zip.ZipScrubberStep;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableMultimap.Builder;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import com.google.common.hash.Hasher;
import com.google.common.hash.Hashing;
import com.google.common.io.Files;
import com.google.common.util.concurrent.ListeningExecutorService;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collection;
import java.util.EnumSet;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.function.Supplier;
import java.util.stream.Stream;
import javax.annotation.Nullable;
/**
* Optimized dx command runner which can invoke multiple dx commands in parallel and also avoid
* doing unnecessary dx invocations in the first place.
*
* <p>This is most appropriately represented as a build rule itself (which depends on individual dex
* rules) however this would require significant refactoring of AndroidBinaryRule that would be
* disruptive to other initiatives in flight (namely, ApkBuilder). It is also debatable that it is
* even the right course of action given that it would require dynamically modifying the DAG.
*/
public class SmartDexingStep implements Step {
public static final String SHORT_NAME = "smart_dex";
private static final String SECONDARY_SOLID_DEX_EXTENSION = ".dex.jar.xzs";
public interface DexInputHashesProvider {
ImmutableMap<Path, Sha1HashCode> getDexInputHashes();
}
private final AndroidPlatformTarget androidPlatformTarget;
private final BuildContext buildContext;
private final ProjectFilesystem filesystem;
private final boolean desugarInterfaceMethods;
private final Supplier<Multimap<Path, Path>> outputToInputsSupplier;
private final Optional<Path> secondaryOutputDir;
private final DexInputHashesProvider dexInputHashesProvider;
private final Path successDir;
private final EnumSet<DxStep.Option> dxOptions;
private final ListeningExecutorService executorService;
private final int xzCompressionLevel;
private final Optional<String> dxMaxHeapSize;
private final String dexTool;
private final boolean useDexBuckedId;
private final Optional<Set<Path>> additonalDesugarDeps;
private final BuildTarget buildTarget;
private final Optional<Integer> minSdkVersion;
/**
* @param primaryOutputPath Path for the primary dex artifact.
* @param primaryInputsToDex Set of paths to include as inputs for the primary dex artifact.
* @param secondaryOutputDir Directory path for the secondary dex artifacts, if there are any.
* Note that this directory will be pruned such that only those secondary outputs generated by
* this command will remain in the directory!
* @param secondaryInputsToDex List of paths to input jar files, to use as dx input, keyed by the
* corresponding output dex file. Note that for each output file (key), a separate dx
* invocation will be started with the corresponding jar files (value) as the input.
* @param successDir Directory where success artifacts are written.
* @param executorService The thread pool to execute the dx command on.
* @param minSdkVersion
*/
public SmartDexingStep(
AndroidPlatformTarget androidPlatformTarget,
BuildContext buildContext,
ProjectFilesystem filesystem,
Optional<Path> primaryOutputPath,
Optional<Supplier<Set<Path>>> primaryInputsToDex,
Optional<Path> secondaryOutputDir,
Optional<Supplier<Multimap<Path, Path>>> secondaryInputsToDex,
DexInputHashesProvider dexInputHashesProvider,
Path successDir,
EnumSet<Option> dxOptions,
ListeningExecutorService executorService,
int xzCompressionLevel,
Optional<String> dxMaxHeapSize,
String dexTool,
boolean desugarInterfaceMethods,
boolean useDexBuckedId,
Optional<Set<Path>> additonalDesugarDeps,
BuildTarget buildTarget,
Optional<Integer> minSdkVersion) {
this.androidPlatformTarget = androidPlatformTarget;
this.buildContext = buildContext;
this.filesystem = filesystem;
this.desugarInterfaceMethods = desugarInterfaceMethods;
this.outputToInputsSupplier =
MoreSuppliers.memoize(
() -> {
Builder<Path, Path> map = ImmutableMultimap.builder();
if (primaryInputsToDex.isPresent()) {
map.putAll(primaryOutputPath.get(), primaryInputsToDex.get().get());
}
if (secondaryInputsToDex.isPresent()) {
map.putAll(secondaryInputsToDex.get().get());
}
return map.build();
});
this.secondaryOutputDir = secondaryOutputDir;
this.dexInputHashesProvider = dexInputHashesProvider;
this.successDir = successDir;
this.dxOptions = dxOptions;
this.executorService = executorService;
this.xzCompressionLevel = xzCompressionLevel;
this.dxMaxHeapSize = dxMaxHeapSize;
this.dexTool = dexTool;
this.useDexBuckedId = useDexBuckedId;
this.additonalDesugarDeps = additonalDesugarDeps;
this.buildTarget = buildTarget;
this.minSdkVersion = minSdkVersion;
}
/**
* @return Optimal (in terms of both memory and performance) number of parallel threads to run
* dexer. The implementation uses running machine hardware characteristics to determine this.
*/
public static int determineOptimalThreadCount() {
// Most processors these days have hyperthreading that multiplies the amount of logical
// processors reported by Java. So in case of 1 CPU, 2 physical cores with hyperthreading, the
// call to Runtime.getRuntime().availableProcessors() would return 1*2*2 = 4, assuming 2 hyper
// threads per core, which is common but in fact may be more than that.
// Using hyper threads does not help to dex faster, but consumes a lot of memory, so it makes
// sense to base heuristics on the number of physical cores.
// Unfortunately there is no good way to detect the number of physical cores in pure Java,
// so we just divide the total number of logical processors by two to cover the majority of
// cases.
// TODO(buck_team): Implement cross-platform hardware capabilities detection and use it here
return Math.max(Runtime.getRuntime().availableProcessors() / 2, 1);
}
@Override
public StepExecutionResult execute(ExecutionContext context)
throws IOException, InterruptedException {
try {
Multimap<Path, Path> outputToInputs = outputToInputsSupplier.get();
runDxCommands(context, outputToInputs);
if (secondaryOutputDir.isPresent()) {
removeExtraneousSecondaryArtifacts(
secondaryOutputDir.get(), outputToInputs.keySet(), filesystem);
// Concatenate if solid compression is specified.
// create a mapping of the xzs file target and the dex.jar files that go into it
ImmutableMultimap.Builder<Path, Path> secondaryDexJarsMultimapBuilder =
ImmutableMultimap.builder();
for (Path p : outputToInputs.keySet()) {
if (DexStore.XZS.matchesPath(p)) {
String[] matches = p.getFileName().toString().split("-");
Path output = p.getParent().resolve(matches[0].concat(SECONDARY_SOLID_DEX_EXTENSION));
secondaryDexJarsMultimapBuilder.put(output, p);
}
}
ImmutableMultimap<Path, Path> secondaryDexJarsMultimap =
secondaryDexJarsMultimapBuilder.build();
if (!secondaryDexJarsMultimap.isEmpty()) {
for (Map.Entry<Path, Collection<Path>> entry :
secondaryDexJarsMultimap.asMap().entrySet()) {
Path secondaryCompressedBlobOutput = entry.getKey();
Collection<Path> secondaryDexJars = entry.getValue();
// Construct the output path for our solid blob and its compressed form.
Path secondaryBlobOutput =
secondaryCompressedBlobOutput.getParent().resolve("uncompressed.dex.blob");
// Concatenate the jars into a blob and compress it.
Step concatStep =
new ConcatStep(
filesystem, ImmutableList.copyOf(secondaryDexJars), secondaryBlobOutput);
Step xzStep =
new XzStep(
filesystem,
secondaryBlobOutput,
secondaryCompressedBlobOutput,
xzCompressionLevel);
StepRunner.runStep(context, concatStep, Optional.of(buildTarget));
StepRunner.runStep(context, xzStep, Optional.of(buildTarget));
}
}
}
} catch (StepFailedException e) {
context.logError(e, "There was an error in smart dexing step.");
return StepExecutionResults.ERROR;
}
return StepExecutionResults.SUCCESS;
}
private void runDxCommands(ExecutionContext context, Multimap<Path, Path> outputToInputs)
throws StepFailedException, InterruptedException {
// Invoke dx commands in parallel for maximum thread utilization. In testing, dx revealed
// itself to be CPU (and not I/O) bound making it a good candidate for parallelization.
Stream<ImmutableList<Step>> dxSteps = generateDxCommands(filesystem, outputToInputs);
ImmutableList<Callable<Unit>> callables =
dxSteps
.map(
steps ->
(Callable<Unit>)
() -> {
for (Step step : steps) {
StepRunner.runStep(context, step, Optional.of(buildTarget));
}
return Unit.UNIT;
})
.collect(ImmutableList.toImmutableList());
try {
MoreFutures.getAll(executorService, callables);
} catch (ExecutionException e) {
Throwable cause = e.getCause();
Throwables.throwIfInstanceOf(cause, StepFailedException.class);
// Programmer error. Boo-urns.
throw new RuntimeException(cause);
}
}
/**
* Prune the secondary output directory of any files that we didn't generate. This is needed
* because we crudely add all files in this directory to the final APK, but the number may have
* been reduced due to split-zip having less code to process.
*
* <p>This is also a defensive measure to cleanup extraneous artifacts left behind due to changes
* to buck itself.
*/
private void removeExtraneousSecondaryArtifacts(
Path secondaryOutputDir, Set<Path> producedArtifacts, ProjectFilesystem projectFilesystem)
throws IOException {
secondaryOutputDir = secondaryOutputDir.normalize();
for (Path secondaryOutput : projectFilesystem.getDirectoryContents(secondaryOutputDir)) {
if (!producedArtifacts.contains(secondaryOutput)
&& !secondaryOutput.getFileName().toString().endsWith(".meta")) {
projectFilesystem.deleteRecursivelyIfExists(secondaryOutput);
}
}
}
@Override
public String getShortName() {
return SHORT_NAME;
}
@Override
public String getDescription(ExecutionContext context) {
StringBuilder b = new StringBuilder();
b.append(getShortName());
minSdkVersion.ifPresent(minSdk -> b.append("--min-sdk-version ").append(minSdk));
Multimap<Path, Path> outputToInputs = outputToInputsSupplier.get();
for (Path output : outputToInputs.keySet()) {
b.append(" -out ");
b.append(output);
b.append(" -in ");
Joiner.on(':').appendTo(b, Iterables.transform(outputToInputs.get(output), Object::toString));
}
return b.toString();
}
/**
* Once the {@code .class} files have been split into separate zip files, each must be converted
* to a {@code .dex} file.
*/
private Stream<ImmutableList<Step>> generateDxCommands(
ProjectFilesystem filesystem, Multimap<Path, Path> outputToInputs) {
ImmutableMap<Path, Sha1HashCode> dexInputHashes = dexInputHashesProvider.getDexInputHashes();
ImmutableSet<Path> allDexInputPaths = ImmutableSet.copyOf(outputToInputs.values());
return outputToInputs.asMap().entrySet().stream()
.map(
outputInputsPair ->
new DxPseudoRule(
androidPlatformTarget,
buildContext,
filesystem,
dexInputHashes,
ImmutableSet.copyOf(outputInputsPair.getValue()),
outputInputsPair.getKey(),
successDir.resolve(outputInputsPair.getKey().getFileName()),
dxOptions,
xzCompressionLevel,
dxMaxHeapSize,
dexTool,
desugarInterfaceMethods
? Sets.union(
Sets.difference(
allDexInputPaths, ImmutableSet.copyOf(outputInputsPair.getValue())),
additonalDesugarDeps.orElse(ImmutableSet.of()))
: null,
useDexBuckedId,
minSdkVersion))
.filter(dxPseudoRule -> !dxPseudoRule.checkIsCached())
.map(
dxPseudoRule -> {
ImmutableList.Builder<Step> steps = ImmutableList.builder();
dxPseudoRule.buildInternal(steps);
return steps.build();
});
}
/**
* Internally designed to simulate a dexing buck rule so that once refactored more broadly as such
* it should be straightforward to convert this code.
*
* <p>This pseudo rule does not use the normal .success file model but instead checksums its
* inputs. This is because the input zip files are guaranteed to have changed on the filesystem
* (ZipSplitter will always write them out even if the same), but the contents contained in the
* zip may not have changed.
*/
@VisibleForTesting
static class DxPseudoRule {
private final AndroidPlatformTarget androidPlatformTarget;
private final BuildContext buildContext;
private final ProjectFilesystem filesystem;
private final Map<Path, Sha1HashCode> dexInputHashes;
private final Set<Path> srcs;
private final Path outputPath;
private final Path outputHashPath;
private final EnumSet<Option> dxOptions;
@Nullable private String newInputsHash;
private final int xzCompressionLevel;
private final Optional<String> dxMaxHeapSize;
private final String dexTool;
@Nullable private final Collection<Path> classpathFiles;
private final boolean useDexBuckedId;
private final Optional<Integer> minSdkVersion;
public DxPseudoRule(
AndroidPlatformTarget androidPlatformTarget,
BuildContext buildContext,
ProjectFilesystem filesystem,
Map<Path, Sha1HashCode> dexInputHashes,
Set<Path> srcs,
Path outputPath,
Path outputHashPath,
EnumSet<Option> dxOptions,
int xzCompressionLevel,
Optional<String> dxMaxHeapSize,
String dexTool,
@Nullable Collection<Path> classpathFiles,
boolean useDexBuckedId,
Optional<Integer> minSdkVersion) {
this.androidPlatformTarget = androidPlatformTarget;
this.buildContext = buildContext;
this.filesystem = filesystem;
this.dexInputHashes = ImmutableMap.copyOf(dexInputHashes);
this.srcs = ImmutableSet.copyOf(srcs);
this.outputPath = outputPath;
this.outputHashPath = outputHashPath;
this.dxOptions = dxOptions;
this.xzCompressionLevel = xzCompressionLevel;
this.dxMaxHeapSize = dxMaxHeapSize;
this.dexTool = dexTool;
this.classpathFiles = classpathFiles;
this.useDexBuckedId = useDexBuckedId;
this.minSdkVersion = minSdkVersion;
}
/**
* Read the previous run's hash from the filesystem.
*
* @return Previous hash if there was one; null otherwise.
*/
@Nullable
private String getPreviousInputsHash() {
// Returning null will trigger the dx command to run again.
return filesystem.readFirstLine(outputHashPath).orElse(null);
}
@VisibleForTesting
String hashInputs() {
Hasher hasher = Hashing.sha1().newHasher();
for (Path src : srcs) {
Preconditions.checkState(
dexInputHashes.containsKey(src), "no hash key exists for path %s", src.toString());
Sha1HashCode hash = Objects.requireNonNull(dexInputHashes.get(src));
hash.update(hasher);
}
return hasher.hash().toString();
}
public boolean checkIsCached() {
newInputsHash = hashInputs();
if (!filesystem.exists(outputHashPath) || !filesystem.exists(outputPath)) {
return false;
}
// Verify input hashes.
String currentInputsHash = getPreviousInputsHash();
return newInputsHash.equals(currentInputsHash);
}
private void buildInternal(ImmutableList.Builder<Step> steps) {
Preconditions.checkState(newInputsHash != null, "Must call checkIsCached first!");
createDxStepForDxPseudoRule(
androidPlatformTarget,
steps,
buildContext,
filesystem,
srcs,
outputPath,
dxOptions,
xzCompressionLevel,
dxMaxHeapSize,
dexTool,
classpathFiles,
useDexBuckedId,
minSdkVersion);
steps.add(
new WriteFileStep(filesystem, newInputsHash, outputHashPath, /* executable */ false));
}
}
/**
* The step to produce the .dex file will be determined by the file extension of outputPath, much
* as {@code dx} itself chooses whether to embed the dex inside a jar/zip based on the destination
* file passed to it. We also create a ".meta" file that contains information about the compressed
* and uncompressed size of the dex; this information is useful later, in applications, when
* unpacking.
*/
static void createDxStepForDxPseudoRule(
AndroidPlatformTarget androidPlatformTarget,
ImmutableList.Builder<Step> steps,
BuildContext context,
ProjectFilesystem filesystem,
Collection<Path> filesToDex,
Path outputPath,
EnumSet<Option> dxOptions,
int xzCompressionLevel,
Optional<String> dxMaxHeapSize,
String dexTool,
@Nullable Collection<Path> classpathFiles,
boolean useDexBuckedId,
Optional<Integer> minSdkVersion) {
Optional<String> buckedId = Optional.empty();
String output = outputPath.toString();
String fileName = Files.getNameWithoutExtension(output);
if (useDexBuckedId && fileName.startsWith("classes")) {
// We know what the output file name is ("classes.dex" or "classesN.dex") as these
// are generated in SplitZipStep and passed around as part of a multi-map - it is
// simply easier and cleaner to extract the dex file number to be used as unique
// identifier rather than creating another map and pass it around
String[] tokens = fileName.split("classes");
String id = tokens.length == 0 ? "" /* primary */ : tokens[1] /* secondary */;
buckedId = Optional.of(id);
}
if (DexStore.XZ.matchesPath(outputPath)) {
Path tempDexJarOutput = Paths.get(output.replaceAll("\\.jar\\.xz$", ".tmp.jar"));
steps.add(
new DxStep(
filesystem,
androidPlatformTarget,
tempDexJarOutput,
filesToDex,
dxOptions,
dxMaxHeapSize,
dexTool,
false,
classpathFiles,
buckedId,
minSdkVersion));
// We need to make sure classes.dex is STOREd in the .dex.jar file, otherwise .XZ
// compression won't be effective.
Path repackedJar = Paths.get(output.replaceAll("\\.xz$", ""));
steps.add(
new RepackZipEntriesStep(
filesystem,
tempDexJarOutput,
repackedJar,
ImmutableSet.of("classes.dex"),
ZipCompressionLevel.NONE));
steps.add(
RmStep.of(
BuildCellRelativePath.fromCellRelativePath(
context.getBuildCellRootPath(), filesystem, tempDexJarOutput)));
steps.add(
new DexJarAnalysisStep(
filesystem,
repackedJar,
repackedJar.resolveSibling(repackedJar.getFileName() + ".meta")));
steps.add(new XzStep(filesystem, repackedJar, xzCompressionLevel));
} else if (DexStore.XZS.matchesPath(outputPath)) {
// Essentially the same logic as the XZ case above, except we compress later.
// The differences in output file names make it worth separating into a different case.
// Ensure classes.dex is stored.
Path tempDexJarOutput = Paths.get(output.replaceAll("\\.jar\\.xzs\\.tmp~$", ".tmp.jar"));
steps.add(
new DxStep(
filesystem,
androidPlatformTarget,
tempDexJarOutput,
filesToDex,
dxOptions,
dxMaxHeapSize,
dexTool,
false,
classpathFiles,
buckedId,
minSdkVersion));
steps.add(
new RepackZipEntriesStep(
filesystem,
tempDexJarOutput,
outputPath,
ImmutableSet.of("classes.dex"),
ZipCompressionLevel.NONE));
steps.add(
RmStep.of(
BuildCellRelativePath.fromCellRelativePath(
context.getBuildCellRootPath(), filesystem, tempDexJarOutput)));
// Write a .meta file.
steps.add(
new DexJarAnalysisStep(
filesystem,
outputPath,
outputPath.resolveSibling(outputPath.getFileName() + ".meta")));
} else if (DexStore.JAR.matchesPath(outputPath)
|| DexStore.RAW.matchesPath(outputPath)
|| output.endsWith("classes.dex")) {
steps.add(
new DxStep(
filesystem,
androidPlatformTarget,
outputPath,
filesToDex,
dxOptions,
dxMaxHeapSize,
dexTool,
false,
classpathFiles,
buckedId,
minSdkVersion));
if (DexStore.JAR.matchesPath(outputPath)) {
steps.add(
new DexJarAnalysisStep(
filesystem,
outputPath,
outputPath.resolveSibling(outputPath.getFileName() + ".meta")));
steps.add(ZipScrubberStep.of(filesystem.resolve(outputPath)));
}
} else {
throw new IllegalArgumentException(
String.format("Suffix of %s does not have a corresponding DexStore type.", outputPath));
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.runtime.io;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.sysml.conf.ConfigurationManager;
import org.apache.sysml.hops.OptimizerUtils;
import org.apache.sysml.runtime.DMLRuntimeException;
import org.apache.sysml.runtime.matrix.data.CSVFileFormatProperties;
import org.apache.sysml.runtime.matrix.data.MatrixBlock;
/**
* Parallel version of ReaderTextCSV.java. To summarize, we do two passes in
* order to compute row offsets and the actual read. We accordingly create count
* and read tasks and use fixed-size thread pools to execute these tasks. If the
* target matrix is dense, the inserts are done lock-free. In contrast to
* textcell parallel read, we also do lock-free inserts. If the matrix is
* sparse, because splits contain row partitioned lines and hence there is no
* danger of lost updates. Note, there is also no sorting of sparse rows
* required because data comes in sorted order per row.
*
*/
public class ReaderTextCSVParallel extends MatrixReader
{
private CSVFileFormatProperties _props = null;
private int _numThreads = 1;
private SplitOffsetInfos _offsets = null;
public ReaderTextCSVParallel(CSVFileFormatProperties props) {
_numThreads = OptimizerUtils.getParallelTextReadParallelism();
_props = props;
}
@Override
public MatrixBlock readMatrixFromHDFS(String fname, long rlen, long clen,
int brlen, int bclen, long estnnz)
throws IOException, DMLRuntimeException
{
// prepare file access
JobConf job = new JobConf(ConfigurationManager.getCachedJobConf());
Path path = new Path(fname);
FileSystem fs = IOUtilFunctions.getFileSystem(path, job);
FileInputFormat.addInputPath(job, path);
TextInputFormat informat = new TextInputFormat();
informat.configure(job);
InputSplit[] splits = informat.getSplits(job, _numThreads);
splits = IOUtilFunctions.sortInputSplits(splits);
// check existence and non-empty file
checkValidInputFile(fs, path);
// allocate output matrix block
// First Read Pass (count rows/cols, determine offsets, allocate matrix block)
MatrixBlock ret = computeCSVSizeAndCreateOutputMatrixBlock(splits,
path, job, _props.hasHeader(), _props.getDelim(), estnnz);
rlen = ret.getNumRows();
clen = ret.getNumColumns();
// Second Read Pass (read, parse strings, append to matrix block)
readCSVMatrixFromHDFS(splits, path, job, ret, rlen, clen, brlen, bclen,
_props.hasHeader(), _props.getDelim(), _props.isFill(),
_props.getFillValue());
//post-processing (representation-specific, change of sparse/dense block representation)
// - no sorting required for CSV because it is read in sorted order per row
// - nnz explicitly maintained in parallel for the individual splits
ret.examSparsity();
// sanity check for parallel row count (since determined internally)
if (rlen > 0 && rlen != ret.getNumRows())
throw new DMLRuntimeException("Read matrix inconsistent with given meta data: "
+ "expected nrow="+ rlen + ", real nrow=" + ret.getNumRows());
return ret;
}
@Override
public MatrixBlock readMatrixFromInputStream(InputStream is, long rlen, long clen, int brlen, int bclen, long estnnz)
throws IOException, DMLRuntimeException
{
//not implemented yet, fallback to sequential reader
return new ReaderTextCSV(_props)
.readMatrixFromInputStream(is, rlen, clen, brlen, bclen, estnnz);
}
private void readCSVMatrixFromHDFS(InputSplit[] splits, Path path, JobConf job,
MatrixBlock dest, long rlen, long clen, int brlen, int bclen,
boolean hasHeader, String delim, boolean fill, double fillValue)
throws IOException
{
FileInputFormat.addInputPath(job, path);
TextInputFormat informat = new TextInputFormat();
informat.configure(job);
ExecutorService pool = Executors.newFixedThreadPool(_numThreads);
try
{
// create read tasks for all splits
ArrayList<CSVReadTask> tasks = new ArrayList<>();
int splitCount = 0;
for (InputSplit split : splits) {
tasks.add( new CSVReadTask(split, _offsets, informat, job, dest,
rlen, clen, hasHeader, delim, fill, fillValue, splitCount++) );
}
pool.invokeAll(tasks);
pool.shutdown();
// check return codes and aggregate nnz
long lnnz = 0;
for (CSVReadTask rt : tasks) {
lnnz += rt.getPartialNnz();
if (!rt.getReturnCode()) {
Exception err = rt.getException();
throw new IOException("Read task for csv input failed: "+ err.toString(), err);
}
}
dest.setNonZeros(lnnz);
}
catch (Exception e) {
throw new IOException("Threadpool issue, while parallel read.", e);
}
}
private MatrixBlock computeCSVSizeAndCreateOutputMatrixBlock(
InputSplit[] splits, Path path, JobConf job, boolean hasHeader,
String delim, long estnnz) throws IOException, DMLRuntimeException
{
int nrow = 0;
int ncol = 0;
FileInputFormat.addInputPath(job, path);
TextInputFormat informat = new TextInputFormat();
informat.configure(job);
// count no of entities in the first non-header row
LongWritable key = new LongWritable();
Text oneLine = new Text();
RecordReader<LongWritable, Text> reader = informat
.getRecordReader(splits[0], job, Reporter.NULL);
try {
if (reader.next(key, oneLine)) {
String cellStr = oneLine.toString().trim();
ncol = StringUtils.countMatches(cellStr, delim) + 1;
}
}
finally {
IOUtilFunctions.closeSilently(reader);
}
// count rows in parallel per split
try
{
ExecutorService pool = Executors.newFixedThreadPool(_numThreads);
ArrayList<CountRowsTask> tasks = new ArrayList<>();
for (InputSplit split : splits) {
tasks.add(new CountRowsTask(split, informat, job, hasHeader));
hasHeader = false;
}
pool.invokeAll(tasks);
pool.shutdown();
// collect row counts for offset computation
// early error notify in case not all tasks successful
_offsets = new SplitOffsetInfos(tasks.size());
for (CountRowsTask rt : tasks) {
if (!rt.getReturnCode())
throw new IOException("Count task for csv input failed: "+ rt.getErrMsg());
_offsets.setOffsetPerSplit(tasks.indexOf(rt), nrow);
_offsets.setLenghtPerSplit(tasks.indexOf(rt), rt.getRowCount());
nrow = nrow + rt.getRowCount();
}
}
catch (Exception e) {
throw new IOException("Threadpool Error " + e.getMessage(), e);
}
// allocate target matrix block based on given size;
// need to allocate sparse as well since lock-free insert into target
long estnnz2 = (estnnz < 0) ? (long)nrow * ncol : estnnz;
return createOutputMatrixBlock(nrow, ncol, nrow, ncol, estnnz2, true, true);
}
private static class SplitOffsetInfos {
// offset & length info per split
private int[] offsetPerSplit = null;
private int[] lenghtPerSplit = null;
public SplitOffsetInfos(int numSplits) {
lenghtPerSplit = new int[numSplits];
offsetPerSplit = new int[numSplits];
}
public int getLenghtPerSplit(int split) {
return lenghtPerSplit[split];
}
public void setLenghtPerSplit(int split, int r) {
lenghtPerSplit[split] = r;
}
public int getOffsetPerSplit(int split) {
return offsetPerSplit[split];
}
public void setOffsetPerSplit(int split, int o) {
offsetPerSplit[split] = o;
}
}
private static class CountRowsTask implements Callable<Object>
{
private InputSplit _split = null;
private TextInputFormat _informat = null;
private JobConf _job = null;
private boolean _rc = true;
private String _errMsg = null;
private int _nrows = -1;
private boolean _hasHeader = false;
public CountRowsTask(InputSplit split, TextInputFormat informat,
JobConf job, boolean hasHeader) {
_split = split;
_informat = informat;
_job = job;
_nrows = 0;
_hasHeader = hasHeader;
}
public boolean getReturnCode() {
return _rc;
}
public int getRowCount() {
return _nrows;
}
public String getErrMsg() {
return _errMsg;
}
@Override
public Object call()
throws Exception
{
RecordReader<LongWritable, Text> reader = _informat.getRecordReader(_split, _job, Reporter.NULL);
LongWritable key = new LongWritable();
Text oneLine = new Text();
try {
// count rows from the first non-header row
if (_hasHeader) {
reader.next(key, oneLine);
}
while (reader.next(key, oneLine)) {
_nrows++;
}
}
catch (Exception e) {
_rc = false;
_errMsg = "RecordReader error CSV format. split: "+ _split.toString() + e.getMessage();
throw new IOException(_errMsg);
}
finally {
IOUtilFunctions.closeSilently(reader);
}
return null;
}
}
private static class CSVReadTask implements Callable<Object>
{
private InputSplit _split = null;
private SplitOffsetInfos _splitoffsets = null;
private boolean _sparse = false;
private TextInputFormat _informat = null;
private JobConf _job = null;
private MatrixBlock _dest = null;
private long _rlen = -1;
private long _clen = -1;
private boolean _isFirstSplit = false;
private boolean _hasHeader = false;
private boolean _fill = false;
private double _fillValue = 0;
private String _delim = null;
private int _splitCount = 0;
private boolean _rc = true;
private Exception _exception = null;
private long _nnz;
public CSVReadTask(InputSplit split, SplitOffsetInfos offsets,
TextInputFormat informat, JobConf job, MatrixBlock dest,
long rlen, long clen, boolean hasHeader, String delim,
boolean fill, double fillValue, int splitCount)
{
_split = split;
_splitoffsets = offsets; // new SplitOffsetInfos(offsets);
_sparse = dest.isInSparseFormat();
_informat = informat;
_job = job;
_dest = dest;
_rlen = rlen;
_clen = clen;
_isFirstSplit = (splitCount == 0);
_hasHeader = hasHeader;
_fill = fill;
_fillValue = fillValue;
_delim = delim;
_rc = true;
_splitCount = splitCount;
}
public boolean getReturnCode() {
return _rc;
}
public Exception getException() {
return _exception;
}
public long getPartialNnz() {
return _nnz;
}
@Override
public Object call()
throws Exception
{
int row = 0;
int col = 0;
double cellValue = 0;
long lnnz = 0;
try
{
RecordReader<LongWritable, Text> reader = _informat.getRecordReader(_split, _job, Reporter.NULL);
LongWritable key = new LongWritable();
Text value = new Text();
// skip the header line
if (_isFirstSplit && _hasHeader) {
reader.next(key, value);
}
boolean noFillEmpty = false;
row = _splitoffsets.getOffsetPerSplit(_splitCount);
try {
if (_sparse) // SPARSE<-value
{
while (reader.next(key, value)) // foreach line
{
String cellStr = value.toString().trim();
String[] parts = IOUtilFunctions.split(cellStr, _delim);
col = 0;
for (String part : parts) // foreach cell
{
part = part.trim();
if (part.isEmpty()) {
noFillEmpty |= !_fill;
cellValue = _fillValue;
}
else {
cellValue = IOUtilFunctions.parseDoubleParallel(part);
}
if( cellValue != 0 ) {
_dest.appendValue(row, col, cellValue);
lnnz++;
}
col++;
}
// sanity checks (number of columns, fill values)
IOUtilFunctions.checkAndRaiseErrorCSVEmptyField(cellStr, _fill, noFillEmpty);
IOUtilFunctions.checkAndRaiseErrorCSVNumColumns(_split.toString(), cellStr, parts, _clen);
row++;
}
}
else // DENSE<-value
{
while (reader.next(key, value)) // foreach line
{
String cellStr = value.toString().trim();
String[] parts = IOUtilFunctions.split(cellStr, _delim);
col = 0;
for (String part : parts) // foreach cell
{
part = part.trim();
if (part.isEmpty()) {
noFillEmpty |= !_fill;
cellValue = _fillValue;
}
else {
cellValue = IOUtilFunctions.parseDoubleParallel(part);
}
if( cellValue != 0 ) {
_dest.setValueDenseUnsafe(row, col, cellValue);
lnnz++;
}
col++;
}
// sanity checks (number of columns, fill values)
IOUtilFunctions.checkAndRaiseErrorCSVEmptyField(cellStr, _fill, noFillEmpty);
IOUtilFunctions.checkAndRaiseErrorCSVNumColumns(_split.toString(), cellStr, parts, _clen);
row++;
}
}
// sanity checks (number of rows)
if (row != (_splitoffsets.getOffsetPerSplit(_splitCount) + _splitoffsets.getLenghtPerSplit(_splitCount)) )
{
throw new IOException("Incorrect number of rows ("+ row+ ") found in delimited file ("
+ (_splitoffsets.getOffsetPerSplit(_splitCount)
+ _splitoffsets.getLenghtPerSplit(_splitCount))+ "): " + value);
}
}
finally {
IOUtilFunctions.closeSilently(reader);
}
}
catch (Exception ex) {
// central error handling (return code, message)
_rc = false;
_exception = ex;
// post-mortem error handling and bounds checking
if (row < 0 || row + 1 > _rlen || col < 0 || col + 1 > _clen) {
String errMsg = "CSV cell [" + (row + 1) + "," + (col + 1)+ "] " +
"out of overall matrix range [1:" + _rlen+ ",1:" + _clen + "]. " + ex.getMessage();
throw new IOException(errMsg, _exception);
}
else {
String errMsg = "Unable to read matrix in text CSV format. "+ ex.getMessage();
throw new IOException(errMsg, _exception);
}
}
//post processing
_nnz = lnnz;
return null;
}
}
}
|
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.upgrades;
import org.apache.http.HttpHost;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.elasticsearch.Version;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.core.IndexerState;
import org.elasticsearch.client.transform.GetTransformStatsResponse;
import org.elasticsearch.client.transform.transforms.DestConfig;
import org.elasticsearch.client.transform.transforms.SourceConfig;
import org.elasticsearch.client.transform.transforms.TimeSyncConfig;
import org.elasticsearch.client.transform.transforms.TransformConfig;
import org.elasticsearch.client.transform.transforms.TransformStats;
import org.elasticsearch.client.transform.transforms.pivot.GroupConfig;
import org.elasticsearch.client.transform.transforms.pivot.PivotConfig;
import org.elasticsearch.client.transform.transforms.pivot.TermsGroupSource;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.core.Booleans;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.xcontent.DeprecationHandler;
import org.elasticsearch.xcontent.NamedXContentRegistry;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.xpack.test.rest.XPackRestTestConstants.TRANSFORM_INTERNAL_INDEX_PREFIX;
import static org.elasticsearch.xpack.test.rest.XPackRestTestConstants.TRANSFORM_INTERNAL_INDEX_PREFIX_DEPRECATED;
import static org.elasticsearch.xpack.test.rest.XPackRestTestConstants.TRANSFORM_TASK_NAME;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.oneOf;
public class TransformSurvivesUpgradeIT extends AbstractUpgradeTestCase {
private static final String TRANSFORM_ENDPOINT = "/_transform/";
private static final String CONTINUOUS_TRANSFORM_ID = "continuous-transform-upgrade-job";
private static final String CONTINUOUS_TRANSFORM_SOURCE = "transform-upgrade-continuous-source";
private static final List<String> ENTITIES = Stream.iterate(1, n -> n + 1).limit(5).map(v -> "user_" + v).collect(Collectors.toList());
private static final List<TimeValue> BUCKETS = Stream.iterate(1, n -> n + 1)
.limit(5)
.map(TimeValue::timeValueMinutes)
.collect(Collectors.toList());
protected static void waitForPendingTransformTasks() throws Exception {
waitForPendingTasks(adminClient(), taskName -> taskName.startsWith(TRANSFORM_TASK_NAME) == false);
}
@Override
protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOException {
RestClientBuilder builder = RestClient.builder(hosts);
configureClient(builder, settings);
builder.setStrictDeprecationMode(false);
return builder.build();
}
/**
* The purpose of this test is to ensure that when a transform is running through a rolling upgrade it
* keeps working and does not fail
*/
public void testTransformRollingUpgrade() throws Exception {
Request adjustLoggingLevels = new Request("PUT", "/_cluster/settings");
adjustLoggingLevels.setJsonEntity(
"{\"persistent\": {"
+ "\"logger.org.elasticsearch.xpack.core.indexing.AsyncTwoPhaseIndexer\": \"trace\","
+ "\"logger.org.elasticsearch.xpack.dataframe\": \"trace\","
+ "\"logger.org.elasticsearch.xpack.transform\": \"trace\""
+ "}}"
);
client().performRequest(adjustLoggingLevels);
Request waitForYellow = new Request("GET", "/_cluster/health");
waitForYellow.addParameter("wait_for_nodes", "3");
waitForYellow.addParameter("wait_for_status", "yellow");
switch (CLUSTER_TYPE) {
case OLD:
client().performRequest(waitForYellow);
createAndStartContinuousTransform();
break;
case MIXED:
client().performRequest(waitForYellow);
long lastCheckpoint = 1;
if (Booleans.parseBoolean(System.getProperty("tests.first_round")) == false) {
lastCheckpoint = 2;
}
verifyContinuousTransformHandlesData(lastCheckpoint);
verifyUpgradeFailsIfMixedCluster();
break;
case UPGRADED:
client().performRequest(waitForYellow);
verifyContinuousTransformHandlesData(3);
verifyUpgrade();
cleanUpTransforms();
break;
default:
throw new UnsupportedOperationException("Unknown cluster type [" + CLUSTER_TYPE + "]");
}
}
private void cleanUpTransforms() throws Exception {
stopTransform(CONTINUOUS_TRANSFORM_ID);
deleteTransform(CONTINUOUS_TRANSFORM_ID);
waitForPendingTransformTasks();
}
private void createAndStartContinuousTransform() throws Exception {
createIndex(CONTINUOUS_TRANSFORM_SOURCE);
long totalDocsWrittenSum = 0;
for (TimeValue bucket : BUCKETS) {
int docs = randomIntBetween(1, 25);
putData(CONTINUOUS_TRANSFORM_SOURCE, docs, bucket, ENTITIES);
totalDocsWrittenSum += docs * ENTITIES.size();
}
long totalDocsWritten = totalDocsWrittenSum;
TransformConfig config = TransformConfig.builder()
.setSyncConfig(TimeSyncConfig.builder().setField("timestamp").setDelay(TimeValue.timeValueSeconds(1)).build())
.setPivotConfig(
PivotConfig.builder()
.setAggregations(new AggregatorFactories.Builder().addAggregator(AggregationBuilders.avg("stars").field("stars")))
.setGroups(GroupConfig.builder().groupBy("user_id", TermsGroupSource.builder().setField("user_id").build()).build())
.build()
)
.setDest(DestConfig.builder().setIndex(CONTINUOUS_TRANSFORM_ID + "_idx").build())
.setSource(SourceConfig.builder().setIndex(CONTINUOUS_TRANSFORM_SOURCE).build())
.setId(CONTINUOUS_TRANSFORM_ID)
.setFrequency(TimeValue.timeValueSeconds(1))
.build();
putTransform(CONTINUOUS_TRANSFORM_ID, config);
startTransform(CONTINUOUS_TRANSFORM_ID);
waitUntilAfterCheckpoint(CONTINUOUS_TRANSFORM_ID, 0L);
assertBusy(() -> {
TransformStats stateAndStats = getTransformStats(CONTINUOUS_TRANSFORM_ID);
assertThat(stateAndStats.getIndexerStats().getDocumentsIndexed(), equalTo((long) ENTITIES.size()));
assertThat(stateAndStats.getIndexerStats().getDocumentsProcessed(), equalTo(totalDocsWritten));
// Even if we get back to started, we may periodically get set back to `indexing` when triggered.
// Though short lived due to no changes on the source indices, it could result in flaky test behavior
assertThat(stateAndStats.getState(), oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING));
}, 120, TimeUnit.SECONDS);
// We want to make sure our latest state is written before we turn the node off, this makes the testing more reliable
awaitWrittenIndexerState(CONTINUOUS_TRANSFORM_ID, IndexerState.STARTED.value());
}
@SuppressWarnings("unchecked")
private void verifyContinuousTransformHandlesData(long expectedLastCheckpoint) throws Exception {
// A continuous transform should automatically become started when it gets assigned to a node
// if it was assigned to the node that was removed from the cluster
assertBusy(() -> {
TransformStats stateAndStats = getTransformStats(CONTINUOUS_TRANSFORM_ID);
assertThat(stateAndStats.getState(), oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING));
}, 120, TimeUnit.SECONDS);
TransformStats previousStateAndStats = getTransformStats(CONTINUOUS_TRANSFORM_ID);
// Add a new user and write data to it
// This is so we can have more reliable data counts, as writing to existing entities requires
// rescanning the past data
List<String> entities = new ArrayList<>(1);
entities.add("user_" + ENTITIES.size() + expectedLastCheckpoint);
int docs = 5;
// Index the data
// The frequency and delay should see the data once its indexed
putData(CONTINUOUS_TRANSFORM_SOURCE, docs, TimeValue.timeValueSeconds(0), entities);
waitUntilAfterCheckpoint(CONTINUOUS_TRANSFORM_ID, expectedLastCheckpoint);
assertBusy(
() -> assertThat(
getTransformStats(CONTINUOUS_TRANSFORM_ID).getIndexerStats().getDocumentsProcessed(),
greaterThanOrEqualTo(docs + previousStateAndStats.getIndexerStats().getDocumentsProcessed())
),
120,
TimeUnit.SECONDS
);
TransformStats stateAndStats = getTransformStats(CONTINUOUS_TRANSFORM_ID);
assertThat(stateAndStats.getState(), oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING));
awaitWrittenIndexerState(CONTINUOUS_TRANSFORM_ID, (responseBody) -> {
Map<String, Object> indexerStats = (Map<String, Object>) ((List<?>) XContentMapValues.extractValue(
"hits.hits._source.stats",
responseBody
)).get(0);
assertThat(
(Integer) indexerStats.get("documents_indexed"),
greaterThan(Long.valueOf(previousStateAndStats.getIndexerStats().getDocumentsIndexed()).intValue())
);
assertThat(
(Integer) indexerStats.get("documents_processed"),
greaterThan(Long.valueOf(previousStateAndStats.getIndexerStats().getDocumentsProcessed()).intValue())
);
});
}
private void verifyUpgradeFailsIfMixedCluster() {
// upgrade tests by design are also executed with the same version, this check must be skipped in this case, see gh#39102.
if (UPGRADE_FROM_VERSION.equals(Version.CURRENT)) {
return;
}
final Request upgradeTransformRequest = new Request("POST", getTransformEndpoint() + "_upgrade");
Exception ex = expectThrows(Exception.class, () -> client().performRequest(upgradeTransformRequest));
assertThat(ex.getMessage(), containsString("All nodes must be the same version"));
}
private void verifyUpgrade() throws IOException {
final Request upgradeTransformRequest = new Request("POST", getTransformEndpoint() + "_upgrade");
Response response = client().performRequest(upgradeTransformRequest);
assertEquals(200, response.getStatusLine().getStatusCode());
}
private void awaitWrittenIndexerState(String id, Consumer<Map<?, ?>> responseAssertion) throws Exception {
Request getStatsDocsRequest = new Request(
"GET",
TRANSFORM_INTERNAL_INDEX_PREFIX + "*," + TRANSFORM_INTERNAL_INDEX_PREFIX_DEPRECATED + "*" + "/_search"
);
getStatsDocsRequest.setJsonEntity(
"{\n"
+ " \"query\": {\n"
+ " \"bool\": {\n"
+ " \"filter\": \n"
+ " {\"term\": {\n"
+ " \"_id\": \"data_frame_transform_state_and_stats-"
+ id
+ "\"\n"
+ " }}\n"
+ " }\n"
+ " },\n"
+ " \"sort\": [\n"
+ " {\n"
+ " \"_index\": {\n"
+ " \"order\": \"desc\"\n"
+ " }\n"
+ " }\n"
+ " ],\n"
+ " \"size\": 1\n"
+ "}"
);
assertBusy(() -> {
// Want to make sure we get the latest docs
client().performRequest(new Request("POST", TRANSFORM_INTERNAL_INDEX_PREFIX + "*/_refresh"));
client().performRequest(new Request("POST", TRANSFORM_INTERNAL_INDEX_PREFIX_DEPRECATED + "*/_refresh"));
Response response = client().performRequest(getStatsDocsRequest);
assertEquals(200, response.getStatusLine().getStatusCode());
Map<String, Object> responseBody = entityAsMap(response);
assertEquals("expected only 1 hit, got: " + responseBody, 1, XContentMapValues.extractValue("hits.total.value", responseBody));
responseAssertion.accept(responseBody);
}, 60, TimeUnit.SECONDS);
}
private void awaitWrittenIndexerState(String id, String indexerState) throws Exception {
awaitWrittenIndexerState(id, (responseBody) -> {
String storedState = ((List<?>) XContentMapValues.extractValue("hits.hits._source.state.indexer_state", responseBody)).get(0)
.toString();
assertThat(storedState, equalTo(indexerState));
});
}
private String getTransformEndpoint() {
return TRANSFORM_ENDPOINT;
}
private void putTransform(String id, TransformConfig config) throws IOException {
final Request createDataframeTransformRequest = new Request("PUT", getTransformEndpoint() + id);
createDataframeTransformRequest.setJsonEntity(Strings.toString(config));
Response response = client().performRequest(createDataframeTransformRequest);
assertEquals(200, response.getStatusLine().getStatusCode());
}
private void deleteTransform(String id) throws IOException {
Response response = client().performRequest(new Request("DELETE", getTransformEndpoint() + id));
assertEquals(200, response.getStatusLine().getStatusCode());
}
private void startTransform(String id) throws IOException {
final Request startDataframeTransformRequest = new Request("POST", getTransformEndpoint() + id + "/_start");
Response response = client().performRequest(startDataframeTransformRequest);
assertEquals(200, response.getStatusLine().getStatusCode());
}
private void stopTransform(String id) throws IOException {
final Request stopDataframeTransformRequest = new Request("POST", getTransformEndpoint() + id + "/_stop?wait_for_completion=true");
Response response = client().performRequest(stopDataframeTransformRequest);
assertEquals(200, response.getStatusLine().getStatusCode());
}
private TransformStats getTransformStats(String id) throws IOException {
final Request getStats = new Request("GET", getTransformEndpoint() + id + "/_stats");
Response response = client().performRequest(getStats);
assertEquals(200, response.getStatusLine().getStatusCode());
XContentType xContentType = XContentType.fromMediaType(response.getEntity().getContentType().getValue());
try (
XContentParser parser = xContentType.xContent()
.createParser(
NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
response.getEntity().getContent()
)
) {
GetTransformStatsResponse resp = GetTransformStatsResponse.fromXContent(parser);
assertThat(resp.getTransformsStats(), hasSize(1));
return resp.getTransformsStats().get(0);
}
}
private void waitUntilAfterCheckpoint(String id, long currentCheckpoint) throws Exception {
assertBusy(
() -> assertThat(getTransformStats(id).getCheckpointingInfo().getLast().getCheckpoint(), greaterThan(currentCheckpoint)),
60,
TimeUnit.SECONDS
);
}
private void createIndex(String indexName) throws IOException {
// create mapping
try (XContentBuilder builder = jsonBuilder()) {
builder.startObject();
{
builder.startObject("mappings")
.startObject("properties")
.startObject("timestamp")
.field("type", "date")
.endObject()
.startObject("user_id")
.field("type", "keyword")
.endObject()
.startObject("stars")
.field("type", "integer")
.endObject()
.endObject()
.endObject();
}
builder.endObject();
final StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON);
Request req = new Request("PUT", indexName);
req.setEntity(entity);
assertThat(client().performRequest(req).getStatusLine().getStatusCode(), equalTo(200));
}
}
private void putData(String indexName, int numDocs, TimeValue fromTime, List<String> entityIds) throws IOException {
long timeStamp = Instant.now().toEpochMilli() - fromTime.getMillis();
// create index
final StringBuilder bulk = new StringBuilder();
for (int i = 0; i < numDocs; i++) {
for (String entity : entityIds) {
bulk.append("{\"index\":{\"_index\":\"" + indexName + "\"}}\n")
.append("{\"user_id\":\"")
.append(entity)
.append("\",\"stars\":")
.append(randomLongBetween(0, 5))
.append(",\"timestamp\":")
.append(timeStamp)
.append("}\n");
}
}
bulk.append("\r\n");
final Request bulkRequest = new Request("POST", "/_bulk");
bulkRequest.addParameter("refresh", "true");
bulkRequest.setJsonEntity(bulk.toString());
entityAsMap(client().performRequest(bulkRequest));
}
}
|
|
/*
* Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.hyracks.storage.am.btree.impls;
import java.nio.ByteBuffer;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
public class FieldPrefixSlotManager implements IPrefixSlotManager {
public static final int TUPLE_UNCOMPRESSED = 0xFF;
public static final int MAX_PREFIX_SLOTS = 0xFE;
public static final int GREATEST_KEY_INDICATOR = 0x00FFFFFF;
public static final int ERROR_INDICATOR = 0x00FFFFFE;
private static final int slotSize = 4;
private ByteBuffer buf;
private BTreeFieldPrefixNSMLeafFrame frame;
private MultiComparator cmp;
public int decodeFirstSlotField(int slot) {
return (slot & 0xFF000000) >>> 24;
}
public int decodeSecondSlotField(int slot) {
return slot & 0x00FFFFFF;
}
public int encodeSlotFields(int firstField, int secondField) {
return ((firstField & 0x000000FF) << 24) | (secondField & 0x00FFFFFF);
}
// returns prefix slot number, or TUPLE_UNCOMPRESSED of no match was found
public int findPrefix(ITupleReference tuple, ITreeIndexTupleReference framePrefixTuple) throws HyracksDataException {
int prefixMid;
int prefixBegin = 0;
int prefixEnd = frame.getPrefixTupleCount() - 1;
if (frame.getPrefixTupleCount() > 0) {
while (prefixBegin <= prefixEnd) {
prefixMid = (prefixBegin + prefixEnd) / 2;
framePrefixTuple.resetByTupleIndex(frame, prefixMid);
int cmpVal = cmp.fieldRangeCompare(tuple, framePrefixTuple, 0, framePrefixTuple.getFieldCount());
if (cmpVal < 0)
prefixEnd = prefixMid - 1;
else if (cmpVal > 0)
prefixBegin = prefixMid + 1;
else
return prefixMid;
}
}
return FieldPrefixSlotManager.TUPLE_UNCOMPRESSED;
}
@Override
public int findSlot(ITupleReference searchKey, ITreeIndexTupleReference frameTuple,
ITreeIndexTupleReference framePrefixTuple, MultiComparator multiCmp, FindTupleMode mode,
FindTupleNoExactMatchPolicy matchPolicy) throws HyracksDataException {
if (frame.getTupleCount() <= 0)
encodeSlotFields(TUPLE_UNCOMPRESSED, GREATEST_KEY_INDICATOR);
int prefixMid;
int prefixBegin = 0;
int prefixEnd = frame.getPrefixTupleCount() - 1;
int prefixMatch = TUPLE_UNCOMPRESSED;
// bounds are inclusive on both ends
int tuplePrefixSlotNumLbound = prefixBegin;
int tuplePrefixSlotNumUbound = prefixEnd;
// binary search on the prefix slots to determine upper and lower bounds
// for the prefixSlotNums in tuple slots
while (prefixBegin <= prefixEnd) {
prefixMid = (prefixBegin + prefixEnd) / 2;
framePrefixTuple.resetByTupleIndex(frame, prefixMid);
int cmp = multiCmp.fieldRangeCompare(searchKey, framePrefixTuple, 0, framePrefixTuple.getFieldCount());
if (cmp < 0) {
prefixEnd = prefixMid - 1;
tuplePrefixSlotNumLbound = prefixMid - 1;
} else if (cmp > 0) {
prefixBegin = prefixMid + 1;
tuplePrefixSlotNumUbound = prefixMid + 1;
} else {
if (mode == FindTupleMode.EXCLUSIVE) {
if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY)
prefixBegin = prefixMid + 1;
else
prefixEnd = prefixMid - 1;
} else {
tuplePrefixSlotNumLbound = prefixMid;
tuplePrefixSlotNumUbound = prefixMid;
prefixMatch = prefixMid;
}
break;
}
}
int tupleMid = -1;
int tupleBegin = 0;
int tupleEnd = frame.getTupleCount() - 1;
// binary search on tuples, guided by the lower and upper bounds on prefixSlotNum
while (tupleBegin <= tupleEnd) {
tupleMid = (tupleBegin + tupleEnd) / 2;
int tupleSlotOff = getTupleSlotOff(tupleMid);
int tupleSlot = buf.getInt(tupleSlotOff);
int prefixSlotNum = decodeFirstSlotField(tupleSlot);
int cmp = 0;
if (prefixSlotNum == TUPLE_UNCOMPRESSED) {
frameTuple.resetByTupleIndex(frame, tupleMid);
cmp = multiCmp.compare(searchKey, frameTuple);
} else {
if (prefixSlotNum < tuplePrefixSlotNumLbound)
cmp = 1;
else if (prefixSlotNum > tuplePrefixSlotNumUbound)
cmp = -1;
else {
frameTuple.resetByTupleIndex(frame, tupleMid);
cmp = multiCmp.compare(searchKey, frameTuple);
}
}
if (cmp < 0)
tupleEnd = tupleMid - 1;
else if (cmp > 0)
tupleBegin = tupleMid + 1;
else {
if (mode == FindTupleMode.EXCLUSIVE) {
if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY)
tupleBegin = tupleMid + 1;
else
tupleEnd = tupleMid - 1;
} else {
if (mode == FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS) {
return encodeSlotFields(prefixMatch, ERROR_INDICATOR);
} else {
return encodeSlotFields(prefixMatch, tupleMid);
}
}
}
}
if (mode == FindTupleMode.EXACT)
return encodeSlotFields(prefixMatch, ERROR_INDICATOR);
// do final comparison to determine whether the search key is greater
// than all keys or in between some existing keys
if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY) {
if (tupleBegin > frame.getTupleCount() - 1)
return encodeSlotFields(prefixMatch, GREATEST_KEY_INDICATOR);
frameTuple.resetByTupleIndex(frame, tupleBegin);
if (multiCmp.compare(searchKey, frameTuple) < 0)
return encodeSlotFields(prefixMatch, tupleBegin);
else
return encodeSlotFields(prefixMatch, GREATEST_KEY_INDICATOR);
} else {
if (tupleEnd < 0)
return encodeSlotFields(prefixMatch, GREATEST_KEY_INDICATOR);
frameTuple.resetByTupleIndex(frame, tupleEnd);
if (multiCmp.compare(searchKey, frameTuple) > 0)
return encodeSlotFields(prefixMatch, tupleEnd);
else
return encodeSlotFields(prefixMatch, GREATEST_KEY_INDICATOR);
}
}
public int getPrefixSlotStartOff() {
return buf.capacity() - slotSize;
}
public int getPrefixSlotEndOff() {
return buf.capacity() - slotSize * frame.getPrefixTupleCount();
}
public int getTupleSlotStartOff() {
return getPrefixSlotEndOff() - slotSize;
}
public int getTupleSlotEndOff() {
return buf.capacity() - slotSize * (frame.getPrefixTupleCount() + frame.getTupleCount());
}
public int getSlotSize() {
return slotSize;
}
public void setSlot(int offset, int value) {
frame.getBuffer().putInt(offset, value);
}
public int insertSlot(int slot, int tupleOff) {
int slotNum = decodeSecondSlotField(slot);
if (slotNum == ERROR_INDICATOR) {
System.out.println("WOW BIG PROBLEM!");
}
if (slotNum == GREATEST_KEY_INDICATOR) {
int slotOff = getTupleSlotEndOff() - slotSize;
int newSlot = encodeSlotFields(decodeFirstSlotField(slot), tupleOff);
setSlot(slotOff, newSlot);
return newSlot;
} else {
int slotEndOff = getTupleSlotEndOff();
int slotOff = getTupleSlotOff(slotNum);
int length = (slotOff - slotEndOff) + slotSize;
System.arraycopy(frame.getBuffer().array(), slotEndOff, frame.getBuffer().array(), slotEndOff - slotSize,
length);
int newSlot = encodeSlotFields(decodeFirstSlotField(slot), tupleOff);
setSlot(slotOff, newSlot);
return newSlot;
}
}
public int getPrefixSlotOff(int tupleIndex) {
return getPrefixSlotStartOff() - tupleIndex * slotSize;
}
public int getTupleSlotOff(int tupleIndex) {
return getTupleSlotStartOff() - tupleIndex * slotSize;
}
public void setPrefixSlot(int tupleIndex, int slot) {
buf.putInt(getPrefixSlotOff(tupleIndex), slot);
}
@Override
public int getGreatestKeyIndicator() {
return GREATEST_KEY_INDICATOR;
}
@Override
public int getErrorIndicator() {
return ERROR_INDICATOR;
}
@Override
public void setFrame(ITreeIndexFrame frame) {
this.frame = (BTreeFieldPrefixNSMLeafFrame) frame;
this.buf = frame.getBuffer();
}
@Override
public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy) {
throw new UnsupportedOperationException("Not implemented.");
}
@Override
public int getSlotStartOff() {
throw new UnsupportedOperationException("Not implemented.");
}
@Override
public int getSlotEndOff() {
throw new UnsupportedOperationException("Not implemented.");
}
@Override
public int getTupleOff(int slotOff) {
throw new UnsupportedOperationException("Not implemented.");
}
@Override
public int getSlotOff(int tupleIndex) {
throw new UnsupportedOperationException("Not implemented.");
}
public void setMultiComparator(MultiComparator cmp) {
this.cmp = cmp;
}
}
|
|
package org.ovirt.engine.core.bll;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.ovirt.engine.core.common.action.MoveOrCopyImageGroupParameters;
import org.ovirt.engine.core.common.businessentities.DiskImage;
import org.ovirt.engine.core.common.businessentities.ImageOperation;
import org.ovirt.engine.core.common.businessentities.StorageDomainStatus;
import org.ovirt.engine.core.common.businessentities.StorageType;
import org.ovirt.engine.core.common.businessentities.VM;
import org.ovirt.engine.core.common.businessentities.VMStatus;
import org.ovirt.engine.core.common.businessentities.VmDevice;
import org.ovirt.engine.core.common.businessentities.VmDeviceId;
import org.ovirt.engine.core.common.businessentities.VmEntityType;
import org.ovirt.engine.core.common.businessentities.storage_domains;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.core.dal.VdcBllMessages;
import org.ovirt.engine.core.dal.dbbroker.auditloghandling.AuditLogableBaseMockUtils;
import org.ovirt.engine.core.dao.DiskImageDAO;
import org.ovirt.engine.core.dao.StorageDomainDAO;
import org.ovirt.engine.core.dao.VmDAO;
import org.ovirt.engine.core.dao.VmDeviceDAO;
import org.ovirt.engine.core.dao.VmTemplateDAO;
@RunWith(MockitoJUnitRunner.class)
public class MoveOrCopyDiskCommandTest {
private final Guid diskImageGuid = Guid.NewGuid();
private Guid destStorageId = Guid.NewGuid();
private final Guid srcStorageId = Guid.NewGuid();
@Mock
private DiskImageDAO diskImageDao;
@Mock
private StorageDomainDAO storageDomainDao;
@Mock
private VmDAO vmDao;
@Mock
private VmTemplateDAO vmTemplateDao;
@Mock
private VmDeviceDAO vmDeviceDao;
/**
* The command under test.
*/
protected MoveOrCopyDiskCommand<MoveOrCopyImageGroupParameters> command;
@Test
public void canDoActionImageNotFound() throws Exception {
initializeCommand(ImageOperation.Move);
when(diskImageDao.get(any(Guid.class))).thenReturn(null);
when(diskImageDao.getSnapshotById(any(Guid.class))).thenReturn(null);
assertFalse(command.canDoAction());
assertTrue(command.getReturnValue()
.getCanDoActionMessages()
.contains(VdcBllMessages.ACTION_TYPE_FAILED_DISK_NOT_EXIST.toString()));
}
@Test
public void canDoActionWrongDiskImageTypeTemplate() throws Exception {
initializeCommand(ImageOperation.Move);
initTemplateDiskImage();
assertFalse(command.canDoAction());
assertTrue(command.getReturnValue()
.getCanDoActionMessages()
.contains(VdcBllMessages.ACTION_TYPE_FAILED_DISK_IS_NOT_VM_DISK.toString()));
}
@Test
public void canDoActionWrongDiskImageTypeVm() throws Exception {
initializeCommand(ImageOperation.Copy);
initVmDiskImage();
doReturn(vmTemplateDao).when(command).getVmTemplateDAO();
assertFalse(command.canDoAction());
assertTrue(command.getReturnValue()
.getCanDoActionMessages()
.contains(VdcBllMessages.ACTION_TYPE_FAILED_DISK_IS_NOT_TEMPLATE_DISK.toString()));
}
@Test
public void canDoActionCanNotFindTemplet() throws Exception {
initializeCommand(ImageOperation.Copy);
initTemplateDiskImage();
doReturn(vmTemplateDao).when(command).getVmTemplateDAO();
when(vmTemplateDao.get(any(Guid.class))).thenReturn(null);
assertFalse(command.canDoAction());
assertTrue(command.getReturnValue()
.getCanDoActionMessages()
.contains(VdcBllMessages.ACTION_TYPE_FAILED_TEMPLATE_DOES_NOT_EXIST.toString()));
}
@Test
public void canDoActionSameSourceAndDest() throws Exception {
destStorageId = srcStorageId;
initializeCommand(ImageOperation.Move);
initVmDiskImage();
initVm();
initSrcStorageDomain();
assertFalse(command.canDoAction());
assertTrue(command.getReturnValue()
.getCanDoActionMessages()
.contains(VdcBllMessages.ACTION_TYPE_FAILED_SOURCE_AND_TARGET_SAME.toString()));
}
@Test
public void canDoActionVmIsNotDown() throws Exception {
initializeCommand(ImageOperation.Move);
initVmDiskImage();
initVm();
initSrcStorageDomain();
initDestStorageDomain();
initVmDevice();
doReturn(vmDeviceDao).when(command).getVmDeviceDAO();
assertFalse(command.canDoAction());
assertTrue(command.getReturnValue()
.getCanDoActionMessages()
.contains(VdcBllMessages.ACTION_TYPE_FAILED_VM_IS_NOT_DOWN.toString()));
}
protected void initVm() {
VM vm = new VM();
vm.setstatus(VMStatus.PoweredDown);
AuditLogableBaseMockUtils.mockVmDao(command, vmDao);
when(vmDao.get(any(Guid.class))).thenReturn(vm);
mockGetVmsListForDisk();
}
private void mockGetVmsListForDisk() {
List<VM> vmList = new ArrayList<VM>();
VM vm1 = new VM();
vm1.setstatus(VMStatus.PoweredDown);
VM vm2 = new VM();
vm2.setstatus(VMStatus.Down);
vmList.add(vm1);
vmList.add(vm2);
when(vmDao.getVmsListForDisk(any(Guid.class))).thenReturn(vmList);
}
private void initSrcStorageDomain() {
storage_domains stDomain = new storage_domains();
stDomain.setstatus(StorageDomainStatus.Active);
doReturn(storageDomainDao).when(command).getStorageDomainDAO();
when(storageDomainDao.getForStoragePool(any(Guid.class), any(Guid.class))).thenReturn(stDomain);
}
private void initDestStorageDomain() {
storage_domains destDomain = new storage_domains();
destDomain.setstatus(StorageDomainStatus.Active);
destDomain.setstorage_type(StorageType.NFS);
doReturn(destDomain).when(command).getStorageDomain();
}
private void initVmDevice() {
VmDevice vmDevice = new VmDevice();
vmDevice.setIsPlugged(true);
when(vmDeviceDao.get(any(VmDeviceId.class))).thenReturn(vmDevice);
}
@SuppressWarnings("unchecked")
protected void initializeCommand(ImageOperation operation) {
command = spy(new MoveOrCopyDiskCommandDummy(new MoveOrCopyImageGroupParameters(diskImageGuid,
srcStorageId,
destStorageId,
operation)));
// Spy away the storage domain checker methods
doReturn(true).when(command).isStorageDomainSpaceBelowThresholds();
doReturn(true).when(command).doesStorageDomainHaveSpaceForRequest(anyLong());
// Spy away the image handler methods
doReturn(true).when(command).checkImageConfiguration(any(List.class));
doReturn(Collections.emptyList()).when(command).getAllImageSnapshots();
doReturn(false).when(command).acquireLock();
}
private void initTemplateDiskImage() {
DiskImage diskImage = new DiskImage();
diskImage.setVmEntityType(VmEntityType.TEMPLATE);
when(diskImageDao.get(any(Guid.class))).thenReturn(diskImage);
}
private void initVmDiskImage() {
DiskImage diskImage = new DiskImage();
diskImage.setVmEntityType(VmEntityType.VM);
when(diskImageDao.get(any(Guid.class))).thenReturn(diskImage);
}
/**
* The following class is created in order to allow to use a mock diskImageDao in constructor
*/
private class MoveOrCopyDiskCommandDummy extends MoveOrCopyDiskCommand<MoveOrCopyImageGroupParameters> {
private static final long serialVersionUID = -1781827271090649224L;
public MoveOrCopyDiskCommandDummy(MoveOrCopyImageGroupParameters parameters) {
super(parameters);
}
@Override
protected DiskImageDAO getDiskImageDao() {
return diskImageDao;
}
@Override
protected boolean acquireLockInternal() {
return true;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.connect.transforms;
import org.apache.kafka.common.cache.Cache;
import org.apache.kafka.common.cache.LRUCache;
import org.apache.kafka.common.cache.SynchronizedCache;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.connect.connector.ConnectRecord;
import org.apache.kafka.connect.data.ConnectSchema;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.SchemaBuilder;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.errors.DataException;
import org.apache.kafka.connect.transforms.util.SchemaUtil;
import org.apache.kafka.connect.transforms.util.SimpleConfig;
import java.util.LinkedHashMap;
import java.util.Map;
import static org.apache.kafka.connect.transforms.util.Requirements.requireMap;
import static org.apache.kafka.connect.transforms.util.Requirements.requireStructOrNull;
public abstract class Flatten<R extends ConnectRecord<R>> implements Transformation<R> {
public static final String OVERVIEW_DOC =
"Flatten a nested data structure, generating names for each field by concatenating the field names at each "
+ "level with a configurable delimiter character. Applies to Struct when schema present, or a Map "
+ "in the case of schemaless data. Array fields and their contents are not modified. The default delimiter is '.'."
+ "<p/>Use the concrete transformation type designed for the record key (<code>" + Key.class.getName() + "</code>) "
+ "or value (<code>" + Value.class.getName() + "</code>).";
private static final String DELIMITER_CONFIG = "delimiter";
private static final String DELIMITER_DEFAULT = ".";
public static final ConfigDef CONFIG_DEF = new ConfigDef()
.define(DELIMITER_CONFIG, ConfigDef.Type.STRING, DELIMITER_DEFAULT, ConfigDef.Importance.MEDIUM,
"Delimiter to insert between field names from the input record when generating field names for the "
+ "output record");
private static final String PURPOSE = "flattening";
private String delimiter;
private Cache<Schema, Schema> schemaUpdateCache;
@Override
public void configure(Map<String, ?> props) {
final SimpleConfig config = new SimpleConfig(CONFIG_DEF, props);
delimiter = config.getString(DELIMITER_CONFIG);
schemaUpdateCache = new SynchronizedCache<>(new LRUCache<>(16));
}
@Override
public R apply(R record) {
if (operatingValue(record) == null) {
return record;
} else if (operatingSchema(record) == null) {
return applySchemaless(record);
} else {
return applyWithSchema(record);
}
}
@Override
public void close() {
}
@Override
public ConfigDef config() {
return CONFIG_DEF;
}
protected abstract Schema operatingSchema(R record);
protected abstract Object operatingValue(R record);
protected abstract R newRecord(R record, Schema updatedSchema, Object updatedValue);
private R applySchemaless(R record) {
final Map<String, Object> value = requireMap(operatingValue(record), PURPOSE);
final Map<String, Object> newValue = new LinkedHashMap<>();
applySchemaless(value, "", newValue);
return newRecord(record, null, newValue);
}
private void applySchemaless(Map<String, Object> originalRecord, String fieldNamePrefix, Map<String, Object> newRecord) {
for (Map.Entry<String, Object> entry : originalRecord.entrySet()) {
final String fieldName = fieldName(fieldNamePrefix, entry.getKey());
Object value = entry.getValue();
if (value == null) {
newRecord.put(fieldName(fieldNamePrefix, entry.getKey()), null);
continue;
}
Schema.Type inferredType = ConnectSchema.schemaType(value.getClass());
if (inferredType == null) {
throw new DataException("Flatten transformation was passed a value of type " + value.getClass()
+ " which is not supported by Connect's data API");
}
switch (inferredType) {
case INT8:
case INT16:
case INT32:
case INT64:
case FLOAT32:
case FLOAT64:
case BOOLEAN:
case STRING:
case BYTES:
case ARRAY:
newRecord.put(fieldName(fieldNamePrefix, entry.getKey()), entry.getValue());
break;
case MAP:
final Map<String, Object> fieldValue = requireMap(entry.getValue(), PURPOSE);
applySchemaless(fieldValue, fieldName, newRecord);
break;
default:
throw new DataException("Flatten transformation does not support " + entry.getValue().getClass()
+ " for record without schemas (for field " + fieldName + ").");
}
}
}
private R applyWithSchema(R record) {
final Struct value = requireStructOrNull(operatingValue(record), PURPOSE);
Schema schema = operatingSchema(record);
Schema updatedSchema = schemaUpdateCache.get(schema);
if (updatedSchema == null) {
final SchemaBuilder builder = SchemaUtil.copySchemaBasics(schema, SchemaBuilder.struct());
Struct defaultValue = (Struct) schema.defaultValue();
buildUpdatedSchema(schema, "", builder, schema.isOptional(), defaultValue);
updatedSchema = builder.build();
schemaUpdateCache.put(schema, updatedSchema);
}
if (value == null) {
return newRecord(record, updatedSchema, null);
} else {
final Struct updatedValue = new Struct(updatedSchema);
buildWithSchema(value, "", updatedValue);
return newRecord(record, updatedSchema, updatedValue);
}
}
/**
* Build an updated Struct Schema which flattens all nested fields into a single struct, handling cases where
* optionality and default values of the flattened fields are affected by the optionality and default values of
* parent/ancestor schemas (e.g. flattened field is optional because the parent schema was optional, even if the
* schema itself is marked as required).
* @param schema the schema to translate
* @param fieldNamePrefix the prefix to use on field names, i.e. the delimiter-joined set of ancestor field names
* @param newSchema the flattened schema being built
* @param optional true if any ancestor schema is optional
* @param defaultFromParent the default value, if any, included via the parent/ancestor schemas
*/
private void buildUpdatedSchema(Schema schema, String fieldNamePrefix, SchemaBuilder newSchema, boolean optional, Struct defaultFromParent) {
for (Field field : schema.fields()) {
final String fieldName = fieldName(fieldNamePrefix, field.name());
final boolean fieldIsOptional = optional || field.schema().isOptional();
Object fieldDefaultValue = null;
if (field.schema().defaultValue() != null) {
fieldDefaultValue = field.schema().defaultValue();
} else if (defaultFromParent != null) {
fieldDefaultValue = defaultFromParent.get(field);
}
switch (field.schema().type()) {
case INT8:
case INT16:
case INT32:
case INT64:
case FLOAT32:
case FLOAT64:
case BOOLEAN:
case STRING:
case BYTES:
case ARRAY:
newSchema.field(fieldName, convertFieldSchema(field.schema(), fieldIsOptional, fieldDefaultValue));
break;
case STRUCT:
buildUpdatedSchema(field.schema(), fieldName, newSchema, fieldIsOptional, (Struct) fieldDefaultValue);
break;
default:
throw new DataException("Flatten transformation does not support " + field.schema().type()
+ " for record with schemas (for field " + fieldName + ").");
}
}
}
/**
* Convert the schema for a field of a Struct with a primitive schema to the schema to be used for the flattened
* version, taking into account that we may need to override optionality and default values in the flattened version
* to take into account the optionality and default values of parent/ancestor schemas
* @param orig the original schema for the field
* @param optional whether the new flattened field should be optional
* @param defaultFromParent the default value either taken from the existing field or provided by the parent
*/
private Schema convertFieldSchema(Schema orig, boolean optional, Object defaultFromParent) {
// Note that we don't use the schema translation cache here. It might save us a bit of effort, but we really
// only care about caching top-level schema translations.
final SchemaBuilder builder = SchemaUtil.copySchemaBasics(orig);
if (optional)
builder.optional();
if (defaultFromParent != null)
builder.defaultValue(defaultFromParent);
return builder.build();
}
private void buildWithSchema(Struct record, String fieldNamePrefix, Struct newRecord) {
if (record == null) {
return;
}
for (Field field : record.schema().fields()) {
final String fieldName = fieldName(fieldNamePrefix, field.name());
switch (field.schema().type()) {
case INT8:
case INT16:
case INT32:
case INT64:
case FLOAT32:
case FLOAT64:
case BOOLEAN:
case STRING:
case BYTES:
case ARRAY:
newRecord.put(fieldName, record.get(field));
break;
case STRUCT:
buildWithSchema(record.getStruct(field.name()), fieldName, newRecord);
break;
default:
throw new DataException("Flatten transformation does not support " + field.schema().type()
+ " for record with schemas (for field " + fieldName + ").");
}
}
}
private String fieldName(String prefix, String fieldName) {
return prefix.isEmpty() ? fieldName : (prefix + delimiter + fieldName);
}
public static class Key<R extends ConnectRecord<R>> extends Flatten<R> {
@Override
protected Schema operatingSchema(R record) {
return record.keySchema();
}
@Override
protected Object operatingValue(R record) {
return record.key();
}
@Override
protected R newRecord(R record, Schema updatedSchema, Object updatedValue) {
return record.newRecord(record.topic(), record.kafkaPartition(), updatedSchema, updatedValue, record.valueSchema(), record.value(), record.timestamp());
}
}
public static class Value<R extends ConnectRecord<R>> extends Flatten<R> {
@Override
protected Schema operatingSchema(R record) {
return record.valueSchema();
}
@Override
protected Object operatingValue(R record) {
return record.value();
}
@Override
protected R newRecord(R record, Schema updatedSchema, Object updatedValue) {
return record.newRecord(record.topic(), record.kafkaPartition(), record.keySchema(), record.key(), updatedSchema, updatedValue, record.timestamp());
}
}
}
|
|
/*
Licensed to the Court of the University of Edinburgh (UofE) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The UofE licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.pathwayeditor.visualeditor.ui;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ComponentEvent;
import java.awt.event.ComponentListener;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import javax.swing.BorderFactory;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JColorChooser;
import javax.swing.JComboBox;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JSlider;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.pathwayeditor.businessobjects.drawingprimitives.attributes.Colour;
import org.pathwayeditor.businessobjects.drawingprimitives.attributes.RGB;
import org.pathwayeditor.visualeditor.commands.ChangeShapeFillPropertyChange;
import org.pathwayeditor.visualeditor.commands.ChangeShapeLinePropertyChange;
import org.pathwayeditor.visualeditor.commands.ChangeShapeLineWidth;
import org.pathwayeditor.visualeditor.commands.CompoundCommand;
import org.pathwayeditor.visualeditor.commands.ICommand;
import org.pathwayeditor.visualeditor.commands.ICompoundCommand;
import org.pathwayeditor.visualeditor.controller.IShapeController;
public class ShapeFormatDialog extends JDialog implements ActionListener, FocusListener {
private static final long serialVersionUID = 1L;
private static final String OK_CMD = "ok_cmd";
private static final String CANCEL_CMD = "cancel_cmd";
private static final Object[] LINE_WIDTH_OPTION = new Integer[] { new Integer(1), new Integer(2), new Integer(4), new Integer(6), new Integer(8), new Integer(10) };
private static final double MIN_LINE_WIDTH = 1.0;
private static final int MIN_TRANS = 0;
private static final int MAX_TRANS = 100;
private static final int TRANS_INIT = MAX_TRANS;
private final JPanel fillPanel = new JPanel();
private final JPanel linePanel = new JPanel();
private final JPanel buttonPanel = new JPanel();
private final JButton okButton = new JButton("OK");
private final JButton cancelButton = new JButton("Cancel");
private IShapeController selectedShape;
private JPanel lineColourLabel;
private JPanel fillColourLabel;
private ICompoundCommand latestCommand;
private JComboBox lineWidthCombo;
private JSlider lineTransSlider;
private JSlider fillTransSlider;
public ShapeFormatDialog(JFrame frame){
super(frame, true);
setTitle("Format Shape");
this.setLayout(new BoxLayout(this.getContentPane(), BoxLayout.PAGE_AXIS));
layoutLinePanel();
layoutFillColourPanel();
this.okButton.addActionListener(this);
this.okButton.setActionCommand(OK_CMD);
this.cancelButton.addActionListener(this);
this.cancelButton.setActionCommand(CANCEL_CMD);
this.buttonPanel.add(okButton);
this.buttonPanel.add(cancelButton);
this.add(linePanel);
this.add(fillPanel);
this.add(buttonPanel);
this.addComponentListener(new ComponentListener(){
@Override
public void componentHidden(ComponentEvent e) {
}
@Override
public void componentMoved(ComponentEvent arg0) {
}
@Override
public void componentResized(ComponentEvent arg0) {
}
@Override
public void componentShown(ComponentEvent arg0) {
latestCommand = new CompoundCommand();
}
});
this.pack();
}
private void layoutLinePanel(){
linePanel.setBorder(BorderFactory.createCompoundBorder(
BorderFactory.createTitledBorder("Line"),
BorderFactory.createEmptyBorder(5,5,5,5)));
linePanel.setLayout(new GridBagLayout());
JLabel colourLabel = new JLabel("Colour");
lineColourLabel = new JPanel();
lineColourLabel.setPreferredSize(new Dimension(100, 20));
JButton colorDialogButton = new JButton("...");
colorDialogButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
Color lineColour = JColorChooser.showDialog(ShapeFormatDialog.this, "Line Colour", lineColourLabel.getBackground());
Color origColour = lineColourLabel.getBackground();
lineColourLabel.setBackground(new Color(lineColour.getRed(), lineColour.getGreen(), lineColour.getBlue(), origColour.getAlpha()));
linePanel.repaint();
}
});
GridBagConstraints c1 = new GridBagConstraints();
c1.gridx = 0;
c1.gridy = 0;
c1.ipadx = 6;
linePanel.add(colourLabel, c1);
GridBagConstraints c2 = new GridBagConstraints();
c2.gridx = 1;
c2.gridy = 0;
linePanel.add(lineColourLabel, c2);
GridBagConstraints c3 = new GridBagConstraints();
c3.gridx = 2;
c3.gridy = 0;
c3.ipadx = 6;
linePanel.add(colorDialogButton, c3);
JLabel transparencyLabel = new JLabel("Transparency");
this.lineTransSlider = new JSlider(JSlider.HORIZONTAL, MIN_TRANS, MAX_TRANS, TRANS_INIT);
this.lineTransSlider.setMajorTickSpacing(25);
this.lineTransSlider.setMinorTickSpacing(10);
this.lineTransSlider.setPaintTicks(true);
this.lineTransSlider.setPaintLabels(true);
this.lineTransSlider.setPaintTrack(true);
this.lineTransSlider.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
JSlider source = (JSlider)e.getSource();
if(!source.getValueIsAdjusting()){
float alpha = source.getValue();
int currentAlpha = Math.round((alpha/100.0f) * (float)Colour.OPAQUE);
Color col = lineColourLabel.getBackground();
Color newCol = new Color(col.getRed(), col.getGreen(), col.getBlue(), currentAlpha);
lineColourLabel.setBackground(newCol);
linePanel.repaint();
}
}
});
GridBagConstraints c6 = new GridBagConstraints();
c6.gridx = 0;
c6.gridy = 1;
linePanel.add(transparencyLabel, c6);
GridBagConstraints c7 = new GridBagConstraints();
c7.gridx = 1;
c7.gridy = 1;
c7.gridwidth = GridBagConstraints.REMAINDER;
linePanel.add(lineTransSlider, c7);
JLabel lineWidthLabel = new JLabel("Width");
lineWidthCombo = new JComboBox(LINE_WIDTH_OPTION);
GridBagConstraints c4 = new GridBagConstraints();
c4.gridx = 0;
c4.gridy = 2;
linePanel.add(lineWidthLabel, c4);
GridBagConstraints c5 = new GridBagConstraints();
c5.gridx = 1;
c5.gridy = 2;
c5.fill = GridBagConstraints.HORIZONTAL;
linePanel.add(lineWidthCombo, c5);
lineWidthCombo.setEditable(false);
}
private void layoutFillColourPanel(){
fillPanel.setBorder(BorderFactory.createCompoundBorder(
BorderFactory.createTitledBorder("Fill"),
BorderFactory.createEmptyBorder(5,5,5,5)));
fillPanel.setLayout(new GridBagLayout());
JLabel colourLabel = new JLabel("Colour");
fillColourLabel = new JPanel();
fillColourLabel.setPreferredSize(new Dimension(100, 20));
JButton colorDialogButton = new JButton("...");
colorDialogButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
Color fillColour = JColorChooser.showDialog(ShapeFormatDialog.this, "Fill Colour", fillColourLabel.getBackground());
fillColourLabel.setBackground(new Color(fillColour.getRed(), fillColour.getGreen(), fillColour.getBlue(), fillColourLabel.getBackground().getAlpha()));
fillPanel.repaint();
}
});
GridBagConstraints c1 = new GridBagConstraints();
c1.gridx = 0;
c1.gridy = 0;
c1.ipadx = 6;
fillPanel.add(colourLabel, c1);
GridBagConstraints c2 = new GridBagConstraints();
c2.gridx = 1;
c2.gridy = 0;
fillPanel.add(fillColourLabel, c2);
GridBagConstraints c3 = new GridBagConstraints();
c3.gridx = 2;
c3.gridy = 0;
c3.ipadx = 6;
fillPanel.add(colorDialogButton, c3);
JLabel transparencyLabel = new JLabel("Transparency");
this.fillTransSlider = new JSlider(JSlider.HORIZONTAL, MIN_TRANS, MAX_TRANS, TRANS_INIT);
this.fillTransSlider.setMajorTickSpacing(25);
this.fillTransSlider.setMinorTickSpacing(10);
this.fillTransSlider.setPaintTicks(true);
this.fillTransSlider.setPaintLabels(true);
this.fillTransSlider.setPaintTrack(true);
this.fillTransSlider.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
JSlider source = (JSlider)e.getSource();
if(!source.getValueIsAdjusting()){
float alpha = source.getValue();
int currentAlpha = Math.round((alpha/100.0f) * (float)Colour.OPAQUE);
Color col = fillColourLabel.getBackground();
Color newCol = new Color(col.getRed(), col.getGreen(), col.getBlue(), currentAlpha);
fillColourLabel.setBackground(newCol);
fillPanel.repaint();
}
}
});
GridBagConstraints c6 = new GridBagConstraints();
c6.gridx = 0;
c6.gridy = 1;
fillPanel.add(transparencyLabel, c6);
GridBagConstraints c7 = new GridBagConstraints();
c7.gridx = 1;
c7.gridy = 1;
c7.gridwidth = GridBagConstraints.REMAINDER;
fillPanel.add(fillTransSlider, c7);
}
public void setSelectedShape(IShapeController shape) {
this.selectedShape = shape;
Colour fillCol = this.selectedShape.getAssociatedAttribute().getFillColour();
RGB fillRGB = fillCol.getRgb();
Color fillColour = new Color(fillRGB.getRed(), fillRGB.getGreen(), fillRGB.getBlue(), fillCol.getAlpha());
fillColourLabel.setBackground(fillColour);
setFillTransparency(fillColour);
// fillColourLabel.setVisible(false);
// fillColourLabel.setVisible(true);
Colour lineCol = this.selectedShape.getAssociatedAttribute().getLineColour();
RGB lineRGB = lineCol.getRgb();
Color lineColour = new Color(lineRGB.getRed(), lineRGB.getGreen(), lineRGB.getBlue(), lineCol.getAlpha());
lineColourLabel.setBackground(lineColour);
setLineTransparency(lineColour);
// lineColourLabel.setVisible(false);
// lineColourLabel.setVisible(true);
double lineWidth = Math.max(MIN_LINE_WIDTH, this.selectedShape.getAssociatedAttribute().getLineWidth());
this.lineWidthCombo.setSelectedItem(new Integer((int)Math.round(lineWidth)));
}
private void setLineTransparency(Color lineColour) {
float alpha = lineColour.getAlpha();
float opaque = Colour.OPAQUE;
this.lineTransSlider.setValue(Math.round((alpha/opaque)*(float)MAX_TRANS));
}
private void setFillTransparency(Color fillColour) {
float alpha = fillColour.getAlpha();
float opaque = Colour.OPAQUE;
this.fillTransSlider.setValue(Math.round((alpha/opaque)*(float)MAX_TRANS));
}
@Override
public void focusGained(FocusEvent arg0) {
}
@Override
public void focusLost(FocusEvent arg0) {
}
@Override
public void actionPerformed(ActionEvent e) {
if(e.getActionCommand().equals(OK_CMD)){
Color fillColour = this.fillColourLabel.getBackground();
Colour fillCol = new Colour(fillColour.getRed(), fillColour.getGreen(), fillColour.getBlue(), fillColour.getAlpha());
if(!this.selectedShape.getAssociatedAttribute().getFillColour().equals(fillCol)){
this.latestCommand.addCommand(new ChangeShapeFillPropertyChange(this.selectedShape.getAssociatedAttribute(), fillCol));
}
Color lineColour = this.lineColourLabel.getBackground();
Colour lineCol = new Colour(lineColour.getRed(), lineColour.getGreen(), lineColour.getBlue(), lineColour.getAlpha());
if(!this.selectedShape.getAssociatedAttribute().getLineColour().equals(lineCol)){
this.latestCommand.addCommand(new ChangeShapeLinePropertyChange(this.selectedShape.getAssociatedAttribute(), lineCol));
}
Integer selectedLineWidth = (Integer)this.lineWidthCombo.getSelectedItem();
if(!(this.selectedShape.getAssociatedAttribute().getLineWidth() == selectedLineWidth.doubleValue())){
this.latestCommand.addCommand(new ChangeShapeLineWidth(this.selectedShape.getAssociatedAttribute(), selectedLineWidth.doubleValue()));
}
this.setVisible(false);
}
else if(e.getActionCommand().equals(CANCEL_CMD)){
this.setVisible(false);
}
}
public ICommand getCommand(){
return this.latestCommand;
}
public boolean hasFormatChanged() {
return !this.latestCommand.isEmpty();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.test.runtime;
import static org.apache.hyracks.util.ThreadDumpUtil.takeDumpJSONString;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.management.ManagementFactory;
import java.lang.management.RuntimeMXBean;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.apache.asterix.app.external.ExternalUDFLibrarian;
import org.apache.asterix.common.config.ClusterProperties;
import org.apache.asterix.common.library.ILibraryManager;
import org.apache.asterix.common.utils.StorageConstants;
import org.apache.asterix.test.common.TestExecutor;
import org.apache.asterix.testframework.context.TestCaseContext;
import org.apache.commons.lang.SystemUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.api.io.IODeviceHandle;
import org.apache.hyracks.util.ThreadDumpUtil;
import org.apache.hyracks.control.nc.NodeControllerService;
/**
* Utils for running SQL++ or AQL runtime tests.
*/
public class LangExecutionUtil {
private static final String PATH_ACTUAL = "target" + File.separator + "rttest" + File.separator;
private static final String PATH_BASE =
StringUtils.join(new String[] { "src", "test", "resources", "runtimets" }, File.separator);
private static final boolean cleanupOnStart = true;
private static final boolean cleanupOnStop = true;
private static final List<String> badTestCases = new ArrayList<>();
private static TestExecutor testExecutor;
private static ExternalUDFLibrarian librarian;
private static final int repeat = Integer.getInteger("test.repeat", 1);
public static void setUp(String configFile, TestExecutor executor) throws Exception {
testExecutor = executor;
File outdir = new File(PATH_ACTUAL);
outdir.mkdirs();
List<ILibraryManager> libraryManagers = ExecutionTestUtil.setUp(cleanupOnStart, configFile);
ExternalUDFLibrarian.removeLibraryDir();
librarian = new ExternalUDFLibrarian(libraryManagers);
testExecutor.setLibrarian(librarian);
if (repeat != 1) {
System.out.println("FYI: each test will be run " + repeat + " times.");
}
}
public static void tearDown() throws Exception {
try {
// Check whether there are leaked open run file handles.
checkOpenRunFileLeaks();
// Check whether there are leaked threads.
checkThreadLeaks();
} finally {
ExternalUDFLibrarian.removeLibraryDir();
ExecutionTestUtil.tearDown(cleanupOnStop);
ExecutionTestUtil.integrationUtil.removeTestStorageFiles();
if (!badTestCases.isEmpty()) {
System.out.println("The following test cases left some data");
for (String testCase : badTestCases) {
System.out.println(testCase);
}
}
}
}
public static Collection<Object[]> tests(String onlyFilePath, String suiteFilePath) throws Exception {
Collection<Object[]> testArgs = buildTestsInXml(onlyFilePath);
if (testArgs.size() == 0) {
testArgs = buildTestsInXml(suiteFilePath);
}
return testArgs;
}
protected static Collection<Object[]> buildTestsInXml(String xmlfile) throws Exception {
Collection<Object[]> testArgs = new ArrayList<>();
TestCaseContext.Builder b = new TestCaseContext.Builder();
for (TestCaseContext ctx : b.build(new File(PATH_BASE), xmlfile)) {
testArgs.add(new Object[] { ctx });
}
return testArgs;
}
public static void test(TestCaseContext tcCtx) throws Exception {
test(testExecutor, tcCtx);
}
public static void test(TestExecutor testExecutor, TestCaseContext tcCtx) throws Exception {
int repeat = LangExecutionUtil.repeat * tcCtx.getRepeat();
try {
for (int i = 1; i <= repeat; i++) {
if (repeat > 1) {
System.err.print("[" + i + "/" + repeat + "] ");
}
if (librarian != null) {
librarian.cleanup();
}
testExecutor.executeTest(PATH_ACTUAL, tcCtx, null, false, ExecutionTestUtil.FailedGroup);
try {
checkStorageFiles();
} finally {
testExecutor.cleanup(tcCtx.toString(), badTestCases);
}
}
} finally {
System.err.flush();
}
}
// Checks whether data files are uniformly distributed among io devices.
private static void checkStorageFiles() throws Exception {
NodeControllerService[] ncs = ExecutionTestUtil.integrationUtil.ncs;
// Checks that dataset files are uniformly distributed across each io device.
for (NodeControllerService nc : ncs) {
checkNcStore(nc);
}
}
// For each NC, check whether data files are uniformly distributed among io devices.
private static void checkNcStore(NodeControllerService nc) throws Exception {
List<IODeviceHandle> ioDevices = nc.getIoManager().getIODevices();
int expectedPartitionNum = -1;
for (IODeviceHandle ioDevice : ioDevices) {
File[] dataDirs = ioDevice.getMount().listFiles();
for (File dataDir : dataDirs) {
String dirName = dataDir.getName();
if (!dirName.equals(StorageConstants.STORAGE_ROOT_DIR_NAME)) {
// Skips non-storage directories.
continue;
}
int numPartitions = getNumResidentPartitions(dataDir.listFiles());
if (expectedPartitionNum < 0) {
// Sets the expected number of partitions to the number of partitions on the first io device.
expectedPartitionNum = numPartitions;
} else {
// Checks whether the number of partitions of the current io device is expected.
if (expectedPartitionNum != numPartitions) {
throw new Exception("Non-uniform data distribution on io devices: " + dataDir.getAbsolutePath()
+ " number of partitions: " + numPartitions + " expected number of partitions: "
+ expectedPartitionNum);
}
}
}
}
}
// Gets the number of partitions on each io device.
private static int getNumResidentPartitions(File[] partitions) {
int num = 0;
for (File partition : partitions) {
File[] dataverses = partition.listFiles();
for (File dv : dataverses) {
String dvName = dv.getName();
// If a partition only contains the Metadata dataverse, it's not counted.
if (!dvName.equals("Metadata")) {
num++;
break;
}
}
}
return num;
}
public static void checkThreadLeaks() throws IOException {
String threadDump = ThreadDumpUtil.takeDumpJSONString();
// Currently we only do sanity check for threads used in the execution engine.
// Later we should check if there are leaked storage threads as well.
if (threadDump.contains("Operator") || threadDump.contains("SuperActivity")
|| threadDump.contains("PipelinedPartition")) {
System.out.print(threadDump);
throw new AssertionError("There are leaked threads in the execution engine.");
}
}
public static void checkOpenRunFileLeaks() throws IOException {
if (SystemUtils.IS_OS_WINDOWS) {
return;
}
// Only run the check on Linux and MacOS.
RuntimeMXBean runtimeMXBean = ManagementFactory.getRuntimeMXBean();
String processName = runtimeMXBean.getName();
String processId = processName.split("@")[0];
// Checks whether there are leaked run files from operators.
Process process =
Runtime.getRuntime().exec(new String[] { "bash", "-c", "lsof -p " + processId + "|grep waf|wc -l" });
try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
int runFileCount = Integer.parseInt(reader.readLine().trim());
if (runFileCount != 0) {
System.out.print(takeDumpJSONString());
outputLeakedOpenFiles(processId);
throw new AssertionError("There are " + runFileCount + " leaked run files.");
}
}
}
private static void outputLeakedOpenFiles(String processId) throws IOException {
Process process =
Runtime.getRuntime().exec(new String[] { "bash", "-c", "lsof -p " + processId + "|grep waf" });
try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
String line;
while ((line = reader.readLine()) != null) {
System.err.println(line);
}
}
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.fieldcomparator;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import java.io.IOException;
/**
* Sorts by field's natural Term sort order, using
* ordinals. This is functionally equivalent to {@link
* org.apache.lucene.search.FieldComparator.TermValComparator}, but it first resolves the string
* to their relative ordinal positions (using the index
* returned by {@link org.apache.lucene.search.FieldCache#getTermsIndex}), and
* does most comparisons using the ordinals. For medium
* to large results, this comparator will be much faster
* than {@link org.apache.lucene.search.FieldComparator.TermValComparator}. For very small
* result sets it may be slower.
*
* Internally this comparator multiplies ordinals by 4 so that virtual ordinals can be inserted in-between the original field data ordinals.
* Thanks to this, an ordinal for the missing value and the bottom value can be computed and all ordinals are directly comparable. For example,
* if the field data ordinals are (a,1), (b,2) and (c,3), they will be internally stored as (a,4), (b,8), (c,12). Then the ordinal for the
* missing value will be computed by binary searching. For example, if the missing value is 'ab', it will be assigned 6 as an ordinal (between
* 'a' and 'b'. And if the bottom value is 'ac', it will be assigned 7 as an ordinal (between 'ab' and 'b').
*/
public final class BytesRefOrdValComparator extends NestedWrappableComparator<BytesRef> {
final IndexFieldData.WithOrdinals<?> indexFieldData;
final BytesRef missingValue;
/* Ords for each slot, times 4.
@lucene.internal */
final long[] ords;
final SortMode sortMode;
/* Values for each slot.
@lucene.internal */
final BytesRef[] values;
/* Which reader last copied a value into the slot. When
we compare two slots, we just compare-by-ord if the
readerGen is the same; else we must compare the
values (slower).
@lucene.internal */
final int[] readerGen;
/* Gen of current reader we are on.
@lucene.internal */
int currentReaderGen = -1;
/* Current reader's doc ord/values.
@lucene.internal */
BytesValues.WithOrdinals termsIndex;
long missingOrd;
/* Bottom slot, or -1 if queue isn't full yet
@lucene.internal */
int bottomSlot = -1;
/* Bottom ord (same as ords[bottomSlot] once bottomSlot
is set). Cached for faster compares.
@lucene.internal */
long bottomOrd;
BytesRef top;
long topOrd;
public BytesRefOrdValComparator(IndexFieldData.WithOrdinals<?> indexFieldData, int numHits, SortMode sortMode, BytesRef missingValue) {
this.indexFieldData = indexFieldData;
this.sortMode = sortMode;
this.missingValue = missingValue;
ords = new long[numHits];
values = new BytesRef[numHits];
readerGen = new int[numHits];
}
@Override
public int compare(int slot1, int slot2) {
if (readerGen[slot1] == readerGen[slot2]) {
return LongValuesComparator.compare(ords[slot1], ords[slot2]);
}
final BytesRef val1 = values[slot1];
final BytesRef val2 = values[slot2];
if (val1 == null) {
if (val2 == null) {
return 0;
}
return -1;
} else if (val2 == null) {
return 1;
}
return val1.compareTo(val2);
}
@Override
public int compareBottom(int doc) {
throw new UnsupportedOperationException();
}
@Override
public int compareTop(int doc) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int compareBottomMissing() {
throw new UnsupportedOperationException();
}
@Override
public void copy(int slot, int doc) {
throw new UnsupportedOperationException();
}
@Override
public void missing(int slot) {
throw new UnsupportedOperationException();
}
@Override
public int compareTopMissing() {
throw new UnsupportedOperationException();
}
class PerSegmentComparator extends NestedWrappableComparator<BytesRef> {
final Ordinals.Docs readerOrds;
final BytesValues.WithOrdinals termsIndex;
public PerSegmentComparator(BytesValues.WithOrdinals termsIndex) {
this.readerOrds = termsIndex.ordinals();
this.termsIndex = termsIndex;
if (readerOrds.getNumOrds() > Long.MAX_VALUE / 4) {
throw new IllegalStateException("Current terms index pretends it has more than " + (Long.MAX_VALUE / 4) + " ordinals, which is unsupported by this impl");
}
}
@Override
public FieldComparator<BytesRef> setNextReader(AtomicReaderContext context) throws IOException {
return BytesRefOrdValComparator.this.setNextReader(context);
}
@Override
public int compare(int slot1, int slot2) {
return BytesRefOrdValComparator.this.compare(slot1, slot2);
}
@Override
public void setBottom(final int bottom) {
BytesRefOrdValComparator.this.setBottom(bottom);
}
@Override
public void setTopValue(BytesRef value) {
BytesRefOrdValComparator.this.setTopValue(value);
}
@Override
public BytesRef value(int slot) {
return BytesRefOrdValComparator.this.value(slot);
}
@Override
public int compareValues(BytesRef val1, BytesRef val2) {
if (val1 == null) {
if (val2 == null) {
return 0;
}
return -1;
} else if (val2 == null) {
return 1;
}
return val1.compareTo(val2);
}
protected long getOrd(int doc) {
return readerOrds.getOrd(doc);
}
@Override
public int compareBottom(int doc) {
assert bottomSlot != -1;
final long docOrd = getOrd(doc);
final long comparableOrd = docOrd == Ordinals.MISSING_ORDINAL ? missingOrd : docOrd << 2;
return LongValuesComparator.compare(bottomOrd, comparableOrd);
}
@Override
public int compareTop(int doc) throws IOException {
final long ord = getOrd(doc);
if (ord == Ordinals.MISSING_ORDINAL) {
return compareTopMissing();
} else {
final long comparableOrd = ord << 2;
return LongValuesComparator.compare(topOrd, comparableOrd);
}
}
@Override
public int compareBottomMissing() {
assert bottomSlot != -1;
return LongValuesComparator.compare(bottomOrd, missingOrd);
}
@Override
public int compareTopMissing() {
int cmp = LongValuesComparator.compare(topOrd, missingOrd);
if (cmp == 0) {
return compareValues(top, missingValue);
} else {
return cmp;
}
}
@Override
public void copy(int slot, int doc) {
final long ord = getOrd(doc);
if (ord == Ordinals.MISSING_ORDINAL) {
ords[slot] = missingOrd;
values[slot] = missingValue;
} else {
assert ord > 0;
ords[slot] = ord << 2;
if (values[slot] == null || values[slot] == missingValue) {
values[slot] = new BytesRef();
}
values[slot].copyBytes(termsIndex.getValueByOrd(ord));
}
readerGen[slot] = currentReaderGen;
}
@Override
public void missing(int slot) {
ords[slot] = missingOrd;
values[slot] = missingValue;
}
}
// for assertions
private boolean consistentInsertedOrd(BytesValues.WithOrdinals termsIndex, long ord, BytesRef value) {
assert ord >= 0 : ord;
assert (ord == 0) == (value == null) : "ord=" + ord + ", value=" + value;
final long previousOrd = ord >>> 2;
final long nextOrd = previousOrd + 1;
final BytesRef previous = previousOrd == 0 ? null : termsIndex.getValueByOrd(previousOrd);
if ((ord & 3) == 0) { // there was an existing ord with the inserted value
assert compareValues(previous, value) == 0;
} else {
assert compareValues(previous, value) < 0;
}
if (nextOrd < termsIndex.ordinals().getMaxOrd()) {
final BytesRef next = termsIndex.getValueByOrd(nextOrd);
assert compareValues(value, next) < 0;
}
return true;
}
// find where to insert an ord in the current terms index
private long ordInCurrentReader(BytesValues.WithOrdinals termsIndex, BytesRef value) {
final long docOrd = binarySearch(termsIndex, value);
assert docOrd != -1; // would mean smaller than null
final long ord;
if (docOrd >= 0) {
// value exists in the current segment
ord = docOrd << 2;
} else {
// value doesn't exist, use the ord between the previous and the next term
ord = ((-2 - docOrd) << 2) + 2;
}
assert (ord & 1) == 0;
return ord;
}
@Override
public FieldComparator<BytesRef> setNextReader(AtomicReaderContext context) throws IOException {
termsIndex = indexFieldData.load(context).getBytesValues(false);
assert termsIndex.ordinals() != null && termsIndex.ordinals().ordinals() != null;
if (missingValue == null) {
missingOrd = Ordinals.MISSING_ORDINAL;
} else {
missingOrd = ordInCurrentReader(termsIndex, missingValue);
assert consistentInsertedOrd(termsIndex, missingOrd, missingValue);
}
FieldComparator<BytesRef> perSegComp = null;
assert termsIndex.ordinals() != null && termsIndex.ordinals().ordinals() != null;
if (termsIndex.isMultiValued()) {
perSegComp = new PerSegmentComparator(termsIndex) {
@Override
protected long getOrd(int doc) {
return getRelevantOrd(readerOrds, doc, sortMode);
}
};
} else {
perSegComp = new PerSegmentComparator(termsIndex);
}
currentReaderGen++;
if (bottomSlot != -1) {
perSegComp.setBottom(bottomSlot);
}
if (top != null) {
perSegComp.setTopValue(top);
topOrd = ordInCurrentReader(termsIndex, top);
} else {
topOrd = missingOrd;
}
return perSegComp;
}
@Override
public void setBottom(final int bottom) {
bottomSlot = bottom;
final BytesRef bottomValue = values[bottomSlot];
if (bottomValue == null) {
bottomOrd = Ordinals.MISSING_ORDINAL;
} else if (currentReaderGen == readerGen[bottomSlot]) {
bottomOrd = ords[bottomSlot];
} else {
// insert an ord
bottomOrd = ordInCurrentReader(termsIndex, bottomValue);
if (bottomOrd == missingOrd) {
// bottomValue and missingValue and in-between the same field data values -> tie-break
// this is why we multiply ords by 4
assert missingValue != null;
final int cmp = bottomValue.compareTo(missingValue);
if (cmp < 0) {
--bottomOrd;
} else if (cmp > 0) {
++bottomOrd;
}
}
assert consistentInsertedOrd(termsIndex, bottomOrd, bottomValue);
}
readerGen[bottomSlot] = currentReaderGen;
}
@Override
public void setTopValue(BytesRef value) {
this.top = value;
}
@Override
public BytesRef value(int slot) {
return values[slot];
}
final protected static long binarySearch(BytesValues.WithOrdinals a, BytesRef key) {
return binarySearch(a, key, 1, a.ordinals().getNumOrds());
}
final protected static long binarySearch(BytesValues.WithOrdinals a, BytesRef key, long low, long high) {
assert low != Ordinals.MISSING_ORDINAL;
assert high == Ordinals.MISSING_ORDINAL || (a.getValueByOrd(high) == null | a.getValueByOrd(high) != null); // make sure we actually can get these values
assert low == high + 1 || a.getValueByOrd(low) == null | a.getValueByOrd(low) != null;
while (low <= high) {
long mid = (low + high) >>> 1;
BytesRef midVal = a.getValueByOrd(mid);
int cmp;
if (midVal != null) {
cmp = midVal.compareTo(key);
} else {
cmp = -1;
}
if (cmp < 0)
low = mid + 1;
else if (cmp > 0)
high = mid - 1;
else
return mid;
}
return -(low + 1);
}
static long getRelevantOrd(Ordinals.Docs readerOrds, int docId, SortMode sortMode) {
int length = readerOrds.setDocument(docId);
long relevantVal = sortMode.startLong();
long result = 0;
assert sortMode == SortMode.MAX || sortMode == SortMode.MIN;
for (int i = 0; i < length; i++) {
result = relevantVal = sortMode.apply(readerOrds.nextOrd(), relevantVal);
}
assert result >= 0;
assert result <= readerOrds.getMaxOrd();
return result;
// Enable this when the api can tell us that the ords per doc are ordered
/*if (reversed) {
IntArrayRef ref = readerOrds.getOrds(docId);
if (ref.isEmpty()) {
return 0;
} else {
return ref.values[ref.end - 1]; // last element is the highest value.
}
} else {
return readerOrds.getOrd(docId); // returns the lowest value
}*/
}
}
|
|
/*
Copyright 2022 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package io.kubernetes.client.openapi.models;
/** Generated */
public class V1CapabilitiesFluentImpl<
A extends io.kubernetes.client.openapi.models.V1CapabilitiesFluent<A>>
extends io.kubernetes.client.fluent.BaseFluent<A>
implements io.kubernetes.client.openapi.models.V1CapabilitiesFluent<A> {
public V1CapabilitiesFluentImpl() {}
public V1CapabilitiesFluentImpl(io.kubernetes.client.openapi.models.V1Capabilities instance) {
this.withAdd(instance.getAdd());
this.withDrop(instance.getDrop());
}
private java.util.List<java.lang.String> add;
private java.util.List<java.lang.String> drop;
public A addToAdd(java.lang.Integer index, java.lang.String item) {
if (this.add == null) {
this.add = new java.util.ArrayList<java.lang.String>();
}
this.add.add(index, item);
return (A) this;
}
public A setToAdd(java.lang.Integer index, java.lang.String item) {
if (this.add == null) {
this.add = new java.util.ArrayList<java.lang.String>();
}
this.add.set(index, item);
return (A) this;
}
public A addToAdd(java.lang.String... items) {
if (this.add == null) {
this.add = new java.util.ArrayList<java.lang.String>();
}
for (java.lang.String item : items) {
this.add.add(item);
}
return (A) this;
}
public A addAllToAdd(java.util.Collection<java.lang.String> items) {
if (this.add == null) {
this.add = new java.util.ArrayList<java.lang.String>();
}
for (java.lang.String item : items) {
this.add.add(item);
}
return (A) this;
}
public A removeFromAdd(java.lang.String... items) {
for (java.lang.String item : items) {
if (this.add != null) {
this.add.remove(item);
}
}
return (A) this;
}
public A removeAllFromAdd(java.util.Collection<java.lang.String> items) {
for (java.lang.String item : items) {
if (this.add != null) {
this.add.remove(item);
}
}
return (A) this;
}
public java.util.List<java.lang.String> getAdd() {
return this.add;
}
public java.lang.String getAdd(java.lang.Integer index) {
return this.add.get(index);
}
public java.lang.String getFirstAdd() {
return this.add.get(0);
}
public java.lang.String getLastAdd() {
return this.add.get(add.size() - 1);
}
public java.lang.String getMatchingAdd(java.util.function.Predicate<java.lang.String> predicate) {
for (java.lang.String item : add) {
if (predicate.test(item)) {
return item;
}
}
return null;
}
public java.lang.Boolean hasMatchingAdd(
java.util.function.Predicate<java.lang.String> predicate) {
for (java.lang.String item : add) {
if (predicate.test(item)) {
return true;
}
}
return false;
}
public A withAdd(java.util.List<java.lang.String> add) {
if (add != null) {
this.add = new java.util.ArrayList();
for (java.lang.String item : add) {
this.addToAdd(item);
}
} else {
this.add = null;
}
return (A) this;
}
public A withAdd(java.lang.String... add) {
if (this.add != null) {
this.add.clear();
}
if (add != null) {
for (java.lang.String item : add) {
this.addToAdd(item);
}
}
return (A) this;
}
public java.lang.Boolean hasAdd() {
return add != null && !add.isEmpty();
}
public A addNewAdd(java.lang.String original) {
return (A) addToAdd(new String(original));
}
public A addToDrop(java.lang.Integer index, java.lang.String item) {
if (this.drop == null) {
this.drop = new java.util.ArrayList<java.lang.String>();
}
this.drop.add(index, item);
return (A) this;
}
public A setToDrop(java.lang.Integer index, java.lang.String item) {
if (this.drop == null) {
this.drop = new java.util.ArrayList<java.lang.String>();
}
this.drop.set(index, item);
return (A) this;
}
public A addToDrop(java.lang.String... items) {
if (this.drop == null) {
this.drop = new java.util.ArrayList<java.lang.String>();
}
for (java.lang.String item : items) {
this.drop.add(item);
}
return (A) this;
}
public A addAllToDrop(java.util.Collection<java.lang.String> items) {
if (this.drop == null) {
this.drop = new java.util.ArrayList<java.lang.String>();
}
for (java.lang.String item : items) {
this.drop.add(item);
}
return (A) this;
}
public A removeFromDrop(java.lang.String... items) {
for (java.lang.String item : items) {
if (this.drop != null) {
this.drop.remove(item);
}
}
return (A) this;
}
public A removeAllFromDrop(java.util.Collection<java.lang.String> items) {
for (java.lang.String item : items) {
if (this.drop != null) {
this.drop.remove(item);
}
}
return (A) this;
}
public java.util.List<java.lang.String> getDrop() {
return this.drop;
}
public java.lang.String getDrop(java.lang.Integer index) {
return this.drop.get(index);
}
public java.lang.String getFirstDrop() {
return this.drop.get(0);
}
public java.lang.String getLastDrop() {
return this.drop.get(drop.size() - 1);
}
public java.lang.String getMatchingDrop(
java.util.function.Predicate<java.lang.String> predicate) {
for (java.lang.String item : drop) {
if (predicate.test(item)) {
return item;
}
}
return null;
}
public java.lang.Boolean hasMatchingDrop(
java.util.function.Predicate<java.lang.String> predicate) {
for (java.lang.String item : drop) {
if (predicate.test(item)) {
return true;
}
}
return false;
}
public A withDrop(java.util.List<java.lang.String> drop) {
if (drop != null) {
this.drop = new java.util.ArrayList();
for (java.lang.String item : drop) {
this.addToDrop(item);
}
} else {
this.drop = null;
}
return (A) this;
}
public A withDrop(java.lang.String... drop) {
if (this.drop != null) {
this.drop.clear();
}
if (drop != null) {
for (java.lang.String item : drop) {
this.addToDrop(item);
}
}
return (A) this;
}
public java.lang.Boolean hasDrop() {
return drop != null && !drop.isEmpty();
}
public A addNewDrop(java.lang.String original) {
return (A) addToDrop(new String(original));
}
public boolean equals(java.lang.Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
V1CapabilitiesFluentImpl that = (V1CapabilitiesFluentImpl) o;
if (add != null ? !add.equals(that.add) : that.add != null) return false;
if (drop != null ? !drop.equals(that.drop) : that.drop != null) return false;
return true;
}
public int hashCode() {
return java.util.Objects.hash(add, drop, super.hashCode());
}
}
|
|
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// This file is a part of the 'esoco-business' project.
// Copyright 2017 Elmar Sonnenschein, esoco GmbH, Flensburg, Germany
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
package de.esoco.entity;
import de.esoco.lib.expression.predicate.ElementPredicate;
import de.esoco.lib.manage.TransactionException;
import de.esoco.lib.property.SortDirection;
import de.esoco.storage.StorageException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.obrel.core.RelationType;
import org.obrel.core.RelationTypes;
import org.obrel.type.MetaTypes;
import static de.esoco.entity.EntityPredicates.hasExtraAttribute;
import static de.esoco.entity.EntityPredicates.ifAttribute;
import static de.esoco.entity.ExtraAttributes.newExtraAttribute;
import static de.esoco.entity.TestContact.CONTACT_VALUE;
import static de.esoco.entity.TestPerson.AGE;
import static de.esoco.entity.TestPerson.CITY;
import static de.esoco.entity.TestPerson.CONTACTS;
import static de.esoco.entity.TestPerson.FORENAME;
import static de.esoco.entity.TestPerson.LASTNAME;
import static de.esoco.entity.TestPerson.PARENT;
import static de.esoco.lib.expression.CollectionPredicates.elementOf;
import static de.esoco.lib.expression.Predicates.alwaysTrue;
import static de.esoco.lib.expression.Predicates.equalTo;
import static de.esoco.lib.expression.Predicates.greaterOrEqual;
import static de.esoco.lib.expression.Predicates.greaterThan;
import static de.esoco.lib.expression.Predicates.lessOrEqual;
import static de.esoco.storage.StoragePredicates.like;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/********************************************************************
* Test for the JDBC storage implementation.
*
* @author eso
*/
@RunWith(Parameterized.class)
@SuppressWarnings("boxing")
public class EntityStorageTest extends AbstractEntityStorageTest
{
//~ Static fields/initializers ---------------------------------------------
private static final RelationType<String> XA1 =
newExtraAttribute();
private static final RelationType<String> XA2 =
newExtraAttribute();
private static final RelationType<Integer> XA_INT =
newExtraAttribute();
private static final RelationType<Boolean> XA_FLAG =
newExtraAttribute();
private static final RelationType<Date> XA_DATE =
newExtraAttribute();
private static final RelationType<List<String>> XA_LIST =
newExtraAttribute();
private static final Date TEST_DATE = new Date();
private static final int TEST_DATA_SIZE = 5;
private static final String[][] TEST_DATA;
static
{
RelationTypes.init(EntityStorageTest.class);
TEST_DATA = new String[TEST_DATA_SIZE][];
for (int i = 1; i <= TEST_DATA_SIZE; i++)
{
String sEmail =
i < TEST_DATA_SIZE ? "test" + i + "@test.com" : null;
TEST_DATA[i - 1] =
new String[]
{
"Test" + i, "First" + (TEST_DATA_SIZE - i + 1),
"Street" + i, "Postal" + i, "City" + i, "4" + i, sEmail,
i + "23-456789"
};
}
}
//~ Constructors -----------------------------------------------------------
/***************************************
* Creates a new instance.
*
* @param nCacheLevel1 Size of the first cache level
* @param nCacheLevel2 Size of the second cache level
* @param nCacheLevel3 Size of the third cache level
*/
public EntityStorageTest(int nCacheLevel1,
int nCacheLevel2,
int nCacheLevel3)
{
EntityManager.setCacheCapacity(nCacheLevel1,
nCacheLevel2,
nCacheLevel3);
}
//~ Static methods ---------------------------------------------------------
/***************************************
* Returns the cache size parameters for different test runs.
*
* @return The list of cache sizes
*/
@Parameters
public static List<Object[]> cacheSizes()
{
return Arrays.asList(new Object[][]
{
{ 0, 0, 0 },
{ 1, 1, 1 },
{ 1, 2, 3 },
{ 2, 2, 2 },
{ 5, 10, 15 },
{ 3, 2, 1 },
{ 0, 2, 2 },
{ 2, 0, 2 },
{ 2, 2, 0 },
});
}
//~ Methods ----------------------------------------------------------------
/***************************************
* Invalidates the entity cache after
*/
@After
public void afterTest()
{
EntityManager.invalidateCache();
}
/***************************************
* Initializes the storage for the tests.
*
* @throws Exception
*/
@Before
@Override
public void setUp() throws Exception
{
super.setUp();
List<TestPerson> aInitData = new ArrayList<TestPerson>();
TestPerson aPerson;
for (String[] rTestPersonData : TEST_DATA)
{
aPerson = createPerson(rTestPersonData);
aInitData.add(aPerson);
EntityManager.storeEntity(aPerson, null);
}
}
/***************************************
* Tests queries by extra attributes.
*
* @throws StorageException
* @throws TransactionException
*/
@Test
public void testExtraAttributeQuery() throws StorageException,
TransactionException
{
setupExtraAttributes();
Entity rEntity =
EntityManager.queryEntityByExtraAttribute(XA_INT, 42, true);
assertEquals(TestPerson.class, rEntity.getClass());
assertEquals("Test1", rEntity.get(LASTNAME));
assertEquals("XA1-Test", rEntity.getExtraAttribute(XA1, null));
assertEquals(TEST_DATE, rEntity.getExtraAttribute(XA_DATE, null));
assertEquals(Integer.valueOf(42),
rEntity.getExtraAttribute(XA_INT, null));
rEntity =
EntityManager.queryEntityByExtraAttribute(XA_FLAG, false, true);
assertEquals("Test" + TEST_DATA_SIZE, rEntity.get(LASTNAME));
try
{
EntityManager.queryEntityByExtraAttribute(XA2, "XA2-Test", true);
assertFalse(true);
}
catch (IllegalStateException e)
{
// this should happen
}
Collection<? extends Entity> rEntities =
EntityManager.queryEntitiesByExtraAttribute(XA1,
"XA1-Test",
Integer.MAX_VALUE);
assertEquals(2, rEntities.size());
rEntities =
EntityManager.queryEntitiesByExtraAttribute(XA_LIST,
Arrays.asList("L1",
"L2"),
Integer.MAX_VALUE);
assertEquals(1, rEntities.size());
assertEquals("Test" + TEST_DATA_SIZE,
rEntities.iterator().next().get(LASTNAME));
rEntities =
EntityManager.queryEntitiesByExtraAttribute(TestPerson.class,
XA1,
"XA1-Test",
Integer.MAX_VALUE);
assertEquals(2, rEntities.size());
}
/***************************************
* Test of storing, updating and accessing extra entity attributes.
*
* @throws StorageException
* @throws TransactionException
*/
@Test
public void testExtraAttributeReference() throws StorageException,
TransactionException
{
setupExtraAttributes();
List<TestPerson> rEntities =
EntityManager.queryEntities(TestPerson.class,
hasExtraAttribute(TestPerson.class,
ExtraAttribute.KEY.is(equalTo(XA1))
.and(ExtraAttribute
.VALUE.is(elementOf("XA1-Test",
"XA2-Test")))),
10);
assertEquals(2, rEntities.size());
rEntities =
EntityManager.queryEntities(TestPerson.class,
hasExtraAttribute(TestPerson.class,
ExtraAttribute.VALUE
.is(like("%-Test"))),
10);
assertEquals(2, rEntities.size());
rEntities =
EntityManager.queryEntities(TestPerson.class,
hasExtraAttribute(TestPerson.class,
ExtraAttribute.VALUE
.is(equalTo("42"))),
10);
assertEquals(1, rEntities.size());
}
/***************************************
* Test of storing, updating and accessing extra entity attributes.
*
* @throws StorageException
* @throws TransactionException
*/
@Test
public void testExtraAttributes() throws StorageException,
TransactionException
{
setupExtraAttributes();
Entity rPerson = queryPersonByLastName("Test1");
assertEquals("XA1-Test", rPerson.getExtraAttribute(XA1, null));
assertEquals("XA2-Test", rPerson.getExtraAttribute(XA2, null));
assertEquals(Integer.valueOf(42),
rPerson.getExtraAttribute(XA_INT, null));
assertTrue(rPerson.getExtraAttribute(XA_FLAG, null));
assertEquals(3, rPerson.getExtraAttribute(XA_LIST, null).size());
assertEquals(Arrays.asList("L1", "L2", "L3"),
rPerson.getExtraAttribute(XA_LIST, null));
rPerson.setExtraAttribute(XA1, "XA1-Updated");
rPerson.setExtraAttribute(XA_INT, -42);
rPerson.setExtraAttribute(XA_FLAG, false);
rPerson.setExtraAttribute(XA_LIST, Arrays.asList("L1", "L2"));
EntityManager.storeEntity(rPerson, null);
rPerson = queryPersonByLastName("Test1");
assertEquals("XA1-Updated", rPerson.getExtraAttribute(XA1, null));
assertEquals("XA2-Test", rPerson.getExtraAttribute(XA2, null));
assertEquals(Integer.valueOf(-42),
rPerson.getExtraAttribute(XA_INT, null));
assertFalse(rPerson.getExtraAttribute(XA_FLAG, null));
assertEquals(2, rPerson.getExtraAttribute(XA_LIST, null).size());
assertEquals(Arrays.asList("L1", "L2"),
rPerson.getExtraAttribute(XA_LIST, null));
}
/***************************************
* Test of storage updates.
*
* @throws StorageException
* @throws TransactionException
*/
@Test
public void testHierarchy() throws StorageException, TransactionException
{
createHierarchy(1);
TestPerson rPerson;
rPerson = queryPersonByLastName("Test1");
TestPerson rSubPerson = queryPersonByLastName("SubTest11");
assertEquals(rPerson, rSubPerson.get(PARENT));
assertEquals("SubFirst11", rSubPerson.get(FORENAME));
assertEquals(2, rSubPerson.get(CONTACTS).size());
}
/***************************************
* Test of queries.
*
* @throws StorageException
*/
@Test
public void testQuery() throws StorageException
{
assertEquals(TEST_DATA_SIZE, executePersonQuery(null).size());
assertEquals(TEST_DATA_SIZE - 1,
executePersonQuery(ifAttribute(AGE, greaterOrEqual(42)))
.size());
assertEquals(TEST_DATA_SIZE - 2,
executePersonQuery(ifAttribute(AGE, greaterThan(42)))
.size());
assertEquals(2,
executePersonQuery(ifAttribute(AGE, greaterThan(42)).and(ifAttribute(AGE,
lessOrEqual(44))))
.size());
assertEquals(1,
executePersonQuery(ifAttribute(CITY, like("%2"))).size());
assertEquals(2,
executePersonQuery(ifAttribute(LASTNAME,
elementOf("Test1",
"Test2")))
.size());
assertEquals(2, queryPersonByLastName("Test1").get(CONTACTS).size());
assertEquals(1,
queryPersonByLastName("Test" + TEST_DATA_SIZE).get(CONTACTS)
.size());
}
/***************************************
* Test of sorted queries.
*
* @throws StorageException
*/
@Test
public void testSortedQuery() throws StorageException
{
ElementPredicate<Entity, String> aSortPredicate =
ifAttribute(FORENAME, alwaysTrue());
aSortPredicate.set(MetaTypes.SORT_DIRECTION, SortDirection.ASCENDING);
List<TestPerson> aEntities = executePersonQuery(aSortPredicate);
assertTrue(aEntities.get(0).get(FORENAME).equals("First1"));
aSortPredicate.set(MetaTypes.SORT_DIRECTION, SortDirection.DESCENDING);
aEntities = executePersonQuery(aSortPredicate);
assertTrue(aEntities.get(0)
.get(FORENAME)
.equals("First" + TEST_DATA_SIZE));
}
/***************************************
* Test the sub-hierarchy of contacts
*
* @throws StorageException
* @throws TransactionException
*/
@Test
public void testSubContacts() throws StorageException, TransactionException
{
Entity rPerson = queryPersonByLastName("Test1");
addContacts(rPerson.get(CONTACTS).get(0), "[email protected]", "12345");
assertEquals(2,
rPerson.get(CONTACTS)
.get(0)
.get(TestContact.CHILDREN)
.size());
EntityManager.storeEntity(rPerson, null);
assertEquals(2,
rPerson.get(CONTACTS)
.get(0)
.get(TestContact.CHILDREN)
.size());
rPerson = queryPersonByLastName("Test1");
assertEquals(2,
rPerson.get(CONTACTS)
.get(0)
.get(TestContact.CHILDREN)
.size());
assertEquals("[email protected]",
rPerson.get(CONTACTS)
.get(0)
.get(TestContact.CHILDREN)
.get(0)
.get(CONTACT_VALUE));
assertEquals("12345",
rPerson.get(CONTACTS)
.get(0)
.get(TestContact.CHILDREN)
.get(1)
.get(CONTACT_VALUE));
}
/***************************************
* Test of storage updates.
*
* @throws StorageException
* @throws TransactionException
*/
@Test
public void testUpdate() throws StorageException, TransactionException
{
Entity rPerson = queryPersonByLastName("Test1");
rPerson.set(AGE, 24);
EntityManager.storeEntity(rPerson, null);
rPerson = queryPersonByLastName("Test1");
assertEquals(new Integer(24), rPerson.get(AGE));
}
/***************************************
* Test of storage updates for child entities.
*
* @throws StorageException
* @throws TransactionException
*/
@Test
public void testUpdateChildren() throws StorageException,
TransactionException
{
Entity rPerson = queryPersonByLastName("Test1");
rPerson.get(CONTACTS).get(0).set(CONTACT_VALUE, "[email protected]");
EntityManager.storeEntity(rPerson, null);
rPerson = queryPersonByLastName("Test1");
assertEquals("[email protected]",
rPerson.get(CONTACTS).get(0).get(CONTACT_VALUE));
}
/***************************************
* Query an entity by it's last name.
*
* @param sName The name
*
* @return The entity
*
* @throws StorageException
*/
protected TestPerson queryPersonByLastName(String sName)
throws StorageException
{
return EntityManager.queryEntity(TestPerson.class,
ifAttribute(LASTNAME, equalTo(sName)),
true);
}
/***************************************
* Creates a hierarchy of test persons.
*
* @param nParent
*
* @throws StorageException
* @throws TransactionException
*/
private void createHierarchy(int nParent) throws StorageException,
TransactionException
{
TestPerson rPerson = queryPersonByLastName("Test" + nParent);
TestPerson aSubPerson =
createPerson(new String[]
{
"SubTest1" + nParent, "SubFirst1" + nParent,
"SubStreet1" + nParent, "SubPostal1" + nParent,
"SubCity1" + nParent, "1" + nParent,
"subtest1" + nParent + "@test.com",
"111-222333-" + nParent
});
rPerson.addChild(TestPerson.CHILDREN, aSubPerson);
EntityManager.storeEntity(rPerson, null);
}
/***************************************
* Initializes test extra attributes.
*
* @throws StorageException
* @throws TransactionException
*/
private void setupExtraAttributes() throws StorageException,
TransactionException
{
Entity rPerson = queryPersonByLastName("Test1");
assertFalse(rPerson.hasExtraAttribute(XA_FLAG));
rPerson.setExtraAttribute(XA1, "XA1-Test");
rPerson.setExtraAttribute(XA2, "XA2-Test");
rPerson.setExtraAttribute(XA_INT, 42);
rPerson.setExtraAttribute(XA_FLAG, true);
rPerson.setExtraAttribute(XA_DATE, TEST_DATE);
rPerson.setExtraAttribute(XA_LIST, Arrays.asList("L1", "L2", "L3"));
rStorage.store(rPerson);
rPerson = queryPersonByLastName("Test" + TEST_DATA_SIZE);
assertFalse(rPerson.hasExtraAttribute(XA_FLAG));
rPerson.setExtraAttribute(XA1, "XA1-Test");
rPerson.setExtraAttribute(XA2, "XA2-Test");
rPerson.setExtraAttribute(XA_INT, 99);
rPerson.setExtraAttribute(XA_FLAG, false);
rPerson.setExtraAttribute(XA_DATE, TEST_DATE);
rPerson.setExtraAttribute(XA_LIST, Arrays.asList("L1", "L2"));
rStorage.store(rPerson);
}
}
|
|
/*
* $HeadURL: http://svn.apache.org/repos/asf/httpcomponents/httpclient/trunk/module-client/src/main/java/org/apache/http/impl/conn/AbstractPoolEntry.java $
* $Revision: 658775 $
* $Date: 2008-05-21 10:30:45 -0700 (Wed, 21 May 2008) $
*
* ====================================================================
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.http.impl.conn;
import java.io.IOException;
import org.apache.http.HttpHost;
import org.apache.http.params.HttpParams;
import org.apache.http.protocol.HttpContext;
import org.apache.http.conn.routing.HttpRoute;
import org.apache.http.conn.routing.RouteTracker;
import org.apache.http.conn.ClientConnectionOperator;
import org.apache.http.conn.OperatedClientConnection;
/**
* A pool entry for use by connection manager implementations.
* Pool entries work in conjunction with an
* {@link AbstractClientConnAdapter adapter}.
* The adapter is handed out to applications that obtain a connection.
* The pool entry stores the underlying connection and tracks the
* {@link HttpRoute route} established.
* The adapter delegates methods for establishing the route to
* it's pool entry.
* <br/>
* If the managed connections is released or revoked, the adapter
* gets disconnected, but the pool entry still contains the
* underlying connection and the established route.
*
* @author <a href="mailto:rolandw at apache.org">Roland Weber</a>
* @author <a href="mailto:[email protected]">Michael Becke</a>
*
*
* <!-- empty lines to avoid svn diff problems -->
* @version $Revision: 658775 $
*
* @since 4.0
*/
public abstract class AbstractPoolEntry {
/** The connection operator. */
protected final ClientConnectionOperator connOperator;
/** The underlying connection being pooled or used. */
protected final OperatedClientConnection connection;
/** The route for which this entry gets allocated. */
//@@@ currently accessed from connection manager(s) as attribute
//@@@ avoid that, derived classes should decide whether update is allowed
//@@@ SCCM: yes, TSCCM: no
protected volatile HttpRoute route;
/** Connection state object */
protected volatile Object state;
/** The tracked route, or <code>null</code> before tracking starts. */
protected volatile RouteTracker tracker;
/**
* Creates a new pool entry.
*
* @param connOperator the Connection Operator for this entry
* @param route the planned route for the connection,
* or <code>null</code>
*/
protected AbstractPoolEntry(ClientConnectionOperator connOperator,
HttpRoute route) {
super();
if (connOperator == null) {
throw new IllegalArgumentException("Connection operator may not be null");
}
this.connOperator = connOperator;
this.connection = connOperator.createConnection();
this.route = route;
this.tracker = null;
}
/**
* Returns the state object associated with this pool entry.
*
* @return The state object
*/
public Object getState() {
return state;
}
/**
* Assigns a state object to this pool entry.
*
* @param state The state object
*/
public void setState(final Object state) {
this.state = state;
}
/**
* Opens the underlying connection.
*
* @param route the route along which to open the connection
* @param context the context for opening the connection
* @param params the parameters for opening the connection
*
* @throws IOException in case of a problem
*/
public void open(HttpRoute route,
HttpContext context, HttpParams params)
throws IOException {
if (route == null) {
throw new IllegalArgumentException
("Route must not be null.");
}
//@@@ is context allowed to be null? depends on operator?
if (params == null) {
throw new IllegalArgumentException
("Parameters must not be null.");
}
if ((this.tracker != null) && this.tracker.isConnected()) {
throw new IllegalStateException("Connection already open.");
}
// - collect the arguments
// - call the operator
// - update the tracking data
// In this order, we can be sure that only a successful
// opening of the connection will be tracked.
//@@@ verify route against planned route?
this.tracker = new RouteTracker(route);
final HttpHost proxy = route.getProxyHost();
connOperator.openConnection
(this.connection,
(proxy != null) ? proxy : route.getTargetHost(),
route.getLocalAddress(),
context, params);
RouteTracker localTracker = tracker; // capture volatile
// If this tracker was reset while connecting,
// fail early.
if (localTracker == null) {
throw new IOException("Request aborted");
}
if (proxy == null) {
localTracker.connectTarget(this.connection.isSecure());
} else {
localTracker.connectProxy(proxy, this.connection.isSecure());
}
} // open
/**
* Tracks tunnelling of the connection to the target.
* The tunnel has to be established outside by sending a CONNECT
* request to the (last) proxy.
*
* @param secure <code>true</code> if the tunnel should be
* considered secure, <code>false</code> otherwise
* @param params the parameters for tunnelling the connection
*
* @throws IOException in case of a problem
*/
public void tunnelTarget(boolean secure, HttpParams params)
throws IOException {
if (params == null) {
throw new IllegalArgumentException
("Parameters must not be null.");
}
//@@@ check for proxy in planned route?
if ((this.tracker == null) || !this.tracker.isConnected()) {
throw new IllegalStateException("Connection not open.");
}
if (this.tracker.isTunnelled()) {
throw new IllegalStateException
("Connection is already tunnelled.");
}
// LOG.debug?
this.connection.update(null, tracker.getTargetHost(),
secure, params);
this.tracker.tunnelTarget(secure);
} // tunnelTarget
/**
* Tracks tunnelling of the connection to a chained proxy.
* The tunnel has to be established outside by sending a CONNECT
* request to the previous proxy.
*
* @param next the proxy to which the tunnel was established.
* See {@link org.apache.http.conn.ManagedClientConnection#tunnelProxy
* ManagedClientConnection.tunnelProxy}
* for details.
* @param secure <code>true</code> if the tunnel should be
* considered secure, <code>false</code> otherwise
* @param params the parameters for tunnelling the connection
*
* @throws IOException in case of a problem
*/
public void tunnelProxy(HttpHost next, boolean secure, HttpParams params)
throws IOException {
if (next == null) {
throw new IllegalArgumentException
("Next proxy must not be null.");
}
if (params == null) {
throw new IllegalArgumentException
("Parameters must not be null.");
}
//@@@ check for proxy in planned route?
if ((this.tracker == null) || !this.tracker.isConnected()) {
throw new IllegalStateException("Connection not open.");
}
// LOG.debug?
this.connection.update(null, next, secure, params);
this.tracker.tunnelProxy(next, secure);
} // tunnelProxy
/**
* Layers a protocol on top of an established tunnel.
*
* @param context the context for layering
* @param params the parameters for layering
*
* @throws IOException in case of a problem
*/
public void layerProtocol(HttpContext context, HttpParams params)
throws IOException {
//@@@ is context allowed to be null? depends on operator?
if (params == null) {
throw new IllegalArgumentException
("Parameters must not be null.");
}
if ((this.tracker == null) || !this.tracker.isConnected()) {
throw new IllegalStateException("Connection not open.");
}
if (!this.tracker.isTunnelled()) {
//@@@ allow this?
throw new IllegalStateException
("Protocol layering without a tunnel not supported.");
}
if (this.tracker.isLayered()) {
throw new IllegalStateException
("Multiple protocol layering not supported.");
}
// - collect the arguments
// - call the operator
// - update the tracking data
// In this order, we can be sure that only a successful
// layering on top of the connection will be tracked.
final HttpHost target = tracker.getTargetHost();
connOperator.updateSecureConnection(this.connection, target,
context, params);
this.tracker.layerProtocol(this.connection.isSecure());
} // layerProtocol
/**
* Shuts down the entry.
*
* If {@link #open(HttpRoute, HttpContext, HttpParams)} is in progress,
* this will cause that open to possibly throw an {@link IOException}.
*/
protected void shutdownEntry() {
tracker = null;
}
} // class AbstractPoolEntry
|
|
package uk.org.ponder.saxalizer.support;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Map;
import uk.org.ponder.arrayutil.ArrayEnumeration;
import uk.org.ponder.beanutil.BeanLocator;
import uk.org.ponder.beanutil.PropertyAccessor;
import uk.org.ponder.beanutil.WriteableBeanLocator;
import uk.org.ponder.beanutil.support.BeanLocatorPropertyAccessor;
import uk.org.ponder.beanutil.support.IndexedPropertyAccessor;
import uk.org.ponder.beanutil.support.MapPropertyAccessor;
import uk.org.ponder.errorutil.PropertyException;
import uk.org.ponder.reflect.ClassGetter;
import uk.org.ponder.saxalizer.AccessMethod;
import uk.org.ponder.saxalizer.DeSAXalizable;
import uk.org.ponder.saxalizer.DeSAXalizableAttrs;
import uk.org.ponder.saxalizer.SAMSList;
import uk.org.ponder.saxalizer.SAXAccessMethodSpec;
import uk.org.ponder.saxalizer.SAXalizable;
import uk.org.ponder.saxalizer.SAXalizableAttrs;
import uk.org.ponder.saxalizer.SAXalizerMappingContext;
import uk.org.ponder.saxalizer.WBLAccessMethod;
import uk.org.ponder.saxalizer.mapping.SAXalizerMapperEntry;
import uk.org.ponder.util.Logger;
import uk.org.ponder.util.UniversalRuntimeException;
/**
* One instance of a MethodAnalyser is stored for each SAXalizable class that
* the SAXalizer discovers; this instance is returned when a call is made to
* <code>getMethodAnalyser</code> with an object of the SAXalizable class as
* argument. MethodAnalysers are cached in a static hashtable indexed by the
* SAXalizable class.
* <p>
* Some "bean-sense" has been retroactively blown into this class, which dates
* from the dinosaur SAXalizer days of 2000, with the retrofit of the
* <code>PropertyAccessor</code> interface. Its structure still needs a little
* work though, since it still maintains separate collections for "tag" and
* "attribute" methods.
*/
public class MethodAnalyser implements PropertyAccessor {
public Class targetclass;
/**
* Each of the four types of SAXAccessMethods supported, being get and set
* methods for subtags and attributes.
*/
public SAXAccessMethodHash tagmethods;
public SAXAccessMethodHash attrmethods;
public SAXAccessMethod bodymethod;
/** A flat array of ALL accessors for the target class. This will be the most
* efficient means of using introspection information, and is populated on
* construction of this MethodAnalyser.
*/
public SAXAccessMethod[] allgetters;
private void assembleGetters() {
ArrayList accumulate = new ArrayList();
for (SAMIterator tagget = tagmethods.getGetEnumeration(); tagget.valid(); tagget
.next()) {
accumulate.add(tagget.get());
}
for (SAMIterator tagget = attrmethods.getGetEnumeration(); tagget.valid(); tagget
.next()) {
accumulate.add(tagget.get());
}
if (bodymethod != null) {
accumulate.add(allgetters);
}
allgetters = new SAXAccessMethod[accumulate.size()];
for (int i = 0; i < accumulate.size(); ++i) {
allgetters[i] = (SAXAccessMethod) accumulate.get(i);
}
}
public AccessMethod getAccessMethod(String tagname) {
SAXAccessMethod method = tagmethods.get(tagname);
if (method == null) {
method = attrmethods.get(tagname);
}
if (method == null && WriteableBeanLocator.class.isAssignableFrom(targetclass)) {
return new WBLAccessMethod(WriteableBeanLocator.class, tagname);
}
return method;
}
// ****** Begin implementation of PropertyAccessor interface
public boolean canSet(String name) {
AccessMethod accessmethod = getAccessMethod(name);
return accessmethod == null ? false
: accessmethod.canSet();
}
public void setProperty(Object parent, String name, Object value) {
AccessMethod accessmethod = getAccessMethod(name);
if (accessmethod == null) {
throw UniversalRuntimeException
.accumulate(new PropertyException(), "Property " + name
+ " of object " + parent.getClass() + " not found");
}
else if (!accessmethod.canSet()) {
throw UniversalRuntimeException.accumulate(new PropertyException(), "Property " + name
+ " of object " + parent.getClass() + " is not writeable");
}
accessmethod.setChildObject(parent, value);
}
public void unlink(Object parent, String name) {
AccessMethod accessmethod = getAccessMethod(name);
if (accessmethod == null) {
throw UniversalRuntimeException
.accumulate(new PropertyException(), "Property " + name
+ " of object " + parent.getClass() + " not found");
}
accessmethod.setChildObject(parent, null);
}
public boolean canGet(String name) {
AccessMethod accessmethod = getAccessMethod(name);
return accessmethod == null ? false
: accessmethod.canGet();
}
public Object getProperty(Object parent, String name) {
AccessMethod accessmethod = getAccessMethod(name);
if (accessmethod == null) {
throw UniversalRuntimeException
.accumulate(new PropertyException(), "Property " + name
+ " of object " + parent.getClass() + " not found");
}
return accessmethod.getChildObject(parent);
}
public Class getPropertyType(Object parent, String name) {
AccessMethod accessmethod = getAccessMethod(name);
if (accessmethod == null) {
throw UniversalRuntimeException.accumulate(new PropertyException(),
"Property " + name + " of " + targetclass + " not found ");
}
return accessmethod.getAccessedType();
}
public boolean isMultiple(Object parent, String name) {
AccessMethod accessmethod = getAccessMethod(name);
if (accessmethod == null) {
throw UniversalRuntimeException.accumulate(new PropertyException(),
"Property " + name + " of " + targetclass + " not found");
}
return accessmethod.isDenumerable();
}
// ****** End implementation of PropertyAccessor interface.
/**
* Given an object to be serialised/deserialised, return a MethodAnalyser
* object containing a hash of Method and Field accessors. The
* <code>context</code> stores a hash of these analysers so they are only
* ever computed once per context per object class analysed.
*
* @param objclass Either an object instance to be investigated, or an object class.
* If a class is specified and no analyser is registered, a new
* object will be created using newInstance() to be queried.
* @param context the context
* @return a MethodAnalyser
*/
public static MethodAnalyser constructMethodAnalyser(Class objclass,
SAXalizerMappingContext context) {
SAXalizerMapperEntry entry = context.mapper.byClass(objclass);
try {
MethodAnalyser togo = new MethodAnalyser(objclass, entry, context);
return togo;
}
catch (Exception e) {
throw UniversalRuntimeException.accumulate(e,
"Error constructing method analyser for " + objclass);
}
}
public static PropertyAccessor getPropertyAccessor(Object o,
SAXalizerMappingContext context) {
if (o instanceof BeanLocator) {
return BeanLocatorPropertyAccessor.instance;
}
else if (o instanceof Map) {
return MapPropertyAccessor.instance;
}
else if (IndexedPropertyAccessor.isIndexed(o.getClass())) {
return context.getIndexedPropertyAccessor();
}
else
return context.getAnalyser(o.getClass());
}
private void condenseMethods(SAMSList existingmethods,
Enumeration newmethods, String xmlform) {
while (newmethods.hasMoreElements()) {
SAXAccessMethodSpec nextentry = (SAXAccessMethodSpec) newmethods
.nextElement();
// fuse together any pairs of methods that refer to the same tag/property
// name(xmlname)
// as getters and setters.
if (nextentry.xmlform.equals(xmlform)) {
SAXAccessMethodSpec previous = existingmethods
.byXMLName(nextentry.xmlname);
if (previous != null) {
SAXAccessMethodSpec setmethod = null;
if (previous.setmethodname != null)
setmethod = previous;
if (nextentry.setmethodname != null) {
if (setmethod != null) {
throw new UniversalRuntimeException(
"Duplicate set method specification for tag "
+ previous.xmlname + " with java type " + previous.clazz);
}
setmethod = nextentry;
previous.setmethodname = nextentry.setmethodname;
}
if (setmethod == null) {
throw new UniversalRuntimeException("Neither of specifications "
+ previous + " and " + nextentry + " defines a set method");
}
// The "set" method will in general have a more precise argument type.
previous.clazz = setmethod.clazz;
if (nextentry.getmethodname != null) {
previous.getmethodname = nextentry.getmethodname;
}
}
else {
existingmethods.add(nextentry);
}
}
}
}
SAXAccessMethodSpec bodymethodspec = null;
public void checkBodyMethodSpec(SAXAccessMethodSpec bodymethodspec) {
if (bodymethodspec.xmlform.equals(SAXAccessMethodSpec.XML_BODY)) {
if (this.bodymethodspec != null) {
throw new UniversalRuntimeException("Duplicate body method spec "
+ bodymethodspec);
}
this.bodymethodspec = bodymethodspec;
}
}
// Note that these two methods have side-effect on bodymethodspec
private void absorbSAMSArray(SAXAccessMethodSpec[] setmethods,
SAMSList tagMethods, SAMSList attrMethods) {
condenseMethods(tagMethods, new ArrayEnumeration(setmethods),
SAXAccessMethodSpec.XML_TAG);
condenseMethods(attrMethods, new ArrayEnumeration(setmethods),
SAXAccessMethodSpec.XML_ATTRIBUTE);
if (setmethods != null) {
for (int i = 0; i < setmethods.length; ++i) {
checkBodyMethodSpec(setmethods[i]);
}
}
}
private void absorbSAMSList(SAXalizerMapperEntry entry, SAMSList tagMethods,
SAMSList attrMethods) {
condenseMethods(tagMethods, Collections.enumeration(entry.getSAMSList()),
SAXAccessMethodSpec.XML_TAG);
condenseMethods(attrMethods, Collections.enumeration(entry.getSAMSList()),
SAXAccessMethodSpec.XML_ATTRIBUTE);
for (int i = 0; i < entry.size(); ++i) {
checkBodyMethodSpec(entry.specAt(i));
}
}
/**
* This constructor locates SAXAccessMethodSpec objects for objects of the
* supplied class from all available static and dynamic sources, sorts them
* into tag and attribute methods while condensing together set and get
* specifications into single entries, and returns a MethodAnalyser object
* with the specs resolved into Method and Field accessors ready for use.
*
* @param objclass The class of the object to be inspected.
* @param o Either the object to be inspected for accessors, or its class in
* the case construction is to be deferred until the last possible
* moment (it implements SAXalizable &c)
* @param entry A SAXalizerMapperEntry object already determined from dynamic
* sources.
* @param context The global mapping context.
*/
MethodAnalyser(Class objclass, SAXalizerMapperEntry entry,
SAXalizerMappingContext context) {
targetclass = objclass;
bodymethodspec = null;
SAMSList tagMethods = new SAMSList();
SAMSList attrMethods = new SAMSList();
boolean defaultinferrible = context.inferrer != null
&& (context.inferrer.isDefaultInferrible(objclass) || entry != null
&& entry.defaultible);
// source 1: dynamic info from mapper file takes precendence
if (entry != null) {
// do not absorb entry if defaultinferrible, since it will be done again
// later.
if (!defaultinferrible) {
absorbSAMSList(entry, tagMethods, attrMethods);
}
}
else {
if (SAXalizable.class.isAssignableFrom(objclass)
|| SAXalizableAttrs.class.isAssignableFrom(objclass)
|| DeSAXalizable.class.isAssignableFrom(objclass)
|| DeSAXalizableAttrs.class.isAssignableFrom(objclass)) {
// this branch will become gradually more deprecated - info should move
// into mapping files or else be default.
Object o = ClassGetter.construct(objclass);
// source 2: static info from interfaces is second choice
// System.out.println("MethodAnalyser called for object "+o);
if (o instanceof SAXalizable) {
SAXalizable so = (SAXalizable) o;
SAXAccessMethodSpec[] setMethods = so.getSAXSetMethods();
SAXAccessMethodSpec.convertToSetSpec(setMethods);
absorbSAMSArray(setMethods, tagMethods, attrMethods);
}
if (o instanceof SAXalizableAttrs) { // now do the same for attributes
SAXalizableAttrs sao = (SAXalizableAttrs) o;
SAXAccessMethodSpec[] setAttrMethods = sao.getSAXSetAttrMethods();
if (setAttrMethods != null) {
Logger.println("MethodAnalyser found " + setAttrMethods.length
+ " setattr methods for " + o.getClass(),
Logger.DEBUG_INFORMATIONAL);
}
SAXAccessMethodSpec.convertToAttrSpec(setAttrMethods);
SAXAccessMethodSpec.convertToSetSpec(setAttrMethods);
absorbSAMSArray(setAttrMethods, tagMethods, attrMethods);
}
if (o instanceof DeSAXalizable) {
// construct array of SAXAccessMethods for DeSAXalizable objects
DeSAXalizable doz = (DeSAXalizable) o;
SAXAccessMethodSpec[] getMethods = doz.getSAXGetMethods();
absorbSAMSArray(getMethods, tagMethods, attrMethods);
}
if (o instanceof DeSAXalizableAttrs) { // now do the same for
// attributes
DeSAXalizableAttrs sao = (DeSAXalizableAttrs) o;
SAXAccessMethodSpec[] getAttrMethods = sao.getSAXGetAttrMethods();
if (getAttrMethods != null) {
SAXAccessMethodSpec.convertToAttrSpec(getAttrMethods);
Logger.println("MethodAnalyser found " + getAttrMethods.length
+ " getattr methods for " + o, Logger.DEBUG_INFORMATIONAL);
}
absorbSAMSArray(getAttrMethods, tagMethods, attrMethods);
}
}
}
// Source 3: if no accessors have so far been discovered, try to infer some
// using an inferrer if one is set.
if (context.inferrer != null
&& (tagMethods.size() == 0 && attrMethods.size() == 0)
|| defaultinferrible) {
entry = context.inferrer.inferEntry(objclass, entry);
absorbSAMSList(entry, tagMethods, attrMethods);
}
tagmethods = new SAXAccessMethodHash(tagMethods, objclass);
attrmethods = new SAXAccessMethodHash(attrMethods, objclass);
if (bodymethodspec != null) {
bodymethod = new SAXAccessMethod(bodymethodspec, objclass);
}
bodymethodspec = null;
assembleGetters();
}
}
|
|
package net.qiujuer.genius.graphics;
import android.graphics.Bitmap;
/**
* In this can blurring your image by {@link Bitmap}
* The Blurring support:
* {@link android.graphics.Bitmap.Config#RGB_565},
* {@link android.graphics.Bitmap.Config#ARGB_8888}
*/
@SuppressWarnings("JavaDoc")
public final class Blur {
/**
* Building the bitmap
*
* @param original Bitmap
* @return Bitmap
*/
private static Bitmap checkSource(Bitmap original, int radius) {
if (radius < 0 || radius > 256) {
throw new RuntimeException("Blur bitmap radius must >= 1 and <=256.");
}
// First we should check the original
if (original == null) {
throw new NullPointerException("Blur bitmap original isn't null.");
}
if (original.isRecycled()) {
throw new RuntimeException("Blur bitmap can't blur a recycled bitmap.");
}
Bitmap.Config config = original.getConfig();
if (config != Bitmap.Config.ARGB_8888 && config != Bitmap.Config.RGB_565) {
throw new RuntimeException("Blur bitmap only supported Bitmap.Config.ARGB_8888 and Bitmap.Config.RGB_565.");
}
return (original);
}
/**
* StackBlur By Jni Bitmap
*
* @param original Original Image
* @param radius Blur radius
* @return Image Bitmap
*/
public static Bitmap onStackBlur(Bitmap original, int radius) {
Bitmap bitmap = checkSource(original, radius);
// Return this none blur
if (radius == 1) {
return bitmap;
}
//Jni BitMap Blur
nativeStackBlurBitmap(bitmap, radius);
return (bitmap);
}
/**
* StackBlur By Jni Bitmap
* in this we will cut the source bitmap to some parts.
* We'll deal(blur) with it one by one. This will reduce the memory consumption.
*
* @param original Original Image
* @param radius Blur radius
* @return Image Bitmap
*/
public static Bitmap onStackBlurClip(Bitmap original, int radius) {
Bitmap bitmap = checkSource(original, radius);
// Return this none blur
if (radius == 1) {
return bitmap;
}
int h = bitmap.getHeight();
int w = bitmap.getWidth();
final int clipPixels = 1024 * 256;
float clipScale = (h * w) / clipPixels;
int minLen = radius + radius + 50;
if (clipScale >= 2) {
float itemLen = h / clipScale;
itemLen = itemLen < minLen ? minLen : itemLen;
clipScale = h / itemLen;
}
if (clipScale < 2) {
//Jni BitMap Blur
nativeStackBlurBitmap(bitmap, radius);
} else {
if (clipScale > 12)
clipScale = 12;
//Jni BitMap Blur
onStackBlurClip(bitmap, radius, (int) clipScale);
}
return (bitmap);
}
/**
* StackBlur By Jni Bitmap
* in this we will cut the source bitmap to some parts.
* We'll deal(blur) with it one by one. This will reduce the memory consumption.
*
* @param original Original Image
* @param radius Blur radius
* @param parts Blur cut parts count
* @return Image Bitmap
*/
public static Bitmap onStackBlurClip(Bitmap original, int radius, int parts) {
Bitmap bitmap = checkSource(original, radius);
if (parts < 2 || parts > 12) {
throw new RuntimeException("Blur bitmap parts must >= 2 and <=12.");
}
if (original.getHeight() / parts < radius + radius) {
throw new RuntimeException("Blur bitmap height/partsCount must > radius+radius values.");
}
//Jni BitMap Blur
nativeStackBlurBitmapClip(bitmap, radius, parts);
return (bitmap);
}
/**
* StackBlur By Jni Pixels
*
* @param pix Original Image, you can call:
* <p>
* int w = bitmap.getWidth();
* int h = bitmap.getHeight();
* int[] pix = new int[w * h];
* bitmap.getPixels(pix, 0, w, 0, 0, w, h);
* <p>
* // Jni Pixels Blur
* onStackBlurPixels(pix, w, h, radius);
* <p>
* bitmap.setPixels(pix, 0, w, 0, 0, w, h);
* <p>
* @param radius Blur radius
* @return Image Bitmap
*/
public static int[] onStackBlurPixels(int[] pix, int w, int h, int radius) {
if (radius < 0 || radius > 256) {
throw new RuntimeException("Blur bitmap radius must >= 1 and <=256.");
}
if (pix == null) {
throw new RuntimeException("Blur bitmap pix isn't null.");
}
if (pix.length < w * h) {
throw new RuntimeException("Blur bitmap pix length must >= w * h.");
}
// Jni Pixels Blur
nativeStackBlurPixels(pix, w, h, radius);
return (pix);
}
/**
* StackBlur By Java Bitmap
*
* @param original Original Image
* @param radius Blur radius
* @return Image Bitmap
*/
public static Bitmap onStackBlurJava(Bitmap original, int radius) {
// Stack Blur v1.0 from
// http://www.quasimondo.com/StackBlurForCanvas/StackBlurDemo.html
//
// Java Author: Mario Klingemann <mario at quasimondo.com>
// http://incubator.quasimondo.com
// created Feburary 29, 2004
// Android port : Yahel Bouaziz <yahel at kayenko.com>
// http://www.kayenko.com
// ported april 5th, 2012
// This is a compromise between Gaussian Blur and Box blur
// It creates much better looking blurs than Box Blur, but is
// 7x faster than my Gaussian Blur implementation.
//
// I called it Stack Blur because this describes best how this
// filter works internally: it creates a kind of moving stack
// of colors whilst scanning through the image. Thereby it
// just has to add one new block of color to the right side
// of the stack and remove the leftmost color. The remaining
// colors on the topmost layer of the stack are either added on
// or reduced by one, depending on if they are on the right or
// on the left side of the stack.
//
// If you are using this algorithm in your code please add
// the following line:
//
// Stack Blur Algorithm by Mario Klingemann <[email protected]>
Bitmap bitmap = checkSource(original, radius);
// Return this none blur
if (radius == 1) {
return bitmap;
}
int w = bitmap.getWidth();
int h = bitmap.getHeight();
int[] pix = new int[w * h];
// get array
bitmap.getPixels(pix, 0, w, 0, 0, w, h);
// run Blur
int wm = w - 1;
int hm = h - 1;
int wh = w * h;
int div = radius + radius + 1;
short r[] = new short[wh];
short g[] = new short[wh];
short b[] = new short[wh];
int rSum, gSum, bSum, x, y, i, p, yp, yi, yw;
int vMin[] = new int[Math.max(w, h)];
int divSum = (div + 1) >> 1;
divSum *= divSum;
short dv[] = new short[256 * divSum];
for (i = 0; i < 256 * divSum; i++) {
dv[i] = (short) (i / divSum);
}
yw = yi = 0;
int[][] stack = new int[div][3];
int stackPointer;
int stackStart;
int[] sir;
int rbs;
int r1 = radius + 1;
int routSum, goutSum, boutSum;
int rinSum, ginSum, binSum;
for (y = 0; y < h; y++) {
rinSum = ginSum = binSum = routSum = goutSum = boutSum = rSum = gSum = bSum = 0;
for (i = -radius; i <= radius; i++) {
p = pix[yi + Math.min(wm, Math.max(i, 0))];
sir = stack[i + radius];
sir[0] = (p & 0xff0000) >> 16;
sir[1] = (p & 0x00ff00) >> 8;
sir[2] = (p & 0x0000ff);
rbs = r1 - Math.abs(i);
rSum += sir[0] * rbs;
gSum += sir[1] * rbs;
bSum += sir[2] * rbs;
if (i > 0) {
rinSum += sir[0];
ginSum += sir[1];
binSum += sir[2];
} else {
routSum += sir[0];
goutSum += sir[1];
boutSum += sir[2];
}
}
stackPointer = radius;
for (x = 0; x < w; x++) {
r[yi] = dv[rSum];
g[yi] = dv[gSum];
b[yi] = dv[bSum];
rSum -= routSum;
gSum -= goutSum;
bSum -= boutSum;
stackStart = stackPointer - radius + div;
sir = stack[stackStart % div];
routSum -= sir[0];
goutSum -= sir[1];
boutSum -= sir[2];
if (y == 0) {
vMin[x] = Math.min(x + radius + 1, wm);
}
p = pix[yw + vMin[x]];
sir[0] = (p & 0xff0000) >> 16;
sir[1] = (p & 0x00ff00) >> 8;
sir[2] = (p & 0x0000ff);
rinSum += sir[0];
ginSum += sir[1];
binSum += sir[2];
rSum += rinSum;
gSum += ginSum;
bSum += binSum;
stackPointer = (stackPointer + 1) % div;
sir = stack[(stackPointer) % div];
routSum += sir[0];
goutSum += sir[1];
boutSum += sir[2];
rinSum -= sir[0];
ginSum -= sir[1];
binSum -= sir[2];
yi++;
}
yw += w;
}
for (x = 0; x < w; x++) {
rinSum = ginSum = binSum = routSum = goutSum = boutSum = rSum = gSum = bSum = 0;
yp = -radius * w;
for (i = -radius; i <= radius; i++) {
yi = Math.max(0, yp) + x;
sir = stack[i + radius];
sir[0] = r[yi];
sir[1] = g[yi];
sir[2] = b[yi];
rbs = r1 - Math.abs(i);
rSum += r[yi] * rbs;
gSum += g[yi] * rbs;
bSum += b[yi] * rbs;
if (i > 0) {
rinSum += sir[0];
ginSum += sir[1];
binSum += sir[2];
} else {
routSum += sir[0];
goutSum += sir[1];
boutSum += sir[2];
}
if (i < hm) {
yp += w;
}
}
yi = x;
stackPointer = radius;
for (y = 0; y < h; y++) {
// Preserve alpha channel: ( 0xff000000 & pix[yi] )
pix[yi] = (0xff000000 & pix[yi]) | (dv[rSum] << 16) | (dv[gSum] << 8) | dv[bSum];
rSum -= routSum;
gSum -= goutSum;
bSum -= boutSum;
stackStart = stackPointer - radius + div;
sir = stack[stackStart % div];
routSum -= sir[0];
goutSum -= sir[1];
boutSum -= sir[2];
if (x == 0) {
vMin[y] = Math.min(y + r1, hm) * w;
}
p = x + vMin[y];
sir[0] = r[p];
sir[1] = g[p];
sir[2] = b[p];
rinSum += sir[0];
ginSum += sir[1];
binSum += sir[2];
rSum += rinSum;
gSum += ginSum;
bSum += binSum;
stackPointer = (stackPointer + 1) % div;
sir = stack[stackPointer];
routSum += sir[0];
goutSum += sir[1];
boutSum += sir[2];
rinSum -= sir[0];
ginSum -= sir[1];
binSum -= sir[2];
yi += w;
}
}
// set Bitmap
bitmap.setPixels(pix, 0, w, 0, 0, w, h);
return (bitmap);
}
static {
System.loadLibrary("genius_graphics");
}
/**
* Blur Image By Pixels
*
* @param pixels Img pixel array
* @param w Img width
* @param h Img height
* @param r Blur radius
* @hide
*/
private static native void nativeStackBlurPixels(int[] pixels, int w, int h, int r);
/**
* Blur Image By Bitmap
*
* @param bitmap Img Bitmap
* @param r Blur radius
* @hide
*/
private static native void nativeStackBlurBitmap(Bitmap bitmap, int r);
/**
* Blur image by bitmap,
* in this we will cut the source bitmap to some parts.
* We'll deal(blur) with it one by one. This will reduce the memory consumption.
*
* @param bitmap Img Bitmap
* @param r Blur radius
* @param parts Cut the image to parts by the vertical orientation
* @hide
*/
private static native void nativeStackBlurBitmapClip(Bitmap bitmap, int r, int parts);
}
|
|
/*
(c) Copyright 2002, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP
[See end of file]
$Id: DBReifier.java,v 1.1 2009/06/29 08:55:37 castagna Exp $
*/
package com.hp.hpl.jena.db.impl;
import com.hp.hpl.jena.db.*;
/**
* Implementation of Reifier for graphs stored in a database.
*
* @author csayers based in part on SimpleReifier by kers.
*/
import java.util.List;
import com.hp.hpl.jena.graph.*;
import com.hp.hpl.jena.util.iterator.*;
import com.hp.hpl.jena.shared.*;
public class DBReifier implements Reifier
{
protected GraphRDB m_parent = null;
protected Graph m_hiddenTriples = null;
protected List<SpecializedGraphReifier> m_reifiers = null;
protected List<SpecializedGraphReifier> m_hidden_reifiers = null;
// For now, we just deal with a single specializedGraphReifier,
// but in the future we could replace this with a list of
// those and operate much as the GraphRDB implementation
// does with it's list of SpecializedGraphs.
protected SpecializedGraphReifier m_reifier = null;
protected ReificationStyle m_style;
/**
* Construct a reifier for GraphRDB's.
*
* @param parent the Graph for which we will expose reified triples.
* @param allReifiers a List of SpecializedGraphReifiers which reifiy triples in that graph.
* @param hiddenReifiers the subset of allReifiers whose triples are hidden when querying the parent graph.
*/
public DBReifier(GraphRDB parent, ReificationStyle style,
List<SpecializedGraphReifier> allReifiers,
List<SpecializedGraphReifier> hiddenReifiers ) {
m_parent = parent;
m_reifiers = allReifiers;
m_hidden_reifiers = hiddenReifiers;
m_style = style;
// For now, just take the first specializedGraphReifier
if (m_reifiers.size() != 1)
throw new BrokenException("Internal error - DBReifier requires exactly one SpecializedGraphReifier");
m_reifier = m_reifiers.get(0);
}
/* (non-Javadoc)
* @see com.hp.hpl.jena.graph.Reifier#getParentGraph()
*/
public Graph getParentGraph() {
return m_parent; }
public ReificationStyle getStyle()
{ return m_style; }
/* (non-Javadoc)
* @see com.hp.hpl.jena.graph.Reifier#getHiddenTriples()
*/
private Graph getReificationTriples() {
if( m_hiddenTriples == null)
m_hiddenTriples = new DBReifierGraph(m_parent, m_hidden_reifiers);
return m_hiddenTriples;
}
public ExtendedIterator<Triple> find( TripleMatch m )
{ return getReificationTriples().find( m ); }
public ExtendedIterator<Triple> findExposed( TripleMatch m )
{ return getReificationTriples().find( m ); }
public ExtendedIterator<Triple> findEither( TripleMatch m, boolean showHidden )
{ return showHidden == m_style.conceals() ? getReificationTriples().find( m ) : Triple.None; }
public int size()
{ return m_style.conceals() ? 0 : getReificationTriples().size(); }
/**
Utility method useful for its short name: answer a new CompletionFlag
initialised to false.
*/
protected static SpecializedGraph.CompletionFlag newComplete()
{ return new SpecializedGraph.CompletionFlag(); }
/* (non-Javadoc)
* @see com.hp.hpl.jena.graph.Reifier#reifyAs(com.hp.hpl.jena.graph.Node, com.hp.hpl.jena.graph.Triple)
*/
public Node reifyAs( Node n, Triple t ) {
m_reifier.add( n, t, newComplete() );
return n;
}
/* (non-Javadoc)
* @see com.hp.hpl.jena.graph.Reifier#hasTriple(com.hp.hpl.jena.graph.Node)
*/
public boolean hasTriple(Node n) {
return m_reifier.findReifiedTriple( n, newComplete() ) != null;
}
/* (non-Javadoc)
* @see com.hp.hpl.jena.graph.Reifier#hasTriple(com.hp.hpl.jena.graph.Triple)
*/
public boolean hasTriple( Triple t ) {
return m_reifier.findReifiedNodes(t, newComplete() ).hasNext();
}
/* (non-Javadoc)
* @see com.hp.hpl.jena.graph.Reifier#allNodes()
*/
public ExtendedIterator<Node> allNodes() {
return m_reifier.findReifiedNodes( null, newComplete() );
}
/**
All the nodes reifying triple <code>t</code>, using the matching code
from SimpleReifier.
*/
public ExtendedIterator<Node> allNodes( Triple t )
{ return m_reifier.findReifiedNodes( t, newComplete() ); }
/* (non-Javadoc)
* @see com.hp.hpl.jena.graph.Reifier#remove(com.hp.hpl.jena.graph.Node, com.hp.hpl.jena.graph.Triple)
*/
public void remove( Node n, Triple t ) {
m_reifier.delete( n, t, newComplete() );
}
/* (non-Javadoc)
* @see com.hp.hpl.jena.graph.Reifier#remove(com.hp.hpl.jena.graph.Triple)
*/
public void remove( Triple t ) {
m_reifier.delete(null,t, newComplete() );
}
/* (non-Javadoc)
* @see com.hp.hpl.jena.graph.Reifier#handledAdd(com.hp.hpl.jena.graph.Triple)
*/
public boolean handledAdd(Triple t) {
SpecializedGraph.CompletionFlag complete = newComplete();
m_reifier.add(t, complete);
return complete.isDone();
}
/* (non-Javadoc)
* @see com.hp.hpl.jena.graph.Reifier#handledRemove(com.hp.hpl.jena.graph.Triple)
*/
public boolean handledRemove(Triple t) {
SpecializedGraph.CompletionFlag complete = newComplete();
m_reifier.delete(t, complete);
return complete.isDone();
}
/* (non-Javadoc)
* @see com.hp.hpl.jena.graph.GetTriple#getTriple(com.hp.hpl.jena.graph.Node)
*/
public Triple getTriple(Node n) {
return m_reifier.findReifiedTriple(n, newComplete() );
}
public void close() {
// TODO anything useful for a close operation
}
}
/*
(c) Copyright 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
|
|
package de.halfreal.spezi.gdx.framework;
import com.badlogic.gdx.scenes.scene2d.Actor;
import com.badlogic.gdx.scenes.scene2d.ui.WidgetGroup;
public class RelativeLayout {
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignAbove(
ASSIGNEE actor, RELATIVE group) {
return RelativeLayout.alignAbove(actor, group, 0);
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignAbove(
ASSIGNEE actor, RELATIVE group, float yOffset) {
actor.setY(group.getY() + group.getHeight() + yOffset);
return actor;
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignBelow(
ASSIGNEE actor, RELATIVE group) {
return RelativeLayout.alignBelow(actor, group, 0);
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignBelow(
ASSIGNEE actor, RELATIVE group, float yOffset) {
actor.setY(group.getY() - actor.getHeight() - yOffset);
return actor;
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignBottom(
ASSIGNEE actor, RELATIVE group) {
return RelativeLayout.alignBottom(actor, group, 0);
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignBottom(
ASSIGNEE actor, RELATIVE group, float yOffset) {
actor.setY(0 + yOffset);
return actor;
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignCenter(
ASSIGNEE actor, RELATIVE group) {
actor.setPosition(group.getWidth() / 2f - actor.getWidth() / 2f,
group.getHeight() / 2f - actor.getHeight() / 2f);
return actor;
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignCenterHorizontal(
ASSIGNEE actor, RELATIVE group) {
if (actor.getParent() == group) {
actor.setX(group.getWidth() / 2f - actor.getWidth() / 2f);
} else {
actor.setX(group.getX() + group.getWidth() / 2f - actor.getWidth()
/ 2f);
}
return actor;
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignCenterOf(
ASSIGNEE actor, RELATIVE group) {
actor.setPosition(
group.getX() + group.getWidth() / 2f - actor.getWidth() / 2f,
group.getY() + group.getHeight() / 2f - actor.getHeight() / 2f);
return actor;
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignCenterVertical(
ASSIGNEE actor, RELATIVE group) {
actor.setY(group.getHeight() / 2f - actor.getHeight() / 2f);
return actor;
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignLeft(
ASSIGNEE actor, RELATIVE group) {
return alignLeft(actor, group, 0);
}
/**
* Add Actor to group before layouting!
*
* @param actor
* @param group
* @param xOffset
* @return
*/
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignLeft(
ASSIGNEE actor, RELATIVE group, float xOffset) {
// TODO change aligns in all methods to support inside / outside group
// alignment
if (actor.getParent() != group) {
xOffset += group.getX();
}
actor.setX(xOffset);
return actor;
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignLeftOf(
ASSIGNEE actor, RELATIVE group) {
return alignLeftOf(actor, group, 0);
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignLeftOf(
ASSIGNEE actor, RELATIVE group, float xOffset) {
actor.setX(group.getX() - actor.getWidth() - xOffset);
return actor;
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignRight(
ASSIGNEE actor, RELATIVE group) {
return alignRight(actor, group, 0);
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignRight(
ASSIGNEE actor, RELATIVE group, float xOffset) {
actor.setX(group.getWidth() - actor.getWidth() - xOffset);
return actor;
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignRightOf(
ASSIGNEE actor, RELATIVE group) {
return alignRightOf(actor, group, 0);
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignRightOf(
ASSIGNEE actor, RELATIVE group, float xOffset) {
actor.setX(group.getWidth() + group.getX() + xOffset);
return actor;
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignSame(
ASSIGNEE actor, RELATIVE group) {
actor.setPosition(group.getX(), group.getY());
return actor;
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignTop(
ASSIGNEE actor, RELATIVE group) {
return RelativeLayout.alignTop(actor, group, 0);
}
public static <RELATIVE extends Actor, ASSIGNEE extends Actor> ASSIGNEE alignTop(
ASSIGNEE actor, RELATIVE group, float yOffset) {
actor.setY(group.getHeight() - actor.getHeight() - yOffset);
return actor;
}
public static <ASSIGNEE extends Actor> ASSIGNEE center(ASSIGNEE actor) {
actor.setPosition(AbstractScreen.WIDTH / 2f - actor.getWidth() / 2f,
AbstractScreen.HEIGHT / 2f - actor.getHeight() / 2f);
return actor;
}
public static <ASSIGNEE extends Actor> ASSIGNEE centerHorizontal(
ASSIGNEE actor) {
actor.setX(AbstractScreen.WIDTH / 2f - actor.getWidth() / 2f);
return actor;
}
public static <ASSIGNEE extends Actor, RELATIVE extends Actor> ASSIGNEE centerHorizontal(
ASSIGNEE actor, RELATIVE relative) {
actor.setX(relative.getX() + relative.getWidth() / 2 - actor.getWidth()
/ 2f);
return actor;
}
public static <ASSIGNEE extends Actor> ASSIGNEE centerVertical(
ASSIGNEE actor) {
actor.setY(AbstractScreen.HEIGHT / 2f - actor.getHeight() / 2f);
return actor;
}
public static WidgetGroup group(final float width, final float height,
Actor... actors) {
WidgetGroup group = new WidgetGroup() {
@Override
public float getPrefHeight() {
return height;
}
@Override
public float getPrefWidth() {
return width;
}
};
group.setWidth(width);
group.setHeight(height);
if (actors != null) {
for (Actor actor : actors) {
group.addActor(actor);
}
}
return group;
}
public static <ASSIGNEE extends Actor> ASSIGNEE marginBottom(
ASSIGNEE actor, float margin) {
actor.setY(actor.getY() + margin);
return actor;
}
public static void marginBottom(float margin, Actor... actors) {
for (Actor actor : actors) {
actor.setY(actor.getY() + margin);
}
}
public static <ASSIGNEE extends Actor> ASSIGNEE marginLeft(ASSIGNEE actor,
float margin) {
actor.setX(actor.getX() + margin);
return actor;
}
public static void marginLeft(float margin, Actor... actors) {
for (Actor actor : actors) {
actor.setX(actor.getX() + margin);
}
}
public static <ASSIGNEE extends Actor> ASSIGNEE marginRight(ASSIGNEE actor,
float margin) {
return marginLeft(actor, -margin);
}
public static void marginRight(float margin, Actor... actors) {
marginLeft(-margin, actors);
}
public static <ASSIGNEE extends Actor> ASSIGNEE marginTop(ASSIGNEE actor,
float margin) {
return marginBottom(actor, -margin);
}
public static void marginTop(float margin, Actor... actors) {
marginBottom(-margin, actors);
}
public static <ASSIGNEE extends Actor> ASSIGNEE pad(ASSIGNEE actor,
float padX, float padY) {
actor.setPosition(actor.getX() + padX, actor.getY() + padY);
return actor;
}
public static <ASSIGNEE extends Actor> ASSIGNEE right(ASSIGNEE actor) {
return right(actor, 0);
}
public static <ASSIGNEE extends Actor> ASSIGNEE right(ASSIGNEE actor,
float rightMargin) {
actor.setX(AbstractScreen.WIDTH - actor.getWidth() - rightMargin);
return actor;
}
public static <ASSIGNEE extends Actor> ASSIGNEE top(ASSIGNEE actor) {
return top(actor, 0);
}
public static <ASSIGNEE extends Actor> ASSIGNEE top(ASSIGNEE actor,
float topPadding) {
actor.setY(AbstractScreen.HEIGHT - actor.getHeight() - topPadding);
return actor;
}
}
|
|
package org.apache.maven.project;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.apache.maven.RepositoryUtils;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.model.Dependency;
import org.apache.maven.model.DependencyManagement;
import org.apache.maven.model.Exclusion;
import org.apache.maven.model.InputLocation;
import org.apache.maven.model.InputSource;
import org.codehaus.plexus.component.annotations.Component;
import org.codehaus.plexus.component.annotations.Requirement;
import org.codehaus.plexus.logging.Logger;
import org.codehaus.plexus.util.StringUtils;
import org.sonatype.aether.RepositorySystem;
import org.sonatype.aether.RepositorySystemSession;
import org.sonatype.aether.RequestTrace;
import org.sonatype.aether.artifact.ArtifactType;
import org.sonatype.aether.artifact.ArtifactTypeRegistry;
import org.sonatype.aether.collection.CollectRequest;
import org.sonatype.aether.collection.DependencyCollectionException;
import org.sonatype.aether.graph.DependencyFilter;
import org.sonatype.aether.graph.DependencyNode;
import org.sonatype.aether.graph.DependencyVisitor;
import org.sonatype.aether.resolution.ArtifactResult;
import org.sonatype.aether.resolution.DependencyRequest;
import org.sonatype.aether.util.DefaultRequestTrace;
import org.sonatype.aether.util.artifact.ArtifacIdUtils;
import org.sonatype.aether.util.artifact.ArtifactProperties;
import org.sonatype.aether.util.artifact.JavaScopes;
/**
* @author Benjamin Bentmann
*/
@Component( role = ProjectDependenciesResolver.class )
public class DefaultProjectDependenciesResolver
implements ProjectDependenciesResolver
{
@Requirement
private Logger logger;
@Requirement
private RepositorySystem repoSystem;
public DependencyResolutionResult resolve( DependencyResolutionRequest request )
throws DependencyResolutionException
{
RequestTrace trace = DefaultRequestTrace.newChild( null, request );
DefaultDependencyResolutionResult result = new DefaultDependencyResolutionResult();
MavenProject project = request.getMavenProject();
RepositorySystemSession session = request.getRepositorySession();
DependencyFilter filter = request.getResolutionFilter();
ArtifactTypeRegistry stereotypes = session.getArtifactTypeRegistry();
CollectRequest collect = new CollectRequest();
collect.setRequestContext( "project" );
collect.setRepositories( project.getRemoteProjectRepositories() );
if ( project.getDependencyArtifacts() == null )
{
for ( Dependency dependency : project.getDependencies() )
{
if ( StringUtils.isEmpty( dependency.getGroupId() ) || StringUtils.isEmpty( dependency.getArtifactId() )
|| StringUtils.isEmpty( dependency.getVersion() ) )
{
// guard against case where best-effort resolution for invalid models is requested
continue;
}
collect.addDependency( RepositoryUtils.toDependency( dependency, stereotypes ) );
}
}
else
{
Map<String, Dependency> dependencies = new HashMap<String, Dependency>();
for ( Dependency dependency : project.getDependencies() )
{
String classifier = dependency.getClassifier();
if ( classifier == null )
{
ArtifactType type = stereotypes.get( dependency.getType() );
if ( type != null )
{
classifier = type.getClassifier();
}
}
String key =
ArtifacIdUtils.toVersionlessId( dependency.getGroupId(), dependency.getArtifactId(),
dependency.getType(), classifier );
dependencies.put( key, dependency );
}
for ( Artifact artifact : project.getDependencyArtifacts() )
{
String key = artifact.getDependencyConflictId();
Dependency dependency = dependencies.get( key );
Collection<Exclusion> exclusions = dependency != null ? dependency.getExclusions() : null;
org.sonatype.aether.graph.Dependency dep = RepositoryUtils.toDependency( artifact, exclusions );
if ( !JavaScopes.SYSTEM.equals( dep.getScope() ) && dep.getArtifact().getFile() != null )
{
// enable re-resolution
org.sonatype.aether.artifact.Artifact art = dep.getArtifact();
art = art.setFile( null ).setVersion( art.getBaseVersion() );
dep = dep.setArtifact( art );
}
collect.addDependency( dep );
}
}
DependencyManagement depMngt = project.getDependencyManagement();
if ( depMngt != null )
{
for ( Dependency dependency : depMngt.getDependencies() )
{
collect.addManagedDependency( RepositoryUtils.toDependency( dependency, stereotypes ) );
}
}
DependencyRequest depRequest = new DependencyRequest( collect, filter );
depRequest.setTrace( trace );
DependencyNode node;
try
{
collect.setTrace( DefaultRequestTrace.newChild( trace, depRequest ) );
node = repoSystem.collectDependencies( session, collect ).getRoot();
result.setDependencyGraph( node );
}
catch ( DependencyCollectionException e )
{
result.setDependencyGraph( e.getResult().getRoot() );
result.setCollectionErrors( e.getResult().getExceptions() );
throw new DependencyResolutionException( result, "Could not resolve dependencies for project "
+ project.getId() + ": " + e.getMessage(), e );
}
depRequest.setRoot( node );
if ( logger.isWarnEnabled() )
{
for ( DependencyNode child : node.getChildren() )
{
if ( !child.getRelocations().isEmpty() )
{
logger.warn( "The artifact " + child.getRelocations().get( 0 ) + " has been relocated to "
+ child.getDependency().getArtifact() );
}
}
}
if ( logger.isDebugEnabled() )
{
node.accept( new GraphLogger( project ) );
}
try
{
process( result, repoSystem.resolveDependencies( session, depRequest ).getArtifactResults() );
}
catch ( org.sonatype.aether.resolution.DependencyResolutionException e )
{
process( result, e.getResult().getArtifactResults() );
throw new DependencyResolutionException( result, "Could not resolve dependencies for project "
+ project.getId() + ": " + e.getMessage(), e );
}
return result;
}
private void process( DefaultDependencyResolutionResult result, Collection<ArtifactResult> results )
{
for ( ArtifactResult ar : results )
{
DependencyNode node = ar.getRequest().getDependencyNode();
if ( ar.isResolved() )
{
result.addResolvedDependency( node.getDependency() );
}
else
{
result.setResolutionErrors( node.getDependency(), ar.getExceptions() );
}
}
}
class GraphLogger
implements DependencyVisitor
{
private final MavenProject project;
private String indent = "";
private Map<String, Dependency> managed;
public GraphLogger( MavenProject project )
{
this.project = project;
}
public boolean visitEnter( DependencyNode node )
{
StringBuilder buffer = new StringBuilder( 128 );
buffer.append( indent );
org.sonatype.aether.graph.Dependency dep = node.getDependency();
if ( dep != null )
{
org.sonatype.aether.artifact.Artifact art = dep.getArtifact();
buffer.append( art );
buffer.append( ':' ).append( dep.getScope() );
if ( node.getPremanagedScope() != null && !node.getPremanagedScope().equals( dep.getScope() ) )
{
buffer.append( " (scope managed from " ).append( node.getPremanagedScope() );
appendManagementSource( buffer, art, "scope" );
buffer.append( ")" );
}
if ( node.getPremanagedVersion() != null && !node.getPremanagedVersion().equals( art.getVersion() ) )
{
buffer.append( " (version managed from " ).append( node.getPremanagedVersion() );
appendManagementSource( buffer, art, "version" );
buffer.append( ")" );
}
}
else
{
buffer.append( project.getGroupId() );
buffer.append( ':' ).append( project.getArtifactId() );
buffer.append( ':' ).append( project.getPackaging() );
buffer.append( ':' ).append( project.getVersion() );
}
logger.debug( buffer.toString() );
indent += " ";
return true;
}
public boolean visitLeave( DependencyNode node )
{
indent = indent.substring( 0, indent.length() - 3 );
return true;
}
private void appendManagementSource( StringBuilder buffer, org.sonatype.aether.artifact.Artifact artifact,
String field )
{
if ( managed == null )
{
managed = new HashMap<String, Dependency>();
if ( project.getDependencyManagement() != null )
{
for ( Dependency dep : project.getDependencyManagement().getDependencies() )
{
managed.put( dep.getManagementKey(), dep );
}
}
}
String key =
ArtifacIdUtils.toVersionlessId( artifact.getGroupId(), artifact.getArtifactId(),
artifact.getProperty( ArtifactProperties.TYPE, "jar" ),
artifact.getClassifier() );
Dependency dependency = managed.get( key );
if ( dependency != null )
{
InputLocation location = dependency.getLocation( field );
if ( location != null )
{
InputSource source = location.getSource();
if ( source != null )
{
buffer.append( " by " ).append( source.getModelId() );
}
}
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.record;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import org.apache.drill.exec.store.mock.MockSubScanPOP;
import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
import org.apache.drill.categories.VectorTest;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.util.DrillFileUtils;
import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
import org.apache.drill.exec.ops.FragmentContextImpl;
import org.apache.drill.exec.ops.OpProfileDef;
import org.apache.drill.exec.ops.OperatorStats;
import org.apache.drill.exec.ops.OperatorUtilities;
import org.apache.drill.exec.physical.PhysicalPlan;
import org.apache.drill.exec.physical.base.FragmentRoot;
import org.apache.drill.exec.physical.base.PhysicalOperator;
import org.apache.drill.exec.physical.impl.ImplCreator;
import org.apache.drill.exec.physical.impl.SimpleRootExec;
import org.apache.drill.exec.planner.PhysicalPlanReaderTestFactory;
import org.apache.drill.exec.pop.PopUnitTestBase;
import org.apache.drill.exec.planner.PhysicalPlanReader;
import org.apache.drill.exec.proto.BitControl;
import org.apache.drill.exec.rpc.UserClientConnection;
import org.apache.drill.exec.server.DrillbitContext;
import org.apache.drill.exec.vector.ValueVector;
import org.junit.Test;
import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
import org.apache.drill.shaded.guava.com.google.common.io.Files;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import java.util.List;
@Category(VectorTest.class)
public class TestRecordIterator extends PopUnitTestBase {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestRecordIterator.class);
DrillConfig c = DrillConfig.create();
@Test
public void testSimpleIterator() throws Throwable {
final DrillbitContext bitContext = mockDrillbitContext();
final UserClientConnection connection = Mockito.mock(UserClientConnection.class);
final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
final String planStr = Files.asCharSource(DrillFileUtils.getResourceAsFile("/record/test_recorditerator.json"), Charsets.UTF_8).read();
final PhysicalPlan plan = reader.readPhysicalPlan(planStr);
final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
final FragmentContextImpl context = new FragmentContextImpl(bitContext, BitControl.PlanFragment.getDefaultInstance(), connection, registry);
final List<PhysicalOperator> operatorList = plan.getSortedOperators(false);
SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) operatorList.iterator().next()));
RecordBatch singleBatch = exec.getIncoming();
PhysicalOperator dummyPop = operatorList.iterator().next();
OpProfileDef def = new OpProfileDef(dummyPop.getOperatorId(), MockSubScanPOP.OPERATOR_TYPE,
OperatorUtilities.getChildCount(dummyPop));
OperatorStats stats = exec.getContext().getStats().newOperatorStats(def, exec.getContext().getAllocator());
RecordIterator iter = new RecordIterator(singleBatch, null, exec.getContext().newOperatorContext(dummyPop, stats), 0, false, null);
int totalRecords = 0;
List<ValueVector> vectors = null;
while (true) {
iter.next();
if (iter.finished()) {
break;
} else {
// First time save vectors.
if (vectors == null) {
vectors = Lists.newArrayList();
for (VectorWrapper vw : iter) {
vectors.add(vw.getValueVector());
}
}
final int position = iter.getCurrentPosition();
if (position %2 == 0 ) {
assertTrue(checkValues(vectors, position));
} else {
assertTrue(checkValues(vectors, position));
}
totalRecords++;
}
assertEquals(0, iter.cachedBatches().size());
}
assertEquals(11112, totalRecords);
try {
iter.mark();
fail();
} catch (UnsupportedOperationException e) {}
try {
iter.reset();
fail();
} catch (UnsupportedOperationException e) {}
}
@Test
public void testMarkResetIterator() throws Throwable {
final DrillbitContext bitContext = mockDrillbitContext();
final UserClientConnection connection = Mockito.mock(UserClientConnection.class);
final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
final String planStr = Files.asCharSource(DrillFileUtils.getResourceAsFile("/record/test_recorditerator.json"), Charsets.UTF_8).read();
final PhysicalPlan plan = reader.readPhysicalPlan(planStr);
final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
final FragmentContextImpl context = new FragmentContextImpl(bitContext, BitControl.PlanFragment.getDefaultInstance(), connection, registry);
final List<PhysicalOperator> operatorList = plan.getSortedOperators(false);
SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) operatorList.iterator().next()));
RecordBatch singleBatch = exec.getIncoming();
PhysicalOperator dummyPop = operatorList.iterator().next();
OpProfileDef def = new OpProfileDef(dummyPop.getOperatorId(), MockSubScanPOP.OPERATOR_TYPE,
OperatorUtilities.getChildCount(dummyPop));
OperatorStats stats = exec.getContext().getStats().newOperatorStats(def, exec.getContext().getAllocator());
RecordIterator iter = new RecordIterator(singleBatch, null, exec.getContext().newOperatorContext(dummyPop, stats), 0, null);
List<ValueVector> vectors;
// batche sizes
// 1, 100, 10, 10000, 1, 1000
// total = 11112
// BATCH 1 : 1, starting outerposition: 0
iter.next();
assertFalse(iter.finished());
assertEquals(1, iter.getTotalRecordCount());
assertEquals(0, iter.getCurrentPosition());
assertEquals(0, iter.getOuterPosition());
assertEquals(1, iter.cachedBatches().size());
vectors = Lists.newArrayList();
for (VectorWrapper vw : iter) {
vectors.add(vw.getValueVector());
}
// mark at position 0
iter.mark();
checkValues(vectors, 0);
// BATCH 2: 100, starting outerposition: 1
iter.next();
assertFalse(iter.finished());
assertEquals(101, iter.getTotalRecordCount(), 101);
assertEquals(0, iter.getCurrentPosition());
assertEquals(100, iter.getInnerRecordCount());
assertEquals(1, iter.getOuterPosition());
assertEquals(2, iter.cachedBatches().size());
for (int i = 0; i < 100; i++) {
checkValues(vectors, i);
iter.next();
}
// BATCH 3 :10, starting outerposition: 101
assertFalse(iter.finished());
assertEquals(111, iter.getTotalRecordCount());
assertEquals(0, iter.getCurrentPosition());
assertEquals(10, iter.getInnerRecordCount());
assertEquals(101, iter.getOuterPosition());
assertEquals(3, iter.cachedBatches().size());
for (int i = 0; i < 10; i++) {
checkValues(vectors, i);
iter.next();
}
// BATCH 4 : 10000, starting outerposition: 111
assertFalse(iter.finished());
assertEquals(10111, iter.getTotalRecordCount());
assertEquals(0, iter.getCurrentPosition(), 0);
assertEquals(10000, iter.getInnerRecordCount());
assertEquals(111, iter.getOuterPosition());
assertEquals(4, iter.cachedBatches().size());
for (int i = 0; i < 10000; i++) {
checkValues(vectors, i);
iter.next();
}
// BATCH 5 : 1, starting outerposition: 10111
assertFalse(iter.finished());
assertEquals(10112, iter.getTotalRecordCount());
assertEquals(0, iter.getCurrentPosition());
assertEquals(1, iter.getInnerRecordCount());
assertEquals(10111, iter.getOuterPosition());
assertEquals(5, iter.cachedBatches().size());
checkValues(vectors, 0);
iter.next();
// BATCH 6 : 1000, starting outerposition: 10112
assertFalse(iter.finished());
assertEquals(11112, iter.getTotalRecordCount());
assertEquals(0, iter.getCurrentPosition());
assertEquals(1000, iter.getInnerRecordCount());
assertEquals(10112, iter.getOuterPosition());
assertEquals(6, iter.cachedBatches().size());
for (int i = 0; i < 1000; i++) {
checkValues(vectors, i);
iter.next();
}
assertTrue(iter.finished());
assertEquals(6, iter.cachedBatches().size());
// back to batch 1
iter.reset();
assertFalse(iter.finished());
assertEquals(iter.getTotalRecordCount(), 11112);
assertEquals(6, iter.cachedBatches().size());
assertEquals(iter.getCurrentPosition(), 0);
assertEquals(1, iter.getInnerRecordCount());
checkValues(vectors, 0);
iter.next();
// mark start of batch 2
iter.mark();
assertFalse(iter.finished());
assertEquals(iter.getTotalRecordCount(), 11112);
assertEquals(5, iter.cachedBatches().size());
assertEquals(iter.getCurrentPosition(), 0);
assertEquals(100, iter.getInnerRecordCount());
for (int i = 0; i < 100; i++) {
iter.next();
}
// mark start of batch 3
iter.mark();
assertFalse(iter.finished());
assertEquals(iter.getTotalRecordCount(), 11112);
assertEquals(4, iter.cachedBatches().size());
assertEquals(iter.getCurrentPosition(), 0);
assertEquals(10, iter.getInnerRecordCount());
for (int i = 0; i < 10; i++) {
iter.next();
}
// jump into middle of largest batch #4.
for (int i = 0; i < 5000; i++) {
iter.next();
}
assertEquals(4, iter.cachedBatches().size());
iter.mark();
assertEquals(3, iter.cachedBatches().size());
for (int i = 0; i < 5000; i++) {
iter.next();
}
// mark start of batch 5
iter.mark();
assertFalse(iter.finished());
assertEquals(iter.getTotalRecordCount(), 11112);
assertEquals(2, iter.cachedBatches().size());
assertEquals(iter.getCurrentPosition(), 0);
assertEquals(1, iter.getInnerRecordCount());
// move to last batch
iter.next();
// skip to the middle of last batch
for (int i = 0; i < 500; i++) {
iter.next();
}
checkValues(vectors, 499);
checkValues(vectors, 500);
iter.reset();
checkValues(vectors, 0);
assertFalse(iter.finished());
assertEquals(iter.getTotalRecordCount(), 11112);
assertEquals(2, iter.cachedBatches().size());
assertEquals(iter.getCurrentPosition(), 0);
assertEquals(1, iter.getInnerRecordCount());
// move to last batch
iter.next();
assertEquals(0, iter.getCurrentPosition());
for (int i = 0; i < 500; i++) {
iter.next();
}
// This should free 5th batch.
iter.mark();
assertFalse(iter.finished());
assertEquals(iter.getTotalRecordCount(), 11112);
assertEquals(1, iter.cachedBatches().size());
assertEquals(500, iter.getCurrentPosition());
assertEquals(1000, iter.getInnerRecordCount());
// go to the end of iterator
for (int i = 0; i < 500; i++) {
iter.next();
}
assertTrue(iter.finished());
iter.reset();
assertFalse(iter.finished());
assertEquals(iter.getTotalRecordCount(), 11112);
assertEquals(1, iter.cachedBatches().size());
assertEquals(500, iter.getCurrentPosition());
assertEquals(1000, iter.getInnerRecordCount());
iter.close();
assertEquals(0, iter.cachedBatches().size());
}
private static boolean checkValues(List<ValueVector> vectors, int position) {
boolean result = true;
final int expected = (position % 2 == 0)? Integer.MIN_VALUE : Integer.MAX_VALUE;
for (ValueVector vv : vectors) {
final Object o = vv.getAccessor().getObject(position);
if (o instanceof Integer) {
final Integer v = (Integer)o;
result &= (v == expected);
} else {
logger.error("Found wrong type {} at position {}", o.getClass(), position);
result = false;
break;
}
}
return result;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.