gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* Copyright 2009 NEERC team Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // $Id$ /** * Date: 24.10.2005 */ package ru.ifmo.neerc.chat.client; import javax.swing.*; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.table.AbstractTableModel; import javax.swing.table.DefaultTableCellRenderer; import javax.swing.table.TableCellRenderer; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * @author Matvey Kazakov */ public class MyJTable<T> extends JPanel { private List<T> data; private MyJTableModel tableModel; private MyJTableScrollBar scrollBar; private JTable table; private TableCellRenderer cellRenderer = new DefaultTableCellRenderer(); private boolean appendToTheEnd = false; public MyJTable(List<T> data) { super(new BorderLayout()); this.data = data; tableModel = new MyJTableModel(); table = new JTable(tableModel); table.setShowGrid(false); table.setRowHeight(24); scrollBar = new MyJTableScrollBar(); add(table, BorderLayout.CENTER); add(scrollBar, BorderLayout.EAST); addComponentListener(new ComponentAdapter() { public void componentResized(ComponentEvent e) { tableModel.updateRowSizes(); } }); } public void setAppendToTheEnd(boolean appendToTheEnd) { this.appendToTheEnd = appendToTheEnd; } public JTable getTable() { return table; } protected void setRenderer(TableCellRenderer renderer) { cellRenderer = renderer; table.getColumnModel().getColumn(0).setCellRenderer(cellRenderer); } protected void addElement(T element) { tableModel.addElement(element); } public Dimension getMinimumSize() { Dimension minimumSize = super.getMinimumSize(); minimumSize.height = 10; return minimumSize; } class MyJTableScrollBar extends JScrollBar { public MyJTableScrollBar() { super(JScrollBar.VERTICAL, 0, 1, 0, Math.max(1, tableModel.maxStart() + 1)); final BoundedRangeModel model = getModel(); model.addChangeListener(new ChangeListener() { public void stateChanged(ChangeEvent e) { tableModel.setStart(model.getValue()); } }); } private void changeTableParams() { int newMax = tableModel.maxStart() + 1; int newValue = tableModel.start; if (newMax != getMaximum()) { setMaximum(newMax); } if (newValue != getValue()) { setValue(newValue); } } } class MyJTableModel extends AbstractTableModel { private static final long serialVersionUID = 2818294672596122659L; int start = 0; int length = 0; int maxLength = 0; int size = 0; /* length = min(size, maxLength) start >= 0 start + length - 1 < size <=> start <= size - length size, maxLength -> length, start length -> start */ public void setMaxLength(int newMaxLength) { boolean skipPolicy = size != start + length; int diff = maxLength - newMaxLength; maxLength = newMaxLength; updateLength(diff, skipPolicy); } private void updateLength(int diff, boolean skipPolicy) { if (setLength(diff, skipPolicy)) { fireTableDataChanged(); } scrollBar.changeTableParams(); } public void setSize(int newSize) { boolean skipPolicy = size != start + length; int diff = newSize - size; size = newSize; updateLength(diff, skipPolicy); } private boolean setLength(int diff, boolean skipPolicy) { int newLength = Math.min(size, maxLength); boolean updateNeeded = length != newLength; length = newLength; if (!skipPolicy && appendToTheEnd) { start += diff; if (start < 0) { start = 0; updateNeeded = true; } } if (start > size - length) { start = size - length; updateNeeded = true; } return updateNeeded; } private void setStart(int newStart) { if (start != newStart && start >= 0 && start <= size - length) { start = newStart; fireTableDataChanged(); scrollBar.changeTableParams(); } } public int getRowCount() { return length; } public int getColumnCount() { return 1; } public String getColumnName(int columnIndex) { return "Row"; } public Object getValueAt(int rowIndex, int columnIndex) { return start + rowIndex < data.size() ? data.get(start + rowIndex) : null; } boolean tableDataChangedLock = false; public void fireTableDataChanged() { if (!tableDataChangedLock) { tableDataChangedLock = true; int savedStart; int savedLength; do { savedStart = start; savedLength = length; updateRowSizes(); } while (savedLength != length || savedStart != start); super.fireTableDataChanged(); resizeTableRows(); tableDataChangedLock = false; } } public int maxStart() { return Math.max(size - length, 0); } public void addElement(T element) { data.add(element); setSize(data.size()); } public void updateRowSizes() { tableModel.setMaxLength(calculateNewLength()); } Map<Integer, Integer> rowsHeights = new HashMap<Integer, Integer>(); private int calculateNewLength() { int direction = appendToTheEnd ? -1 : 1; int start = appendToTheEnd ? tableModel.getRowCount() - 1 : 0; int h = getHeight(); int i = start; while (true) { if (i < 0 || i > tableModel.getRowCount() - 1) { break; } Component tableCellRendererComponent = cellRenderer.getTableCellRendererComponent(table, tableModel.getValueAt(i, 0), false, false, i, 0); tableCellRendererComponent.setSize(getWidth(), tableCellRendererComponent.getHeight()); // receive its preffered height int height = tableCellRendererComponent.getPreferredSize().height; rowsHeights.put(i, height); // int oldHeight = table.getRowHeight(i); // if (oldHeight != height) { // table.setRowHeight(i, height); // } if (h < height) { // this row won't fit break; } h -= height; i += direction; } return (i - start) / direction + h / table.getRowHeight(); } /** * Correctly resizes chat area in order to correctly support auto-wrapping in cells. */ private void resizeTableRows() { // starting from top row to bootom we resize all rows for (int i = 0; i < table.getRowCount(); i++) { int height = rowsHeights.get(i); // get old row height int oldHeight = table.getRowHeight(i); if (height != oldHeight) { // changing height of the row table.setRowHeight(i, height); } } } } public static void main(String[] args) { JFrame frame = new JFrame("My table"); ArrayList<String> data = new ArrayList<String>(); // for (int i = 0; i < 1000; i++) { // data.add("Row#" + String.valueOf(i)); // } final MyJTable<String> table = new MyJTable<String>(data); table.setAppendToTheEnd(true); frame.getContentPane().add(table, BorderLayout.CENTER); JButton button = new JButton("Add"); final int[] i = new int[]{0}; button.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { table.addElement("Row #" + (++i[0])); } }); frame.getContentPane().add(button, BorderLayout.SOUTH); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.pack(); frame.setLocationRelativeTo(null); frame.setVisible(true); } }
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.http.localserver; import java.net.Socket; import org.apache.http.HttpHost; import org.apache.http.HttpVersion; import org.apache.http.conn.scheme.PlainSocketFactory; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.conn.scheme.SocketFactory; import org.apache.http.impl.DefaultHttpClientConnection; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.HttpParams; import org.apache.http.params.HttpProtocolParams; import org.apache.http.protocol.BasicHttpContext; import org.apache.http.protocol.BasicHttpProcessor; import org.apache.http.protocol.HttpRequestExecutor; import org.apache.http.protocol.RequestConnControl; import org.apache.http.protocol.RequestContent; /** * Base class for tests using {@link LocalTestServer LocalTestServer}. * Note that the test server will be {@link #setUp set up} before each * individual tests and {@link #tearDown teared down} afterwards. * Use this base class <i>exclusively</i> for tests that require the * server. If you have some tests that require the server and others * that don't, split them in two different classes. */ public abstract class ServerTestBase extends BasicServerTestBase { /** The available schemes. */ protected SchemeRegistry supportedSchemes; /** The default parameters for the client side. */ protected HttpParams defaultParams; /** The HTTP processor for the client side. */ protected BasicHttpProcessor httpProcessor; /** The default context for the client side. */ protected BasicHttpContext httpContext; /** The request executor for the client side. */ protected HttpRequestExecutor httpExecutor; protected ServerTestBase(String testName) { super(testName); } /** * Prepares the local server for testing. * Derived classes that override this method MUST call * the implementation here. That SHOULD be done at the * beginning of the overriding method. * <br/> * Derived methods can modify for example the default parameters * being set up, or the interceptors. * <p> * This method will re-use the helper objects from a previous run * if they are still available. For example, the local test server * will be re-started rather than re-created. * {@link #httpContext httpContext} will always be re-created. * Tests that modify the other helper objects should afterwards * set the respective attributes to <code>null</code> in a * <code>finally{}</code> block to force re-creation for * subsequent tests. * Of course that shouldn't be done with the test server, * or only after shutting that down. * * @throws Exception in case of a problem */ @Override protected void setUp() throws Exception { if (defaultParams == null) { defaultParams = new BasicHttpParams(); HttpProtocolParams.setVersion (defaultParams, HttpVersion.HTTP_1_1); HttpProtocolParams.setContentCharset (defaultParams, "UTF-8"); HttpProtocolParams.setUserAgent (defaultParams, "TestAgent/1.1"); HttpProtocolParams.setUseExpectContinue (defaultParams, false); } if (supportedSchemes == null) { supportedSchemes = new SchemeRegistry(); SocketFactory sf = PlainSocketFactory.getSocketFactory(); supportedSchemes.register(new Scheme("http", sf, 80)); } if (httpProcessor == null) { httpProcessor = new BasicHttpProcessor(); httpProcessor.addInterceptor(new RequestContent()); httpProcessor.addInterceptor(new RequestConnControl()); // optional } // the context is created each time, it may get modified by test cases httpContext = new BasicHttpContext(null); if (httpExecutor == null) { httpExecutor = new HttpRequestExecutor(); } if (localServer == null) { localServer = new LocalTestServer(null, null); localServer.registerDefaultHandlers(); } localServer.start(); } // setUp /** * Unprepares the local server for testing. * This stops the test server. All helper objects, including the * test server, remain stored in the attributes for the next test. * * @see #setUp setUp() */ @Override protected void tearDown() throws Exception { localServer.stop(); } /** * Opens a connection to the given target using * {@link #defaultParams default parameters}. * Maps to {@link #connectTo(HttpHost,HttpParams) * connectTo(target,defaultParams)}. * * @param target the target to connect to * * @return a new connection opened to the target * * @throws Exception in case of a problem */ protected DefaultHttpClientConnection connectTo(HttpHost target) throws Exception { return connectTo(target, defaultParams); } /** * Opens a connection to the given target using the given parameters. * * @param target the target to connect to * * @return a new connection opened to the target * * @throws Exception in case of a problem */ protected DefaultHttpClientConnection connectTo(HttpHost target, HttpParams params) throws Exception { Scheme schm = supportedSchemes.get(target.getSchemeName()); int port = schm.resolvePort(target.getPort()); DefaultHttpClientConnection conn = new DefaultHttpClientConnection(); Socket sock = schm.getSocketFactory().connectSocket (null, target.getHostName(), port, null, 0, params); conn.bind(sock, params); return conn; } } // class ServerTestBase
package nam.ui.src.main.webapp.admin; import nam.model.Element; import nam.model.Information; import nam.model.ModelLayerHelper; import nam.model.Module; import nam.model.Project; import aries.codegen.util.Buf; import aries.generation.engine.GenerationContext; import aries.generation.model.ModelFile; public class ElementListToolbarXhtmlBuilder extends AbstractCompositionXHTMLBuilder { public ElementListToolbarXhtmlBuilder(GenerationContext context) { super(context); } public void initialize(Project project, Module module) { super.initialize(project, module); } public ModelFile buildFile(Information information, Element element) throws Exception { String elementName = ModelLayerHelper.getElementNameUncapped(element); String folderName = ModelLayerHelper.getElementWebappFolder(element); String fileName = elementName + "ListToolbar.xhtml"; ModelFile modelFile = createMainWebappFile(folderName, fileName); modelFile.setTextContent(getFileContent(element)); return modelFile; } public String getFileContent(Element element) { String elementClassName = ModelLayerHelper.getElementClassName(element); String elementNameUncapped = ModelLayerHelper.getElementNameUncapped(element); Buf buf = new Buf(); buf.putLine("<!DOCTYPE composition PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">"); buf.putLine(""); buf.putLine("<ui:composition"); buf.putLine(" xmlns:aries=\"http://aries.org/jsf\""); buf.putLine(" xmlns:a4j=\"http://richfaces.org/a4j\""); buf.putLine(" xmlns:f=\"http://xmlns.jcp.org/jsf/core\""); buf.putLine(" xmlns:ui=\"http://xmlns.jcp.org/jsf/facelets\">"); buf.putLine(" "); buf.putLine(" <a4j:outputPanel"); buf.putLine(" id=\""+elementNameUncapped+"ListToolbar\">"); buf.putLine(" "); buf.putLine(" <aries:toolbar"); buf.putLine(" type=\"table\">"); buf.putLine(" "); buf.putLine(" <aries:toolbarGroup"); buf.putLine(" location=\"left\">"); buf.putLine(" "); buf.putLine(" <aries:toolButton"); buf.putLine(" id=\""+elementNameUncapped+"ListNewButton\""); buf.putLine(" value=\"New "+elementClassName+"...\""); buf.putLine(" icon=\"/icons/common/New16.gif\""); buf.putLine(" iconDisabled=\"/icons/common/NewDisabled16.gif\""); buf.putLine(" tooltip=\"Create new "+elementClassName+" Record\""); buf.putLine(" enabled=\"#{securityGuard.canCreate('"+elementNameUncapped+"')}\""); buf.putLine(" mode=\"client\""); buf.putLine(" execute=\"@none\""); buf.putLine(" onmouseup=\"processNewElement(event, '"+elementClassName+"')\""); buf.putLine(" offset=\"true\" />"); buf.putLine(" "); buf.putLine(" <aries:toolButton"); buf.putLine(" id=\""+elementNameUncapped+"ListEditButton\""); buf.putLine(" value=\"Edit "+elementClassName+"...\""); buf.putLine(" icon=\"/icons/common/Edit16.gif\""); buf.putLine(" iconDisabled=\"/icons/common/EditDisabled16.gif\""); buf.putLine(" tooltip=\"Open selected "+elementClassName+" record for edit...\""); buf.putLine(" enabled=\"#{"+elementNameUncapped+"ListManager.hasSelection() and securityGuard.canUpdate('"+elementNameUncapped+"')}\""); buf.putLine(" mode=\"client\""); buf.putLine(" execute=\"@none\""); buf.putLine(" onmouseup=\"processEditElement(event, '"+elementClassName+"')\""); buf.putLine(" offset=\"true\" />"); buf.putLine(" "); buf.putLine(" <aries:toolButton"); buf.putLine(" id=\""+elementNameUncapped+"ListRemoveButton\""); buf.putLine(" value=\"Remove "+elementClassName+"...\""); buf.putLine(" icon=\"/icons/common/Remove16.gif\""); buf.putLine(" iconDisabled=\"/icons/common/RemoveDisabled16.gif\""); buf.putLine(" tooltip=\"Remove selected "+elementClassName+" record from system...\""); buf.putLine(" enabled=\"#{"+elementNameUncapped+"ListManager.hasSelection() and securityGuard.canDelete('"+elementNameUncapped+"')}\""); buf.putLine(" mode=\"client\""); buf.putLine(" execute=\"@none\""); buf.putLine(" onmouseup=\"processRemoveElement(event, '"+elementClassName+"')\""); buf.putLine(" offset=\"true\" />"); // buf.putLine(" "); // buf.putLine(" <aries:toolButton"); // buf.putLine(" id=\""+elementNameUncapped+"ListRemoveButton\""); // buf.putLine(" value=\""+elementClassName+"...\""); // buf.putLine(" icon=\"/icons/common/Remove16.gif\""); // buf.putLine(" iconDisabled=\"/icons/common/RemoveDisabled16.gif\""); // buf.putLine(" tooltip=\"Remove selected "+elementClassName+" Record\""); // buf.putLine(" enabled=\"#{"+elementNameUncapped+"ListManager.hasSelection() and securityGuard.canDelete('"+elementNameUncapped+"')}\""); // buf.putLine(" mode=\"ajax\""); // buf.putLine(" execute=\"@this\""); // buf.putLine(" immediate=\"true\""); // buf.putLine(" bypassUpdates=\"true\""); // buf.putLine(" limitRender=\"true\""); // buf.putLine(" manager=\"#{"+elementNameUncapped+"ListManager}\""); // buf.putLine(" action=\"remove"+elementClassName+"\""); // buf.putLine(" onclickXX=\"alert('#{domain}')\""); // buf.putLine(" onclick=\"popupPrompt('"+elementClassName+" List', 'Remove selected "+elementClassName+" record from System', 'Do you wish to continue?', 'org.aries.remove"+elementClassName+"', '#{section}"+elementClassName+"ListPane')\""); // buf.putLine(" rendered=\"#{empty domain}\""); // buf.putLine(" offset=\"true\" />"); // buf.putLine(" "); // buf.putLine(" <aries:toolButton"); // buf.putLine(" id=\""+elementNameUncapped+"ListSelectButton\""); // buf.putLine(" value=\""+elementClassName+"...\""); // buf.putLine(" icon=\"/icons/common/Search16.gif\""); // buf.putLine(" iconDisabled=\"/icons/common/SearchDisabled16.gif\""); // buf.putLine(" tooltip=\"Select "+elementClassName+" Record(s)\""); // buf.putLine(" enabled=\"#{securityGuard.canOpen('"+elementNameUncapped+"')}\""); // buf.putLine(" mode=\"client\""); // buf.putLine(" execute=\"@none\""); // buf.putLine(" immediate=\"true\""); // buf.putLine(" bypassUpdates=\"true\""); // buf.putLine(" limitRender=\"true\""); // buf.putLine(" onclickXX=\"alert('#{domain}')\""); // buf.putLine(" onclick=\"show#{domain}"+elementClassName+"SelectDialog()\""); // buf.putLine(" render=\"#{domain}"+elementClassName+"SelectDialog\""); // buf.putLine(" rendered=\"#{domain eq '"+elementClassName+"Dialog'}\""); // buf.putLine(" offset=\"true\" />"); buf.putLine(" </aries:toolbarGroup>"); buf.putLine(" "); buf.putLine(" <aries:toolbarGroup"); buf.putLine(" location=\"right\">"); buf.putLine(" "); buf.putLine(" <aries:toolButton"); buf.putLine(" id=\""+elementNameUncapped+"ListRefreshButton\""); buf.putLine(" icon=\"/icons/common/Refresh16.gif\""); buf.putLine(" iconDisabled=\"/icons/common/RefreshDisabled16.gif\""); buf.putLine(" tooltip=\"Re-read "+elementClassName+" information from server\""); buf.putLine(" enabled=\"#{securityGuard.canOpen('"+elementNameUncapped+"')}\""); buf.putLine(" mode=\"client\""); buf.putLine(" execute=\"@none\""); buf.putLine(" onmouseup=\"refresh"+elementClassName+"List(event)\""); buf.putLine(" offset=\"true\" />"); buf.putLine(" </aries:toolbarGroup>"); buf.putLine(" </aries:toolbar>"); buf.putLine(" </a4j:outputPanel>"); buf.putLine("</ui:composition>"); /* <!-- <aries:toolButton id="#{domain}UserListNewUserButton" value="User..." tooltip="Create new User Record" icon="/icons/common/Role16.gif" iconDisabled="/icons/common/NewDisabled16.gif" enabled="#{securityGuard.canCreate('user')}" mode="ajax" execute="@this" immediate="true" bypassUpdates="true" limitRender="true" manager="#{itemManager}" action="newUser" onclickXX="alert('#{userInfoDialogs}')" onclick="setCursorWait(); showProgress('', 'User Records', 'Creating new record...')" oncomplete="setCursorDefault(this); hideProgress(); launch#{domain}UserDialog()" render="#{domain}UserDialog, #{domain}UserPersonNameDialog, #{domain}UserEmailAddressDialog, #{domain}UserPhoneNumberDialog, #{domain}UserStreetAddressDialog, #{domain}UserRoleSelectDialog, #{domain}UserPermissionInfoDialog, #{domain}UserPermissionInfoActionSelectDialog" offset="true" /> --> <!-- <aries:toolButton id="#{domain}UserListEditUserButton" value="User..." tooltip="View/Edit selected User Record" icon="/icons/common/Edit16.gif" iconDisabled="/icons/common/EditDisabled16.gif" enabled="#{"+elementNameUncapped+"ListManager.hasSelection() and securityGuard.canOpen('user')}" mode="ajax" execute="@this" immediate="true" bypassUpdates="true" limitRender="true" manager="#{"+elementNameUncapped+"ListManager}" action="editUser" onclickXX="alert('#{domain}')" onclick="setCursorWait(); showProgress('', 'User Records', 'Finding selected User information...')" oncomplete="setCursorDefault(this); hideProgress(); show#{domain}UserDialog()" render="#{domain}UserDialog, #{domain}UserPersonNameDialog, #{domain}UserEmailAddressDialog, #{domain}UserPhoneNumberDialog, #{domain}UserStreetAddressDialog, #{domain}UserRoleSelectDialog, #{domain}UserPermissionInfoDialog, #{domain}UserPermissionInfoActionSelectDialog" rendered="#{empty domain}" offset="true" /> --> <!-- <a4j:region renderRegionOnly="true" selfRendered ="true"> <h:commandButton value="Refresh"> <f:ajax execute="@this" immediate="true" listener="#{"+elementNameUncapped+"ListManager.refresh}" render="UserListPane" /> </h:commandButton> </a4j:region> --> <!-- <a4j:commandButton id="#{domain}UserListRefreshButtonXXX" disabled="#{!securityGuard.canOpen('user')}" tooltip="Re-read information from server" mode="ajax" execute="@this" immediate="true" bypassUpdates="true" limitRender="true" action="#{"+elementNameUncapped+"ListManager.refresh}" onclickXX="alert('#{domain}')" onclick="setCursorWait(this); showProgress('', 'User Service', 'Refreshing User List...')" oncomplete="setCursorDefault(this); hideProgress()" render="#{domain}UserListPane" rendered="#{true}"/> --> <!-- <aries:toolButton id="#{domain}UserListRefreshButton" icon="/icons/common/Refresh16.gif" iconDisabled="/icons/common/RefreshDisabled16.gif" enabled="#{securityGuard.canOpen('user')}" tooltip="Re-read information from server" mode="ajax" execute="@this" immediate="true" bypassUpdates="true" limitRender="true" manager="#{"+elementNameUncapped+"ListManager}" action="refresh" onclickXX="alert('#{domain}')" onclick="setCursorWait(this); showProgress('', 'User Service', 'Refreshing User List...')" oncomplete="setCursorDefault(this); hideProgress()" render="#{domain}UserListPane" rendered="#{true}" linkClass="text16 link"/> --> */ return buf.get(); } }
/* * Copyright 2012-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.build.bom; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.util.function.Consumer; import org.gradle.testkit.runner.BuildResult; import org.gradle.testkit.runner.GradleRunner; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import org.springframework.boot.build.DeployedPlugin; import org.springframework.boot.build.assertj.NodeAssert; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link BomPlugin}. * * @author Andy Wilkinson */ class BomPluginIntegrationTests { private File projectDir; private File buildFile; @BeforeEach void setup(@TempDir File projectDir) throws IOException { this.projectDir = projectDir; this.buildFile = new File(this.projectDir, "build.gradle"); } @Test void libraryModulesAreIncludedInDependencyManagementOfGeneratedPom() throws IOException { try (PrintWriter out = new PrintWriter(new FileWriter(this.buildFile))) { out.println("plugins {"); out.println(" id 'org.springframework.boot.bom'"); out.println("}"); out.println("bom {"); out.println(" library('ActiveMQ', '5.15.10') {"); out.println(" group('org.apache.activemq') {"); out.println(" modules = ["); out.println(" 'activemq-amqp',"); out.println(" 'activemq-blueprint'"); out.println(" ]"); out.println(" }"); out.println(" }"); out.println("}"); } generatePom((pom) -> { assertThat(pom).textAtPath("//properties/activemq.version").isEqualTo("5.15.10"); NodeAssert dependency = pom.nodeAtPath("//dependencyManagement/dependencies/dependency[1]"); assertThat(dependency).textAtPath("groupId").isEqualTo("org.apache.activemq"); assertThat(dependency).textAtPath("artifactId").isEqualTo("activemq-amqp"); assertThat(dependency).textAtPath("version").isEqualTo("${activemq.version}"); assertThat(dependency).textAtPath("scope").isNullOrEmpty(); assertThat(dependency).textAtPath("type").isNullOrEmpty(); dependency = pom.nodeAtPath("//dependencyManagement/dependencies/dependency[2]"); assertThat(dependency).textAtPath("groupId").isEqualTo("org.apache.activemq"); assertThat(dependency).textAtPath("artifactId").isEqualTo("activemq-blueprint"); assertThat(dependency).textAtPath("version").isEqualTo("${activemq.version}"); assertThat(dependency).textAtPath("scope").isNullOrEmpty(); assertThat(dependency).textAtPath("type").isNullOrEmpty(); }); } @Test void libraryPluginsAreIncludedInPluginManagementOfGeneratedPom() throws IOException { try (PrintWriter out = new PrintWriter(new FileWriter(this.buildFile))) { out.println("plugins {"); out.println(" id 'org.springframework.boot.bom'"); out.println("}"); out.println("bom {"); out.println(" library('Flyway', '6.0.8') {"); out.println(" group('org.flywaydb') {"); out.println(" plugins = ["); out.println(" 'flyway-maven-plugin'"); out.println(" ]"); out.println(" }"); out.println(" }"); out.println("}"); } generatePom((pom) -> { assertThat(pom).textAtPath("//properties/flyway.version").isEqualTo("6.0.8"); NodeAssert plugin = pom.nodeAtPath("//pluginManagement/plugins/plugin"); assertThat(plugin).textAtPath("groupId").isEqualTo("org.flywaydb"); assertThat(plugin).textAtPath("artifactId").isEqualTo("flyway-maven-plugin"); assertThat(plugin).textAtPath("version").isEqualTo("${flyway.version}"); assertThat(plugin).textAtPath("scope").isNullOrEmpty(); assertThat(plugin).textAtPath("type").isNullOrEmpty(); }); } @Test void libraryImportsAreIncludedInDependencyManagementOfGeneratedPom() throws Exception { try (PrintWriter out = new PrintWriter(new FileWriter(this.buildFile))) { out.println("plugins {"); out.println(" id 'org.springframework.boot.bom'"); out.println("}"); out.println("bom {"); out.println(" library('Jackson Bom', '2.10.0') {"); out.println(" group('com.fasterxml.jackson') {"); out.println(" imports = ["); out.println(" 'jackson-bom'"); out.println(" ]"); out.println(" }"); out.println(" }"); out.println("}"); } generatePom((pom) -> { assertThat(pom).textAtPath("//properties/jackson-bom.version").isEqualTo("2.10.0"); NodeAssert dependency = pom.nodeAtPath("//dependencyManagement/dependencies/dependency"); assertThat(dependency).textAtPath("groupId").isEqualTo("com.fasterxml.jackson"); assertThat(dependency).textAtPath("artifactId").isEqualTo("jackson-bom"); assertThat(dependency).textAtPath("version").isEqualTo("${jackson-bom.version}"); assertThat(dependency).textAtPath("scope").isEqualTo("import"); assertThat(dependency).textAtPath("type").isEqualTo("pom"); }); } @Test void moduleExclusionsAreIncludedInDependencyManagementOfGeneratedPom() throws IOException { try (PrintWriter out = new PrintWriter(new FileWriter(this.buildFile))) { out.println("plugins {"); out.println(" id 'org.springframework.boot.bom'"); out.println("}"); out.println("bom {"); out.println(" library('MySQL', '8.0.18') {"); out.println(" group('mysql') {"); out.println(" modules = ["); out.println(" 'mysql-connector-java' {"); out.println(" exclude group: 'com.google.protobuf', module: 'protobuf-java'"); out.println(" }"); out.println(" ]"); out.println(" }"); out.println(" }"); out.println("}"); } generatePom((pom) -> { assertThat(pom).textAtPath("//properties/mysql.version").isEqualTo("8.0.18"); NodeAssert dependency = pom.nodeAtPath("//dependencyManagement/dependencies/dependency"); assertThat(dependency).textAtPath("groupId").isEqualTo("mysql"); assertThat(dependency).textAtPath("artifactId").isEqualTo("mysql-connector-java"); assertThat(dependency).textAtPath("version").isEqualTo("${mysql.version}"); assertThat(dependency).textAtPath("scope").isNullOrEmpty(); assertThat(dependency).textAtPath("type").isNullOrEmpty(); NodeAssert exclusion = dependency.nodeAtPath("exclusions/exclusion"); assertThat(exclusion).textAtPath("groupId").isEqualTo("com.google.protobuf"); assertThat(exclusion).textAtPath("artifactId").isEqualTo("protobuf-java"); }); } @Test void moduleTypesAreIncludedInDependencyManagementOfGeneratedPom() throws IOException { try (PrintWriter out = new PrintWriter(new FileWriter(this.buildFile))) { out.println("plugins {"); out.println(" id 'org.springframework.boot.bom'"); out.println("}"); out.println("bom {"); out.println(" library('Elasticsearch', '7.15.2') {"); out.println(" group('org.elasticsearch.distribution.integ-test-zip') {"); out.println(" modules = ["); out.println(" 'elasticsearch' {"); out.println(" type = 'zip'"); out.println(" }"); out.println(" ]"); out.println(" }"); out.println(" }"); out.println("}"); } generatePom((pom) -> { assertThat(pom).textAtPath("//properties/elasticsearch.version").isEqualTo("7.15.2"); NodeAssert dependency = pom.nodeAtPath("//dependencyManagement/dependencies/dependency"); assertThat(dependency).textAtPath("groupId").isEqualTo("org.elasticsearch.distribution.integ-test-zip"); assertThat(dependency).textAtPath("artifactId").isEqualTo("elasticsearch"); assertThat(dependency).textAtPath("version").isEqualTo("${elasticsearch.version}"); assertThat(dependency).textAtPath("scope").isNullOrEmpty(); assertThat(dependency).textAtPath("type").isEqualTo("zip"); assertThat(dependency).nodeAtPath("exclusions").isNull(); }); } @Test void libraryNamedSpringBootHasNoVersionProperty() throws IOException { try (PrintWriter out = new PrintWriter(new FileWriter(this.buildFile))) { out.println("plugins {"); out.println(" id 'org.springframework.boot.bom'"); out.println("}"); out.println("bom {"); out.println(" library('Spring Boot', '1.2.3') {"); out.println(" group('org.springframework.boot') {"); out.println(" modules = ["); out.println(" 'spring-boot'"); out.println(" ]"); out.println(" }"); out.println(" }"); out.println("}"); } generatePom((pom) -> { assertThat(pom).textAtPath("//properties/spring-boot.version").isEmpty(); NodeAssert dependency = pom.nodeAtPath("//dependencyManagement/dependencies/dependency[1]"); assertThat(dependency).textAtPath("groupId").isEqualTo("org.springframework.boot"); assertThat(dependency).textAtPath("artifactId").isEqualTo("spring-boot"); assertThat(dependency).textAtPath("version").isEqualTo("1.2.3"); assertThat(dependency).textAtPath("scope").isNullOrEmpty(); assertThat(dependency).textAtPath("type").isNullOrEmpty(); }); } // @Test // void versionAlignmentIsVerified() throws IOException { // try (PrintWriter out = new PrintWriter(new FileWriter(this.buildFile))) { // out.println("plugins {"); // out.println(" id 'org.springframework.boot.bom'"); // out.println("}"); // out.println("bom {"); // out.println(" library('OAuth2 OIDC SDK', '8.36.1') {"); // out.println(" alignedWith('Spring Security') {"); // out.println( // " // source('https://github.com/spring-projects/spring-security/blob/${libraryVersion}/config/gradle/dependency-locks/optional.lockfile')"); // out.println(" pattern('com.nimbusds:oauth2-oidc-sdk:(.+)')"); // out.println(" }"); // out.println(" group('com.nimbusds') {"); // out.println(" modules = ["); // out.println(" 'oauth2-oidc-sdk'"); // out.println(" ]"); // out.println(" }"); // out.println(" }"); // out.println(" library('Spring Security', '5.4.7') {"); // out.println(" }"); // out.println("}"); // } // System.out.println(runGradle(DeployedPlugin.GENERATE_POM_TASK_NAME, // "-s").getOutput()); // } private BuildResult runGradle(String... args) { return GradleRunner.create().withDebug(true).withProjectDir(this.projectDir).withArguments(args) .withPluginClasspath().build(); } private void generatePom(Consumer<NodeAssert> consumer) { runGradle(DeployedPlugin.GENERATE_POM_TASK_NAME, "-s"); File generatedPomXml = new File(this.projectDir, "build/publications/maven/pom-default.xml"); assertThat(generatedPomXml).isFile(); consumer.accept(new NodeAssert(generatedPomXml)); } }
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2000, 2012 Oracle and/or its affiliates. All rights reserved. * */ package com.sleepycat.util; /** * Static methods for reading and writing packed integers. * * <p>Most applications should use the classes in the {@link * com.sleepycat.bind.tuple} package rather than using this class directly.</p> * * @see <a href="../bind/tuple/package-summary.html#integerFormats">Integer Formats</a> */ public class PackedInteger { /** * The maximum number of bytes needed to store an int value (5). */ public static final int MAX_LENGTH = 5; /** * The maximum number of bytes needed to store a long value (9). */ public static final int MAX_LONG_LENGTH = 9; /** * Reads a packed integer at the given buffer offset and returns it. * * @param buf the buffer to read from. * * @param off the offset in the buffer at which to start reading. * * @return the integer that was read. */ public static int readInt(byte[] buf, int off) { boolean negative; int byteLen; int b1 = buf[off++]; if (b1 < -119) { negative = true; byteLen = -b1 - 119; } else if (b1 > 119) { negative = false; byteLen = b1 - 119; } else { return b1; } int value = buf[off++] & 0xFF; if (byteLen > 1) { value |= (buf[off++] & 0xFF) << 8; if (byteLen > 2) { value |= (buf[off++] & 0xFF) << 16; if (byteLen > 3) { value |= (buf[off++] & 0xFF) << 24; } } } return negative ? (-value - 119) : (value + 119); } /** * Reads a packed long integer at the given buffer offset and returns it. * * @param buf the buffer to read from. * * @param off the offset in the buffer at which to start reading. * * @return the long integer that was read. */ public static long readLong(byte[] buf, int off) { boolean negative; int byteLen; int b1 = buf[off++]; if (b1 < -119) { negative = true; byteLen = -b1 - 119; } else if (b1 > 119) { negative = false; byteLen = b1 - 119; } else { return b1; } long value = buf[off++] & 0xFFL; if (byteLen > 1) { value |= (buf[off++] & 0xFFL) << 8; if (byteLen > 2) { value |= (buf[off++] & 0xFFL) << 16; if (byteLen > 3) { value |= (buf[off++] & 0xFFL) << 24; if (byteLen > 4) { value |= (buf[off++] & 0xFFL) << 32; if (byteLen > 5) { value |= (buf[off++] & 0xFFL) << 40; if (byteLen > 6) { value |= (buf[off++] & 0xFFL) << 48; if (byteLen > 7) { value |= (buf[off++] & 0xFFL) << 56; } } } } } } } return negative ? (-value - 119) : (value + 119); } /** * Returns the number of bytes that would be read by {@link #readInt}. * * <p>Because the length is stored in the first byte, this method may be * called with only the first byte of the packed integer in the given * buffer. This method only accesses one byte at the given offset.</p> * * @param buf the buffer to read from. * * @param off the offset in the buffer at which to start reading. * * @return the number of bytes that would be read. */ public static int getReadIntLength(byte[] buf, int off) { int b1 = buf[off]; if (b1 < -119) { return -b1 - 119 + 1; } else if (b1 > 119) { return b1 - 119 + 1; } else { return 1; } } /** * Returns the number of bytes that would be read by {@link #readLong}. * * <p>Because the length is stored in the first byte, this method may be * called with only the first byte of the packed integer in the given * buffer. This method only accesses one byte at the given offset.</p> * * @param buf the buffer to read from. * * @param off the offset in the buffer at which to start reading. * * @return the number of bytes that would be read. */ public static int getReadLongLength(byte[] buf, int off) { /* The length is stored in the same way for int and long. */ return getReadIntLength(buf, off); } /** * Writes a packed integer starting at the given buffer offset and returns * the next offset to be written. * * @param buf the buffer to write to. * * @param offset the offset in the buffer at which to start writing. * * @param value the integer to be written. * * @return the offset past the bytes written. */ public static int writeInt(byte[] buf, int offset, int value) { int byte1Off = offset; boolean negative; if (value < -119) { negative = true; value = -value - 119; } else if (value > 119) { negative = false; value = value - 119; } else { buf[offset++] = (byte) value; return offset; } offset++; buf[offset++] = (byte) value; if ((value & 0xFFFFFF00) == 0) { buf[byte1Off] = negative ? (byte) -120 : (byte) 120; return offset; } buf[offset++] = (byte) (value >>> 8); if ((value & 0xFFFF0000) == 0) { buf[byte1Off] = negative ? (byte) -121 : (byte) 121; return offset; } buf[offset++] = (byte) (value >>> 16); if ((value & 0xFF000000) == 0) { buf[byte1Off] = negative ? (byte) -122 : (byte) 122; return offset; } buf[offset++] = (byte) (value >>> 24); buf[byte1Off] = negative ? (byte) -123 : (byte) 123; return offset; } /** * Writes a packed long integer starting at the given buffer offset and * returns the next offset to be written. * * @param buf the buffer to write to. * * @param offset the offset in the buffer at which to start writing. * * @param value the long integer to be written. * * @return the offset past the bytes written. */ public static int writeLong(byte[] buf, int offset, long value) { int byte1Off = offset; boolean negative; if (value < -119) { negative = true; value = -value - 119; } else if (value > 119) { negative = false; value = value - 119; } else { buf[offset++] = (byte) value; return offset; } offset++; buf[offset++] = (byte) value; if ((value & 0xFFFFFFFFFFFFFF00L) == 0) { buf[byte1Off] = negative ? (byte) -120 : (byte) 120; return offset; } buf[offset++] = (byte) (value >>> 8); if ((value & 0xFFFFFFFFFFFF0000L) == 0) { buf[byte1Off] = negative ? (byte) -121 : (byte) 121; return offset; } buf[offset++] = (byte) (value >>> 16); if ((value & 0xFFFFFFFFFF000000L) == 0) { buf[byte1Off] = negative ? (byte) -122 : (byte) 122; return offset; } buf[offset++] = (byte) (value >>> 24); if ((value & 0xFFFFFFFF00000000L) == 0) { buf[byte1Off] = negative ? (byte) -123 : (byte) 123; return offset; } buf[offset++] = (byte) (value >>> 32); if ((value & 0xFFFFFF0000000000L) == 0) { buf[byte1Off] = negative ? (byte) -124 : (byte) 124; return offset; } buf[offset++] = (byte) (value >>> 40); if ((value & 0xFFFF000000000000L) == 0) { buf[byte1Off] = negative ? (byte) -125 : (byte) 125; return offset; } buf[offset++] = (byte) (value >>> 48); if ((value & 0xFF00000000000000L) == 0) { buf[byte1Off] = negative ? (byte) -126 : (byte) 126; return offset; } buf[offset++] = (byte) (value >>> 56); buf[byte1Off] = negative ? (byte) -127 : (byte) 127; return offset; } /** * Returns the number of bytes that would be written by {@link #writeInt}. * * @param value the integer to be written. * * @return the number of bytes that would be used to write the given * integer. */ public static int getWriteIntLength(int value) { if (value < -119) { value = -value - 119; } else if (value > 119) { value = value - 119; } else { return 1; } if ((value & 0xFFFFFF00) == 0) { return 2; } if ((value & 0xFFFF0000) == 0) { return 3; } if ((value & 0xFF000000) == 0) { return 4; } return 5; } /** * Returns the number of bytes that would be written by {@link #writeLong}. * * @param value the long integer to be written. * * @return the number of bytes that would be used to write the given long * integer. */ public static int getWriteLongLength(long value) { if (value < -119) { value = -value - 119; } else if (value > 119) { value = value - 119; } else { return 1; } if ((value & 0xFFFFFFFFFFFFFF00L) == 0) { return 2; } if ((value & 0xFFFFFFFFFFFF0000L) == 0) { return 3; } if ((value & 0xFFFFFFFFFF000000L) == 0) { return 4; } if ((value & 0xFFFFFFFF00000000L) == 0) { return 5; } if ((value & 0xFFFFFF0000000000L) == 0) { return 6; } if ((value & 0xFFFF000000000000L) == 0) { return 7; } if ((value & 0xFF00000000000000L) == 0) { return 8; } return 9; } /** * Reads a sorted packed integer at the given buffer offset and returns it. * * @param buf the buffer to read from. * * @param off the offset in the buffer at which to start reading. * * @return the integer that was read. */ public static int readSortedInt(byte[] buf, int off) { int byteLen; boolean negative; /* The first byte of the buf stores the length of the value part. */ int b1 = buf[off++] & 0xff; /* Adjust the byteLen to the real length of the value part. */ if (b1 < 0x08) { byteLen = 0x08 - b1; negative = true; } else if (b1 > 0xf7) { byteLen = b1 - 0xf7; negative = false; } else { return b1 - 127; } /* * The following bytes on the buf store the value as a big endian * integer. We extract the significant bytes from the buf and put them * into the value in big endian order. */ int value; if (negative) { value = 0xFFFFFFFF; } else { value = 0; } if (byteLen > 3) { value = (value << 8) | (buf[off++] & 0xFF); } if (byteLen > 2) { value = (value << 8) | (buf[off++] & 0xFF); } if (byteLen > 1) { value = (value << 8) | (buf[off++] & 0xFF); } value = (value << 8) | (buf[off++] & 0xFF); /* * After get the adjusted value, we have to adjust it back to the * original value. */ if (negative) { value -= 119; } else { value += 121; } return value; } /** * Reads a sorted packed long integer at the given buffer offset and * returns it. * * @param buf the buffer to read from. * * @param off the offset in the buffer at which to start reading. * * @return the long integer that was read. */ public static long readSortedLong(byte[] buf, int off) { int byteLen; boolean negative; /* The first byte of the buf stores the length of the value part. */ int b1 = buf[off++] & 0xff; /* Adjust the byteLen to the real length of the value part. */ if (b1 < 0x08) { byteLen = 0x08 - b1; negative = true; } else if (b1 > 0xf7) { byteLen = b1 - 0xf7; negative = false; } else { return b1 - 127; } /* * The following bytes on the buf store the value as a big endian * integer. We extract the significant bytes from the buf and put them * into the value in big endian order. */ long value; if (negative) { value = 0xFFFFFFFFFFFFFFFFL; } else { value = 0; } if (byteLen > 7) { value = (value << 8) | (buf[off++] & 0xFF); } if (byteLen > 6) { value = (value << 8) | (buf[off++] & 0xFF); } if (byteLen > 5) { value = (value << 8) | (buf[off++] & 0xFF); } if (byteLen > 4) { value = (value << 8) | (buf[off++] & 0xFF); } if (byteLen > 3) { value = (value << 8) | (buf[off++] & 0xFF); } if (byteLen > 2) { value = (value << 8) | (buf[off++] & 0xFF); } if (byteLen > 1) { value = (value << 8) | (buf[off++] & 0xFF); } value = (value << 8) | (buf[off++] & 0xFF); /* * After obtaining the adjusted value, we have to adjust it back to the * original value. */ if (negative) { value -= 119; } else { value += 121; } return value; } /** * Returns the number of bytes that would be read by {@link * #readSortedInt}. * * <p>Because the length is stored in the first byte, this method may be * called with only the first byte of the packed integer in the given * buffer. This method only accesses one byte at the given offset.</p> * * @param buf the buffer to read from. * * @param off the offset in the buffer at which to start reading. * * @return the number of bytes that would be read. */ public static int getReadSortedIntLength(byte[] buf, int off) { /* The first byte of the buf stores the length of the value part. */ int b1 = buf[off] & 0xff; if (b1 < 0x08) { return 1 + 0x08 - b1; } if (b1 > 0xf7) { return 1 + b1 - 0xf7; } return 1; } /** * Returns the number of bytes that would be read by {@link * #readSortedLong}. * * <p>Because the length is stored in the first byte, this method may be * called with only the first byte of the packed integer in the given * buffer. This method only accesses one byte at the given offset.</p> * * @param buf the buffer to read from. * * @param off the offset in the buffer at which to start reading. * * @return the number of bytes that would be read. */ public static int getReadSortedLongLength(byte[] buf, int off) { /* The length is stored in the same way for int and long. */ return getReadSortedIntLength(buf, off); } /** * Writes a packed sorted integer starting at the given buffer offset and * returns the next offset to be written. * * @param buf the buffer to write to. * * @param offset the offset in the buffer at which to start writing. * * @param value the integer to be written. * * @return the offset past the bytes written. */ public static int writeSortedInt(byte[] buf, int offset, int value) { /* * Values in the inclusive range [-119,120] are stored in a single * byte. For values outside that range, the first byte stores the * number of additional bytes. The additional bytes store * (value + 119 for negative and value - 121 for positive) as an * unsigned big endian integer. */ int byte1Off = offset; offset++; if (value < -119) { /* * If the value < -119, then first adjust the value by adding 119. * Then the adjusted value is stored as an unsigned big endian * integer. */ value += 119; /* * Store the adjusted value as an unsigned big endian integer. * For an negative integer, from left to right, the first * significant byte is the byte which is not equal to 0xFF. Also * please note that, because the adjusted value is stored in big * endian integer, we extract the significant byte from left to * right. * * In the left to right order, if the first byte of the adjusted * value is a significant byte, it will be stored in the 2nd byte * of the buf. Then we will look at the 2nd byte of the adjusted * value to see if this byte is the significant byte, if yes, this * byte will be stored in the 3rd byte of the buf, and the like. */ if ((value | 0x00FFFFFF) != 0xFFFFFFFF) { buf[offset++] = (byte) (value >> 24); } if ((value | 0x0000FFFF) != 0xFFFFFFFF) { buf[offset++] = (byte) (value >> 16); } if ((value | 0x000000FF) != 0xFFFFFFFF) { buf[offset++] = (byte) (value >> 8); } buf[offset++] = (byte) value; /* * valueLen is the length of the value part stored in buf. Because * the first byte of buf is used to stored the length, so we need * to minus one. */ int valueLen = offset - byte1Off - 1; /* * The first byte stores the number of additional bytes. Here we * store the result of 0x08 - valueLen, rather than directly store * valueLen. The reason is to implement nature sort order for * byte-by-byte comparison. */ buf[byte1Off] = (byte) (0x08 - valueLen); } else if (value > 120) { /* * If the value > 120, then first adjust the value by subtracting * 119. Then the adjusted value is stored as an unsigned big endian * integer. */ value -= 121; /* * Store the adjusted value as an unsigned big endian integer. * For a positive integer, from left to right, the first * significant byte is the byte which is not equal to 0x00. * * In the left to right order, if the first byte of the adjusted * value is a significant byte, it will be stored in the 2nd byte * of the buf. Then we will look at the 2nd byte of the adjusted * value to see if this byte is the significant byte, if yes, this * byte will be stored in the 3rd byte of the buf, and the like. */ if ((value & 0xFF000000) != 0) { buf[offset++] = (byte) (value >> 24); } if ((value & 0xFFFF0000) != 0) { buf[offset++] = (byte) (value >> 16); } if ((value & 0xFFFFFF00) != 0) { buf[offset++] = (byte) (value >> 8); } buf[offset++] = (byte) value; /* * valueLen is the length of the value part stored in buf. Because * the first byte of buf is used to stored the length, so we need * to minus one. */ int valueLen = offset - byte1Off - 1; /* * The first byte stores the number of additional bytes. Here we * store the result of 0xF7 + valueLen, rather than directly store * valueLen. The reason is to implement nature sort order for * byte-by-byte comparison. */ buf[byte1Off] = (byte) (0xF7 + valueLen); } else { /* * If -119 <= value <= 120, only one byte is needed to store the * value. The stored value is the original value adds 127. */ buf[byte1Off] = (byte) (value + 127); } return offset; } /** * Writes a packed sorted long integer starting at the given buffer offset * and returns the next offset to be written. * * @param buf the buffer to write to. * * @param offset the offset in the buffer at which to start writing. * * @param value the long integer to be written. * * @return the offset past the bytes written. */ public static int writeSortedLong(byte[] buf, int offset, long value) { /* * Values in the inclusive range [-119,120] are stored in a single * byte. For values outside that range, the first byte stores the * number of additional bytes. The additional bytes store * (value + 119 for negative and value - 121 for positive) as an * unsigned big endian integer. */ int byte1Off = offset; offset++; if (value < -119) { /* * If the value < -119, then first adjust the value by adding 119. * Then the adjusted value is stored as an unsigned big endian * integer. */ value += 119; /* * Store the adjusted value as an unsigned big endian integer. * For an negative integer, from left to right, the first * significant byte is the byte which is not equal to 0xFF. Also * please note that, because the adjusted value is stored in big * endian integer, we extract the significant byte from left to * right. * * In the left to right order, if the first byte of the adjusted * value is a significant byte, it will be stored in the 2nd byte * of the buf. Then we will look at the 2nd byte of the adjusted * value to see if this byte is the significant byte, if yes, this * byte will be stored in the 3rd byte of the buf, and the like. */ if ((value | 0x00FFFFFFFFFFFFFFL) != 0xFFFFFFFFFFFFFFFFL) { buf[offset++] = (byte) (value >> 56); } if ((value | 0x0000FFFFFFFFFFFFL) != 0xFFFFFFFFFFFFFFFFL) { buf[offset++] = (byte) (value >> 48); } if ((value | 0x000000FFFFFFFFFFL) != 0xFFFFFFFFFFFFFFFFL) { buf[offset++] = (byte) (value >> 40); } if ((value | 0x00000000FFFFFFFFL) != 0xFFFFFFFFFFFFFFFFL) { buf[offset++] = (byte) (value >> 32); } if ((value | 0x0000000000FFFFFFL) != 0xFFFFFFFFFFFFFFFFL) { buf[offset++] = (byte) (value >> 24); } if ((value | 0x000000000000FFFFL) != 0xFFFFFFFFFFFFFFFFL) { buf[offset++] = (byte) (value >> 16); } if ((value | 0x00000000000000FFL) != 0xFFFFFFFFFFFFFFFFL) { buf[offset++] = (byte) (value >> 8); } buf[offset++] = (byte) value; /* * valueLen is the length of the value part stored in buf. Because * the first byte of buf is used to stored the length, so we need * to minus one. */ int valueLen = offset - byte1Off - 1; /* * The first byte stores the number of additional bytes. Here we * store the result of 0x08 - valueLen, rather than directly store * valueLen. The reason is to implement nature sort order for * byte-by-byte comparison. */ buf[byte1Off] = (byte) (0x08 - valueLen); } else if (value > 120) { /* * If the value > 120, then first adjust the value by subtracting * 119. Then the adjusted value is stored as an unsigned big endian * integer. */ value -= 121; /* * Store the adjusted value as an unsigned big endian integer. * For a positive integer, from left to right, the first * significant byte is the byte which is not equal to 0x00. * * In the left to right order, if the first byte of the adjusted * value is a significant byte, it will be stored in the 2nd byte * of the buf. Then we will look at the 2nd byte of the adjusted * value to see if this byte is the significant byte, if yes, this * byte will be stored in the 3rd byte of the buf, and the like. */ if ((value & 0xFF00000000000000L) != 0L) { buf[offset++] = (byte) (value >> 56); } if ((value & 0xFFFF000000000000L) != 0L) { buf[offset++] = (byte) (value >> 48); } if ((value & 0xFFFFFF0000000000L) != 0L) { buf[offset++] = (byte) (value >> 40); } if ((value & 0xFFFFFFFF00000000L) != 0L) { buf[offset++] = (byte) (value >> 32); } if ((value & 0xFFFFFFFFFF000000L) != 0L) { buf[offset++] = (byte) (value >> 24); } if ((value & 0xFFFFFFFFFFFF0000L) != 0L) { buf[offset++] = (byte) (value >> 16); } if ((value & 0xFFFFFFFFFFFFFF00L) != 0L) { buf[offset++] = (byte) (value >> 8); } buf[offset++] = (byte) value; /* * valueLen is the length of the value part stored in buf. Because * the first byte of buf is used to stored the length, so we need * to minus one. */ int valueLen = offset - byte1Off - 1; /* * The first byte stores the number of additional bytes. Here we * store the result of 0xF7 + valueLen, rather than directly store * valueLen. The reason is to implement nature sort order for * byte-by-byte comparison. */ buf[byte1Off] = (byte) (0xF7 + valueLen); } else { /* * If -119 <= value <= 120, only one byte is needed to store the * value. The stored value is the original value adds 127. */ buf[byte1Off] = (byte) (value + 127); } return offset; } /** * Returns the number of bytes that would be written by {@link * #writeSortedInt}. * * @param value the integer to be written. * * @return the number of bytes that would be used to write the given * integer. */ public static int getWriteSortedIntLength(int value) { if (value < -119) { /* Adjust the value. */ value += 119; /* * Find the left most significant byte of the adjusted value, and * return the length accordingly. */ if ((value | 0x000000FF) == 0xFFFFFFFF) { return 2; } if ((value | 0x0000FFFF) == 0xFFFFFFFF) { return 3; } if ((value | 0x00FFFFFF) == 0xFFFFFFFF) { return 4; } } else if (value > 120) { /* Adjust the value. */ value -= 121; /* * Find the left most significant byte of the adjusted value, and * return the length accordingly. */ if ((value & 0xFFFFFF00) == 0) { return 2; } if ((value & 0xFFFF0000) == 0) { return 3; } if ((value & 0xFF000000) == 0) { return 4; } } else { /* * If -119 <= value <= 120, only one byte is needed to store the * value. */ return 1; } return 5; } /** * Returns the number of bytes that would be written by {@link * #writeSortedLong}. * * @param value the long integer to be written. * * @return the number of bytes that would be used to write the given long * integer. */ public static int getWriteSortedLongLength(long value) { if (value < -119) { /* Adjust the value. */ value += 119; /* * Find the left most significant byte of the adjusted value, and * return the length accordingly. */ if ((value | 0x00000000000000FFL) == 0xFFFFFFFFFFFFFFFFL) { return 2; } if ((value | 0x000000000000FFFFL) == 0xFFFFFFFFFFFFFFFFL) { return 3; } if ((value | 0x0000000000FFFFFFL) == 0xFFFFFFFFFFFFFFFFL) { return 4; } if ((value | 0x00000000FFFFFFFFL) == 0xFFFFFFFFFFFFFFFFL) { return 5; } if ((value | 0x000000FFFFFFFFFFL) == 0xFFFFFFFFFFFFFFFFL) { return 6; } if ((value | 0x0000FFFFFFFFFFFFL) == 0xFFFFFFFFFFFFFFFFL) { return 7; } if ((value | 0x00FFFFFFFFFFFFFFL) == 0xFFFFFFFFFFFFFFFFL) { return 8; } } else if (value > 120) { /* Adjust the value. */ value -= 121; /* * Find the left most significant byte of the adjusted value, and * return the length accordingly. */ if ((value & 0xFFFFFFFFFFFFFF00L) == 0L) { return 2; } if ((value & 0xFFFFFFFFFFFF0000L) == 0L) { return 3; } if ((value & 0xFFFFFFFFFF000000L) == 0L) { return 4; } if ((value & 0xFFFFFFFF00000000L) == 0L) { return 5; } if ((value & 0xFFFFFF0000000000L) == 0L) { return 6; } if ((value & 0xFFFF000000000000L) == 0L) { return 7; } if ((value & 0xFF00000000000000L) == 0L) { return 8; } } else { /* * If -119 <= value <= 120, only one byte is needed to store the * value. */ return 1; } return 9; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.manager; import java.text.DateFormat; import java.text.NumberFormat; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Locale; import org.apache.catalina.Session; import org.apache.catalina.manager.util.SessionUtils; /** * Helper JavaBean for JSPs, because JSTL 1.1/EL 2.0 is too dumb to * to what I need (call methods with parameters), or I am too dumb to use it correctly. :) * @author C&eacute;drik LIME */ public class JspHelper { private static final String DATE_TIME_FORMAT = "yyyy-MM-dd HH:mm:ss"; /** * Public constructor, so that this class can be considered a JavaBean */ private JspHelper() { super(); } /** * Try to get user locale from the session, if possible. * IMPLEMENTATION NOTE: this method has explicit support for Tapestry 3 and Struts 1.x * @param in_session * @return String */ public static String guessDisplayLocaleFromSession(Session in_session) { return localeToString(SessionUtils.guessLocaleFromSession(in_session)); } private static String localeToString(Locale locale) { if (locale != null) { return escapeXml(locale.toString());//locale.getDisplayName(); } else { return ""; } } /** * Try to get user name from the session, if possible. * @param in_session * @return String */ public static String guessDisplayUserFromSession(Session in_session) { Object user = SessionUtils.guessUserFromSession(in_session); return escapeXml(user); } public static String getDisplayCreationTimeForSession(Session in_session) { try { if (in_session.getCreationTime() == 0) { return ""; } DateFormat formatter = new SimpleDateFormat(DATE_TIME_FORMAT); return formatter.format(new Date(in_session.getCreationTime())); } catch (IllegalStateException ise) { //ignore: invalidated session return ""; } } public static String getDisplayLastAccessedTimeForSession(Session in_session) { try { if (in_session.getLastAccessedTime() == 0) { return ""; } DateFormat formatter = new SimpleDateFormat(DATE_TIME_FORMAT); return formatter.format(new Date(in_session.getLastAccessedTime())); } catch (IllegalStateException ise) { //ignore: invalidated session return ""; } } public static String getDisplayUsedTimeForSession(Session in_session) { try { if (in_session.getCreationTime() == 0) { return ""; } } catch (IllegalStateException ise) { //ignore: invalidated session return ""; } return secondsToTimeString(SessionUtils.getUsedTimeForSession(in_session)/1000); } public static String getDisplayTTLForSession(Session in_session) { try { if (in_session.getCreationTime() == 0) { return ""; } } catch (IllegalStateException ise) { //ignore: invalidated session return ""; } return secondsToTimeString(SessionUtils.getTTLForSession(in_session)/1000); } public static String getDisplayInactiveTimeForSession(Session in_session) { try { if (in_session.getCreationTime() == 0) { return ""; } } catch (IllegalStateException ise) { //ignore: invalidated session return ""; } return secondsToTimeString(SessionUtils.getInactiveTimeForSession(in_session)/1000); } public static String secondsToTimeString(long in_seconds) { StringBuilder buff = new StringBuilder(9); if (in_seconds < 0) { buff.append('-'); in_seconds = -in_seconds; } long rest = in_seconds; long hour = rest / 3600; rest = rest % 3600; long minute = rest / 60; rest = rest % 60; long second = rest; if (hour < 10) { buff.append('0'); } buff.append(hour); buff.append(':'); if (minute < 10) { buff.append('0'); } buff.append(minute); buff.append(':'); if (second < 10) { buff.append('0'); } buff.append(second); return buff.toString(); } /* * Following copied from org.apache.taglibs.standard.tag.common.core.Util v1.1.2 */ private static final int HIGHEST_SPECIAL = '>'; private static char[][] specialCharactersRepresentation = new char[HIGHEST_SPECIAL + 1][]; static { specialCharactersRepresentation['&'] = "&amp;".toCharArray(); specialCharactersRepresentation['<'] = "&lt;".toCharArray(); specialCharactersRepresentation['>'] = "&gt;".toCharArray(); specialCharactersRepresentation['"'] = "&#034;".toCharArray(); specialCharactersRepresentation['\''] = "&#039;".toCharArray(); } public static String escapeXml(Object obj) { String value = null; try { value = (obj == null) ? null : obj.toString(); } catch (Exception e) { // Ignore } return escapeXml(value); } /** * Performs the following substring replacements * (to facilitate output to XML/HTML pages): * * & -> &amp; * < -> &lt; * > -> &gt; * " -> &#034; * ' -> &#039; * * See also OutSupport.writeEscapedXml(). */ @SuppressWarnings("null") // escapedBuffer cannot be null public static String escapeXml(String buffer) { if (buffer == null) { return ""; } int start = 0; int length = buffer.length(); char[] arrayBuffer = buffer.toCharArray(); StringBuilder escapedBuffer = null; for (int i = 0; i < length; i++) { char c = arrayBuffer[i]; if (c <= HIGHEST_SPECIAL) { char[] escaped = specialCharactersRepresentation[c]; if (escaped != null) { // create StringBuilder to hold escaped xml string if (start == 0) { escapedBuffer = new StringBuilder(length + 5); } // add unescaped portion if (start < i) { escapedBuffer.append(arrayBuffer,start,i-start); } start = i + 1; // add escaped xml escapedBuffer.append(escaped); } } } // no xml escaping was necessary if (start == 0) { return buffer; } // add rest of unescaped portion if (start < length) { escapedBuffer.append(arrayBuffer,start,length-start); } return escapedBuffer.toString(); } public static String formatNumber(long number) { return NumberFormat.getNumberInstance().format(number); } }
package com.alicode.game.dogedash.models; import com.alicode.game.dogedash.Assets; import com.alicode.game.dogedash.Consts; import com.alicode.game.dogedash.DogeDashCore; import com.alicode.game.dogedash.Statics; import com.alicode.game.dogedash.Statics.GameState; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.audio.Sound; import com.badlogic.gdx.graphics.g2d.Animation; import com.badlogic.gdx.graphics.g2d.ParticleEffect; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.math.Interpolation; import com.badlogic.gdx.math.Rectangle; import com.badlogic.gdx.scenes.scene2d.Action; import com.badlogic.gdx.scenes.scene2d.Actor; import com.badlogic.gdx.scenes.scene2d.actions.Actions; import com.badlogic.gdx.utils.Array; public class MotherDoge extends Actor { public static float playerX, playerY, playerZ; private final Animation dogeWalkAnimation; private final Animation dogeWalkHitAnimation; private final Animation dogeGotHit; private final Animation dogeSuperD; private final Animation dogeSuperDEffect; private float dogeWalkAnimationState; private Array<TextureRegion> dogeWalk, dogeWalkHit, dogeHit, dogeSuper, dogeSuperEffect; private Rectangle bounds = new Rectangle(); private TextureRegion frame; private ParticleEffect waterDrops, mudDrops; private boolean reverseControls = false; public MotherDoge() { setWidth(Assets.character.getRegionWidth()); setHeight(Assets.character.getRegionHeight()); dogeWalk = new Array<TextureRegion>(); dogeWalkHit = new Array<TextureRegion>(); dogeSuper = new Array<TextureRegion>(); dogeHit = new Array<TextureRegion>(); dogeSuperEffect = new Array<TextureRegion>(); dogeSuperEffect.add(Assets.energy1); dogeSuperEffect.add(Assets.energy2); dogeSuperEffect.add(Assets.energy3); dogeWalk.add(Assets.character); dogeWalk.add(Assets.character2); waterDrops = new ParticleEffect(); waterDrops.load(Gdx.files.internal("particles/waterDrops"), Gdx.files.internal("particles")); mudDrops = new ParticleEffect(); mudDrops.load(Gdx.files.internal("particles/mudDrops"), Gdx.files.internal("particles")); dogeWalkHit.add(Assets.character); dogeWalkHit.add(Assets.character2); dogeWalkHit.add(Assets.characterHit); dogeWalkHit.add(Assets.characterHit2); dogeHit.add(Assets.characterHit); dogeHit.add(Assets.characterHit2); dogeSuper.add(Assets.dogsuper1); dogeSuper.add(Assets.dogsuper2); this.dogeWalkAnimation = new Animation(0.15f, dogeWalk); this.dogeWalkHitAnimation = new Animation(0.15f, dogeWalkHit); this.dogeGotHit = new Animation(0.05f, dogeHit); this.dogeSuperD = new Animation(0.15f, dogeSuper); this.dogeSuperDEffect = new Animation(0.15f, dogeSuperEffect); setPosition(Consts.GAMEWIDTH / 8, Consts.GAMEHEIGHT / 2); setOrigin(Assets.character.getRegionWidth() / 2, Assets.character.getRegionHeight() / 2); // if (Statics.gameLevel == 2) // setColor(0.15f, 0.15f, 0.4f, 1.0f); } @Override public void act(float delta) { if (Statics.state == Statics.GameState.Running) { super.act(delta); updateBounds(); jumpUpdate(); updatePlayerStatus(); dogeHitByLog(); } } private void updatePlayerStatus() { } @Override public void draw(SpriteBatch batch, float parentAlpha) { batch.setColor(getColor().r, getColor().g, getColor().b, getColor().a); if (!Statics.isSuperD) { frame = dogeWalkAnimation.getKeyFrame(dogeWalkAnimationState += Gdx.graphics.getDeltaTime() / 2, true); if (Statics.enemiesOnPlayer > 2) frame = dogeWalkHitAnimation.getKeyFrame(dogeWalkAnimationState += Gdx.graphics.getDeltaTime() / 2, true); if (Statics.playerHitByBee || Statics.playerHitAnimation) { addAction(Actions.sequence(Actions.parallel(Actions.fadeOut(0.2f), Actions.fadeIn(0.2f)))); frame = dogeGotHit.getKeyFrame(dogeWalkAnimationState += Gdx.graphics.getDeltaTime() / 2, true); } if (Statics.playerJump && Statics.state == GameState.Running) { frame = Assets.characterJump; } if (Statics.state == GameState.GameOver) { frame = Assets.characterDie; } batch.draw(frame, getX(), getY(), frame.getRegionWidth() / 2, frame.getRegionHeight() / 2, frame.getRegionWidth(), frame.getRegionHeight(), 1, 1, getRotation()); if (Statics.playerHitByPuddle) updateWaterParticles(batch); if (Statics.playerHitByMud) updateMudParticles(batch); } if (Statics.isSuperD) { Statics.cleanseEnemies = true; frame = dogeSuperD.getKeyFrame(dogeWalkAnimationState += Gdx.graphics.getDeltaTime() / 2, true); batch.draw(frame, getX(), getY(), frame.getRegionWidth() / 2, frame.getRegionHeight() / 2, frame.getRegionWidth(), frame.getRegionHeight(), 1, 1, getRotation()); frame = dogeSuperDEffect.getKeyFrame(dogeWalkAnimationState += Gdx.graphics.getDeltaTime() / 2, true); batch.draw(frame, getX() - 170, getY() - 100, frame.getRegionWidth() / 2, frame.getRegionHeight() / 2, frame.getRegionWidth(), frame.getRegionHeight(), 1, 1, getRotation()); } if (Statics.gameLevel == 2 && Statics.dogeLampActive) { frame = Assets.shield; batch.draw(frame, getX() - 60, getY() - 100, frame.getRegionWidth() / 2, frame.getRegionHeight() / 2, frame.getRegionWidth(), frame.getRegionHeight(), 1, 1, getRotation()); } } private void updateMudParticles(SpriteBatch batch) { mudDrops.start(); mudDrops.setPosition(getX() + Assets.character.getRegionWidth(), getY()); for (int i = 0; i < mudDrops.getEmitters().size; i++) { mudDrops.getEmitters().get(i).getAngle().setLow(180); mudDrops.getEmitters().get(i).getAngle().setHigh(180); } mudDrops.draw(batch); mudDrops.update(Gdx.graphics.getDeltaTime()); } private void updateWaterParticles(SpriteBatch batch) { waterDrops.start(); waterDrops.setPosition(getX() + Assets.character.getRegionWidth(), getY()); for (int i = 0; i < waterDrops.getEmitters().size; i++) { waterDrops.getEmitters().get(i).getAngle().setLow(180); waterDrops.getEmitters().get(i).getAngle().setHigh(180); } waterDrops.draw(batch); waterDrops.update(Gdx.graphics.getDeltaTime()); } public void startJump() { if (!Statics.isSuperD && !Statics.playerJump && Statics.playerJumpCooldown < 0) { playerZ = 80; Statics.playerJump = true; } } private void jumpUpdate() { if (playerZ >= 0) { Statics.playerJump = true; playerZ--; Statics.playerJumpCooldown = Consts.JUMP_CD; } else { Statics.playerJump = false; Statics.playerJumpCooldown--; } } private void updateBounds() { bounds.set(getX(), getY(), getWidth(), getHeight()); } public void changeAnimation() { } public void normalDogeMovement(float playerX, float playerY) { if (!Statics.playerJump) { MotherDoge.playerX = playerX; MotherDoge.playerY = playerY; if (Statics.playerHitByLog) { MotherDoge.playerX = playerX; MotherDoge.playerY = -playerY + 480; } if (MotherDoge.playerY >= 390) MotherDoge.playerY = 390; if (MotherDoge.playerY <= 0) MotherDoge.playerY = 0; if (MotherDoge.playerX > 110) MotherDoge.playerX--; if (MotherDoge.playerX <= 110) MotherDoge.playerX++; } addAction(Actions.parallel(Actions.moveTo(MotherDoge.playerX, MotherDoge.playerY, 0.5f))); } public void dogeHitByLog() { if (!Statics.isSuperD) { if (!reverseControls && Statics.playerHitByLog) { Action completeAction = new Action() { public boolean act(float delta) { Statics.playerHitAnimation = false; reverseControls = true; return true; } }; addAction(Actions.parallel(Actions.sequence(Actions.rotateTo(-180, 1), completeAction))); } if (reverseControls && !Statics.playerHitByLog) { reverseControls = false; } } } public Rectangle getBounds() { return bounds; } }
package com.matt.forgehax.mods; import static com.matt.forgehax.util.spam.SpamTokens.MESSAGE; import static com.matt.forgehax.util.spam.SpamTokens.PLAYER_NAME; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.util.concurrent.FutureCallback; import com.matt.forgehax.Helper; import com.matt.forgehax.events.ChatMessageEvent; import com.matt.forgehax.events.PlayerConnectEvent; import com.matt.forgehax.mods.services.SpamService; import com.matt.forgehax.util.ArrayHelper; import com.matt.forgehax.util.command.CommandHelper; import com.matt.forgehax.util.command.Options; import com.matt.forgehax.util.command.Setting; import com.matt.forgehax.util.common.PriorityEnum; import com.matt.forgehax.util.entity.PlayerInfo; import com.matt.forgehax.util.entity.PlayerInfoHelper; import com.matt.forgehax.util.entry.CustomMessageEntry; import com.matt.forgehax.util.mod.Category; import com.matt.forgehax.util.mod.ToggleMod; import com.matt.forgehax.util.mod.loader.RegisterMod; import com.matt.forgehax.util.spam.SpamMessage; import com.matt.forgehax.util.spam.SpamTokens; import java.util.Map; import java.util.UUID; import java.util.concurrent.atomic.AtomicLong; import javax.annotation.Nullable; import joptsimple.internal.Strings; import net.minecraft.client.entity.EntityPlayerSP; import net.minecraft.util.text.Style; import net.minecraft.util.text.TextFormatting; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; /** * Created on 7/21/2017 by fr1kin */ @RegisterMod public class JoinMessage extends ToggleMod { private static final SpamTokens[] SPAM_TOKENS = new SpamTokens[]{PLAYER_NAME, MESSAGE}; private final Options<CustomMessageEntry> messages = getCommandStub() .builders() .<CustomMessageEntry>newOptionsBuilder() .name("messages") .description("Custom messages") .factory(CustomMessageEntry::new) .supplier(Sets::newConcurrentHashSet) .build(); private final Setting<String> keyword = getCommandStub() .builders() .<String>newSettingBuilder() .name("keyword") .description("Keyword for the join message") .defaultTo("!joinmessage") .build(); private final Setting<String> format = getCommandStub() .builders() .<String>newSettingBuilder() .name("format") .description( "Join message format (Use {PLAYER_NAME} for the player joining, {MESSAGE} for the set message)") .defaultTo("<{PLAYER_NAME}> {MESSAGE}") .build(); private final Setting<Long> delay = getCommandStub() .builders() .<Long>newSettingBuilder() .name("delay") .description("Delay between each message in ms") .defaultTo(15000L) .build(); private final Setting<Integer> message_length = getCommandStub() .builders() .<Integer>newSettingBuilder() .name("message_length") .description("Maximum length of a custom message") .defaultTo(25) .build(); private final Setting<Boolean> use_offline = getCommandStub() .builders() .<Boolean>newSettingBuilder() .name("use_offline") .description("Allows non-authenticated player names to be added") .defaultTo(false) .build(); private final Setting<Long> set_cooldown = getCommandStub() .builders() .<Long>newSettingBuilder() .name("set_cooldown") .description("Setting cooldown for individual players in ms") .defaultTo(15000L) .build(); private final Setting<Integer> max_player_messages = getCommandStub() .builders() .<Integer>newSettingBuilder() .name("max_player_messages") .description("Maximum number of messages per individual player") .defaultTo(5) .min(1) .max(Integer.MAX_VALUE) .changed( cb -> { messages.forEach(e -> e.setSize(cb.getTo())); messages.serialize(); }) .build(); private final Setting<Boolean> debug_messages = getCommandStub() .builders() .<Boolean>newSettingBuilder() .name("debug_messages") .description("Displays messages in chat if a player fails to use the command properly") .defaultTo(false) .build(); private final Map<UUID, AtomicLong> cooldowns = Maps.newConcurrentMap(); public JoinMessage() { super(Category.MISC, "JoinMessage", false, "Allows players to add custom join messages"); } private void debugMessage(String str) { if (debug_messages.get()) { Helper.printMessageNaked( Strings.EMPTY, str, new Style().setItalic(true).setColor(TextFormatting.GRAY)); } } private void setJoinMessage(UUID target, UUID setter, String message) { CustomMessageEntry entry = messages.get(target); if (entry == null) { entry = new CustomMessageEntry(target); messages.add(entry); } String replyMessage = "Join message changed."; if (!entry.containsEntry(setter)) { entry.setSize(max_player_messages.get() - 1); // evict a random message replyMessage = "Join message set."; } entry.addMessage(setter, message); // correct size now // set cooldown cooldowns .computeIfAbsent(setter, s -> new AtomicLong(0L)) .set(System.currentTimeMillis() + set_cooldown.get()); messages.serialize(); SpamService.send( new SpamMessage(replyMessage, "JOIN_MESSAGE_REPLY", 0, null, PriorityEnum.HIGHEST)); } @SubscribeEvent public void onPlayerChat(ChatMessageEvent event) { String[] args = event.getMessage().split(" "); if (args.length < 3) { return; // not enough arguments } final String keyword = ArrayHelper.getOrDefault(args, 0, Strings.EMPTY); if (!this.keyword.get().equalsIgnoreCase(keyword)) { return; } final String target = ArrayHelper.getOrDefault(args, 1, Strings.EMPTY); if (target.length() > PlayerInfoHelper.MAX_NAME_LENGTH) { debugMessage("Input name over valid length"); return; } if (target.equalsIgnoreCase(event.getSender().getName())) { debugMessage("Cannot set own join message"); return; } final String message = CommandHelper.join(args, " ", 2, args.length); if (Strings.isNullOrEmpty(message)) { debugMessage("Invalid message (null or empty)"); return; } if (message.length() > message_length.get()) { debugMessage("Message over maximum specified by JoinMessage.message_length"); return; } // setter is not in cooldown if (System.currentTimeMillis() < cooldowns.getOrDefault(event.getSender().getId(), new AtomicLong(0L)).get()) { debugMessage("Player is currently in a cooldown"); return; } if (use_offline.get()) { // use offline ID setJoinMessage(EntityPlayerSP.getOfflineUUID(target), event.getSender().getId(), message); return; // join message set, stop here } PlayerInfoHelper.registerWithCallback( target, new FutureCallback<PlayerInfo>() { @Override public void onSuccess(@Nullable PlayerInfo result) { if (result != null && !result.isOfflinePlayer()) { setJoinMessage(result.getId(), event.getSender().getId(), message); } } @Override public void onFailure(Throwable t) { } }); } @SubscribeEvent public void onPlayerConnect(PlayerConnectEvent.Join event) { CustomMessageEntry entry = messages.get(event.getPlayerInfo().getId()); if (entry != null) { // resize if needed if (entry.getSize() > max_player_messages.get()) { entry.setSize(max_player_messages.get()); } SpamService.send( new SpamMessage( SpamTokens.fillAll( format.get(), SPAM_TOKENS, event.getPlayerInfo().getName(), entry.getRandomMessage()), "JOIN_MESSAGE", delay.get(), null, PriorityEnum.HIGH)); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators; import java.io.IOException; import java.util.List; import java.util.LinkedList; import org.apache.hadoop.mapreduce.Counter; import org.apache.pig.PigException; import org.apache.pig.SortInfo; import org.apache.pig.StoreFuncInterface; import org.apache.pig.backend.executionengine.ExecException; import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReducePOStoreImpl; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.POStatus; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.Result; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhyPlanVisitor; import org.apache.pig.data.Tuple; import org.apache.pig.impl.PigContext; import org.apache.pig.impl.io.FileSpec; import org.apache.pig.impl.logicalLayer.schema.Schema; import org.apache.pig.impl.plan.OperatorKey; import org.apache.pig.impl.plan.VisitorException; import org.apache.pig.impl.util.IdentityHashSet; import org.apache.pig.pen.util.ExampleTuple; import org.apache.pig.pen.util.LineageTracer; /** * The store operator which is used in two ways: * 1) As a local operator it can be used to store files * 2) In the Map Reduce setting, it is used to create jobs * from MapReduce operators which keep the loads and * stores in the Map and Reduce Plans till the job is created * */ public class POStore extends PhysicalOperator { private static final long serialVersionUID = 1L; private static Result empty = new Result(POStatus.STATUS_NULL, null); transient private StoreFuncInterface storer; transient private POStoreImpl impl; private FileSpec sFile; private Schema schema; transient private Counter outputRecordCounter = null; // flag to distinguish user stores from MRCompiler stores. private boolean isTmpStore; // flag to distinguish single store from multiquery store. private boolean isMultiStore; // flag to indicate if the custom counter should be disabled. private boolean disableCounter = false; // the index of multiquery store to track counters private int index; // If we know how to reload the store, here's how. The lFile // FileSpec is set in PigServer.postProcess. It can be used to // reload this store, if the optimizer has the need. private FileSpec lFile; // if the predecessor of store is Sort (order by) // then sortInfo will have information of the sort // column names and the asc/dsc info private SortInfo sortInfo; private String signature; public POStore(OperatorKey k) { this(k, -1, null); } public POStore(OperatorKey k, int rp) { this(k, rp, null); } public POStore(OperatorKey k, int rp, List<PhysicalOperator> inp) { super(k, rp, inp); } /** * Set up the storer * @throws IOException */ public void setUp() throws IOException{ if (impl != null) { try{ storer = impl.createStoreFunc(this); if (!isTmpStore && !disableCounter && impl instanceof MapReducePOStoreImpl) { outputRecordCounter = ((MapReducePOStoreImpl) impl).createRecordCounter(this); } }catch (IOException ioe) { int errCode = 2081; String msg = "Unable to setup the store function."; throw new ExecException(msg, errCode, PigException.BUG, ioe); } } } /** * Called at the end of processing for clean up. * @throws IOException */ public void tearDown() throws IOException{ if (impl != null) { impl.tearDown(); } } /** * To perform cleanup when there is an error. * @throws IOException */ public void cleanUp() throws IOException{ if (impl != null) { impl.cleanUp(); } } @Override public Result getNext(Tuple t) throws ExecException { Result res = processInput(); try { switch (res.returnStatus) { case POStatus.STATUS_OK: if (illustrator == null) { storer.putNext((Tuple)res.result); } else illustratorMarkup(res.result, res.result, 0); res = empty; if (outputRecordCounter != null) { outputRecordCounter.increment(1); } break; case POStatus.STATUS_EOP: break; case POStatus.STATUS_ERR: case POStatus.STATUS_NULL: default: break; } } catch (IOException ioe) { int errCode = 2135; String msg = "Received error from store function." + ioe.getMessage(); throw new ExecException(msg, errCode, ioe); } return res; } @Override public String name() { return (sFile != null) ? getAliasString() + "Store" + "(" + sFile.toString() + ")" + " - " + mKey.toString() : getAliasString() + "Store" + "(" + "DummyFil:DummyLdr" + ")" + " - " + mKey.toString(); } @Override public boolean supportsMultipleInputs() { return false; } @Override public boolean supportsMultipleOutputs() { return true; } @Override public void visit(PhyPlanVisitor v) throws VisitorException { v.visitStore(this); } public FileSpec getSFile() { return sFile; } public void setSFile(FileSpec sFile) { this.sFile = sFile; } public void setInputSpec(FileSpec lFile) { this.lFile = lFile; } public FileSpec getInputSpec() { return lFile; } public void setIsTmpStore(boolean tmp) { isTmpStore = tmp; } public boolean isTmpStore() { return isTmpStore; } public void setStoreImpl(POStoreImpl impl) { this.impl = impl; } public void setSchema(Schema schema) { this.schema = schema; } public Schema getSchema() { return schema; } public StoreFuncInterface getStoreFunc() { if(storer == null){ storer = (StoreFuncInterface)PigContext.instantiateFuncFromSpec(sFile.getFuncSpec()); storer.setStoreFuncUDFContextSignature(signature); } return storer; } /** * @param sortInfo the sortInfo to set */ public void setSortInfo(SortInfo sortInfo) { this.sortInfo = sortInfo; } /** * @return the sortInfo */ public SortInfo getSortInfo() { return sortInfo; } public String getSignature() { return signature; } public void setSignature(String signature) { this.signature = signature; } public void setMultiStore(boolean isMultiStore) { this.isMultiStore = isMultiStore; } public boolean isMultiStore() { return isMultiStore; } @Override public Tuple illustratorMarkup(Object in, Object out, int eqClassIndex) { if(illustrator != null) { ExampleTuple tIn = (ExampleTuple) in; LineageTracer lineage = illustrator.getLineage(); lineage.insert(tIn); if (!isTmpStore) illustrator.getEquivalenceClasses().get(eqClassIndex).add(tIn); illustrator.addData((Tuple) out); } return (Tuple) out; } public void setIndex(int index) { this.index = index; } public int getIndex() { return index; } public void setDisableCounter(boolean disableCounter) { this.disableCounter = disableCounter; } public boolean disableCounter() { return disableCounter; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.contrib.index.main; import java.io.IOException; import java.text.NumberFormat; import java.util.Arrays; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.contrib.index.mapred.IndexUpdateConfiguration; import org.apache.hadoop.contrib.index.mapred.IIndexUpdater; import org.apache.hadoop.contrib.index.mapred.Shard; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.util.ReflectionUtils; /** * A distributed "index" is partitioned into "shards". Each shard corresponds * to a Lucene instance. This class contains the main() method which uses a * Map/Reduce job to analyze documents and update Lucene instances in parallel. * * The main() method in UpdateIndex requires the following information for * updating the shards: * - Input formatter. This specifies how to format the input documents. * - Analysis. This defines the analyzer to use on the input. The analyzer * determines whether a document is being inserted, updated, or deleted. * For inserts or updates, the analyzer also converts each input document * into a Lucene document. * - Input paths. This provides the location(s) of updated documents, * e.g., HDFS files or directories, or HBase tables. * - Shard paths, or index path with the number of shards. Either specify * the path for each shard, or specify an index path and the shards are * the sub-directories of the index directory. * - Output path. When the update to a shard is done, a message is put here. * - Number of map tasks. * * All of the information can be specified in a configuration file. All but * the first two can also be specified as command line options. Check out * conf/index-config.xml.template for other configurable parameters. * * Note: Because of the parallel nature of Map/Reduce, the behaviour of * multiple inserts, deletes or updates to the same document is undefined. */ public class UpdateIndex { public static final Log LOG = LogFactory.getLog(UpdateIndex.class); private static final NumberFormat NUMBER_FORMAT = NumberFormat.getInstance(); static { NUMBER_FORMAT.setMinimumIntegerDigits(5); NUMBER_FORMAT.setGroupingUsed(false); } private static long now() { return System.currentTimeMillis(); } private static void printUsage(String cmd) { System.err.println("Usage: java " + UpdateIndex.class.getName() + "\n" + " -inputPaths <inputPath,inputPath>\n" + " -outputPath <outputPath>\n" + " -shards <shardDir,shardDir>\n" + " -indexPath <indexPath>\n" + " -numShards <num>\n" + " -numMapTasks <num>\n" + " -conf <confPath>\n" + "Note: Do not use both -shards option and -indexPath option."); } private static String getIndexPath(Configuration conf) { return conf.get("sea.index.path"); } private static int getNumShards(Configuration conf) { return conf.getInt("sea.num.shards", 1); } private static Shard[] createShards(String indexPath, int numShards, Configuration conf) throws IOException { String parent = Shard.normalizePath(indexPath) + Path.SEPARATOR; long versionNumber = -1; long generation = -1; FileSystem fs = FileSystem.get(conf); Path path = new Path(indexPath); if (fs.exists(path)) { FileStatus[] fileStatus = fs.listStatus(path); String[] shardNames = new String[fileStatus.length]; int count = 0; for (int i = 0; i < fileStatus.length; i++) { if (fileStatus[i].isDir()) { shardNames[count] = fileStatus[i].getPath().getName(); count++; } } Arrays.sort(shardNames, 0, count); Shard[] shards = new Shard[count >= numShards ? count : numShards]; for (int i = 0; i < count; i++) { shards[i] = new Shard(versionNumber, parent + shardNames[i], generation); } int number = count; for (int i = count; i < numShards; i++) { String shardPath; while (true) { shardPath = parent + NUMBER_FORMAT.format(number++); if (!fs.exists(new Path(shardPath))) { break; } } shards[i] = new Shard(versionNumber, shardPath, generation); } return shards; } else { Shard[] shards = new Shard[numShards]; for (int i = 0; i < shards.length; i++) { shards[i] = new Shard(versionNumber, parent + NUMBER_FORMAT.format(i), generation); } return shards; } } /** * The main() method * @param argv */ public static void main(String[] argv) { if (argv.length == 0) { printUsage(""); System.exit(-1); } String inputPathsString = null; Path outputPath = null; String shardsString = null; String indexPath = null; int numShards = -1; int numMapTasks = -1; Configuration conf = new Configuration(); String confPath = null; // parse the command line for (int i = 0; i < argv.length; i++) { // parse command line if (argv[i].equals("-inputPaths")) { inputPathsString = argv[++i]; } else if (argv[i].equals("-outputPath")) { outputPath = new Path(argv[++i]); } else if (argv[i].equals("-shards")) { shardsString = argv[++i]; } else if (argv[i].equals("-indexPath")) { indexPath = argv[++i]; } else if (argv[i].equals("-numShards")) { numShards = Integer.parseInt(argv[++i]); } else if (argv[i].equals("-numMapTasks")) { numMapTasks = Integer.parseInt(argv[++i]); } else if (argv[i].equals("-conf")) { // add as a local FS resource confPath = argv[++i]; conf.addResource(new Path(confPath)); } else { System.out.println("Unknown option " + argv[i] + " w/ value " + argv[++i]); } } LOG.info("inputPaths = " + inputPathsString); LOG.info("outputPath = " + outputPath); LOG.info("shards = " + shardsString); LOG.info("indexPath = " + indexPath); LOG.info("numShards = " + numShards); LOG.info("numMapTasks= " + numMapTasks); LOG.info("confPath = " + confPath); Path[] inputPaths = null; Shard[] shards = null; JobConf jobConf = new JobConf(conf); IndexUpdateConfiguration iconf = new IndexUpdateConfiguration(jobConf); if (inputPathsString != null) { jobConf.set("mapred.input.dir", inputPathsString); } inputPaths = FileInputFormat.getInputPaths(jobConf); if (inputPaths.length == 0) { inputPaths = null; } if (outputPath == null) { outputPath = FileOutputFormat.getOutputPath(jobConf); } if (inputPaths == null || outputPath == null) { System.err.println("InputPaths and outputPath must be specified."); printUsage(""); System.exit(-1); } if (shardsString != null) { iconf.setIndexShards(shardsString); } shards = Shard.getIndexShards(iconf); if (shards != null && shards.length == 0) { shards = null; } if (indexPath == null) { indexPath = getIndexPath(conf); } if (numShards <= 0) { numShards = getNumShards(conf); } if (shards == null && indexPath == null) { System.err.println("Either shards or indexPath must be specified."); printUsage(""); System.exit(-1); } if (numMapTasks <= 0) { numMapTasks = jobConf.getNumMapTasks(); } try { // create shards and set their directories if necessary if (shards == null) { shards = createShards(indexPath, numShards, conf); } long startTime = now(); try { IIndexUpdater updater = (IIndexUpdater) ReflectionUtils.newInstance( iconf.getIndexUpdaterClass(), conf); LOG.info("sea.index.updater = " + iconf.getIndexUpdaterClass().getName()); updater.run(conf, inputPaths, outputPath, numMapTasks, shards); LOG.info("Index update job is done"); } finally { long elapsedTime = now() - startTime; LOG.info("Elapsed time is " + (elapsedTime / 1000) + "s"); System.out.println("Elapsed time is " + (elapsedTime / 1000) + "s"); } } catch (Exception e) { e.printStackTrace(System.err); } } }
/* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.cluster.impl; import com.codahale.metrics.Timer; import com.codahale.metrics.Timer.Context; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.onlab.metrics.MetricsService; import org.onosproject.cluster.ClusterEvent; import org.onosproject.cluster.ClusterEventListener; import org.onosproject.cluster.ClusterService; import org.onosproject.cluster.ControllerNode; import org.onosproject.cluster.NodeId; import org.onosproject.cluster.RoleInfo; import org.onosproject.core.MetricsHelper; import org.onosproject.event.AbstractListenerRegistry; import org.onosproject.event.EventDeliveryService; import org.onosproject.mastership.MastershipAdminService; import org.onosproject.mastership.MastershipEvent; import org.onosproject.mastership.MastershipListener; import org.onosproject.mastership.MastershipService; import org.onosproject.mastership.MastershipStore; import org.onosproject.mastership.MastershipStoreDelegate; import org.onosproject.mastership.MastershipTerm; import org.onosproject.mastership.MastershipTermService; import org.onosproject.net.DeviceId; import org.onosproject.net.MastershipRole; import org.slf4j.Logger; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.Lists.newArrayList; import static org.onlab.metrics.MetricsUtil.startTimer; import static org.onlab.metrics.MetricsUtil.stopTimer; import static org.onosproject.cluster.ControllerNode.State.ACTIVE; import static org.onosproject.net.MastershipRole.MASTER; import static org.slf4j.LoggerFactory.getLogger; @Component(immediate = true) @Service public class MastershipManager implements MastershipService, MastershipAdminService, MastershipTermService, MetricsHelper { private static final String NODE_ID_NULL = "Node ID cannot be null"; private static final String DEVICE_ID_NULL = "Device ID cannot be null"; private static final String ROLE_NULL = "Mastership role cannot be null"; private final Logger log = getLogger(getClass()); protected final AbstractListenerRegistry<MastershipEvent, MastershipListener> listenerRegistry = new AbstractListenerRegistry<>(); private final MastershipStoreDelegate delegate = new InternalDelegate(); @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected MastershipStore store; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected EventDeliveryService eventDispatcher; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected ClusterService clusterService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected MetricsService metricsService; private ClusterEventListener clusterListener = new InternalClusterEventListener(); private Timer requestRoleTimer; @Activate public void activate() { requestRoleTimer = createTimer("Mastership", "requestRole", "responseTime"); eventDispatcher.addSink(MastershipEvent.class, listenerRegistry); clusterService.addListener(clusterListener); store.setDelegate(delegate); log.info("Started"); } @Deactivate public void deactivate() { eventDispatcher.removeSink(MastershipEvent.class); clusterService.removeListener(clusterListener); store.unsetDelegate(delegate); log.info("Stopped"); } @Override public void setRole(NodeId nodeId, DeviceId deviceId, MastershipRole role) { checkNotNull(nodeId, NODE_ID_NULL); checkNotNull(deviceId, DEVICE_ID_NULL); checkNotNull(role, ROLE_NULL); MastershipEvent event = null; switch (role) { case MASTER: event = store.setMaster(nodeId, deviceId); break; case STANDBY: event = store.setStandby(nodeId, deviceId); break; case NONE: event = store.relinquishRole(nodeId, deviceId); break; default: log.info("Unknown role; ignoring"); return; } if (event != null) { post(event); } } @Override public MastershipRole getLocalRole(DeviceId deviceId) { checkNotNull(deviceId, DEVICE_ID_NULL); return store.getRole(clusterService.getLocalNode().id(), deviceId); } @Override public void relinquishMastership(DeviceId deviceId) { MastershipEvent event = null; event = store.relinquishRole( clusterService.getLocalNode().id(), deviceId); if (event != null) { post(event); } } @Override public MastershipRole requestRoleFor(DeviceId deviceId) { checkNotNull(deviceId, DEVICE_ID_NULL); final Context timer = startTimer(requestRoleTimer); try { return store.requestRole(deviceId); } finally { stopTimer(timer); } } @Override public NodeId getMasterFor(DeviceId deviceId) { checkNotNull(deviceId, DEVICE_ID_NULL); return store.getMaster(deviceId); } @Override public Set<DeviceId> getDevicesOf(NodeId nodeId) { checkNotNull(nodeId, NODE_ID_NULL); return store.getDevices(nodeId); } @Override public RoleInfo getNodesFor(DeviceId deviceId) { checkNotNull(deviceId, DEVICE_ID_NULL); return store.getNodes(deviceId); } @Override public MastershipTerm getMastershipTerm(DeviceId deviceId) { return store.getTermFor(deviceId); } @Override public void addListener(MastershipListener listener) { checkNotNull(listener); listenerRegistry.addListener(listener); } @Override public void removeListener(MastershipListener listener) { checkNotNull(listener); listenerRegistry.removeListener(listener); } @Override public MetricsService metricsService() { return metricsService; } @Override public void balanceRoles() { List<ControllerNode> nodes = newArrayList(clusterService.getNodes()); Map<ControllerNode, Set<DeviceId>> controllerDevices = new HashMap<>(); int deviceCount = 0; // Create buckets reflecting current ownership. for (ControllerNode node : nodes) { if (clusterService.getState(node.id()) == ACTIVE) { Set<DeviceId> devicesOf = new HashSet<>(getDevicesOf(node.id())); deviceCount += devicesOf.size(); controllerDevices.put(node, devicesOf); log.info("Node {} has {} devices.", node.id(), devicesOf.size()); } } // Now re-balance the buckets until they are roughly even. int rounds = controllerDevices.keySet().size(); for (int i = 0; i < rounds; i++) { // Iterate over the buckets and find the smallest and the largest. ControllerNode smallest = findBucket(true, controllerDevices); ControllerNode largest = findBucket(false, controllerDevices); balanceBuckets(smallest, largest, controllerDevices, deviceCount); } } private ControllerNode findBucket(boolean min, Map<ControllerNode, Set<DeviceId>> controllerDevices) { int xSize = min ? Integer.MAX_VALUE : -1; ControllerNode xNode = null; for (ControllerNode node : controllerDevices.keySet()) { int size = controllerDevices.get(node).size(); if ((min && size < xSize) || (!min && size > xSize)) { xSize = size; xNode = node; } } return xNode; } private void balanceBuckets(ControllerNode smallest, ControllerNode largest, Map<ControllerNode, Set<DeviceId>> controllerDevices, int deviceCount) { Collection<DeviceId> minBucket = controllerDevices.get(smallest); Collection<DeviceId> maxBucket = controllerDevices.get(largest); int bucketCount = controllerDevices.keySet().size(); int delta = (maxBucket.size() - minBucket.size()) / 2; delta = Math.min(deviceCount / bucketCount, delta); if (delta > 0) { log.info("Attempting to move {} nodes from {} to {}...", delta, largest.id(), smallest.id()); int i = 0; Iterator<DeviceId> it = maxBucket.iterator(); while (it.hasNext() && i < delta) { DeviceId deviceId = it.next(); log.info("Setting {} as the master for {}", smallest.id(), deviceId); setRole(smallest.id(), deviceId, MASTER); controllerDevices.get(smallest).add(deviceId); it.remove(); i++; } } } // Posts the specified event to the local event dispatcher. private void post(MastershipEvent event) { if (event != null && eventDispatcher != null) { eventDispatcher.post(event); } } //callback for reacting to cluster events private class InternalClusterEventListener implements ClusterEventListener { // A notion of a local maximum cluster size, used to tie-break. // Think of a better way to do this. private AtomicInteger clusterSize; InternalClusterEventListener() { clusterSize = new AtomicInteger(0); } @Override public void event(ClusterEvent event) { switch (event.type()) { case INSTANCE_ADDED: case INSTANCE_ACTIVATED: clusterSize.incrementAndGet(); log.info("instance {} added/activated", event.subject()); break; case INSTANCE_REMOVED: case INSTANCE_DEACTIVATED: ControllerNode node = event.subject(); if (node.equals(clusterService.getLocalNode())) { //If we are in smaller cluster, relinquish and return for (DeviceId device : getDevicesOf(node.id())) { if (!isInMajority()) { //own DeviceManager should catch event and tell switch store.relinquishRole(node.id(), device); } } log.info("broke off from cluster, relinquished devices"); break; } // if we are the larger one and the removed node(s) are brain dead, // force relinquish on behalf of disabled node. // check network channel to do this? for (DeviceId device : getDevicesOf(node.id())) { //some things to check: // 1. we didn't break off as well while we're at it // 2. others don't pile in and try too - maybe a lock if (isInMajority()) { store.relinquishRole(node.id(), device); } } clusterSize.decrementAndGet(); log.info("instance {} removed/deactivated", event.subject()); break; default: log.warn("unknown cluster event {}", event); } } private boolean isInMajority() { if (clusterService.getNodes().size() > (clusterSize.intValue() / 2)) { return true; } //FIXME: break tie for equal-sized clusters, return false; } } public class InternalDelegate implements MastershipStoreDelegate { @Override public void notify(MastershipEvent event) { log.trace("dispatching mastership event {}", event); eventDispatcher.post(event); } } }
package com.bazaarvoice.emodb.sor.db.astyanax; import com.bazaarvoice.emodb.common.api.Ttls; import com.bazaarvoice.emodb.common.cassandra.CassandraKeyspace; import com.bazaarvoice.emodb.sor.api.Compaction; import com.bazaarvoice.emodb.sor.api.DeltaSizeLimitException; import com.bazaarvoice.emodb.sor.api.History; import com.bazaarvoice.emodb.sor.api.ReadConsistency; import com.bazaarvoice.emodb.sor.api.WriteConsistency; import com.bazaarvoice.emodb.sor.core.HistoryStore; import com.bazaarvoice.emodb.sor.db.DAOUtils; import com.bazaarvoice.emodb.sor.db.DataWriterDAO; import com.bazaarvoice.emodb.sor.db.RecordUpdate; import com.bazaarvoice.emodb.sor.delta.Delta; import com.bazaarvoice.emodb.sor.delta.Literal; import com.bazaarvoice.emodb.sor.delta.MapDelta; import com.bazaarvoice.emodb.table.db.Table; import com.bazaarvoice.emodb.table.db.astyanax.AstyanaxStorage; import com.bazaarvoice.emodb.table.db.astyanax.AstyanaxTable; import com.bazaarvoice.emodb.table.db.astyanax.DataPurgeDAO; import com.bazaarvoice.emodb.table.db.astyanax.FullConsistencyTimeProvider; import com.bazaarvoice.emodb.table.db.consistency.HintsConsistencyTimeProvider; import com.codahale.metrics.Meter; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.annotation.Timed; import com.google.common.base.Function; import com.google.common.base.Objects; import com.google.common.base.Optional; import com.google.common.base.Predicates; import com.google.common.base.Throwables; import com.google.common.collect.Collections2; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.ByteStreams; import com.google.common.io.CountingOutputStream; import com.google.inject.Inject; import com.netflix.astyanax.ColumnListMutation; import com.netflix.astyanax.Execution; import com.netflix.astyanax.MutationBatch; import com.netflix.astyanax.connectionpool.OperationResult; import com.netflix.astyanax.connectionpool.exceptions.ConnectionException; import com.netflix.astyanax.model.ConsistencyLevel; import com.netflix.astyanax.serializers.StringSerializer; import com.netflix.astyanax.thrift.AbstractThriftMutationBatchImpl; import org.apache.cassandra.thrift.Cassandra; import org.apache.commons.lang3.StringUtils; import org.apache.thrift.TException; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.transport.TIOStreamTransport; import org.apache.thrift.transport.TTransportException; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Collection; import java.util.EnumSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; /** * Cassandra implementation of {@link DataWriterDAO} that uses the Netflix Astyanax client library. */ public class AstyanaxDataWriterDAO implements DataWriterDAO, DataPurgeDAO { private static final int MAX_BATCH_SIZE = 100; private static final int MAX_PENDING_SIZE = 200; // Must match thrift_framed_transport_size_in_mb value from cassandra.yaml private static final int MAX_THRIFT_FRAMED_TRANSPORT_SIZE = 15 * 1024 * 1024; // Because of the thrift framed transport size conservatively limit the size of deltas // to allow ample room for additional metadata and protocol overhead. private static final int MAX_DELTA_SIZE = 10 * 1024 * 1024; // 10 MB delta limit, measured in UTF-8 bytes private final AstyanaxKeyScanner _keyScanner; private final DataWriterDAO _cqlWriterDAO; private final ChangeEncoder _changeEncoder; private final Meter _updateMeter; private final Meter _oversizeUpdateMeter; private final FullConsistencyTimeProvider _fullConsistencyTimeProvider; private final DAOUtils _daoUtils; private final int _deltaBlockSize; private final String _deltaPrefix; private final int _deltaPrefixLength; private final boolean _writeToLegacyDeltaTable; private final boolean _writeToBlockedDeltaTable; // The difference between full consistency and "raw" consistency provider is that full consistency also includes // a minimum lag of 5 minutes, whereas "raw" consistency timestamp just gives us the last known good FCT which could be less than 5 minutes. // We use this for efficiency reasons, the only use case right now is to delete "compaction-owned" deltas, once we // know that compaction is within FCT. private final HintsConsistencyTimeProvider _rawConsistencyTimeProvider; private final HistoryStore _historyStore; @Inject public AstyanaxDataWriterDAO(@AstyanaxWriterDAODelegate DataWriterDAO delegate, AstyanaxKeyScanner keyScanner, FullConsistencyTimeProvider fullConsistencyTimeProvider, HistoryStore historyStore, HintsConsistencyTimeProvider rawConsistencyTimeProvider, ChangeEncoder changeEncoder, MetricRegistry metricRegistry, DAOUtils daoUtils, @BlockSize int deltaBlockSize, @PrefixLength int deltaPrefixLength, @WriteToLegacyDeltaTable boolean writeToLegacyDeltaTable, @WriteToBlockedDeltaTable boolean writeToBlockedDeltaTable) { checkArgument(writeToLegacyDeltaTable || writeToBlockedDeltaTable, "writeToLegacyDeltaTable and writeToBlockedDeltaTables cannot both be false"); _cqlWriterDAO = checkNotNull(delegate, "delegate"); _keyScanner = checkNotNull(keyScanner, "keyScanner"); _fullConsistencyTimeProvider = checkNotNull(fullConsistencyTimeProvider, "fullConsistencyTimeProvider"); _rawConsistencyTimeProvider = checkNotNull(rawConsistencyTimeProvider, "rawConsistencyTimeProvider"); _historyStore = checkNotNull(historyStore, "historyStore"); _changeEncoder = checkNotNull(changeEncoder, "changeEncoder"); _updateMeter = metricRegistry.meter(getMetricName("updates")); _oversizeUpdateMeter = metricRegistry.meter(getMetricName("oversizeUpdates")); _daoUtils = daoUtils; _deltaBlockSize = deltaBlockSize; _deltaPrefix = StringUtils.repeat('0', deltaPrefixLength); _deltaPrefixLength = deltaPrefixLength; _writeToLegacyDeltaTable = writeToLegacyDeltaTable; _writeToBlockedDeltaTable = writeToBlockedDeltaTable; } private String getMetricName(String name) { return MetricRegistry.name("bv.emodb.sor", "AstyanaxDataWriterDAO", name); } @Override public long getFullConsistencyTimestamp(Table tbl) { return getFullConsistencyTimestamp((AstyanaxTable)tbl, _fullConsistencyTimeProvider); } @Override public long getRawConsistencyTimestamp(Table tbl) { return getFullConsistencyTimestamp((AstyanaxTable)tbl, _rawConsistencyTimeProvider); } private long getFullConsistencyTimestamp(AstyanaxTable tbl, FullConsistencyTimeProvider fullConsistencyTimeProvider) { // Compaction runs off the "read" storage. If there are multiple back-end write storage configurations, // we don't care whether the secondary is falling behind, only the primary that we read from matters. DeltaPlacement placement = (DeltaPlacement) tbl.getReadStorage().getPlacement(); String clusterName = placement.getKeyspace().getClusterName(); return fullConsistencyTimeProvider.getMaxTimeStamp(clusterName); } @Timed(name = "bv.emodb.sor.AstyanaxDataWriterDAO.updateAll", absolute = true) @Override public void updateAll(Iterator<RecordUpdate> updates, UpdateListener listener) { Map<BatchKey, List<BatchUpdate>> batchMap = Maps.newLinkedHashMap(); int numPending = 0; // Group the updates by distinct placement and consistency since a Cassandra mutation only works // with a single keyspace and consistency at a time. while (updates.hasNext()) { RecordUpdate update = updates.next(); AstyanaxTable table = (AstyanaxTable) update.getTable(); for (AstyanaxStorage storage : table.getWriteStorage()) { DeltaPlacement placement = (DeltaPlacement) storage.getPlacement(); BatchKey batchKey = new BatchKey(placement, update.getConsistency()); List<BatchUpdate> batch = batchMap.get(batchKey); if (batch == null) { batchMap.put(batchKey, batch = Lists.newArrayList()); } batch.add(new BatchUpdate(storage, update)); numPending++; // Flush this batch if it's bigger than the maximum mutation we want to send to Cassandra. Alternatively, // don't queue more than MAX_PENDING_SIZE updates in memory at a time, to keep max mem usage down. Go // ahead and flush all the batches at once, even if some are still small, in order to avoid potentially // extreme re-ordering of writes (say a batch contains 1 record in placement A followed by 100k records in // placement B, we shouldn't delay writing A until after all B records). if (batch.size() >= MAX_BATCH_SIZE || numPending >= MAX_PENDING_SIZE) { writeAll(batchMap, listener); batchMap.clear(); numPending = 0; } } } // Flush final batches. writeAll(batchMap, listener); } private void writeAll(Map<BatchKey, List<BatchUpdate>> batchMap, UpdateListener listener) { for (Map.Entry<BatchKey, List<BatchUpdate>> entry : batchMap.entrySet()) { write(entry.getKey(), entry.getValue(), listener); } } private void putBlockedDeltaColumn(ColumnListMutation mutation, UUID changeId, ByteBuffer encodedDelta) { List<ByteBuffer> blocks = _daoUtils.getDeltaBlocks(encodedDelta); for (int i = 0; i < blocks.size(); i++) { mutation.putColumn(new DeltaKey(changeId, i), blocks.get(i)); } } private void write(BatchKey batchKey, List<BatchUpdate> updates, UpdateListener listener) { // Invoke the configured listener. This is used to write events to the databus. listener.beforeWrite(Collections2.transform(updates, BatchUpdate::getUpdate)); DeltaPlacement placement = batchKey.getPlacement(); MutationBatch mutation = placement.getKeyspace().prepareMutationBatch(batchKey.getConsistency()); int approxMutationSize = 0; int updateCount = 0; for (BatchUpdate batchUpdate : updates) { AstyanaxStorage storage = batchUpdate.getStorage(); RecordUpdate update = batchUpdate.getUpdate(); ByteBuffer rowKey = storage.getRowKey(update.getKey()); Delta delta = update.getDelta(); String deltaString = delta.toString(); Set<String> tags = update.getTags(); // Set any change flags which may make reading this delta back more efficient. Currently the only case // for this is for a literal map delta. EnumSet<ChangeFlag> changeFlags = EnumSet.noneOf(ChangeFlag.class); if (delta.isConstant()) { changeFlags.add(ChangeFlag.CONSTANT_DELTA); } if (delta instanceof MapDelta || (delta instanceof Literal && ((Literal) delta).getValue() instanceof Map)) { changeFlags.add(ChangeFlag.MAP_DELTA); } // Regardless of migration stage, we will still encode both deltas versions // The values are encoded in a flexible format that allows versioning of the strings ByteBuffer encodedBlockDelta = stringToByteBuffer(_changeEncoder.encodeDelta(deltaString, changeFlags, tags, new StringBuilder(_deltaPrefix)).toString()); ByteBuffer encodedDelta = encodedBlockDelta.duplicate(); encodedDelta.position(encodedDelta.position() + _deltaPrefixLength); int deltaSize = _writeToLegacyDeltaTable ? encodedDelta.remaining(): 0; int blockDeltaSize = _writeToBlockedDeltaTable ? encodedBlockDelta.remaining() : 0; UUID changeId = update.getChangeId(); // Validate sizes of individual deltas if (deltaSize > MAX_DELTA_SIZE) { _oversizeUpdateMeter.mark(); throw new DeltaSizeLimitException("Delta exceeds size limit of " + MAX_DELTA_SIZE + ": " + deltaSize, deltaSize); } // Perform a quick validation that the size of the mutation batch as a whole won't exceed the thrift threshold. // This validation is inexact and overly-conservative but it is cheap and fast. if (!mutation.isEmpty() && approxMutationSize + deltaSize + blockDeltaSize > MAX_DELTA_SIZE) { // Adding the next row may exceed the Thrift threshold. Check definitively now. This is fairly expensive // which is why we don't do it unless the cheap check above passes. MutationBatch potentiallyOversizeMutation = placement.getKeyspace().prepareMutationBatch(batchKey.getConsistency()); potentiallyOversizeMutation.mergeShallow(mutation); if (_writeToLegacyDeltaTable) { //this will be removed in the next version potentiallyOversizeMutation.withRow(placement.getDeltaColumnFamily(), rowKey).putColumn(changeId, encodedDelta, null); } if (_writeToBlockedDeltaTable) { putBlockedDeltaColumn(potentiallyOversizeMutation.withRow(placement.getBlockedDeltaColumnFamily(), rowKey), changeId, encodedBlockDelta); } if (getMutationBatchSize(potentiallyOversizeMutation) >= MAX_THRIFT_FRAMED_TRANSPORT_SIZE) { // Execute the mutation batch now. As a side-effect this empties the mutation batch // so we can continue using the same instance. execute(mutation, "batch update %d records in placement %s", updateCount, placement.getName()); approxMutationSize = 0; updateCount = 0; } } // this will be removed in the next version if (_writeToLegacyDeltaTable) { mutation.withRow(placement.getDeltaColumnFamily(), rowKey).putColumn(changeId, encodedDelta, null); approxMutationSize += deltaSize; } if (deltaSize + blockDeltaSize >= MAX_THRIFT_FRAMED_TRANSPORT_SIZE) { execute(mutation, "update large record in old table in placement %s", placement.getName()); approxMutationSize = 0; } if (_writeToBlockedDeltaTable) { putBlockedDeltaColumn(mutation.withRow(placement.getBlockedDeltaColumnFamily(), rowKey), changeId, encodedBlockDelta); approxMutationSize += blockDeltaSize; } updateCount += 1; } execute(mutation, "batch update %d records in placement %s", updateCount, placement.getName()); // Invoke the configured listener. This is used to write audits. listener.afterWrite(Collections2.transform(updates, BatchUpdate::getUpdate)); _updateMeter.mark(updates.size()); } private ByteBuffer stringToByteBuffer(String str) { return StringSerializer.get().toByteBuffer(str); } /** * We need to make sure that compaction is written *before* the compacted deltas are deleted. * This should be a synchronous operation. */ @Timed(name = "bv.emodb.sor.AstyanaxDataWriterDAO.compact", absolute = true) @Override public void compact(Table tbl, String key, UUID compactionKey, Compaction compaction, UUID changeId, Delta delta, Collection<UUID> changesToDelete, List<History> historyList, WriteConsistency consistency) { // delegate to CQL Writer for double compaction writing _cqlWriterDAO.compact(tbl, key, compactionKey, compaction, changeId, delta, changesToDelete, historyList, consistency); } @Timed (name = "bv.emodb.sorAstyanaxDataWriterDAO.storeCompactedDeltas", absolute = true) @Override public void storeCompactedDeltas(Table tbl, String key, List<History> histories, WriteConsistency consistency) { checkNotNull(tbl, "table"); checkNotNull(key, "key"); checkNotNull(histories, "histories"); checkNotNull(consistency, "consistency"); AstyanaxTable table = (AstyanaxTable) tbl; for (AstyanaxStorage storage : table.getWriteStorage()) { DeltaPlacement placement = (DeltaPlacement) storage.getPlacement(); CassandraKeyspace keyspace = placement.getKeyspace(); ByteBuffer rowKey = storage.getRowKey(key); MutationBatch mutation = keyspace.prepareMutationBatch(SorConsistencies.toAstyanax(consistency)); ColumnListMutation<UUID> rowMutation = mutation.withRow(placement.getDeltaHistoryColumnFamily(), rowKey); for (History history : histories) { rowMutation.putColumn(history.getChangeId(), _changeEncoder.encodeHistory(history), Ttls.toSeconds(_historyStore.getHistoryTtl(), 1, null)); } execute(mutation, "store %d compacted deltas for placement %s, table %s, key %s", histories.size(), placement.getName(), table.getName(), key); } } @Timed(name = "bv.emodb.sor.AstyanaxDataWriterDAO.purgeUnsafe", absolute = true) @Override public void purgeUnsafe(Table tbl) { checkNotNull(tbl, "table"); AstyanaxTable table = (AstyanaxTable) tbl; for (AstyanaxStorage storage : table.getWriteStorage()) { purge(storage, noop()); } } // DataPurgeDAO @Override public void purge(AstyanaxStorage storage, Runnable progress) { DeltaPlacement placement = (DeltaPlacement) storage.getPlacement(); CassandraKeyspace keyspace = placement.getKeyspace(); // Scan all the shards and delete all the rows we find. MutationBatch mutation = keyspace.prepareMutationBatch(SorConsistencies.toAstyanax(WriteConsistency.STRONG)); Iterator<String> keyIter = _keyScanner.scanKeys(storage, ReadConsistency.STRONG); while (keyIter.hasNext()) { ByteBuffer rowKey = storage.getRowKey(keyIter.next()); mutation.withRow(placement.getDeltaColumnFamily(), rowKey).delete(); mutation.withRow(placement.getBlockedDeltaColumnFamily(), rowKey).delete(); if (mutation.getRowCount() >= 100) { progress.run(); execute(mutation, "purge %d records from placement %s", mutation.getRowCount(), placement.getName()); mutation.discardMutations(); } } if (!mutation.isEmpty()) { progress.run(); execute(mutation, "purge %d records from placement %s", mutation.getRowCount(), placement.getName()); } } private <R> R execute(Execution<R> execution, String operation, Object... operationArguments) { OperationResult<R> operationResult; try { operationResult = execution.execute(); } catch (ConnectionException e) { String message = String.format(operation, operationArguments); if (isThriftFramedTransportSizeOverrun(execution, e)) { throw new ThriftFramedTransportSizeException("Thrift request to large to " + message, e); } throw new RuntimeException("Failed to " + message, e); } return operationResult.getResult(); } private boolean isThriftFramedTransportSizeOverrun(Execution<?> execution, ConnectionException exception) { // Thrift framed transport size overruns don't have an explicit exception, but they fall under the general // umbrella of "unknown" thrift transport exceptions. Optional<Throwable> thriftException = Iterables.tryFind(Throwables.getCausalChain(exception), Predicates.instanceOf(TTransportException.class)); //noinspection ThrowableResultOfMethodCallIgnored if (!thriftException.isPresent() || ((TTransportException) thriftException.get()).getType() != TTransportException.UNKNOWN) { return false; } return execution instanceof MutationBatch && getMutationBatchSize((MutationBatch) execution) >= MAX_THRIFT_FRAMED_TRANSPORT_SIZE; } private int getMutationBatchSize(MutationBatch mutation) { assert mutation instanceof AbstractThriftMutationBatchImpl : "MutationBatch is not an instance of AbstractThriftMutationBatchImpl"; try (CountingOutputStream countingOut = new CountingOutputStream(ByteStreams.nullOutputStream())) { TIOStreamTransport transport = new TIOStreamTransport(countingOut); Cassandra.batch_mutate_args args = new Cassandra.batch_mutate_args(); args.setMutation_map(((AbstractThriftMutationBatchImpl) mutation).getMutationMap()); args.write(new TBinaryProtocol(transport)); return (int) countingOut.getCount(); } catch (TException | IOException e) { throw Throwables.propagate(e); } } private Runnable noop() { return new Runnable() { @Override public void run() { // Do nothing } }; } /** Key used for grouping batches of update operations for execution. */ private static class BatchKey { private final DeltaPlacement _placement; private final ConsistencyLevel _consistency; BatchKey(DeltaPlacement placement, WriteConsistency consistency) { _placement = placement; _consistency = SorConsistencies.toAstyanax(consistency); } DeltaPlacement getPlacement() { return _placement; } ConsistencyLevel getConsistency() { return _consistency; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof BatchKey)) { return false; } BatchKey batchKey = (BatchKey) o; return _consistency == batchKey.getConsistency() && _placement.equals(batchKey.getPlacement()); } @Override public int hashCode() { return Objects.hashCode(_placement, _consistency); } } /** Value used for grouping batches of update operations for execution. */ private static class BatchUpdate { private final AstyanaxStorage _storage; private final RecordUpdate _update; BatchUpdate(AstyanaxStorage storage, RecordUpdate record) { _storage = storage; _update = record; } AstyanaxStorage getStorage() { return _storage; } RecordUpdate getUpdate() { return _update; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.xbean.osgi.bundle.util; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.Collections; import java.util.Dictionary; import java.util.Enumeration; import java.util.LinkedHashSet; import java.util.List; import org.osgi.framework.Bundle; import org.osgi.framework.BundleReference; import org.osgi.framework.Constants; import org.osgi.framework.ServiceReference; import org.osgi.framework.wiring.BundleRevision; import org.osgi.framework.wiring.BundleWire; import org.osgi.framework.wiring.BundleWiring; import org.osgi.service.packageadmin.ExportedPackage; import org.osgi.service.packageadmin.PackageAdmin; /** * @version $Rev$ $Date$ */ public class BundleUtils { private static final boolean isOSGi43 = isOSGi43(); private static boolean isOSGi43() { try { Class.forName("org.osgi.framework.wiring.BundleWiring"); return true; } catch (Throwable e) { return false; } } public final static String REFERENCE_SCHEME = "reference:"; public final static String FILE_SCHEMA = "file:"; public final static String REFERENCE_FILE_SCHEMA = "reference:file:"; /** * Based on the constant field values, if it is bigger than the RESOLVED status value, the bundle has been resolved by the framework * @param bundle * @return true if the bundle is resolved, or false if not. */ public static boolean isResolved(Bundle bundle) { return bundle.getState() >= Bundle.RESOLVED; } /** * resolve method will try to load the Object.class, the behavior triggers a resolved request to the OSGI framework. * @param bundle */ public static void resolve(Bundle bundle) { if (isFragment(bundle)) { return; } try { bundle.loadClass(Object.class.getName()); } catch (Exception e) { } } /** * If the bundle fulfills the conditions below, it could be started * a. Not in the UNINSTALLED status. * b. Not in the STARTING status. * c. Not a fragment bundle. * @param bundle * @return */ public static boolean canStart(Bundle bundle) { return (bundle.getState() != Bundle.UNINSTALLED) && (bundle.getState() != Bundle.STARTING) && (!isFragment(bundle)); } /** * If the bundle fulfills the conditions below, it could be stopped * a. Not in the UNINSTALLED status. * b. Not in the STOPPING status. * c. Not a fragment bundle. * @param bundle * @return */ public static boolean canStop(Bundle bundle) { return (bundle.getState() != Bundle.UNINSTALLED) && (bundle.getState() != Bundle.STOPPING) && (!isFragment(bundle)); } /** * If the bundle fulfills the conditions below, it could be un-installed * a. Not in the UNINSTALLED status. * @param bundle * @return */ public static boolean canUninstall(Bundle bundle) { return bundle.getState() != Bundle.UNINSTALLED; } public static boolean isFragment(Bundle bundle) { Dictionary headers = bundle.getHeaders(); return (headers != null && headers.get(Constants.FRAGMENT_HOST) != null); } /** * Returns bundle (if any) associated with current thread's context classloader. * Invoking this method is equivalent to getBundle(Thread.currentThread().getContextClassLoader(), unwrap) */ public static Bundle getContextBundle(boolean unwrap) { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); return classLoader == null ? null : getBundle(classLoader, unwrap); } /** * Returns bundle (if any) associated with the classloader. * @param classLoader * @param unwrap if true and if the bundle associated with the context classloader is a * {@link DelegatingBundle}, this function will return the main application bundle * backing the {@link DelegatingBundle}. Otherwise, the bundle associated with * the context classloader is returned as is. See {@link BundleClassLoader#getBundle(boolean)} * for more information. * @return The bundle associated with the classloader. Might be null. */ public static Bundle getBundle(ClassLoader classLoader, boolean unwrap) { if (classLoader instanceof DelegatingBundleReference) { return ((DelegatingBundleReference) classLoader).getBundle(unwrap); } else if (classLoader instanceof BundleReference) { return ((BundleReference) classLoader).getBundle(); } else { return null; } } /** * If the given bundle is a {@link DelegatingBundle} this function will return the main * application bundle backing the {@link DelegatingBundle}. Otherwise, the bundle * passed in is returned as is. */ public static Bundle unwrapBundle(Bundle bundle) { if (bundle instanceof DelegatingBundle) { return ((DelegatingBundle) bundle).getMainBundle(); } return bundle; } /** * Works like {@link Bundle#getEntryPaths(String)} but also returns paths * in attached fragment bundles. * * @param bundle * @param name * @return */ public static Enumeration<String> getEntryPaths(Bundle bundle, String name) { Enumeration<URL> entries = bundle.findEntries(name, null, false); if (entries == null) { return null; } LinkedHashSet<String> paths = new LinkedHashSet<String>(); while (entries.hasMoreElements()) { URL url = entries.nextElement(); String path = url.getPath(); if (path.startsWith("/")) { path = path.substring(1); } paths.add(path); } return Collections.enumeration(paths); } /** * 1, If the bundle was installed with reference directory mode * return the file URL directly. * 2, For traditional package bundle, Works like {@link Bundle#getEntry(String)} * * In addition to the searching abaove, it also checks attached fragment bundles for the given entry. * * @param bundle * @param name * @return * @throws MalformedURLException */ public static URL getEntry(Bundle bundle, String name) throws MalformedURLException { if (name.endsWith("/")) { name = name.substring(0, name.length() - 1); } File bundleFile = toFile(bundle); if (bundleFile != null && bundleFile.isDirectory()) { File entryFile = new File(bundleFile, name); if (entryFile.exists()) { return entryFile.toURI().toURL(); } } if (name.equals("/")) { return bundle.getEntry(name); } String path; String pattern; int pos = name.lastIndexOf("/"); if (pos == -1) { path = "/"; pattern = name; } else if (pos == 0) { path = "/"; pattern = name.substring(1); } else { path = name.substring(0, pos); pattern = name.substring(pos + 1); } Enumeration<URL> entries = bundle.findEntries(path, pattern, false); if (entries != null && entries.hasMoreElements()) { return entries.nextElement(); } else { return null; } } public static URL getNestedEntry(Bundle bundle, String jarEntryName, String subEntryName) throws MalformedURLException { File bundleFile = toFile(bundle); if (bundleFile != null && bundleFile.isDirectory()) { File entryFile = new File(bundleFile, jarEntryName); if (entryFile.exists()) { if (entryFile.isFile()) { return new URL("jar:" + entryFile.toURI().toURL() + "!/" + subEntryName); } else { return new File(entryFile, subEntryName).toURI().toURL(); } } return null; } return new URL("jar:" + bundle.getEntry(jarEntryName).toString() + "!/" + subEntryName); } public static File toFile(Bundle bundle) { return toFile(bundle.getLocation()); } public static File toFile(URL url) { return toFile(url.toExternalForm()); } /** * Translate the reference:file:// style URL to the underlying file instance * @param url * @return */ public static File toFile(String url) { if (url !=null && url.startsWith(REFERENCE_FILE_SCHEMA)) { File file = null; try { file = new File(new URL(url.substring(REFERENCE_SCHEME.length())).toURI()); if (file.exists()) { return file; } } catch (Exception e) { // If url includes special chars: { } [ ] % < > # ^ ? // URISyntaxException or MalformedURLException will be thrown, // so try to use File(String) directly file = new File(url.substring(REFERENCE_FILE_SCHEMA.length())); if (file.exists()) { return file; } } } return null; } public static String toReferenceFileLocation(File file) throws IOException { if (!file.exists()) { throw new IOException("file not exist " + file.getAbsolutePath()); } return REFERENCE_SCHEME + file.toURI(); } public static LinkedHashSet<Bundle> getWiredBundles(Bundle bundle) { if (isOSGi43) { return getWiredBundles43(bundle); } else { return getWiredBundles42(bundle); } } private static LinkedHashSet<Bundle> getWiredBundles42(Bundle bundle) { ServiceReference reference = bundle.getBundleContext().getServiceReference(PackageAdmin.class.getName()); PackageAdmin packageAdmin = (PackageAdmin) bundle.getBundleContext().getService(reference); try { return getWiredBundles(packageAdmin, bundle); } finally { bundle.getBundleContext().ungetService(reference); } } public static LinkedHashSet<Bundle> getWiredBundles(PackageAdmin packageAdmin, Bundle bundle) { BundleDescription description = new BundleDescription(bundle.getHeaders()); // handle static wire via Import-Package List<BundleDescription.ImportPackage> imports = description.getExternalImports(); LinkedHashSet<Bundle> wiredBundles = new LinkedHashSet<Bundle>(); for (BundleDescription.ImportPackage packageImport : imports) { ExportedPackage[] exports = packageAdmin.getExportedPackages(packageImport.getName()); Bundle wiredBundle = getWiredBundle(bundle, exports); if (wiredBundle != null) { wiredBundles.add(wiredBundle); } } // handle dynamic wire via DynamicImport-Package if (!description.getDynamicImportPackage().isEmpty()) { for (Bundle b : bundle.getBundleContext().getBundles()) { if (!wiredBundles.contains(b)) { ExportedPackage[] exports = packageAdmin.getExportedPackages(b); Bundle wiredBundle = getWiredBundle(bundle, exports); if (wiredBundle != null) { wiredBundles.add(wiredBundle); } } } } return wiredBundles; } static Bundle getWiredBundle(Bundle bundle, ExportedPackage[] exports) { if (exports != null) { for (ExportedPackage exportedPackage : exports) { Bundle[] importingBundles = exportedPackage.getImportingBundles(); if (importingBundles != null) { for (Bundle importingBundle : importingBundles) { if (importingBundle == bundle) { return exportedPackage.getExportingBundle(); } } } } } return null; } // OSGi 4.3 API private static LinkedHashSet<Bundle> getWiredBundles43(Bundle bundle) { LinkedHashSet<Bundle> wiredBundles = new LinkedHashSet<Bundle>(); BundleWiring wiring = bundle.adapt(BundleWiring.class); if (wiring != null) { List<BundleWire> wires; wires = wiring.getRequiredWires(BundleRevision.PACKAGE_NAMESPACE); for (BundleWire wire : wires) { wiredBundles.add(wire.getProviderWiring().getBundle()); } wires = wiring.getRequiredWires(BundleRevision.BUNDLE_NAMESPACE); for (BundleWire wire : wires) { wiredBundles.add(wire.getProviderWiring().getBundle()); } } return wiredBundles; } }
package org.jetbrains.plugins.cucumber.java.run; import cucumber.api.TestCase; import cucumber.api.TestStep; import cucumber.api.event.*; import cucumber.api.formatter.Formatter; import gherkin.events.PickleEvent; import java.lang.reflect.Field; import java.util.HashMap; import java.util.Map; import static cucumber.api.Result.Type.*; import static org.jetbrains.plugins.cucumber.java.run.CucumberJvmSMFormatterUtil.*; @SuppressWarnings("unused") public class CucumberJvm2SMFormatter implements Formatter { private static final String EXAMPLES_CAPTION = "Examples:"; private static final String SCENARIO_OUTLINE_CAPTION = "Scenario: Line: "; private final Map<String, String> pathToDescription = new HashMap<String, String>(); private String currentFilePath; private int currentScenarioOutlineLine; private String currentScenarioOutlineName; public CucumberJvm2SMFormatter() { outCommand(String.format(TEMPLATE_ENTER_THE_MATRIX, getCurrentTime())); outCommand(String.format(TEMPLATE_SCENARIO_COUNTING_STARTED, 0, getCurrentTime())); } private final EventHandler<TestCaseStarted> testCaseStartedHandler = new EventHandler<TestCaseStarted>() { public void receive(TestCaseStarted event) { CucumberJvm2SMFormatter.this.handleTestCaseStarted(event); } }; private final EventHandler<TestCaseFinished> testCaseFinishedHandler = new EventHandler<TestCaseFinished>() { public void receive(TestCaseFinished event) { handleTestCaseFinished(event); } }; private final EventHandler<TestRunFinished> testRunFinishedHandler = new EventHandler<TestRunFinished>() { public void receive(TestRunFinished event) { CucumberJvm2SMFormatter.this.handleTestRunFinished(event); } }; private final EventHandler<TestStepStarted> testStepStartedHandler = new EventHandler<TestStepStarted>() { public void receive(TestStepStarted event) { handleTestStepStarted(event); } }; private final EventHandler<TestStepFinished> testStepFinishedHandler = new EventHandler<TestStepFinished>() { public void receive(TestStepFinished event) { handleTestStepFinished(event); } }; private final EventHandler<TestSourceRead> testSourceReadHandler = new EventHandler<TestSourceRead>() { public void receive(TestSourceRead event) { CucumberJvm2SMFormatter.this.handleTestSourceRead(event); } }; @Override public void setEventPublisher(EventPublisher publisher) { publisher.registerHandlerFor(TestCaseStarted.class, this.testCaseStartedHandler); publisher.registerHandlerFor(TestCaseFinished.class, this.testCaseFinishedHandler); publisher.registerHandlerFor(TestStepStarted.class, this.testStepStartedHandler); publisher.registerHandlerFor(TestStepFinished.class, this.testStepFinishedHandler); publisher.registerHandlerFor(TestSourceRead.class, this.testSourceReadHandler); publisher.registerHandlerFor(TestRunFinished.class, this.testRunFinishedHandler); } private void handleTestCaseStarted(TestCaseStarted event) { if (currentFilePath == null) { outCommand(String.format(TEMPLATE_TEST_SUITE_STARTED, getCurrentTime(), event.testCase.getUri(), getFeatureFileDescription(event.testCase.getUri()))); } else if (!event.testCase.getUri().equals(currentFilePath)) { closeCurrentScenarioOutline(); outCommand(String.format(TEMPLATE_TEST_SUITE_FINISHED, getCurrentTime(), getFeatureFileDescription(currentFilePath))); outCommand(String.format(TEMPLATE_TEST_SUITE_STARTED, getCurrentTime(), event.testCase.getUri(), getFeatureFileDescription(event.testCase.getUri()))); } if (isScenarioOutline(event.testCase)) { int mainScenarioLine = getScenarioOutlineLine(event.testCase); if (currentScenarioOutlineLine != mainScenarioLine || currentFilePath == null || !currentFilePath.equals(event.testCase.getUri())) { closeCurrentScenarioOutline(); currentScenarioOutlineLine = mainScenarioLine; currentScenarioOutlineName = event.testCase.getName(); outCommand(String.format(TEMPLATE_TEST_SUITE_STARTED, getCurrentTime(), event.testCase.getUri() + ":" + currentScenarioOutlineLine, currentScenarioOutlineName)); outCommand(String.format(TEMPLATE_TEST_SUITE_STARTED, getCurrentTime(), "", EXAMPLES_CAPTION)); } } else { closeCurrentScenarioOutline(); } currentFilePath = event.testCase.getUri(); outCommand(String.format(TEMPLATE_TEST_SUITE_STARTED, getCurrentTime(), event.testCase.getUri() + ":" + event.testCase.getLine(), getScenarioName(event.testCase))); } private static void handleTestCaseFinished(TestCaseFinished event) { outCommand(String.format(TEMPLATE_TEST_SUITE_FINISHED, getCurrentTime(), getScenarioName(event.testCase))); } private void handleTestRunFinished(TestRunFinished event) { closeCurrentScenarioOutline(); outCommand(String.format(TEMPLATE_TEST_SUITE_FINISHED, getCurrentTime(), getFeatureFileDescription(currentFilePath))); } private static void handleTestStepStarted(TestStepStarted event) { if (event.testStep.isHook()) { return; } outCommand(String.format(TEMPLATE_TEST_STARTED, getCurrentTime(), getStepLocation(event.testStep), getStepName(event.testStep))); } private static void handleTestStepFinished(TestStepFinished event) { if (event.testStep.isHook()) { return; } if (event.result.getStatus() == PASSED) { // write nothing } else if (event.result.getStatus() == SKIPPED || event.result.getStatus() == PENDING) { outCommand(String.format(TEMPLATE_TEST_PENDING, getStepName(event.testStep), getCurrentTime())); } else { outCommand(String.format(TEMPLATE_TEST_FAILED, getCurrentTime(), "", escape(event.result.getErrorMessage()), getStepName(event.testStep), "")); } Long duration = event.result.getDuration() != null ? event.result.getDuration() / 1000000: 0; outCommand(String.format(TEMPLATE_TEST_FINISHED, getCurrentTime(), duration, getStepName(event.testStep))); } private String getFeatureFileDescription(String uri) { if (pathToDescription.containsKey(uri)) { return pathToDescription.get(uri); } return uri; } private void handleTestSourceRead(TestSourceRead event) { closeCurrentScenarioOutline(); String[] lines = event.source.split("\n"); if (lines.length > 0) { pathToDescription.put(event.uri, lines[0]); } else { pathToDescription.put(event.uri, event.source); } } private void closeCurrentScenarioOutline() { if (currentScenarioOutlineLine > 0) { outCommand(String.format(TEMPLATE_TEST_SUITE_FINISHED, getCurrentTime(), EXAMPLES_CAPTION)); outCommand(String.format(TEMPLATE_TEST_SUITE_FINISHED, getCurrentTime(), currentScenarioOutlineName)); currentScenarioOutlineLine = 0; currentScenarioOutlineName = null; } } private static String getStepLocation(TestStep step) { return step.getStepLocation() + ":" + step.getStepLine(); } private static String getStepName(TestStep step) { return escape(step.getStepText()); } private static void outCommand(String s) { //noinspection UseOfSystemOutOrSystemErr System.out.println(s); } private static PickleEvent getPickleEvent(TestCase testCase) { try { Field pickleEventField = TestCase.class.getDeclaredField("pickleEvent"); pickleEventField.setAccessible(true); return (PickleEvent)pickleEventField.get(testCase); } catch (Exception ignored) { } return null; } private static boolean isScenarioOutline(TestCase testCase) { PickleEvent pickleEvent = getPickleEvent(testCase); return pickleEvent != null && pickleEvent.pickle.getLocations().size() > 1; } private static int getScenarioOutlineLine(TestCase testCase) { PickleEvent pickleEvent = getPickleEvent(testCase); if (pickleEvent != null) { return pickleEvent.pickle.getLocations().get(pickleEvent.pickle.getLocations().size() - 1).getLine(); } return 0; } private static String getScenarioName(TestCase testCase) { if (isScenarioOutline(testCase)) { return SCENARIO_OUTLINE_CAPTION + testCase.getLine(); } return escape(testCase.getName()); } }
/*- * -\-\- * Helios Tools * -- * Copyright (C) 2016 Spotify AB * -- * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * -/-/- */ package com.spotify.helios.cli.command; import static com.google.common.util.concurrent.Futures.immediateFuture; import static com.spotify.helios.common.descriptors.DeploymentGroup.RollingUpdateReason.MANUAL; import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.ListenableFuture; import com.spotify.helios.client.HeliosClient; import com.spotify.helios.common.Json; import com.spotify.helios.common.descriptors.DeploymentGroup; import com.spotify.helios.common.descriptors.HostSelector; import com.spotify.helios.common.descriptors.JobId; import com.spotify.helios.common.descriptors.RolloutOptions; import com.spotify.helios.common.descriptors.TaskStatus; import com.spotify.helios.common.protocol.DeploymentGroupStatusResponse; import com.spotify.helios.common.protocol.RollingUpdateResponse; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintStream; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import net.sourceforge.argparse4j.ArgumentParsers; import net.sourceforge.argparse4j.inf.Namespace; import org.junit.Before; import org.junit.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; public class RollingUpdateCommandTest { private static final String GROUP_NAME = "my_group"; private static final JobId JOB_ID = new JobId("foo", "2", "1212121212121212121"); private static final JobId OLD_JOB_ID = new JobId("foo", "1", "3232323232323232323"); private static final JobId NEW_JOB_ID = new JobId("foo", "3", "4242424242424242424"); private static final int PARALLELISM = 1; private static final long TIMEOUT = 300; private static final String TOKEN = "my_token"; private static final RolloutOptions OPTIONS = RolloutOptions.newBuilder() .setTimeout(TIMEOUT) .setParallelism(PARALLELISM) .setToken(TOKEN) .build(); private final Namespace options = mock(Namespace.class); private final HeliosClient client = mock(HeliosClient.class); private final ByteArrayOutputStream baos = new ByteArrayOutputStream(); private final PrintStream out = new PrintStream(baos); private final TimeUtil timeUtil = new TimeUtil(); private final RollingUpdateCommand command = new RollingUpdateCommand( ArgumentParsers.newArgumentParser("test").addSubparsers().addParser("rolling-update"), timeUtil, timeUtil); @Before public void before() { // Default CLI argument stubs when(options.getString("deployment-group-name")).thenReturn(GROUP_NAME); when(options.getInt("parallelism")).thenReturn(PARALLELISM); when(options.getLong("timeout")).thenReturn(TIMEOUT); when(options.getLong("rollout_timeout")).thenReturn(10L); when(options.getBoolean("async")).thenReturn(false); when(options.getBoolean("migrate")).thenReturn(false); when(options.getBoolean("overlap")).thenReturn(false); when(options.getString("token")).thenReturn(TOKEN); } private static DeploymentGroupStatusResponse.HostStatus makeHostStatus( final String host, final JobId jobId, final TaskStatus.State state) { return new DeploymentGroupStatusResponse.HostStatus(host, jobId, state); } private static DeploymentGroupStatusResponse statusResponse( final DeploymentGroupStatusResponse.Status status, final String error, DeploymentGroupStatusResponse.HostStatus... args) { return statusResponse(status, JOB_ID, error, args); } private static DeploymentGroupStatusResponse statusResponse( final DeploymentGroupStatusResponse.Status status, final JobId jobId, final String error, DeploymentGroupStatusResponse.HostStatus... args) { return new DeploymentGroupStatusResponse( DeploymentGroup.newBuilder() .setName(GROUP_NAME) .setHostSelectors(Collections.<HostSelector>emptyList()) .setJobId(jobId) .setRolloutOptions(RolloutOptions.newBuilder().build()) .setRollingUpdateReason(MANUAL) .build(), status, error, Arrays.asList(args), null); } @Test public void testRollingUpdate() throws Exception { when(client.rollingUpdate(anyString(), any(JobId.class), any(RolloutOptions.class))) .thenReturn(immediateFuture(new RollingUpdateResponse(RollingUpdateResponse.Status.OK))); when(client.deploymentGroupStatus(GROUP_NAME)).then(new ResponseAnswer( statusResponse(DeploymentGroupStatusResponse.Status.ROLLING_OUT, null), statusResponse(DeploymentGroupStatusResponse.Status.ROLLING_OUT, null, makeHostStatus("host1", null, null), makeHostStatus("host2", OLD_JOB_ID, TaskStatus.State.RUNNING), makeHostStatus("host3", OLD_JOB_ID, TaskStatus.State.RUNNING)), statusResponse(DeploymentGroupStatusResponse.Status.ROLLING_OUT, null, makeHostStatus("host1", JOB_ID, TaskStatus.State.RUNNING), makeHostStatus("host2", JOB_ID, TaskStatus.State.PULLING_IMAGE), makeHostStatus("host3", OLD_JOB_ID, TaskStatus.State.RUNNING)), statusResponse(DeploymentGroupStatusResponse.Status.ROLLING_OUT, null, makeHostStatus("host1", JOB_ID, TaskStatus.State.RUNNING), makeHostStatus("host2", JOB_ID, TaskStatus.State.RUNNING), makeHostStatus("host3", JOB_ID, TaskStatus.State.CREATING)), statusResponse(DeploymentGroupStatusResponse.Status.ACTIVE, null, makeHostStatus("host1", JOB_ID, TaskStatus.State.RUNNING), makeHostStatus("host2", JOB_ID, TaskStatus.State.RUNNING), makeHostStatus("host3", JOB_ID, TaskStatus.State.RUNNING)) )); final int ret = command.runWithJobId(options, client, out, false, JOB_ID, null); final String output = baos.toString(); verify(client).rollingUpdate(GROUP_NAME, JOB_ID, OPTIONS); assertEquals(0, ret); final String expected = "Rolling update started: my_group -> foo:2:1212121 (parallelism=1, timeout=300, " + "overlap=false, token=" + TOKEN + ", ignoreFailures=false)\n" + "\n" + "host1 -> RUNNING (1/3)\n" + "host2 -> RUNNING (2/3)\n" + "host3 -> RUNNING (3/3)\n" + "\n" + "Done.\n" + "Duration: 4.00 s\n"; assertEquals(expected, output.replaceAll("\\p{Blank}+|(?:\\p{Blank})$", " ")); } @Test public void testRollingUpdateAsync() throws Exception { when(client.rollingUpdate(anyString(), any(JobId.class), any(RolloutOptions.class))) .thenReturn(immediateFuture(new RollingUpdateResponse(RollingUpdateResponse.Status.OK))); when(options.getBoolean("async")).thenReturn(true); final int ret = command.runWithJobId(options, client, out, false, JOB_ID, null); final String output = baos.toString(); verify(client).rollingUpdate(GROUP_NAME, JOB_ID, OPTIONS); assertEquals(0, ret); final String expected = "Rolling update (async) started: my_group -> foo:2:1212121 (parallelism=1, timeout=300, " + "overlap=false, token=" + TOKEN + ", ignoreFailures=false)\n"; assertEquals(expected, output); } @Test public void testRollingUpdateFailsIfJobIdChangedDuringRollout() throws Exception { when(client.rollingUpdate(anyString(), any(JobId.class), any(RolloutOptions.class))) .thenReturn(immediateFuture(new RollingUpdateResponse(RollingUpdateResponse.Status.OK))); when(client.deploymentGroupStatus(GROUP_NAME)).then(new ResponseAnswer( statusResponse(DeploymentGroupStatusResponse.Status.ROLLING_OUT, null, makeHostStatus("host1", null, null), makeHostStatus("host2", OLD_JOB_ID, TaskStatus.State.RUNNING), makeHostStatus("host3", OLD_JOB_ID, TaskStatus.State.RUNNING)), statusResponse(DeploymentGroupStatusResponse.Status.ROLLING_OUT, null, makeHostStatus("host1", JOB_ID, TaskStatus.State.RUNNING), makeHostStatus("host2", JOB_ID, TaskStatus.State.PULLING_IMAGE), makeHostStatus("host3", OLD_JOB_ID, TaskStatus.State.RUNNING)), statusResponse(DeploymentGroupStatusResponse.Status.ROLLING_OUT, NEW_JOB_ID, null, makeHostStatus("host1", JOB_ID, TaskStatus.State.RUNNING), makeHostStatus("host2", JOB_ID, TaskStatus.State.STARTING), makeHostStatus("host3", OLD_JOB_ID, TaskStatus.State.RUNNING)) )); final int ret = command.runWithJobId(options, client, out, false, JOB_ID, null); final String output = baos.toString(); verify(client).rollingUpdate(GROUP_NAME, JOB_ID, OPTIONS); assertEquals(1, ret); final String expected = "Rolling update started: my_group -> foo:2:1212121 (parallelism=1, timeout=300, " + "overlap=false, token=" + TOKEN + ", ignoreFailures=false)\n" + "\n" + "host1 -> RUNNING (1/3)\n" + "\n" + "Failed: Deployment-group job id changed during rolling-update\n" + "Duration: 2.00 s\n"; assertEquals(expected, output.replaceAll("\\p{Blank}+|(?:\\p{Blank})$", " ")); } @Test public void testRollingUpdateFailsOnRolloutTimeout() throws Exception { when(client.rollingUpdate(anyString(), any(JobId.class), any(RolloutOptions.class))) .thenReturn(immediateFuture(new RollingUpdateResponse(RollingUpdateResponse.Status.OK))); when(client.deploymentGroupStatus(GROUP_NAME)).then(new ResponseAnswer( statusResponse(DeploymentGroupStatusResponse.Status.ROLLING_OUT, null, makeHostStatus("host1", null, null), makeHostStatus("host2", null, null)), statusResponse(DeploymentGroupStatusResponse.Status.ROLLING_OUT, null, makeHostStatus("host1", JOB_ID, TaskStatus.State.PULLING_IMAGE), makeHostStatus("host2", null, null)) )); final int ret = command.runWithJobId(options, client, out, false, JOB_ID, null); final String output = baos.toString(); verify(client).rollingUpdate(GROUP_NAME, JOB_ID, OPTIONS); assertEquals(1, ret); final String expected = "Rolling update started: my_group -> foo:2:1212121 (parallelism=1, timeout=300, " + "overlap=false, token=" + TOKEN + ", ignoreFailures=false)\n" + "\n" + "\n" + "Timed out! (rolling-update still in progress)\n" + "Duration: 601.00 s\n"; assertEquals(expected, output.replaceAll("\\p{Blank}+|(?:\\p{Blank})$", " ")); } @Test public void testRollingUpdateFailed() throws Exception { when(client.rollingUpdate(anyString(), any(JobId.class), any(RolloutOptions.class))) .thenReturn(immediateFuture(new RollingUpdateResponse(RollingUpdateResponse.Status.OK))); when(client.deploymentGroupStatus(GROUP_NAME)).then(new ResponseAnswer( statusResponse(DeploymentGroupStatusResponse.Status.ROLLING_OUT, null, makeHostStatus("host1", JOB_ID, TaskStatus.State.PULLING_IMAGE), makeHostStatus("host2", null, null)), statusResponse(DeploymentGroupStatusResponse.Status.FAILED, "foobar", makeHostStatus("host1", JOB_ID, TaskStatus.State.RUNNING), makeHostStatus("host2", null, null)) )); final int ret = command.runWithJobId(options, client, out, false, JOB_ID, null); final String output = baos.toString(); verify(client).rollingUpdate(GROUP_NAME, JOB_ID, OPTIONS); assertEquals(1, ret); final String expected = "Rolling update started: my_group -> foo:2:1212121 (parallelism=1, timeout=300, " + "overlap=false, token=" + TOKEN + ", ignoreFailures=false)\n" + "\n" + "host1 -> RUNNING (1/2)\n" + "\n" + "Failed: foobar\n" + "Duration: 1.00 s\n"; assertEquals(expected, output.replaceAll("\\p{Blank}+|(?:\\p{Blank})$", " ")); } // ---------------------------- @Test public void testRollingUpdateJson() throws Exception { when(client.rollingUpdate(anyString(), any(JobId.class), any(RolloutOptions.class))) .thenReturn(immediateFuture(new RollingUpdateResponse(RollingUpdateResponse.Status.OK))); when(client.deploymentGroupStatus(GROUP_NAME)).then(new ResponseAnswer( statusResponse(DeploymentGroupStatusResponse.Status.ACTIVE, null, makeHostStatus("host1", JOB_ID, TaskStatus.State.RUNNING), makeHostStatus("host2", JOB_ID, TaskStatus.State.RUNNING), makeHostStatus("host3", JOB_ID, TaskStatus.State.RUNNING)) )); final int ret = command.runWithJobId(options, client, out, true, JOB_ID, null); final String output = baos.toString(); verify(client).rollingUpdate(GROUP_NAME, JOB_ID, OPTIONS); assertEquals(0, ret); assertJsonOutputEquals(output, ImmutableMap.<String, Object>builder() .put("status", "DONE") .put("duration", 0.00) .put("parallelism", PARALLELISM) .put("timeout", TIMEOUT) .put("overlap", false) .put("token", TOKEN) .put("ignoreFailures", false) .build()); } @Test public void testRollingUpdateAsyncJson() throws Exception { when(client.rollingUpdate(anyString(), any(JobId.class), any(RolloutOptions.class))) .thenReturn(immediateFuture(new RollingUpdateResponse(RollingUpdateResponse.Status.OK))); when(options.getBoolean("async")).thenReturn(true); final int ret = command.runWithJobId(options, client, out, true, JOB_ID, null); final String output = baos.toString(); verify(client).rollingUpdate(GROUP_NAME, JOB_ID, OPTIONS); assertEquals(0, ret); assertJsonOutputEquals(output, ImmutableMap.<String, Object>builder() .put("status", "OK") .put("parallelism", PARALLELISM) .put("timeout", TIMEOUT) .put("overlap", false) .put("token", TOKEN) .put("ignoreFailures", false) .build() ); } @Test public void testRollingUpdateFailsIfJobIdChangedDuringRolloutJson() throws Exception { when(client.rollingUpdate(anyString(), any(JobId.class), any(RolloutOptions.class))) .thenReturn(immediateFuture(new RollingUpdateResponse(RollingUpdateResponse.Status.OK))); when(client.deploymentGroupStatus(GROUP_NAME)).then(new ResponseAnswer( statusResponse(DeploymentGroupStatusResponse.Status.ROLLING_OUT, null, makeHostStatus("host1", null, null), makeHostStatus("host2", OLD_JOB_ID, TaskStatus.State.RUNNING), makeHostStatus("host3", OLD_JOB_ID, TaskStatus.State.RUNNING)), statusResponse(DeploymentGroupStatusResponse.Status.ROLLING_OUT, NEW_JOB_ID, null, makeHostStatus("host1", JOB_ID, TaskStatus.State.RUNNING), makeHostStatus("host2", JOB_ID, TaskStatus.State.STARTING), makeHostStatus("host3", OLD_JOB_ID, TaskStatus.State.RUNNING)) )); final int ret = command.runWithJobId(options, client, out, true, JOB_ID, null); final String output = baos.toString(); verify(client).rollingUpdate(GROUP_NAME, JOB_ID, OPTIONS); assertEquals(1, ret); assertJsonOutputEquals(output, ImmutableMap.<String, Object>builder() .put("status", "FAILED") .put("error", "Deployment-group job id changed during rolling-update") .put("duration", 1.00) .put("parallelism", PARALLELISM) .put("timeout", TIMEOUT) .put("overlap", false) .put("token", TOKEN) .put("ignoreFailures", false) .build()); } @Test public void testRollingUpdateFailsOnRolloutTimeoutJson() throws Exception { when(client.rollingUpdate(anyString(), any(JobId.class), any(RolloutOptions.class))) .thenReturn(immediateFuture(new RollingUpdateResponse(RollingUpdateResponse.Status.OK))); when(client.deploymentGroupStatus(GROUP_NAME)).then(new ResponseAnswer( statusResponse(DeploymentGroupStatusResponse.Status.ROLLING_OUT, null, makeHostStatus("host1", null, null), makeHostStatus("host2", null, null)), statusResponse(DeploymentGroupStatusResponse.Status.ROLLING_OUT, null, makeHostStatus("host1", JOB_ID, TaskStatus.State.PULLING_IMAGE), makeHostStatus("host2", null, null)) )); final int ret = command.runWithJobId(options, client, out, true, JOB_ID, null); final String output = baos.toString(); verify(client).rollingUpdate(GROUP_NAME, JOB_ID, OPTIONS); assertEquals(1, ret); assertJsonOutputEquals(output, ImmutableMap.<String, Object>builder() .put("status", "TIMEOUT") .put("duration", 601.00) .put("parallelism", PARALLELISM) .put("timeout", TIMEOUT) .put("overlap", false) .put("token", TOKEN) .put("ignoreFailures", false) .build()); } @Test public void testRollingUpdateFailedJson() throws Exception { when(client.rollingUpdate(anyString(), any(JobId.class), any(RolloutOptions.class))) .thenReturn(immediateFuture(new RollingUpdateResponse(RollingUpdateResponse.Status.OK))); when(client.deploymentGroupStatus(GROUP_NAME)).then(new ResponseAnswer( statusResponse(DeploymentGroupStatusResponse.Status.ROLLING_OUT, null, makeHostStatus("host1", JOB_ID, TaskStatus.State.PULLING_IMAGE), makeHostStatus("host2", null, null)), statusResponse(DeploymentGroupStatusResponse.Status.FAILED, "foobar", makeHostStatus("host1", JOB_ID, TaskStatus.State.RUNNING), makeHostStatus("host2", null, null)) )); final int ret = command.runWithJobId(options, client, out, true, JOB_ID, null); final String output = baos.toString(); verify(client).rollingUpdate(GROUP_NAME, JOB_ID, OPTIONS); assertEquals(1, ret); assertJsonOutputEquals(output, ImmutableMap.<String, Object>builder() .put("status", "FAILED") .put("error", "foobar") .put("duration", 1.00) .put("parallelism", PARALLELISM) .put("timeout", TIMEOUT) .put("overlap", false) .put("token", TOKEN) .put("ignoreFailures", false) .build() ); } @Test public void testRollingUpdateMigrateJson() throws Exception { when(client.rollingUpdate(anyString(), any(JobId.class), any(RolloutOptions.class))) .thenReturn(immediateFuture(new RollingUpdateResponse(RollingUpdateResponse.Status.OK))); when(client.deploymentGroupStatus(GROUP_NAME)).then(new ResponseAnswer( statusResponse(DeploymentGroupStatusResponse.Status.ACTIVE, null, makeHostStatus("host1", JOB_ID, TaskStatus.State.RUNNING)) )); when(options.getBoolean("migrate")).thenReturn(true); final int ret = command.runWithJobId(options, client, out, true, JOB_ID, null); final String output = baos.toString(); // Verify that rollingUpdate() was called with migrate=true final RolloutOptions rolloutOptions = RolloutOptions.newBuilder() .setTimeout(TIMEOUT) .setParallelism(PARALLELISM) .setMigrate(true) .setToken(TOKEN) .build(); verify(client).rollingUpdate(GROUP_NAME, JOB_ID, rolloutOptions); assertEquals(0, ret); assertJsonOutputEquals(output, ImmutableMap.<String, Object>builder() .put("status", "DONE") .put("duration", 0.00) .put("parallelism", PARALLELISM) .put("timeout", TIMEOUT) .put("overlap", false) .put("token", TOKEN) .put("ignoreFailures", false) .build()); } @Test public void testRollingUpdateOverlapJson() throws Exception { when(client.rollingUpdate(anyString(), any(JobId.class), any(RolloutOptions.class))) .thenReturn(immediateFuture(new RollingUpdateResponse(RollingUpdateResponse.Status.OK))); when(client.deploymentGroupStatus(GROUP_NAME)).then(new ResponseAnswer( statusResponse(DeploymentGroupStatusResponse.Status.ACTIVE, null, makeHostStatus("host1", JOB_ID, TaskStatus.State.RUNNING)) )); when(options.getBoolean("overlap")).thenReturn(true); final int ret = command.runWithJobId(options, client, out, true, JOB_ID, null); final String output = baos.toString(); // Verify that rollingUpdate() was called with migrate=true final RolloutOptions rolloutOptions = RolloutOptions.newBuilder() .setTimeout(TIMEOUT) .setParallelism(PARALLELISM) .setOverlap(true) .setToken(TOKEN) .build(); verify(client).rollingUpdate(GROUP_NAME, JOB_ID, rolloutOptions); assertEquals(0, ret); assertJsonOutputEquals(output, ImmutableMap.<String, Object>builder() .put("status", "DONE") .put("duration", 0.00) .put("parallelism", PARALLELISM) .put("timeout", TIMEOUT) .put("overlap", true) .put("token", TOKEN) .put("ignoreFailures", false) .build() ); } private static class TimeUtil implements RollingUpdateCommand.SleepFunction, Supplier<Long> { private long curentTimeMillis = 0; @Override public void sleep(final long millis) throws InterruptedException { advanceTime(millis); } public void advanceTime(final long millis) { curentTimeMillis += millis; } @Override public Long get() { return curentTimeMillis; } } private static class ResponseAnswer implements Answer< ListenableFuture<DeploymentGroupStatusResponse>> { private final List<DeploymentGroupStatusResponse> responses; private int index = 0; public ResponseAnswer(final DeploymentGroupStatusResponse... responses) { this(Arrays.asList(responses)); } public ResponseAnswer(final List<DeploymentGroupStatusResponse> responses) { this.responses = responses; } @Override public ListenableFuture<DeploymentGroupStatusResponse> answer( final InvocationOnMock ignored) { return immediateFuture(responses.get(index++ % responses.size())); } } private static void assertJsonOutputEquals( final String actual, final Map<String, Object> expected) throws IOException { // * Long(2) != Integer(2) // * Json serializing a Long and then parsing it makes it into an Integer (in some cases?) // * => Can't easily compare a map with a json-deserialized map // * => Serialize and deserialize the expected value map, and compare against that final TypeReference<Map<String, Object>> typeRef = new TypeReference<Map<String, Object>>() {}; final Map<String, Object> actualMap = Json.read(actual, typeRef); final Map<String, Object> expectedMap = Json.read(Json.asString(expected), typeRef); assertEquals(expectedMap, actualMap); } }
/* * Copyright 2018 The Bazel Authors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.idea.blaze.skylark.debugger.impl; import static com.google.common.base.Preconditions.checkState; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.starlarkdebugging.StarlarkDebuggingProtos; import com.google.devtools.build.lib.starlarkdebugging.StarlarkDebuggingProtos.ContinueExecutionRequest; import com.google.devtools.build.lib.starlarkdebugging.StarlarkDebuggingProtos.DebugEvent; import com.google.devtools.build.lib.starlarkdebugging.StarlarkDebuggingProtos.DebugEvent.PayloadCase; import com.google.devtools.build.lib.starlarkdebugging.StarlarkDebuggingProtos.DebugRequest; import com.google.devtools.build.lib.starlarkdebugging.StarlarkDebuggingProtos.EvaluateRequest; import com.google.devtools.build.lib.starlarkdebugging.StarlarkDebuggingProtos.Location; import com.google.devtools.build.lib.starlarkdebugging.StarlarkDebuggingProtos.PauseReason; import com.google.devtools.build.lib.starlarkdebugging.StarlarkDebuggingProtos.PauseThreadRequest; import com.google.devtools.build.lib.starlarkdebugging.StarlarkDebuggingProtos.PausedThread; import com.google.devtools.build.lib.starlarkdebugging.StarlarkDebuggingProtos.SetBreakpointsRequest; import com.google.devtools.build.lib.starlarkdebugging.StarlarkDebuggingProtos.StartDebuggingRequest; import com.google.devtools.build.lib.starlarkdebugging.StarlarkDebuggingProtos.Stepping; import com.intellij.execution.ExecutionResult; import com.intellij.execution.process.ProcessAdapter; import com.intellij.execution.process.ProcessEvent; import com.intellij.execution.process.ProcessHandler; import com.intellij.execution.ui.ExecutionConsole; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Ref; import com.intellij.util.concurrency.AppExecutorUtil; import com.intellij.xdebugger.XDebugProcess; import com.intellij.xdebugger.XDebugSession; import com.intellij.xdebugger.breakpoints.XBreakpoint; import com.intellij.xdebugger.breakpoints.XBreakpointHandler; import com.intellij.xdebugger.breakpoints.XBreakpointProperties; import com.intellij.xdebugger.breakpoints.XLineBreakpoint; import com.intellij.xdebugger.evaluation.XDebuggerEditorsProvider; import com.intellij.xdebugger.evaluation.XDebuggerEvaluator.XEvaluationCallback; import com.intellij.xdebugger.frame.XExecutionStack; import com.intellij.xdebugger.frame.XSuspendContext; import java.util.Collection; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import javax.annotation.Nullable; /** * Core class controlling skylark debugging behavior. Interfaces with an {@link XDebugSession}, * responsible for the UI and some other state. */ public class SkylarkDebugProcess extends XDebugProcess { private static final Logger logger = Logger.getInstance(SkylarkDebugProcess.class); private final Project project; private final ExecutionResult executionResult; private final DebugClientTransport transport; // TODO(brendandouglas): Here for backwards-compatibility, remove in v2018.10+ private volatile boolean debuggingStarted = false; // SkylarkLineBreakpointType extends from XLineBreakpointTypeBase which uses raw // XBreakpointProperties. The raw use of XBreakpointProperties needs to propagate to all affected // classes. Check XLineBreakpointTypeBase again after #api212. @SuppressWarnings("rawtypes") // state shared with debug server private final ConcurrentMap<Location, XLineBreakpoint<XBreakpointProperties>> lineBreakpoints = new ConcurrentHashMap<>(); private final ConcurrentMap<Long, PausedThreadState> pausedThreads = new ConcurrentHashMap<>(); // the currently-stepping thread gets priority in the UI -- we always grab focus when it's paused private volatile long currentlySteppingThreadId = 0; public SkylarkDebugProcess(XDebugSession session, ExecutionResult executionResult, int port) { super(session); this.project = session.getProject(); this.executionResult = executionResult; this.transport = new DebugClientTransport(this, port); session.setPauseActionSupported(true); } Collection<PausedThreadState> getPausedThreads() { return ImmutableList.copyOf(pausedThreads.values()); } @Override public XBreakpointHandler<?>[] getBreakpointHandlers() { return new XBreakpointHandler<?>[] {new SkylarkLineBreakpointHandler(this)}; } @Override public XDebuggerEditorsProvider getEditorsProvider() { return new SkylarkDebuggerEditorsProvider(); } @Override protected ProcessHandler doGetProcessHandler() { return executionResult.getProcessHandler(); } @Override public ExecutionConsole createConsole() { return executionResult.getExecutionConsole(); } @Override public void sessionInitialized() { waitForConnection(); ProcessHandler processHandler = getSession().getRunContentDescriptor().getProcessHandler(); processHandler.addProcessListener( new ProcessAdapter() { @Override public void processWillTerminate(ProcessEvent event, boolean willBeDestroyed) { if (transport.isConnected()) { // unset breakpoints and resume all threads prior to stopping debugger, otherwise the // interrupt signal won't be properly handled clearBreakpoints(); startStepping(null, Stepping.NONE); } } }); } /** Returns false if the blaze process is terminating or already terminated. */ boolean isProcessAlive() { ProcessHandler handler = getProcessHandler(); return !handler.isProcessTerminated() && !handler.isProcessTerminating(); } /** Returns true if the transport socket still appears to be connected. */ private boolean isConnected() { return transport.isConnected(); } @Override public void stop() { transport.close(); } private void waitForConnection() { ProgressManager.getInstance() .run( new Task.Backgroundable(project, "Connecting To Debugger", /* canBeCanceled= */ false) { @Override public void run(ProgressIndicator indicator) { indicator.setText("Waiting for connection..."); boolean success = transport.waitForConnection(); if (!success) { reportError("Failed to connect to the debugger"); transport.close(); getSession().stop(); return; } init(); } }); } private void init() { getSession().rebuildViews(); registerBreakpoints(); boolean started = assertResponseType( transport.sendRequest( DebugRequest.newBuilder() .setStartDebugging(StartDebuggingRequest.newBuilder().build())), PayloadCase.START_DEBUGGING); if (started) { debuggingStarted = true; } else { // abort the debugging session entirely if we couldn't successfully initialize transport.close(); getSession().stop(); } } private void registerBreakpoints() { SetBreakpointsRequest.Builder request = SetBreakpointsRequest.newBuilder(); lineBreakpoints.forEach((l, b) -> request.addBreakpoint(getBreakpointProto(l, b))); assertResponseType( transport.sendRequest(DebugRequest.newBuilder().setSetBreakpoints(request)), PayloadCase.SET_BREAKPOINTS); } private void clearBreakpoints() { lineBreakpoints.clear(); registerBreakpoints(); } // Check XLineBreakpointTypeBase for raw use of XBreakpointProperties after #api212. @SuppressWarnings("rawtypes") private static StarlarkDebuggingProtos.Breakpoint getBreakpointProto( Location location, XLineBreakpoint<XBreakpointProperties> breakpoint) { StarlarkDebuggingProtos.Breakpoint.Builder builder = StarlarkDebuggingProtos.Breakpoint.newBuilder().setLocation(location); String condition = getConditionExpression(breakpoint); if (condition != null) { builder.setExpression(condition); } return builder.build(); } @Nullable private static String getConditionExpression(XBreakpoint<?> breakpoint) { return breakpoint.getConditionExpression() == null ? null : breakpoint.getConditionExpression().getExpression(); } private void reportError(StarlarkDebuggingProtos.Error error) { reportError(error.getMessage()); } private void reportError(String message) { if (isConnected() && isProcessAlive()) { getSession().reportError(message); } } /** * If the response doesn't match the expected type, or is an error event, log an error. * * @return false if no response was received, or the response type doesn't match that expected. */ private boolean assertResponseType(@Nullable DebugEvent response, PayloadCase expectedType) { if (response == null) { reportError(String.format("No '%s' response received from the debugger", expectedType)); return false; } if (expectedType.equals(response.getPayloadCase())) { return true; } if (response.hasError()) { reportError(response.getError()); } else { String message = String.format( "Expected response type '%s', but got '%s'", expectedType, response.getPayloadCase()); reportError(message); } return false; } // Check XLineBreakpointTypeBase for raw use of XBreakpointProperties after #api212. @SuppressWarnings("rawtypes") private Location convertLocation(XLineBreakpoint<XBreakpointProperties> breakpoint) { // TODO(brendandouglas): handle local changes? return Location.newBuilder() .setLineNumber(breakpoint.getLine() + 1) .setPath(breakpoint.getPresentableFilePath()) .build(); } @Override public void startStepOver(@Nullable XSuspendContext context) { startStepping(context, Stepping.OVER); } @Override public void startStepInto(@Nullable XSuspendContext context) { startStepping(context, Stepping.INTO); } @Override public void startStepOut(@Nullable XSuspendContext context) { startStepping(context, Stepping.OUT); } @Override public void resume(@Nullable XSuspendContext context) { // unpausing only a single thread isn't well supported by the debugging API, so pass through a // null suspect context, indicating all threads should be unpaused startStepping(null, Stepping.NONE); } private void startStepping(@Nullable XSuspendContext context, Stepping stepping) { if (!isConnected()) { return; } ApplicationManager.getApplication() .executeOnPooledThread(() -> doStartStepping(context, stepping)); } /** Blocks waiting for a response from the debugger, so must be called on a worker thread. */ private void doStartStepping(@Nullable XSuspendContext context, Stepping stepping) { long threadId = getThreadId(context); if (threadId == 0 && stepping != Stepping.NONE) { // TODO(brendandouglas): cache suspended threads here, and apply stepping behavior to all? return; } transport.sendRequest( DebugRequest.newBuilder() .setContinueExecution( ContinueExecutionRequest.newBuilder() .setThreadId(threadId) .setStepping(stepping) .build())); currentlySteppingThreadId = threadId; // this is necessary because we yet aren't reliably informed of thread death. If the thread // finishes without triggering the stepping condition, we need to remind the debugger that there // could still be suspended threads remaining. scheduleWakeupIfNecessary(2000); } private void scheduleWakeupIfNecessary(int delayMillis) { if (pausedThreads.size() < 2) { // if no other threads are paused, the UI state will already be up-to-date return; } @SuppressWarnings({"unused", "nullness"}) Future<?> possiblyIgnoredError = AppExecutorUtil.getAppScheduledExecutorService() .schedule(this::wakeUpUiIfNecessary, delayMillis, TimeUnit.MILLISECONDS); } @Override public void startPausing() { if (!isConnected()) { return; } // pause all threads transport.sendRequest( DebugRequest.newBuilder().setPauseThread(PauseThreadRequest.newBuilder())); } private long getThreadId(@Nullable XSuspendContext context) { if (context instanceof SkylarkSuspendContext) { return ((SkylarkSuspendContext) context).getActiveExecutionStack().getThreadId(); } return 0; } // Check XLineBreakpointTypeBase for raw use of XBreakpointProperties after #api212. @SuppressWarnings("rawtypes") void addBreakpoint(XLineBreakpoint<XBreakpointProperties> breakpoint) { lineBreakpoints.put(convertLocation(breakpoint), breakpoint); if (isConnected()) { registerBreakpoints(); } } // Check XLineBreakpointTypeBase for raw use of XBreakpointProperties after #api212. @SuppressWarnings("rawtypes") void removeBreakpoint(XLineBreakpoint<XBreakpointProperties> breakpoint) { boolean changed = lineBreakpoints.remove(convertLocation(breakpoint)) != null; if (changed && isConnected()) { registerBreakpoints(); } } /** * Sends an {@link EvaluateRequest} to the debug server for the current frame, passing the * response on to the {@link XEvaluationCallback}. */ void evaluate(String expression, XEvaluationCallback callback) { try { evaluate(currentFrame(), expression, callback); } catch (SkylarkDebuggerException e) { callback.errorOccurred(e.getMessage()); } } private void evaluate(SkylarkStackFrame frame, String expression, XEvaluationCallback callback) { EvaluateRequest request = EvaluateRequest.newBuilder().setThreadId(frame.threadId).setStatement(expression).build(); doEvaluate(request, frame, callback); } private SkylarkStackFrame currentFrame() throws SkylarkDebuggerException { if (!isConnected()) { getSession().stop(); throw new SkylarkDebuggerException("Disconnected"); } SkylarkStackFrame frame = (SkylarkStackFrame) getSession().getCurrentStackFrame(); if (frame == null) { throw new RuntimeException("Process is running"); } return frame; } private void doEvaluate( EvaluateRequest request, SkylarkStackFrame frame, XEvaluationCallback callback) { DebugEvent response = transport.sendRequest(DebugRequest.newBuilder().setEvaluate(request)); if (response == null) { callback.errorOccurred("No response from the Skylark debugger"); return; } if (response.hasError()) { callback.errorOccurred(response.getError().getMessage()); return; } checkState(response.getPayloadCase() == PayloadCase.EVALUATE); callback.evaluated(SkylarkDebugValue.fromProto(frame, response.getEvaluate().getResult())); } @Nullable List<StarlarkDebuggingProtos.Value> getChildren( long threadId, StarlarkDebuggingProtos.Value value) { PausedThreadState threadState = pausedThreads.get(threadId); if (threadState == null) { return null; } return threadState.childCache.getChildren(transport, value); } void listFrames(long threadId, XExecutionStack.XStackFrameContainer container) { DebugEvent response = transport.sendRequest( DebugRequest.newBuilder() .setListFrames( StarlarkDebuggingProtos.ListFramesRequest.newBuilder().setThreadId(threadId))); if (response == null) { container.errorOccurred("No frames data received from the Skylark debugger"); return; } if (response.hasError()) { container.errorOccurred(response.getError().getMessage()); return; } checkState(response.getPayloadCase() == PayloadCase.LIST_FRAMES); List<StarlarkDebuggingProtos.Frame> frames = response.getListFrames().getFrameList(); container.addStackFrames( frames.stream().map(f -> convert(threadId, f)).collect(Collectors.toList()), true); } private SkylarkStackFrame convert(long threadId, StarlarkDebuggingProtos.Frame frame) { return new SkylarkStackFrame(this, threadId, frame); } void handleEvent(StarlarkDebuggingProtos.DebugEvent event) { switch (event.getPayloadCase()) { case ERROR: reportError(event.getError()); return; case THREAD_PAUSED: handleThreadPausedEvent(event.getThreadPaused().getThread()); return; case THREAD_CONTINUED: pausedThreads.remove(event.getThreadContinued().getThreadId()); return; case LIST_FRAMES: case EVALUATE: case SET_BREAKPOINTS: case CONTINUE_EXECUTION: case START_DEBUGGING: case PAUSE_THREAD: case GET_CHILDREN: logger.error("Can't handle a response event without the associated request"); return; case PAYLOAD_NOT_SET: break; // intentional fall through to error reporting } reportError( "Unrecognized or unset skylark debugger response type. Try upgrading to a newer " + "version of the plugin."); } /** * The debugger UI doesn't handle multiple concurrently paused threads well. If the 'primary' * thread dies, or never resumes, we need to manually remind it that there may be other suspended * threads remaining. * * <p>See RemoteDebugger#processThreadEvent for an upstream example. */ private void wakeUpUiIfNecessary() { if (getSession().isSuspended()) { // we already have an active thread return; } PausedThreadState state = Iterables.getFirst(getPausedThreads(), null); if (state != null) { notifyThreadPaused(state, true); } } // Check XLineBreakpointTypeBase for raw use of XBreakpointProperties after #api212. @SuppressWarnings("rawtypes") private void handleThreadPausedEvent(PausedThread thread) { // ignore threads paused during initialization if (!debuggingStarted && thread.getPauseReason() == PauseReason.ALL_THREADS_PAUSED) { // Temporary backwards-compatibility code. TODO(brendandouglas): remove in v2018.10+ return; } if (thread.getPauseReason() == PauseReason.INITIALIZING) { return; } if (thread.getPauseReason() != PauseReason.CONDITIONAL_BREAKPOINT_ERROR) { notifyThreadPaused(thread); return; } XLineBreakpoint<XBreakpointProperties> breakpoint = lineBreakpoints.get(thread.getLocation().toBuilder().setColumnNumber(0).build()); if (breakpoint == null) { notifyThreadPaused(thread); } else { handleConditionalBreakpointError(breakpoint, thread); } } private void notifyThreadPaused(PausedThread thread) { notifyThreadPaused(new PausedThreadState(thread), /* alwaysNotify= */ false); } // Check XLineBreakpointTypeBase for raw use of XBreakpointProperties after #api212. @SuppressWarnings("rawtypes") private void notifyThreadPaused(PausedThreadState threadState, boolean alwaysNotify) { pausedThreads.put(threadState.thread.getId(), threadState); XLineBreakpoint<XBreakpointProperties> breakpoint = lineBreakpoints.get( threadState.thread.getLocation().toBuilder().setColumnNumber(0).build()); boolean isSuspended = getSession().isSuspended(); if (!alwaysNotify && isSuspended && breakpoint != null) { // don't notify for subsequent breakpoint hits when we're already suspended. Otherwise we can // get 100s of threads stopping at a single breakpoint, kicking off a listFrames for each return; } SkylarkSuspendContext suspendContext = new SkylarkSuspendContext(this, threadState); if (breakpoint != null) { getSession().breakpointReached(breakpoint, null, suspendContext); } else if (alwaysNotify || threadState.thread.getId() == currentlySteppingThreadId || !isSuspended || individualThreadPausedByUser(threadState.thread.getPauseReason())) { getSession().positionReached(suspendContext); } } private boolean individualThreadPausedByUser(StarlarkDebuggingProtos.PauseReason reason) { switch (reason) { case STEPPING: case PAUSE_THREAD_REQUEST: case HIT_BREAKPOINT: case CONDITIONAL_BREAKPOINT_ERROR: return true; case INITIALIZING: case ALL_THREADS_PAUSED: case UNSET: return false; case UNRECOGNIZED: } reportError("Unrecognized pause reason. Try upgrading to a newer version of the plugin."); // default to returning true, so we don't leave the debugger in an unusable state return true; } // Check XLineBreakpointTypeBase for raw use of XBreakpointProperties after #api212. @SuppressWarnings("rawtypes") private void handleConditionalBreakpointError( XLineBreakpoint<XBreakpointProperties> breakpoint, PausedThread thread) { // TODO(brendandouglas): also navigate to the problematic breakpoint String error = Preconditions.checkNotNull(thread.getConditionalBreakpointError().getMessage()); String title = "Breakpoint Condition Error"; String message = String.format( "Breakpoint: %s\nError: %s\nWould you like to stop at the breakpoint?", breakpoint.getType().getDisplayText(breakpoint), error); Ref<Boolean> stop = new Ref<>(true); ApplicationManager.getApplication() .invokeAndWait( () -> stop.set( Messages.showYesNoDialog(project, message, title, Messages.getQuestionIcon()) == Messages.YES)); if (stop.get()) { notifyThreadPaused(thread); return; } // else resume the thread transport.sendRequest( DebugRequest.newBuilder() .setContinueExecution( ContinueExecutionRequest.newBuilder() .setThreadId(thread.getId()) .setStepping(Stepping.NONE) .build())); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.Map; import java.util.Timer; import java.util.TimerTask; import org.apache.hadoop.mapred.StatisticsCollector.Stat.TimeStat; /** * Collects the statistics in time windows. */ class StatisticsCollector { private static final int DEFAULT_PERIOD = 5; static final TimeWindow SINCE_START = new TimeWindow("Since Start", -1, -1); static final TimeWindow LAST_WEEK = new TimeWindow("Last Week", 7 * 24 * 60 * 60, 60 * 60); static final TimeWindow LAST_DAY = new TimeWindow("Last Day", 24 * 60 * 60, 60 * 60); static final TimeWindow LAST_HOUR = new TimeWindow("Last Hour", 60 * 60, 60); static final TimeWindow LAST_MINUTE = new TimeWindow("Last Minute", 60, 10); static final TimeWindow[] DEFAULT_COLLECT_WINDOWS = { StatisticsCollector.SINCE_START, StatisticsCollector.LAST_DAY, StatisticsCollector.LAST_HOUR }; private final int period; private boolean started; private final Map<TimeWindow, StatUpdater> updaters = new LinkedHashMap<TimeWindow, StatUpdater>(); private final Map<String, Stat> statistics = new HashMap<String, Stat>(); StatisticsCollector() { this(DEFAULT_PERIOD); } StatisticsCollector(int period) { this.period = period; } synchronized void start() { if (started) { return; } Timer timer = new Timer("Timer thread for monitoring ", true); TimerTask task = new TimerTask() { public void run() { update(); } }; long millis = period * 1000; timer.scheduleAtFixedRate(task, millis, millis); started = true; } protected synchronized void update() { for (StatUpdater c : updaters.values()) { c.update(); } } Map<TimeWindow, StatUpdater> getUpdaters() { return Collections.unmodifiableMap(updaters); } Map<String, Stat> getStatistics() { return Collections.unmodifiableMap(statistics); } synchronized Stat createStat(String name) { return createStat(name, DEFAULT_COLLECT_WINDOWS); } synchronized Stat createStat(String name, TimeWindow[] windows) { if (statistics.get(name) != null) { throw new RuntimeException("Stat with name "+ name + " is already defined"); } Map<TimeWindow, TimeStat> timeStats = new LinkedHashMap<TimeWindow, TimeStat>(); for (TimeWindow window : windows) { StatUpdater collector = updaters.get(window); if (collector == null) { if(SINCE_START.equals(window)) { collector = new StatUpdater(); } else { collector = new TimeWindowStatUpdater(window, period); } updaters.put(window, collector); } TimeStat timeStat = new TimeStat(); collector.addTimeStat(name, timeStat); timeStats.put(window, timeStat); } Stat stat = new Stat(name, timeStats); statistics.put(name, stat); return stat; } synchronized Stat removeStat(String name) { Stat stat = statistics.remove(name); if (stat != null) { for (StatUpdater collector : updaters.values()) { collector.removeTimeStat(name); } } return stat; } static class TimeWindow { final String name; final int windowSize; final int updateGranularity; TimeWindow(String name, int windowSize, int updateGranularity) { if (updateGranularity > windowSize) { throw new RuntimeException( "Invalid TimeWindow: updateGranularity > windowSize"); } this.name = name; this.windowSize = windowSize; this.updateGranularity = updateGranularity; } public int hashCode() { return name.hashCode() + updateGranularity + windowSize; } public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; final TimeWindow other = (TimeWindow) obj; if (name == null) { if (other.name != null) return false; } else if (!name.equals(other.name)) return false; if (updateGranularity != other.updateGranularity) return false; if (windowSize != other.windowSize) return false; return true; } } static class Stat { final String name; private Map<TimeWindow, TimeStat> timeStats; private Stat(String name, Map<TimeWindow, TimeStat> timeStats) { this.name = name; this.timeStats = timeStats; } public synchronized void inc(int incr) { for (TimeStat ts : timeStats.values()) { ts.inc(incr); } } public synchronized void inc() { inc(1); } public synchronized Map<TimeWindow, TimeStat> getValues() { return Collections.unmodifiableMap(timeStats); } static class TimeStat { private final LinkedList<Integer> buckets = new LinkedList<Integer>(); private int value; private int currentValue; private int updates; public synchronized int getValue() { return value; } private synchronized void inc(int i) { currentValue += i; } private synchronized void addBucket() { buckets.addLast(currentValue); setValueToCurrent(); } private synchronized void setValueToCurrent() { value += currentValue; currentValue = 0; } private synchronized void removeBucket() { int removed = buckets.removeFirst(); value -= removed; } } } private static class StatUpdater { protected final Map<String, TimeStat> statToCollect = new HashMap<String, TimeStat>(); synchronized void addTimeStat(String name, TimeStat s) { statToCollect.put(name, s); } synchronized TimeStat removeTimeStat(String name) { return statToCollect.remove(name); } synchronized void update() { for (TimeStat stat : statToCollect.values()) { stat.setValueToCurrent(); } } } /** * Updates TimeWindow statistics in buckets. * */ private static class TimeWindowStatUpdater extends StatUpdater{ final int collectBuckets; final int updatesPerBucket; TimeWindowStatUpdater(TimeWindow w, int updatePeriod) { if (updatePeriod > w.updateGranularity) { throw new RuntimeException( "Invalid conf: updatePeriod > updateGranularity"); } collectBuckets = w.windowSize / w.updateGranularity; updatesPerBucket = w.updateGranularity / updatePeriod; } synchronized void update() { for (TimeStat stat : statToCollect.values()) { stat.updates++; if (stat.updates == updatesPerBucket) { stat.addBucket(); stat.updates = 0; } if (stat.buckets.size() > collectBuckets) { stat.removeBucket(); } } } } }
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.util.indexing.events; import com.intellij.concurrency.ConcurrentCollectionFactory; import com.intellij.history.LocalHistory; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.application.impl.LaterInvocator; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.roots.ContentIterator; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.vfs.*; import com.intellij.openapi.vfs.newvfs.events.VFileEvent; import com.intellij.psi.PsiManager; import com.intellij.util.ConcurrencyUtil; import com.intellij.util.SystemProperties; import com.intellij.util.concurrency.BoundedTaskExecutor; import com.intellij.util.concurrency.SequentialTaskExecutor; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.IntObjectMap; import com.intellij.util.indexing.*; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.TestOnly; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.stream.Stream; @ApiStatus.Internal public final class ChangedFilesCollector extends IndexedFilesListener { private static final Logger LOG = Logger.getInstance(ChangedFilesCollector.class); public static final boolean CLEAR_NON_INDEXABLE_FILE_DATA = SystemProperties.getBooleanProperty("idea.indexes.clear.non.indexable.file.data", true); private final IntObjectMap<VirtualFile> myFilesToUpdate = ConcurrentCollectionFactory.createConcurrentIntObjectMap(); private final AtomicInteger myProcessedEventIndex = new AtomicInteger(); private final Phaser myWorkersFinishedSync = new Phaser() { @Override protected boolean onAdvance(int phase, int registeredParties) { return false; } }; private final Executor myVfsEventsExecutor = SequentialTaskExecutor.createSequentialApplicationPoolExecutor("FileBasedIndex Vfs Event Processor"); private final AtomicInteger myScheduledVfsEventsWorkers = new AtomicInteger(); private final FileBasedIndexImpl myFileBasedIndex = (FileBasedIndexImpl)FileBasedIndex.getInstance(); @Override protected void iterateIndexableFiles(@NotNull VirtualFile file, @NotNull ContentIterator iterator) { if (myFileBasedIndex.belongsToIndexableFiles(file)) { VfsUtilCore.visitChildrenRecursively(file, new VirtualFileVisitor<Void>() { @Override public boolean visitFile(@NotNull VirtualFile file11) { if (!myFileBasedIndex.belongsToIndexableFiles(file11)) return false; iterator.processFile(file11); return true; } }); } } public void scheduleForUpdate(@NotNull VirtualFile file) { int fileId = FileBasedIndex.getFileId(file); if (!(file instanceof DeletedVirtualFileStub)) { Set<Project> projects = myFileBasedIndex.getContainingProjects(file); if (projects.isEmpty()) { removeNonIndexableFileData(file, fileId); return; } } VfsEventsMerger.tryLog("ADD_TO_UPDATE", file); myFilesToUpdate.put(fileId, file); } public void removeScheduledFileFromUpdate(VirtualFile file) { int fileId = FileBasedIndex.getFileId(file); VirtualFile alreadyScheduledFile = myFilesToUpdate.get(fileId); if (!(alreadyScheduledFile instanceof DeletedVirtualFileStub)) { VfsEventsMerger.tryLog("PULL_OUT_FROM_UPDATE", file); myFilesToUpdate.remove(fileId); } } public void removeFileIdFromFilesScheduledForUpdate(int fileId) { myFilesToUpdate.remove(fileId); } public boolean containsFileId(int fileId) { return myFilesToUpdate.containsKey(fileId); } public Stream<VirtualFile> getFilesToUpdate() { return myFilesToUpdate.values().stream(); } public Collection<VirtualFile> getAllFilesToUpdate() { ensureUpToDate(); if (myFilesToUpdate.isEmpty()) { return Collections.emptyList(); } return new ArrayList<>(myFilesToUpdate.values()); } // it's important here to don't load any extension here, so we don't check scopes. public Collection<VirtualFile> getAllPossibleFilesToUpdate() { ReadAction.run(() -> { processFilesInReadAction(info -> { myFilesToUpdate.put(info.getFileId(), info.isFileRemoved() ? new DeletedVirtualFileStub(((VirtualFileWithId)info.getFile())) : info.getFile()); return true; }); }); return new ArrayList<>(myFilesToUpdate.values()); } public void clearFilesToUpdate() { myFilesToUpdate.clear(); } @Override @NotNull public AsyncFileListener.ChangeApplier prepareChange(@NotNull List<? extends @NotNull VFileEvent> events) { boolean shouldCleanup = ContainerUtil.exists(events, ChangedFilesCollector::memoryStorageCleaningNeeded); ChangeApplier superApplier = super.prepareChange(events); return new ChangeApplier() { @Override public void beforeVfsChange() { if (shouldCleanup) { myFileBasedIndex.cleanupMemoryStorage(false); } superApplier.beforeVfsChange(); } @Override public void afterVfsChange() { superApplier.afterVfsChange(); RegisteredIndexes registeredIndexes = myFileBasedIndex.getRegisteredIndexes(); if (registeredIndexes != null && registeredIndexes.isInitialized()) ensureUpToDateAsync(); } }; } private void removeNonIndexableFileData(@NotNull VirtualFile file, int fileId) { if (CLEAR_NON_INDEXABLE_FILE_DATA) { List<ID<?, ?>> extensions = getIndexedContentDependentExtensions(fileId); if (!extensions.isEmpty()) { myFileBasedIndex.removeDataFromIndicesForFile(fileId, file, "non_indexable_file"); } IndexingFlag.cleanProcessingFlag(file); } else if (ApplicationManager.getApplication().isInternal() && !ApplicationManager.getApplication().isUnitTestMode()) { checkNotIndexedByContentBasedIndexes(file, fileId); } } private static boolean memoryStorageCleaningNeeded(@NotNull VFileEvent event) { Object requestor = event.getRequestor(); return requestor instanceof FileDocumentManager || requestor instanceof PsiManager || requestor == LocalHistory.VFS_EVENT_REQUESTOR; } public boolean isScheduledForUpdate(VirtualFile file) { return myFilesToUpdate.containsKey(FileBasedIndex.getFileId(file)); } public void ensureUpToDate() { if (!IndexUpToDateCheckIn.isUpToDateCheckEnabled()) { return; } //assert ApplicationManager.getApplication().isReadAccessAllowed() || ShutDownTracker.isShutdownHookRunning(); myFileBasedIndex.waitUntilIndicesAreInitialized(); if (ApplicationManager.getApplication().isReadAccessAllowed()) { processFilesToUpdateInReadAction(); } else { processFilesInReadActionWithYieldingToWriteAction(); } } public void ensureUpToDateAsync() { if (getEventMerger().getApproximateChangesCount() >= 20 && myScheduledVfsEventsWorkers.compareAndSet(0,1)) { myVfsEventsExecutor.execute(() -> { try { processFilesInReadActionWithYieldingToWriteAction(); } finally { myScheduledVfsEventsWorkers.decrementAndGet(); } }); if (Registry.is("try.starting.dumb.mode.where.many.files.changed")) { Runnable startDumbMode = () -> { for (Project project : ProjectManager.getInstance().getOpenProjects()) { FileBasedIndexProjectHandler.scheduleReindexingInDumbMode(project); } }; Application app = ApplicationManager.getApplication(); if (!app.isHeadlessEnvironment() /*avoid synchronous ensureUpToDate to prevent deadlock*/ && app.isDispatchThread() && !LaterInvocator.isInModalContext()) { startDumbMode.run(); } else { app.invokeLater(startDumbMode, ModalityState.NON_MODAL); } } } } public void processFilesToUpdateInReadAction() { processFilesInReadAction(info -> { int fileId = info.getFileId(); VirtualFile file = info.getFile(); if (info.isTransientStateChanged()) myFileBasedIndex.doTransientStateChangeForFile(fileId, file); if (info.isContentChanged()) myFileBasedIndex.scheduleFileForIndexing(fileId, file, true); if (info.isFileRemoved()) myFileBasedIndex.doInvalidateIndicesForFile(fileId, file); if (info.isFileAdded()) myFileBasedIndex.scheduleFileForIndexing(fileId, file, false); return true; }); } private void processFilesInReadAction(@NotNull VfsEventsMerger.VfsEventProcessor processor) { assert ApplicationManager.getApplication().isReadAccessAllowed(); // no vfs events -> event processing code can finish int publishedEventIndex = getEventMerger().getPublishedEventIndex(); int processedEventIndex = myProcessedEventIndex.get(); if (processedEventIndex == publishedEventIndex) { return; } myWorkersFinishedSync.register(); int phase = myWorkersFinishedSync.getPhase(); try { myFileBasedIndex.waitUntilIndicesAreInitialized(); getEventMerger().processChanges(info -> ConcurrencyUtil.withLock(myFileBasedIndex.myWriteLock, () -> { try { ProgressManager.getInstance().executeNonCancelableSection(() -> { processor.process(info); }); } finally { IndexingStamp.flushCache(info.getFileId()); } return true; }) ); } finally { myWorkersFinishedSync.arriveAndDeregister(); } try { myWorkersFinishedSync.awaitAdvance(phase); } catch (RejectedExecutionException e) { LOG.warn(e); throw new ProcessCanceledException(e); } if (getEventMerger().getPublishedEventIndex() == publishedEventIndex) { myProcessedEventIndex.compareAndSet(processedEventIndex, publishedEventIndex); } } private void processFilesInReadActionWithYieldingToWriteAction() { while (getEventMerger().hasChanges()) { ReadAction.nonBlocking(() -> processFilesToUpdateInReadAction()).executeSynchronously(); } } private void checkNotIndexedByContentBasedIndexes(@NotNull VirtualFile file, int fileId) { List<ID<?, ?>> contentDependentIndexes = getIndexedContentDependentExtensions(fileId); if (!contentDependentIndexes.isEmpty()) { LOG.error("indexes " + contentDependentIndexes + " will not be updated for file = " + file + ", id = " + fileId); } } private @NotNull List<ID<?, ?>> getIndexedContentDependentExtensions(int fileId) { List<ID<?, ?>> indexedStates = IndexingStamp.getNontrivialFileIndexedStates(fileId); RegisteredIndexes registeredIndexes = myFileBasedIndex.getRegisteredIndexes(); List<ID<?, ?>> contentDependentIndexes; if (registeredIndexes == null) { Set<? extends ID<?, ?>> allContentDependentIndexes = FileBasedIndexExtension .EXTENSION_POINT_NAME .extensions() .filter(ex -> ex.dependsOnFileContent()) .map(ex -> ex.getName()) .collect(Collectors.toSet()); contentDependentIndexes = ContainerUtil.filter(indexedStates, id -> !allContentDependentIndexes.contains(id)); } else { contentDependentIndexes = ContainerUtil.filter(indexedStates, id -> { return registeredIndexes.isContentDependentIndex(id); }); } return contentDependentIndexes; } @TestOnly public void waitForVfsEventsExecuted(long timeout, @NotNull TimeUnit unit) throws Exception { ApplicationManager.getApplication().assertIsDispatchThread(); long deadline = System.nanoTime() + unit.toNanos(timeout); while (System.nanoTime() < deadline) { try { ((BoundedTaskExecutor)myVfsEventsExecutor).waitAllTasksExecuted(100, TimeUnit.MILLISECONDS); return; } catch (TimeoutException e) { UIUtil.dispatchAllInvocationEvents(); } } } }
/* ----------------------------------------------------------------------------- Cogaen - Component-based Game Engine V3 ----------------------------------------------------------------------------- This software is developed by the Cogaen Development Team. Please have a look at our project home page for further details: http://www.cogaen.org - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Copyright (c) 2010-2011 Roman Divotkey Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ package org.cogaen.util; import java.util.Arrays; /** * <p>A multiset container (also known as <em>bag</em>) that allows to be iterated and modified at the same time. * It is save to add and remove elements while iterating this bag. However this container does not follow the * Java Colletiosn Framework API.</p> * * <p>Another limitation is that the sequence of inserted elements might not be preserved during different * iterations.</p> * * <p> * Example usage: * <pre> * Bag<Integer> myBag = new Bag<Integer>(); * myBag.add(1); * myBag.add(2); * myBag.add(3); * * for (myBag.reset(); myBag.hasNext(); ) { * System.out.println("element: " + myBag.next()); * } * </pre> * </p> * * * @param <E> the type of elements maintained by this bag * @author Roman Divotkey */ public class Bag<E> { private static final int DEFAULT_CAPACITY = 10; private Object[] elements; private int size; private int remaining; private int pos; private boolean elementRemoved = false; public Bag(int initialCapacity) { this.elements = new Object[initialCapacity]; } public Bag() { this(DEFAULT_CAPACITY); } public int size() { return this.size; } public boolean isEmpty() { return this.size == 0; } public boolean add(E e) { int length = this.elements.length; for (int i = this.pos; i < length; ++i) { if (this.elements[i] == null) { this.elements[i] = e; ++this.size; ++this.remaining; return true; } } ensureCapacity(length + 1); this.elements[length] = e; ++this.size; ++this.remaining; return true; } public boolean contains(E e) { return find(e) != -1; } public boolean remove(E e) { int idx = find(e); if (idx != -1) { this.elements[idx] = null; --this.size; this.elementRemoved = true; if (idx >= this.pos) { --this.remaining; } return true; } else { return false; } } public void reset() { this.pos = 0; this.remaining = this.size; if (this.elementRemoved) { optimize(); } } private void optimize() { int length = this.elements.length; int cnt = 0; int idx = length - 1; for (int i = 0; i < length && cnt < this.size; ++i) { if (this.elements[i] == null) { while (this.elements[idx] == null) { --idx; } this.elements[i] = this.elements[idx]; this.elements[idx--] = null; } ++cnt; } } public boolean hasNext() { return this.remaining > 0; } public E get(int idx) { int j = 0; for (int i = 0; i < this.size; ++i) { if (elements[i] != null && j == idx) { break; } else { j++; } } if (j < this.size && this.elements[j] != null) { @SuppressWarnings("unchecked") E result = (E) this.elements[j]; return result; } throw new IndexOutOfBoundsException(); } public E next() { if (this.remaining <= 0) { return null; } while (this.elements[pos] == null) { ++pos; } --this.remaining; @SuppressWarnings("unchecked") E result = (E) this.elements[this.pos++]; return result; } private int find(E e) { int size = this.elements.length; for (int i = 0; i < size; ++i) { if (e.equals(this.elements[i])) { return i; } } return -1; } private void ensureCapacity(int minCapacity) { int oldCapacity = this.elements.length; if (minCapacity > oldCapacity) { int newCapacity = (oldCapacity * 3) / 2 + 1; if (newCapacity < minCapacity) { newCapacity = minCapacity; } this.elements = Arrays.copyOf(this.elements, newCapacity); } } }
package com.farwolf.view; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Bitmap; import android.graphics.BitmapShader; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.ColorFilter; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.RectF; import android.graphics.Shader; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.net.Uri; import android.support.annotation.ColorRes; import android.support.annotation.DrawableRes; import android.util.AttributeSet; import android.widget.ImageView; import com.farwolf.libary.R; public class CircleImageView extends ImageView { private static final ScaleType SCALE_TYPE = ScaleType.CENTER_CROP; private static final Bitmap.Config BITMAP_CONFIG = Bitmap.Config.ARGB_8888; private static final int COLORDRAWABLE_DIMENSION = 2; private static final int DEFAULT_BORDER_WIDTH = 0; private static final int DEFAULT_BORDER_COLOR = Color.BLACK; private static final boolean DEFAULT_BORDER_OVERLAY = false; private final RectF mDrawableRect = new RectF(); private final RectF mBorderRect = new RectF(); private final Matrix mShaderMatrix = new Matrix(); private final Paint mBitmapPaint = new Paint(); private final Paint mBorderPaint = new Paint(); private int mBorderColor = DEFAULT_BORDER_COLOR; private int mBorderWidth = DEFAULT_BORDER_WIDTH; private Bitmap mBitmap; private BitmapShader mBitmapShader; private int mBitmapWidth; private int mBitmapHeight; private float mDrawableRadius; private float mBorderRadius; private ColorFilter mColorFilter; private boolean mReady; private boolean mSetupPending; private boolean mBorderOverlay; public CircleImageView(Context context) { super(context); init(); } public CircleImageView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public CircleImageView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.CircleImageView, defStyle, 0); mBorderWidth = a.getDimensionPixelSize(R.styleable.CircleImageView_border_width, DEFAULT_BORDER_WIDTH); mBorderColor = a.getColor(R.styleable.CircleImageView_border_color, DEFAULT_BORDER_COLOR); mBorderOverlay = a.getBoolean(R.styleable.CircleImageView_border_overlay, DEFAULT_BORDER_OVERLAY); a.recycle(); init(); } private void init() { super.setScaleType(SCALE_TYPE); mReady = true; if (mSetupPending) { setup(); mSetupPending = false; } } @Override public ScaleType getScaleType() { return SCALE_TYPE; } @Override public void setScaleType(ScaleType scaleType) { if (scaleType != SCALE_TYPE) { throw new IllegalArgumentException(String.format("ScaleType %s not supported.", scaleType)); } } @Override public void setAdjustViewBounds(boolean adjustViewBounds) { if (adjustViewBounds) { throw new IllegalArgumentException("adjustViewBounds not supported."); } } @Override protected void onDraw(Canvas canvas) { if (getDrawable() == null) { return; } canvas.drawCircle(getWidth() / 2, getHeight() / 2, mDrawableRadius, mBitmapPaint); if (mBorderWidth != 0) { canvas.drawCircle(getWidth() / 2, getHeight() / 2, mBorderRadius, mBorderPaint); } } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); setup(); } public int getBorderColor() { return mBorderColor; } public void setBorderColor(int borderColor) { if (borderColor == mBorderColor) { return; } mBorderColor = borderColor; mBorderPaint.setColor(mBorderColor); invalidate(); } public void setBorderColorResource(@ColorRes int borderColorRes) { setBorderColor(getContext().getResources().getColor(borderColorRes)); } public int getBorderWidth() { return mBorderWidth; } public void setBorderWidth(int borderWidth) { if (borderWidth == mBorderWidth) { return; } mBorderWidth = borderWidth; setup(); } public boolean isBorderOverlay() { return mBorderOverlay; } public void setBorderOverlay(boolean borderOverlay) { if (borderOverlay == mBorderOverlay) { return; } mBorderOverlay = borderOverlay; setup(); } @Override public void setImageBitmap(Bitmap bm) { super.setImageBitmap(bm); mBitmap = bm; setup(); } @Override public void setImageDrawable(Drawable drawable) { super.setImageDrawable(drawable); mBitmap = getBitmapFromDrawable(drawable); setup(); } @Override public void setImageResource(@DrawableRes int resId) { super.setImageResource(resId); mBitmap = getBitmapFromDrawable(getDrawable()); setup(); } @Override public void setImageURI(Uri uri) { super.setImageURI(uri); mBitmap = getBitmapFromDrawable(getDrawable()); setup(); } @Override public void setColorFilter(ColorFilter cf) { if (cf == mColorFilter) { return; } mColorFilter = cf; mBitmapPaint.setColorFilter(mColorFilter); invalidate(); } private Bitmap getBitmapFromDrawable(Drawable drawable) { if (drawable == null) { return null; } if (drawable instanceof BitmapDrawable) { return ((BitmapDrawable) drawable).getBitmap(); } try { Bitmap bitmap; if (drawable instanceof ColorDrawable) { bitmap = Bitmap.createBitmap(COLORDRAWABLE_DIMENSION, COLORDRAWABLE_DIMENSION, BITMAP_CONFIG); } else { bitmap = Bitmap.createBitmap(drawable.getIntrinsicWidth(), drawable.getIntrinsicHeight(), BITMAP_CONFIG); } Canvas canvas = new Canvas(bitmap); drawable.setBounds(0, 0, canvas.getWidth(), canvas.getHeight()); drawable.draw(canvas); return bitmap; } catch (OutOfMemoryError e) { return null; } } private void setup() { if (!mReady) { mSetupPending = true; return; } if (mBitmap == null) { return; } mBitmapShader = new BitmapShader(mBitmap, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP); mBitmapPaint.setAntiAlias(true); mBitmapPaint.setShader(mBitmapShader); mBorderPaint.setStyle(Paint.Style.STROKE); mBorderPaint.setAntiAlias(true); mBorderPaint.setColor(mBorderColor); mBorderPaint.setStrokeWidth(mBorderWidth); mBitmapHeight = mBitmap.getHeight(); mBitmapWidth = mBitmap.getWidth(); mBorderRect.set(0, 0, getWidth(), getHeight()); mBorderRadius = Math.min((mBorderRect.height() - mBorderWidth) / 2, (mBorderRect.width() - mBorderWidth) / 2); mDrawableRect.set(mBorderRect); if (!mBorderOverlay) { mDrawableRect.inset(mBorderWidth, mBorderWidth); } mDrawableRadius = Math.min(mDrawableRect.height() / 2, mDrawableRect.width() / 2); updateShaderMatrix(); invalidate(); } private void updateShaderMatrix() { float scale; float dx = 0; float dy = 0; mShaderMatrix.set(null); if (mBitmapWidth * mDrawableRect.height() > mDrawableRect.width() * mBitmapHeight) { scale = mDrawableRect.height() / (float) mBitmapHeight; dx = (mDrawableRect.width() - mBitmapWidth * scale) * 0.5f; } else { scale = mDrawableRect.width() / (float) mBitmapWidth; dy = (mDrawableRect.height() - mBitmapHeight * scale) * 0.5f; } mShaderMatrix.setScale(scale, scale); mShaderMatrix.postTranslate((int) (dx + 0.5f) + mDrawableRect.left, (int) (dy + 0.5f) + mDrawableRect.top); mBitmapShader.setLocalMatrix(mShaderMatrix); } }
/** * */ package com.qshuttle.passenger; import java.util.Calendar; import java.util.Date; import org.json.JSONArray; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.util.Log; import android.view.View; import android.view.Window; import android.view.View.OnClickListener; import android.widget.DatePicker; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.ProgressBar; import android.widget.RelativeLayout; import android.widget.TextView; import android.widget.Toast; /** * @author wangpeifeng * */ public class ActivityOrderInput extends Activity{ ///////////////////////////////////////////////// // PROPERTIES, PUBLIC ///////////////////////////////////////////////// ///////////////////////////////////////////////// // PROPERTIES, PROTECTED ///////////////////////////////////////////////// ///////////////////////////////////////////////// // PROPERTIES, PRIVATE ///////////////////////////////////////////////// private Context context; private ProgressBar progressBar; private int hub_serial,area_serial; private String hub_name,area_name,line_type, from, to; private Date date; private String instance_date; private Calendar calendar; private LinearLayout lytPassenger,lytAccount; private RelativeLayout lytPay; private long clickPayStamp; private TextView tvPassengers,tvAccount,tvUserName; private TextView tvHubName, tvLineType, tvInstanceDate, tvDepartureTime, tvArriveTime, tvPrice, tvPriceTotal, tvFrom,tvTo; ///////////////////////////////////////////////// // CONSTANTS ///////////////////////////////////////////////// private static final int HANDLER_MSG_SUCCESS = 1; private static final int HANDLER_MSG_FAILED = 1 + HANDLER_MSG_SUCCESS; private static final int HANDLER_MSG_NULL = 1 + HANDLER_MSG_FAILED; private static final int HANDLER_MSG_WAITING = 1 + HANDLER_MSG_NULL; private static final int HANDLER_MSG_RUNNING = 1 + HANDLER_MSG_WAITING; private static final String BUNDLE_KEY_TOAST = "toast"; private static final int ACTIVITY_PASSENGER_SELECT = 1; private static final int ACTIVITY_LOGIN = 1 + ACTIVITY_PASSENGER_SELECT; private static final int CLICK_DEBOUNCE = 1000; private static final int HANDLER_MSG_ORDER_SUCCESS = 1; private static final int HANDLER_MSG_ORDER_FAILED = 1 + HANDLER_MSG_ORDER_SUCCESS; private boolean waiting = false; /* * PROPERTIES, BEHAVIAR */ public OnClickListener lsrPassenger = new OnClickListener(){ public void onClick(View v) { // TODO Auto-generated method stub Intent intent = new Intent(); intent.setClass(getApplicationContext(), ActivityPassengerSelect.class); startActivityForResult(intent,ACTIVITY_PASSENGER_SELECT); } }; public OnClickListener lsrAccount = new OnClickListener(){ public void onClick(View v) { // TODO Auto-generated method stub Intent intent = new Intent(); intent.setClass(getApplicationContext(), ActivityLogin.class); startActivityForResult(intent,ACTIVITY_LOGIN); } }; public OnClickListener lsrPay = new OnClickListener(){ public void onClick(View v) { // TODO Auto-generated method stub if(!waiting && System.currentTimeMillis() - clickPayStamp > CLICK_DEBOUNCE){ if(PrefProxy.getMyOrderPriceTotal(context) > PrefProxy.getMyInstancePrice(context)*PrefProxy.getMyInstanceTickets(context)){ Toast.makeText(context, context.getResources().getString(R.string.too_much_passenger), Toast.LENGTH_SHORT).show(); } else{ WebApi webApi = new WebApi(context); webApi.setOnHttpResponse(onHttpResponsePay); webApi.commitOrder(PrefProxy.getMyOrder(context)); clickPayStamp = System.currentTimeMillis(); handler.sendEmptyMessage(HANDLER_MSG_WAITING); } } } }; private OnHttpResponse onHttpResponsePay = new OnHttpResponse(){ @Override public void doHttpResponse(String response) { // TODO Auto-generated method stub if (WebApi.isRespSuccess(response)){ Message msg = new Message(); msg.what = HANDLER_MSG_ORDER_SUCCESS; Bundle bundle = new Bundle(); bundle.putString(BUNDLE_KEY_TOAST, new WebApi(context).getRespMsg(response)); msg.setData(bundle); handler.sendMessage(msg); } else{ Message msg = new Message(); msg.what = HANDLER_MSG_ORDER_FAILED; Bundle bundle = new Bundle(); bundle.putString(BUNDLE_KEY_TOAST, new WebApi(context).getRespMsg(response)); msg.setData(bundle); handler.sendMessage(msg); } handler.sendEmptyMessage(HANDLER_MSG_RUNNING); } }; private Handler handler = new Handler(){ /* (non-Javadoc) * @see android.os.Handler#handleMessage(android.os.Message) */ @Override public void handleMessage(Message msg) { // TODO Auto-generated method stub switch(msg.what){ case HANDLER_MSG_ORDER_SUCCESS: String toast = msg.getData().getString(BUNDLE_KEY_TOAST); Toast.makeText(context, toast, Toast.LENGTH_LONG).show(); setResult(Activity.RESULT_OK,null); finish(); break; case HANDLER_MSG_ORDER_FAILED: toast = msg.getData().getString(BUNDLE_KEY_TOAST); Toast.makeText(context, toast, Toast.LENGTH_LONG).show(); break; case HANDLER_MSG_WAITING: progressBar.setVisibility(View.VISIBLE); waiting = true; break; case HANDLER_MSG_RUNNING: progressBar.setVisibility(View.INVISIBLE); waiting = false; } super.handleMessage(msg); } }; /* (non-Javadoc) * @see android.app.Activity#onCreate(android.os.Bundle) */ @Override protected void onCreate(Bundle savedInstanceState) { // TODO Auto-generated method stub super.onCreate(savedInstanceState); context = this; this.requestWindowFeature(Window.FEATURE_NO_TITLE); setContentView(R.layout.booking_order); tvHubName = (TextView)findViewById(R.id.textViewHub); tvHubName.setText(PrefProxy.getMyInstanceHubName(context)); tvLineType = (TextView)findViewById(R.id.textViewLineType); tvLineType.setText(PrefProxy.getMyInstanceLineType(context)); tvDepartureTime = (TextView)findViewById(R.id.textViewTimeDeparture); tvDepartureTime.setText(PrefProxy.getMyInstanceDepartureTime(context)); tvArriveTime = (TextView)findViewById(R.id.textViewTimeArrive); tvArriveTime.setText(PrefProxy.getMyInstanceArriveTime(context)); tvPrice = (TextView)findViewById(R.id.textViewPrice); tvPrice.setText(""+PrefProxy.getMyInstancePrice(context)); ((TextView)findViewById(R.id.textViewTicket)).setText(""+PrefProxy.getMyInstanceTickets(context)); tvPriceTotal = (TextView)findViewById(R.id.textViewPriceTotal); tvPriceTotal.setText(""+PrefProxy.getMyOrderPriceTotal(context)); tvFrom = (TextView)findViewById(R.id.textViewFrom); tvFrom.setText(PrefProxy.getMyInstanceFrom(context)); tvTo = (TextView)findViewById(R.id.textViewTo); tvTo.setText(PrefProxy.getMyInstanceTo(context)); tvInstanceDate = (TextView)findViewById(R.id.textViewDate); tvInstanceDate.setText(PrefProxy.getMyInstanceDate(context)); lytPassenger = (LinearLayout)findViewById(R.id.layoutPassenger); lytPassenger.setOnClickListener(lsrPassenger); lytAccount = (LinearLayout)findViewById(R.id.layoutContact); lytAccount.setOnClickListener(lsrAccount); lytPay = (RelativeLayout)findViewById(R.id.layoutPay); lytPay.setOnClickListener(lsrPay); clickPayStamp = System.currentTimeMillis(); tvAccount = (TextView)findViewById(R.id.textViewAccountATOrder); tvAccount.setText(PrefProxy.getAccount(context)); tvUserName = (TextView)findViewById(R.id.textViewUserName); tvUserName.setText(PrefProxy.getUserName(context)); tvPassengers = (TextView)findViewById(R.id.textViewPassengerInfo); progressBar = (ProgressBar) findViewById(R.id.progressBar); } /* (non-Javadoc) * @see android.app.Activity#onActivityResult(int, int, android.content.Intent) */ @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { // TODO Auto-generated method stub if(resultCode==Activity.RESULT_OK){ switch(requestCode){ case ACTIVITY_LOGIN: tvAccount.setText(PrefProxy.getAccount(context)); tvUserName.setText(PrefProxy.getUserName(context)); break; case ACTIVITY_PASSENGER_SELECT: String strPassengers = data.getStringExtra(ActivityPassengerSelect.EXTRA_PASSENGERS); tvPassengers.setText(strPassengers); tvPriceTotal.setText(""+PrefProxy.getMyOrderPriceTotal(context)); if(PrefProxy.getMyOrderPriceTotal(context) > PrefProxy.getMyInstancePrice(context)*PrefProxy.getMyInstanceTickets(context)){ Toast.makeText(context, context.getResources().getString(R.string.too_much_passenger), Toast.LENGTH_SHORT).show(); } break; } } //super.onActivityResult(requestCode, resultCode, data); } }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.codec.http.multipart; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufInputStream; import io.netty.buffer.ByteBufUtil; import io.netty.buffer.Unpooled; import io.netty.util.internal.PlatformDependent; import org.junit.jupiter.api.Test; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.nio.charset.Charset; import java.security.SecureRandom; import java.util.Arrays; import java.util.Random; import java.util.UUID; import static io.netty.util.CharsetUtil.*; import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; /** {@link AbstractMemoryHttpData} test cases. */ public class AbstractMemoryHttpDataTest { @Test public void testSetContentFromFile() throws Exception { TestHttpData test = new TestHttpData("test", UTF_8, 0); try { File tmpFile = PlatformDependent.createTempFile(UUID.randomUUID().toString(), ".tmp", null); tmpFile.deleteOnExit(); FileOutputStream fos = new FileOutputStream(tmpFile); byte[] bytes = new byte[4096]; PlatformDependent.threadLocalRandom().nextBytes(bytes); try { fos.write(bytes); fos.flush(); } finally { fos.close(); } test.setContent(tmpFile); ByteBuf buf = test.getByteBuf(); assertEquals(buf.readerIndex(), 0); assertEquals(buf.writerIndex(), bytes.length); assertArrayEquals(bytes, test.get()); assertArrayEquals(bytes, ByteBufUtil.getBytes(buf)); } finally { //release the ByteBuf test.delete(); } } @Test public void testRenameTo() throws Exception { TestHttpData test = new TestHttpData("test", UTF_8, 0); try { File tmpFile = PlatformDependent.createTempFile(UUID.randomUUID().toString(), ".tmp", null); tmpFile.deleteOnExit(); final int totalByteCount = 4096; byte[] bytes = new byte[totalByteCount]; PlatformDependent.threadLocalRandom().nextBytes(bytes); ByteBuf content = Unpooled.wrappedBuffer(bytes); test.setContent(content); boolean succ = test.renameTo(tmpFile); assertTrue(succ); FileInputStream fis = new FileInputStream(tmpFile); try { byte[] buf = new byte[totalByteCount]; int count = 0; int offset = 0; int size = totalByteCount; while ((count = fis.read(buf, offset, size)) > 0) { offset += count; size -= count; if (offset >= totalByteCount || size <= 0) { break; } } assertArrayEquals(bytes, buf); assertEquals(0, fis.available()); } finally { fis.close(); } } finally { //release the ByteBuf in AbstractMemoryHttpData test.delete(); } } /** * Provide content into HTTP data with input stream. * * @throws Exception In case of any exception. */ @Test public void testSetContentFromStream() throws Exception { // definedSize=0 TestHttpData test = new TestHttpData("test", UTF_8, 0); String contentStr = "foo_test"; ByteBuf buf = Unpooled.wrappedBuffer(contentStr.getBytes(UTF_8)); buf.markReaderIndex(); ByteBufInputStream is = new ByteBufInputStream(buf); try { test.setContent(is); assertFalse(buf.isReadable()); assertEquals(test.getString(UTF_8), contentStr); buf.resetReaderIndex(); assertTrue(ByteBufUtil.equals(buf, test.getByteBuf())); } finally { is.close(); } Random random = new SecureRandom(); for (int i = 0; i < 20; i++) { // Generate input data bytes. int size = random.nextInt(Short.MAX_VALUE); byte[] bytes = new byte[size]; random.nextBytes(bytes); // Generate parsed HTTP data block. TestHttpData data = new TestHttpData("name", UTF_8, 0); data.setContent(new ByteArrayInputStream(bytes)); // Validate stored data. ByteBuf buffer = data.getByteBuf(); assertEquals(0, buffer.readerIndex()); assertEquals(bytes.length, buffer.writerIndex()); assertArrayEquals(bytes, Arrays.copyOf(buffer.array(), bytes.length)); assertArrayEquals(bytes, data.get()); } } /** Memory-based HTTP data implementation for test purposes. */ private static final class TestHttpData extends AbstractMemoryHttpData { /** * Constructs HTTP data for tests. * * @param name Name of parsed data block. * @param charset Used charset for data decoding. * @param size Expected data block size. */ private TestHttpData(String name, Charset charset, long size) { super(name, charset, size); } @Override public InterfaceHttpData.HttpDataType getHttpDataType() { throw reject(); } @Override public HttpData copy() { throw reject(); } @Override public HttpData duplicate() { throw reject(); } @Override public HttpData retainedDuplicate() { throw reject(); } @Override public HttpData replace(ByteBuf content) { return null; } @Override public int compareTo(InterfaceHttpData o) { throw reject(); } @Override public int hashCode() { return super.hashCode(); } @Override public boolean equals(Object obj) { return super.equals(obj); } private static UnsupportedOperationException reject() { throw new UnsupportedOperationException("Should never be called."); } } }
/* * Copyright (C) 2007 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package hyn.com.lib; import java.util.HashMap; import java.util.regex.Pattern; /** * Two-way map that maps MIME-types to file extensions and vice versa. */ public final class MimeTypeMap { public static final String DEFAULT_MIME_TYPE = "application/octet-stream"; /** * Singleton MIME-type map instance: */ private static MimeTypeMap sMimeTypeMap; /** * MIME-type to file extension mapping: */ private HashMap<String, String> mMimeTypeToExtensionMap; /** * File extension to MIME type mapping: */ private HashMap<String, String> mExtensionToMimeTypeMap; /** * Creates a new MIME-type map. */ private MimeTypeMap() { mMimeTypeToExtensionMap = new HashMap<String, String>(); mExtensionToMimeTypeMap = new HashMap<String, String>(); } /** * Returns the file extension or an empty string iff there is no * extension. This method is a convenience method for obtaining the * extension of a url and has undefined results for other Strings. * @param url * @return The file extension of the given url. */ public static String getFileExtensionFromUrl(String url) { if (url != null && url.length() > 0) { int query = url.lastIndexOf('?'); if (query > 0) { url = url.substring(0, query); } int filenamePos = url.lastIndexOf('/'); String filename = 0 <= filenamePos ? url.substring(filenamePos + 1) : url; // if the filename contains special characters, we don't // consider it valid for our matching purposes: if (filename.length() > 0 && Pattern.matches("[a-zA-Z_0-9\\.\\-\\(\\)\\%]+", filename)) { int dotPos = filename.lastIndexOf('.'); if (0 <= dotPos) { return filename.substring(dotPos + 1); } } } return ""; } /** * Load an entry into the map. This does not check if the item already * exists, it trusts the caller! */ private void loadEntry(String mimeType, String extension) { // // if we have an existing x --> y mapping, we do not want to // override it with another mapping x --> ? // this is mostly because of the way the mime-type map below // is constructed (if a mime type maps to several extensions // the first extension is considered the most popular and is // added first; we do not want to overwrite it later). // if (!mMimeTypeToExtensionMap.containsKey(mimeType)) { mMimeTypeToExtensionMap.put(mimeType, extension); } mExtensionToMimeTypeMap.put(extension, mimeType); } /** * Return true if the given MIME type has an entry in the map. * @param mimeType A MIME type (i.e. text/plain) * @return True iff there is a mimeType entry in the map. */ public boolean hasMimeType(String mimeType) { if (mimeType != null && mimeType.length() > 0) { return mMimeTypeToExtensionMap.containsKey(mimeType); } return false; } /** * Return the MIME type for the given extension. * @param extension A file extension without the leading '.' * @return The MIME type for the given extension or null iff there is none. */ public String getMimeTypeFromExtension(String extension) { if (extension != null && extension.length() > 0) { return mExtensionToMimeTypeMap.get(extension); } return null; } // Static method called by jni. @SuppressWarnings("unused") private static String mimeTypeFromExtension(String extension) { return getSingleton().getMimeTypeFromExtension(extension); } /** * Return true if the given extension has a registered MIME type. * @param extension A file extension without the leading '.' * @return True iff there is an extension entry in the map. */ public boolean hasExtension(String extension) { if (extension != null && extension.length() > 0) { return mExtensionToMimeTypeMap.containsKey(extension); } return false; } /** * Return the registered extension for the given MIME type. Note that some * MIME types map to multiple extensions. This call will return the most * common extension for the given MIME type. * @param mimeType A MIME type (i.e. text/plain) * @return The extension for the given MIME type or null iff there is none. */ public String getExtensionFromMimeType(String mimeType) { if (mimeType != null && mimeType.length() > 0) { return mMimeTypeToExtensionMap.get(mimeType); } return null; } /** * Get the singleton instance of MimeTypeMap. * @return The singleton instance of the MIME-type map. */ public static MimeTypeMap getSingleton() { if (sMimeTypeMap == null) { sMimeTypeMap = new MimeTypeMap(); // The following table is based on /etc/mime.types data minus // chemical/* MIME types and MIME types that don't map to any // file extensions. We also exclude top-level domain names to // deal with cases like: // // mail.google.com/a/google.com // // and "active" MIME types (due to potential security issues). sMimeTypeMap.loadEntry("application/andrew-inset", "ez"); sMimeTypeMap.loadEntry("application/dsptype", "tsp"); sMimeTypeMap.loadEntry("application/futuresplash", "spl"); sMimeTypeMap.loadEntry("application/hta", "hta"); sMimeTypeMap.loadEntry("application/mac-binhex40", "hqx"); sMimeTypeMap.loadEntry("application/mac-compactpro", "cpt"); sMimeTypeMap.loadEntry("application/mathematica", "nb"); sMimeTypeMap.loadEntry("application/msaccess", "mdb"); sMimeTypeMap.loadEntry("application/oda", "oda"); sMimeTypeMap.loadEntry("application/ogg", "ogg"); sMimeTypeMap.loadEntry("application/pdf", "pdf"); sMimeTypeMap.loadEntry("application/pgp-keys", "key"); sMimeTypeMap.loadEntry("application/pgp-signature", "pgp"); sMimeTypeMap.loadEntry("application/pics-rules", "prf"); sMimeTypeMap.loadEntry("application/rar", "rar"); sMimeTypeMap.loadEntry("application/rdf+xml", "rdf"); sMimeTypeMap.loadEntry("application/rss+xml", "rss"); sMimeTypeMap.loadEntry("application/zip", "zip"); sMimeTypeMap.loadEntry("application/vnd.android.package-archive", "apk"); sMimeTypeMap.loadEntry("application/vnd.cinderella", "cdy"); sMimeTypeMap.loadEntry("application/vnd.ms-pki.stl", "stl"); sMimeTypeMap.loadEntry( "application/vnd.oasis.opendocument.database", "odb"); sMimeTypeMap.loadEntry( "application/vnd.oasis.opendocument.formula", "odf"); sMimeTypeMap.loadEntry( "application/vnd.oasis.opendocument.graphics", "odg"); sMimeTypeMap.loadEntry( "application/vnd.oasis.opendocument.graphics-template", "otg"); sMimeTypeMap.loadEntry( "application/vnd.oasis.opendocument.image", "odi"); sMimeTypeMap.loadEntry( "application/vnd.oasis.opendocument.spreadsheet", "ods"); sMimeTypeMap.loadEntry( "application/vnd.oasis.opendocument.spreadsheet-template", "ots"); sMimeTypeMap.loadEntry( "application/vnd.oasis.opendocument.text", "odt"); sMimeTypeMap.loadEntry( "application/vnd.oasis.opendocument.text-master", "odm"); sMimeTypeMap.loadEntry( "application/vnd.oasis.opendocument.text-template", "ott"); sMimeTypeMap.loadEntry( "application/vnd.oasis.opendocument.text-web", "oth"); sMimeTypeMap.loadEntry("application/msword", "doc"); sMimeTypeMap.loadEntry("application/msword", "dot"); sMimeTypeMap.loadEntry( "application/vnd.openxmlformats-officedocument.wordprocessingml.document", "docx"); sMimeTypeMap.loadEntry( "application/vnd.openxmlformats-officedocument.wordprocessingml.template", "dotx"); sMimeTypeMap.loadEntry("application/vnd.ms-excel", "xls"); sMimeTypeMap.loadEntry("application/vnd.ms-excel", "xlt"); sMimeTypeMap.loadEntry( "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", "xlsx"); sMimeTypeMap.loadEntry( "application/vnd.openxmlformats-officedocument.spreadsheetml.template", "xltx"); sMimeTypeMap.loadEntry("application/vnd.ms-powerpoint", "ppt"); sMimeTypeMap.loadEntry("application/vnd.ms-powerpoint", "pot"); sMimeTypeMap.loadEntry("application/vnd.ms-powerpoint", "pps"); sMimeTypeMap.loadEntry( "application/vnd.openxmlformats-officedocument.presentationml.presentation", "pptx"); sMimeTypeMap.loadEntry( "application/vnd.openxmlformats-officedocument.presentationml.template", "potx"); sMimeTypeMap.loadEntry( "application/vnd.openxmlformats-officedocument.presentationml.slideshow", "ppsx"); sMimeTypeMap.loadEntry("application/vnd.rim.cod", "cod"); sMimeTypeMap.loadEntry("application/vnd.smaf", "mmf"); sMimeTypeMap.loadEntry("application/vnd.stardivision.calc", "sdc"); sMimeTypeMap.loadEntry("application/vnd.stardivision.draw", "sda"); sMimeTypeMap.loadEntry( "application/vnd.stardivision.impress", "sdd"); sMimeTypeMap.loadEntry( "application/vnd.stardivision.impress", "sdp"); sMimeTypeMap.loadEntry("application/vnd.stardivision.math", "smf"); sMimeTypeMap.loadEntry("application/vnd.stardivision.writer", "sdw"); sMimeTypeMap.loadEntry("application/vnd.stardivision.writer", "vor"); sMimeTypeMap.loadEntry( "application/vnd.stardivision.writer-global", "sgl"); sMimeTypeMap.loadEntry("application/vnd.sun.xml.calc", "sxc"); sMimeTypeMap.loadEntry( "application/vnd.sun.xml.calc.template", "stc"); sMimeTypeMap.loadEntry("application/vnd.sun.xml.draw", "sxd"); sMimeTypeMap.loadEntry( "application/vnd.sun.xml.draw.template", "std"); sMimeTypeMap.loadEntry("application/vnd.sun.xml.impress", "sxi"); sMimeTypeMap.loadEntry( "application/vnd.sun.xml.impress.template", "sti"); sMimeTypeMap.loadEntry("application/vnd.sun.xml.math", "sxm"); sMimeTypeMap.loadEntry("application/vnd.sun.xml.writer", "sxw"); sMimeTypeMap.loadEntry( "application/vnd.sun.xml.writer.global", "sxg"); sMimeTypeMap.loadEntry( "application/vnd.sun.xml.writer.template", "stw"); sMimeTypeMap.loadEntry("application/vnd.visio", "vsd"); sMimeTypeMap.loadEntry("application/x-abiword", "abw"); sMimeTypeMap.loadEntry("application/x-apple-diskimage", "dmg"); sMimeTypeMap.loadEntry("application/x-bcpio", "bcpio"); sMimeTypeMap.loadEntry("application/x-bittorrent", "torrent"); sMimeTypeMap.loadEntry("application/x-cdf", "cdf"); sMimeTypeMap.loadEntry("application/x-cdlink", "vcd"); sMimeTypeMap.loadEntry("application/x-chess-pgn", "pgn"); sMimeTypeMap.loadEntry("application/x-cpio", "cpio"); sMimeTypeMap.loadEntry("application/x-debian-package", "deb"); sMimeTypeMap.loadEntry("application/x-debian-package", "udeb"); sMimeTypeMap.loadEntry("application/x-director", "dcr"); sMimeTypeMap.loadEntry("application/x-director", "dir"); sMimeTypeMap.loadEntry("application/x-director", "dxr"); sMimeTypeMap.loadEntry("application/x-dms", "dms"); sMimeTypeMap.loadEntry("application/x-doom", "wad"); sMimeTypeMap.loadEntry("application/x-dvi", "dvi"); sMimeTypeMap.loadEntry("application/x-flac", "flac"); sMimeTypeMap.loadEntry("application/x-font", "pfa"); sMimeTypeMap.loadEntry("application/x-font", "pfb"); sMimeTypeMap.loadEntry("application/x-font", "gsf"); sMimeTypeMap.loadEntry("application/x-font", "pcf"); sMimeTypeMap.loadEntry("application/x-font", "pcf.Z"); sMimeTypeMap.loadEntry("application/x-freemind", "mm"); sMimeTypeMap.loadEntry("application/x-futuresplash", "spl"); sMimeTypeMap.loadEntry("application/x-gnumeric", "gnumeric"); sMimeTypeMap.loadEntry("application/x-go-sgf", "sgf"); sMimeTypeMap.loadEntry("application/x-graphing-calculator", "gcf"); sMimeTypeMap.loadEntry("application/x-gtar", "gtar"); sMimeTypeMap.loadEntry("application/x-gtar", "tgz"); sMimeTypeMap.loadEntry("application/x-gtar", "taz"); sMimeTypeMap.loadEntry("application/x-hdf", "hdf"); sMimeTypeMap.loadEntry("application/x-ica", "ica"); sMimeTypeMap.loadEntry("application/x-internet-signup", "ins"); sMimeTypeMap.loadEntry("application/x-internet-signup", "isp"); sMimeTypeMap.loadEntry("application/x-iphone", "iii"); sMimeTypeMap.loadEntry("application/x-iso9660-image", "iso"); sMimeTypeMap.loadEntry("application/x-jmol", "jmz"); sMimeTypeMap.loadEntry("application/x-kchart", "chrt"); sMimeTypeMap.loadEntry("application/x-killustrator", "kil"); sMimeTypeMap.loadEntry("application/x-koan", "skp"); sMimeTypeMap.loadEntry("application/x-koan", "skd"); sMimeTypeMap.loadEntry("application/x-koan", "skt"); sMimeTypeMap.loadEntry("application/x-koan", "skm"); sMimeTypeMap.loadEntry("application/x-kpresenter", "kpr"); sMimeTypeMap.loadEntry("application/x-kpresenter", "kpt"); sMimeTypeMap.loadEntry("application/x-kspread", "ksp"); sMimeTypeMap.loadEntry("application/x-kword", "kwd"); sMimeTypeMap.loadEntry("application/x-kword", "kwt"); sMimeTypeMap.loadEntry("application/x-latex", "latex"); sMimeTypeMap.loadEntry("application/x-lha", "lha"); sMimeTypeMap.loadEntry("application/x-lzh", "lzh"); sMimeTypeMap.loadEntry("application/x-lzx", "lzx"); sMimeTypeMap.loadEntry("application/x-maker", "frm"); sMimeTypeMap.loadEntry("application/x-maker", "maker"); sMimeTypeMap.loadEntry("application/x-maker", "frame"); sMimeTypeMap.loadEntry("application/x-maker", "fb"); sMimeTypeMap.loadEntry("application/x-maker", "book"); sMimeTypeMap.loadEntry("application/x-maker", "fbdoc"); sMimeTypeMap.loadEntry("application/x-mif", "mif"); sMimeTypeMap.loadEntry("application/x-ms-wmd", "wmd"); sMimeTypeMap.loadEntry("application/x-ms-wmz", "wmz"); sMimeTypeMap.loadEntry("application/x-msi", "msi"); sMimeTypeMap.loadEntry("application/x-ns-proxy-autoconfig", "pac"); sMimeTypeMap.loadEntry("application/x-nwc", "nwc"); sMimeTypeMap.loadEntry("application/x-object", "o"); sMimeTypeMap.loadEntry("application/x-oz-application", "oza"); sMimeTypeMap.loadEntry("application/x-pkcs12", "p12"); sMimeTypeMap.loadEntry("application/x-pkcs7-certreqresp", "p7r"); sMimeTypeMap.loadEntry("application/x-pkcs7-crl", "crl"); sMimeTypeMap.loadEntry("application/x-quicktimeplayer", "qtl"); sMimeTypeMap.loadEntry("application/x-shar", "shar"); sMimeTypeMap.loadEntry("application/x-shockwave-flash", "swf"); sMimeTypeMap.loadEntry("application/x-stuffit", "sit"); sMimeTypeMap.loadEntry("application/x-sv4cpio", "sv4cpio"); sMimeTypeMap.loadEntry("application/x-sv4crc", "sv4crc"); sMimeTypeMap.loadEntry("application/x-tar", "tar"); sMimeTypeMap.loadEntry("application/x-texinfo", "texinfo"); sMimeTypeMap.loadEntry("application/x-texinfo", "texi"); sMimeTypeMap.loadEntry("application/x-troff", "t"); sMimeTypeMap.loadEntry("application/x-troff", "roff"); sMimeTypeMap.loadEntry("application/x-troff-man", "man"); sMimeTypeMap.loadEntry("application/x-ustar", "ustar"); sMimeTypeMap.loadEntry("application/x-wais-source", "src"); sMimeTypeMap.loadEntry("application/x-wingz", "wz"); sMimeTypeMap.loadEntry("application/x-webarchive", "webarchive"); sMimeTypeMap.loadEntry("application/x-x509-ca-cert", "crt"); sMimeTypeMap.loadEntry("application/x-x509-user-cert", "crt"); sMimeTypeMap.loadEntry("application/x-xcf", "xcf"); sMimeTypeMap.loadEntry("application/x-xfig", "fig"); sMimeTypeMap.loadEntry("application/xhtml+xml", "xhtml"); sMimeTypeMap.loadEntry("audio/3gpp", "3gpp"); sMimeTypeMap.loadEntry("audio/amr", "amr"); sMimeTypeMap.loadEntry("audio/basic", "snd"); sMimeTypeMap.loadEntry("audio/midi", "mid"); sMimeTypeMap.loadEntry("audio/midi", "midi"); sMimeTypeMap.loadEntry("audio/midi", "kar"); sMimeTypeMap.loadEntry("audio/midi", "xmf"); sMimeTypeMap.loadEntry("audio/mobile-xmf", "mxmf"); sMimeTypeMap.loadEntry("audio/mpeg", "mpga"); sMimeTypeMap.loadEntry("audio/mpeg", "mpega"); sMimeTypeMap.loadEntry("audio/mpeg", "mp2"); sMimeTypeMap.loadEntry("audio/mpeg", "mp3"); sMimeTypeMap.loadEntry("audio/mpeg", "m4a"); sMimeTypeMap.loadEntry("audio/mpegurl", "m3u"); sMimeTypeMap.loadEntry("audio/prs.sid", "sid"); sMimeTypeMap.loadEntry("audio/x-aiff", "aif"); sMimeTypeMap.loadEntry("audio/x-aiff", "aiff"); sMimeTypeMap.loadEntry("audio/x-aiff", "aifc"); sMimeTypeMap.loadEntry("audio/x-gsm", "gsm"); sMimeTypeMap.loadEntry("audio/x-mpegurl", "m3u"); sMimeTypeMap.loadEntry("audio/x-ms-wma", "wma"); sMimeTypeMap.loadEntry("audio/x-ms-wax", "wax"); sMimeTypeMap.loadEntry("audio/x-pn-realaudio", "ra"); sMimeTypeMap.loadEntry("audio/x-pn-realaudio", "rm"); sMimeTypeMap.loadEntry("audio/x-pn-realaudio", "ram"); sMimeTypeMap.loadEntry("audio/x-realaudio", "ra"); sMimeTypeMap.loadEntry("audio/x-scpls", "pls"); sMimeTypeMap.loadEntry("audio/x-sd2", "sd2"); sMimeTypeMap.loadEntry("audio/x-wav", "wav"); sMimeTypeMap.loadEntry("image/bmp", "bmp"); sMimeTypeMap.loadEntry("image/gif", "gif"); sMimeTypeMap.loadEntry("image/ico", "cur"); sMimeTypeMap.loadEntry("image/ico", "ico"); sMimeTypeMap.loadEntry("image/ief", "ief"); sMimeTypeMap.loadEntry("image/jpeg", "jpeg"); sMimeTypeMap.loadEntry("image/jpeg", "jpg"); sMimeTypeMap.loadEntry("image/jpeg", "jpe"); sMimeTypeMap.loadEntry("image/pcx", "pcx"); sMimeTypeMap.loadEntry("image/png", "png"); sMimeTypeMap.loadEntry("image/svg+xml", "svg"); sMimeTypeMap.loadEntry("image/svg+xml", "svgz"); sMimeTypeMap.loadEntry("image/tiff", "tiff"); sMimeTypeMap.loadEntry("image/tiff", "tif"); sMimeTypeMap.loadEntry("image/vnd.djvu", "djvu"); sMimeTypeMap.loadEntry("image/vnd.djvu", "djv"); sMimeTypeMap.loadEntry("image/vnd.wap.wbmp", "wbmp"); sMimeTypeMap.loadEntry("image/x-cmu-raster", "ras"); sMimeTypeMap.loadEntry("image/x-coreldraw", "cdr"); sMimeTypeMap.loadEntry("image/x-coreldrawpattern", "pat"); sMimeTypeMap.loadEntry("image/x-coreldrawtemplate", "cdt"); sMimeTypeMap.loadEntry("image/x-corelphotopaint", "cpt"); sMimeTypeMap.loadEntry("image/x-icon", "ico"); sMimeTypeMap.loadEntry("image/x-jg", "art"); sMimeTypeMap.loadEntry("image/x-jng", "jng"); sMimeTypeMap.loadEntry("image/x-ms-bmp", "bmp"); sMimeTypeMap.loadEntry("image/x-photoshop", "psd"); sMimeTypeMap.loadEntry("image/x-portable-anymap", "pnm"); sMimeTypeMap.loadEntry("image/x-portable-bitmap", "pbm"); sMimeTypeMap.loadEntry("image/x-portable-graymap", "pgm"); sMimeTypeMap.loadEntry("image/x-portable-pixmap", "ppm"); sMimeTypeMap.loadEntry("image/x-rgb", "rgb"); sMimeTypeMap.loadEntry("image/x-xbitmap", "xbm"); sMimeTypeMap.loadEntry("image/x-xpixmap", "xpm"); sMimeTypeMap.loadEntry("image/x-xwindowdump", "xwd"); sMimeTypeMap.loadEntry("model/iges", "igs"); sMimeTypeMap.loadEntry("model/iges", "iges"); sMimeTypeMap.loadEntry("model/mesh", "msh"); sMimeTypeMap.loadEntry("model/mesh", "mesh"); sMimeTypeMap.loadEntry("model/mesh", "silo"); sMimeTypeMap.loadEntry("text/calendar", "ics"); sMimeTypeMap.loadEntry("text/calendar", "icz"); sMimeTypeMap.loadEntry("text/comma-separated-values", "csv"); sMimeTypeMap.loadEntry("text/css", "css"); sMimeTypeMap.loadEntry("text/html", "htm"); sMimeTypeMap.loadEntry("text/html", "html"); sMimeTypeMap.loadEntry("text/h323", "323"); sMimeTypeMap.loadEntry("text/iuls", "uls"); sMimeTypeMap.loadEntry("text/mathml", "mml"); // add it first so it will be the default for ExtensionFromMimeType sMimeTypeMap.loadEntry("text/plain", "txt"); sMimeTypeMap.loadEntry("text/plain", "asc"); sMimeTypeMap.loadEntry("text/plain", "text"); sMimeTypeMap.loadEntry("text/plain", "diff"); sMimeTypeMap.loadEntry("text/plain", "po"); // reserve "pot" for vnd.ms-powerpoint sMimeTypeMap.loadEntry("text/richtext", "rtx"); sMimeTypeMap.loadEntry("text/rtf", "rtf"); sMimeTypeMap.loadEntry("text/texmacs", "ts"); sMimeTypeMap.loadEntry("text/text", "phps"); sMimeTypeMap.loadEntry("text/tab-separated-values", "tsv"); sMimeTypeMap.loadEntry("text/xml", "xml"); sMimeTypeMap.loadEntry("text/x-bibtex", "bib"); sMimeTypeMap.loadEntry("text/x-boo", "boo"); sMimeTypeMap.loadEntry("text/x-c++hdr", "h++"); sMimeTypeMap.loadEntry("text/x-c++hdr", "hpp"); sMimeTypeMap.loadEntry("text/x-c++hdr", "hxx"); sMimeTypeMap.loadEntry("text/x-c++hdr", "hh"); sMimeTypeMap.loadEntry("text/x-c++src", "c++"); sMimeTypeMap.loadEntry("text/x-c++src", "cpp"); sMimeTypeMap.loadEntry("text/x-c++src", "cxx"); sMimeTypeMap.loadEntry("text/x-chdr", "h"); sMimeTypeMap.loadEntry("text/x-component", "htc"); sMimeTypeMap.loadEntry("text/x-csh", "csh"); sMimeTypeMap.loadEntry("text/x-csrc", "c"); sMimeTypeMap.loadEntry("text/x-dsrc", "d"); sMimeTypeMap.loadEntry("text/x-haskell", "hs"); sMimeTypeMap.loadEntry("text/x-java", "java"); sMimeTypeMap.loadEntry("text/x-literate-haskell", "lhs"); sMimeTypeMap.loadEntry("text/x-moc", "moc"); sMimeTypeMap.loadEntry("text/x-pascal", "p"); sMimeTypeMap.loadEntry("text/x-pascal", "pas"); sMimeTypeMap.loadEntry("text/x-pcs-gcd", "gcd"); sMimeTypeMap.loadEntry("text/x-setext", "etx"); sMimeTypeMap.loadEntry("text/x-tcl", "tcl"); sMimeTypeMap.loadEntry("text/x-tex", "tex"); sMimeTypeMap.loadEntry("text/x-tex", "ltx"); sMimeTypeMap.loadEntry("text/x-tex", "sty"); sMimeTypeMap.loadEntry("text/x-tex", "cls"); sMimeTypeMap.loadEntry("text/x-vcalendar", "vcs"); sMimeTypeMap.loadEntry("text/x-vcard", "vcf"); sMimeTypeMap.loadEntry("video/3gpp", "3gpp"); sMimeTypeMap.loadEntry("video/3gpp", "3gp"); sMimeTypeMap.loadEntry("video/3gpp", "3g2"); sMimeTypeMap.loadEntry("video/dl", "dl"); sMimeTypeMap.loadEntry("video/dv", "dif"); sMimeTypeMap.loadEntry("video/dv", "dv"); sMimeTypeMap.loadEntry("video/fli", "fli"); sMimeTypeMap.loadEntry("video/m4v", "m4v"); sMimeTypeMap.loadEntry("video/mpeg", "mpeg"); sMimeTypeMap.loadEntry("video/mpeg", "mpg"); sMimeTypeMap.loadEntry("video/mpeg", "mpe"); sMimeTypeMap.loadEntry("video/mp4", "mp4"); sMimeTypeMap.loadEntry("video/mpeg", "VOB"); sMimeTypeMap.loadEntry("video/quicktime", "qt"); sMimeTypeMap.loadEntry("video/quicktime", "mov"); sMimeTypeMap.loadEntry("video/vnd.mpegurl", "mxu"); sMimeTypeMap.loadEntry("video/x-la-asf", "lsf"); sMimeTypeMap.loadEntry("video/x-la-asf", "lsx"); sMimeTypeMap.loadEntry("video/x-mng", "mng"); sMimeTypeMap.loadEntry("video/x-ms-asf", "asf"); sMimeTypeMap.loadEntry("video/x-ms-asf", "asx"); sMimeTypeMap.loadEntry("video/x-ms-wm", "wm"); sMimeTypeMap.loadEntry("video/x-ms-wmv", "wmv"); sMimeTypeMap.loadEntry("video/x-ms-wmx", "wmx"); sMimeTypeMap.loadEntry("video/x-ms-wvx", "wvx"); sMimeTypeMap.loadEntry("video/x-msvideo", "avi"); sMimeTypeMap.loadEntry("video/x-sgi-movie", "movie"); sMimeTypeMap.loadEntry("x-conference/x-cooltalk", "ice"); sMimeTypeMap.loadEntry("x-epoc/x-sisx-app", "sisx"); // Some more mime-pairs sMimeTypeMap.loadEntry("video/vnd.rn-realmedia", "rmvb"); sMimeTypeMap.loadEntry("video/vnd.rn-realmedia", "rm"); sMimeTypeMap.loadEntry("video/vnd.rn-realvideo", "rv"); sMimeTypeMap.loadEntry("video/x-flv", "flv"); sMimeTypeMap.loadEntry("video/x-flv", "hlv"); sMimeTypeMap.loadEntry("video/x-matroska", "mkv"); sMimeTypeMap.loadEntry("audio/vnd.rn-realaudio", "ra"); sMimeTypeMap.loadEntry("audio/vnd.rn-realaudio", "ram"); sMimeTypeMap.loadEntry("text/plain", "lrc"); sMimeTypeMap.loadEntry("application/json", "json"); // End more mime-pairs } return sMimeTypeMap; } }
/******************************************************************************* * * Pentaho Data Integration * * Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.ui.repository.dialog; import java.util.regex.Pattern; import org.eclipse.swt.SWT; import org.eclipse.swt.events.KeyAdapter; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Dialog; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Text; import org.eclipse.swt.widgets.ToolBar; import org.eclipse.swt.widgets.ToolItem; import org.eclipse.swt.widgets.Tree; import org.eclipse.swt.widgets.TreeColumn; import org.eclipse.swt.widgets.TreeItem; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.repository.RepositoryDirectory; import org.pentaho.di.repository.RepositoryDirectoryInterface; import org.pentaho.di.repository.RepositoryElementMetaInterface; import org.pentaho.di.repository.RepositoryObjectType; import org.pentaho.di.ui.core.ConstUI; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.core.gui.WindowProperty; import org.pentaho.di.ui.repository.RepositoryDirectoryUI; import org.pentaho.di.ui.trans.step.BaseStepDialog; /** * Allows the user to make a selection of an Object in the repository * * @author Matt * @since 19-06-2003 */ public class SelectObjectDialog extends Dialog { private static Class<?> PKG = RepositoryDialogInterface.class; // for // i18n // purposes, // needed // by // Translator2!! // $NON-NLS-1$ private Label wlTree; private Tree wTree; private FormData fdlTree, fdTree, fdexpandAll; private Button wOK, wCancel; private Listener lsOK, lsCancel; private Shell shell; private SelectionAdapter lsDef; private String shellText; private String lineText; private PropsUI props; private Color dircolor; private Repository rep; private String objectName; private RepositoryDirectoryInterface objectDirectory; private TreeColumn nameColumn; private TreeColumn userColumn; private TreeColumn changedColumn; private int sortColumn; private boolean ascending; private TreeColumn typeColumn; private RepositoryObjectType objectType; private boolean showTrans; private boolean showJobs; private TreeColumn descriptionColumn; private ToolItem expandAll, collapseAll, goSearch, wfilter; private String filterString = null; private Text searchText = null; private Pattern pattern = null; private RepositoryDirectoryInterface directoryTree; // private RepositoryCapabilities capabilities; private boolean includeDeleted; private ToolItem wbRegex; private RepositoryElementMetaInterface repositoryObject; private ObjectId objectId; public SelectObjectDialog(Shell parent, Repository rep) { this(parent, rep, true, true); } public SelectObjectDialog(Shell parent, Repository rep, boolean showTransformations, boolean showJobs) { super(parent, SWT.NONE); this.props = PropsUI.getInstance(); this.rep = rep; this.showTrans = showTransformations; this.showJobs = showJobs; // TODO: make this a configuration option in the dialog! // this.includeDeleted = false; shellText = BaseMessages.getString(PKG, "SelectObjectDialog.Dialog.Main.Title"); //$NON-NLS-1$ lineText = BaseMessages.getString(PKG, "SelectObjectDialog.Dialog.Object.Title"); //$NON-NLS-1$ objectName = null; objectDirectory = null; sortColumn = 0; ascending = false; } public String open() { Shell parent = getParent(); dircolor = GUIResource.getInstance().getColorDirectory(); shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.APPLICATION_MODAL | SWT.SHEET| SWT.RESIZE | SWT.MIN | SWT.MAX); props.setLook(shell); shell.setImage(GUIResource.getInstance().getImageFolderConnections()); FormLayout formLayout = new FormLayout(); formLayout.marginWidth = Const.FORM_MARGIN; formLayout.marginHeight = Const.FORM_MARGIN; shell.setLayout(formLayout); shell.setText(shellText); int margin = Const.MARGIN; ToolBar treeTb = new ToolBar(shell, SWT.HORIZONTAL | SWT.FLAT); wfilter = new ToolItem(treeTb, SWT.SEPARATOR); searchText = new Text(treeTb, SWT.SEARCH | SWT.CANCEL); searchText.setToolTipText(BaseMessages.getString(PKG, "RepositoryExplorerDialog.Search.FilterString.ToolTip")); wfilter.setControl(searchText); wfilter.setWidth(100); wbRegex = new ToolItem(treeTb, SWT.CHECK); wbRegex.setImage(GUIResource.getInstance().getImageRegexSmall()); wbRegex.setToolTipText(BaseMessages.getString(PKG, "RepositoryExplorerDialog.Search.UseRegex")); goSearch = new ToolItem(treeTb, SWT.PUSH); goSearch.setImage(GUIResource.getInstance().getImageSearchSmall()); goSearch.setToolTipText(BaseMessages.getString(PKG, "RepositoryExplorerDialog.Search.Run")); expandAll = new ToolItem(treeTb, SWT.PUSH); expandAll.setImage(GUIResource.getInstance().getImageExpandAll()); collapseAll = new ToolItem(treeTb, SWT.PUSH); collapseAll.setImage(GUIResource.getInstance().getImageCollapseAll()); fdexpandAll = new FormData(); fdexpandAll.right = new FormAttachment(100, -margin); fdexpandAll.top = new FormAttachment(0, margin); treeTb.setLayoutData(fdexpandAll); // From step line wlTree = new Label(shell, SWT.NONE); wlTree.setText(lineText); props.setLook(wlTree); fdlTree = new FormData(); fdlTree.left = new FormAttachment(0, 0); fdlTree.top = new FormAttachment(0, margin); wlTree.setLayoutData(fdlTree); wTree = new Tree(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER | SWT.V_SCROLL | SWT.H_SCROLL); wTree.setHeaderVisible(true); props.setLook(wTree); // Add some columns to it as well... nameColumn = new TreeColumn(wTree, SWT.LEFT); nameColumn.setText(BaseMessages.getString(PKG, "RepositoryExplorerDialog.Column.Name")); //$NON-NLS-1$ nameColumn.setWidth(350); nameColumn.addListener(SWT.Selection, new Listener() { public void handleEvent(Event e) { setSort(0); } }); // No sorting on the type column just yet. typeColumn = new TreeColumn(wTree, SWT.LEFT); typeColumn.setText(BaseMessages.getString(PKG, "RepositoryExplorerDialog.Column.Type")); //$NON-NLS-1$ typeColumn.setWidth(100); typeColumn.addListener(SWT.Selection, new Listener() { public void handleEvent(Event e) { setSort(1); } }); userColumn = new TreeColumn(wTree, SWT.LEFT); userColumn.setText(BaseMessages.getString(PKG, "RepositoryExplorerDialog.Column.User")); //$NON-NLS-1$ userColumn.setWidth(100); userColumn.addListener(SWT.Selection, new Listener() { public void handleEvent(Event e) { setSort(2); } }); changedColumn = new TreeColumn(wTree, SWT.LEFT); changedColumn.setText(BaseMessages.getString(PKG, "RepositoryExplorerDialog.Column.Changed")); //$NON-NLS-1$ changedColumn.setWidth(120); changedColumn.addListener(SWT.Selection, new Listener() { public void handleEvent(Event e) { setSort(3); } }); descriptionColumn = new TreeColumn(wTree, SWT.LEFT); descriptionColumn.setText(BaseMessages.getString(PKG, "RepositoryExplorerDialog.Column.Description")); //$NON-NLS-1$ descriptionColumn.setWidth(120); descriptionColumn.addListener(SWT.Selection, new Listener() { public void handleEvent(Event e) { setSort(4); } }); props.setLook(wTree); fdTree = new FormData(); fdTree.left = new FormAttachment(0, 0); fdTree.right = new FormAttachment(100, 0); fdTree.top = new FormAttachment(treeTb, margin); fdTree.bottom = new FormAttachment(100, -30); wTree.setLayoutData(fdTree); // Some buttons wOK = new Button(shell, SWT.PUSH); wOK.setText(BaseMessages.getString(PKG, "System.Button.OK"));//$NON-NLS-1$ lsOK = new Listener() { public void handleEvent(Event e) { ok(); } }; wOK.addListener(SWT.Selection, lsOK); wOK.setEnabled(false); wCancel = new Button(shell, SWT.PUSH); wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel")); //$NON-NLS-1$ lsCancel = new Listener() { public void handleEvent(Event e) { cancel(); } }; wCancel.addListener(SWT.Selection, lsCancel); BaseStepDialog.positionBottomButtons(shell, new Button[] { wOK, wCancel }, margin, null); // Add listeners lsDef = new SelectionAdapter() { public void widgetDefaultSelected(SelectionEvent e) { ok(); } }; wTree.addSelectionListener(lsDef); wTree.addKeyListener(new KeyAdapter() { public void keyPressed(KeyEvent e) { if (e.character == SWT.CR) ok(); } }); wTree.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { wOK.setEnabled(!wTree.getSelection()[0].getForeground().equals(dircolor)); } }); expandAll.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent event) { expandAllItems(wTree.getItems(), true); } }); collapseAll.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent event) { expandAllItems(wTree.getItems(), false); } }); goSearch.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent event) { updateFilter(); } }); searchText.addSelectionListener(new SelectionAdapter() { public void widgetDefaultSelected(SelectionEvent e) { updateFilter(); } }); // Detect [X] or ALT-F4 or something that kills this window... shell.addShellListener(new ShellAdapter() { public void shellClosed(ShellEvent e) { cancel(); } }); try { directoryTree = rep.loadRepositoryDirectoryTree(); } catch (KettleException e) { new ErrorDialog(shell, BaseMessages.getString(PKG, "SelectObjectDialog.Dialog.ErrorRefreshingDirectoryTree.Title"), BaseMessages.getString(PKG, "SelectObjectDialog.Dialog.ErrorRefreshingDirectoryTree.Message"), e); //$NON-NLS-1$ //$NON-NLS-2$ } getData(); wTree.setFocus(); BaseStepDialog.setSize(shell); shell.open(); while (!shell.isDisposed()) { if (!shell.getDisplay().readAndDispatch()) shell.getDisplay().sleep(); } return objectName; } private void expandAllItems(TreeItem[] treeitems, boolean expand) { for (TreeItem item : treeitems) { item.setExpanded(expand); if (item.getItemCount() > 0) expandAllItems(item.getItems(), expand); } } protected void updateFilter() { pattern = null; filterString = null; if (searchText != null && !searchText.isDisposed() && !Const.isEmpty(searchText.getText())) { if (wbRegex.getSelection()) { pattern = Pattern.compile(searchText.getText()); } else { filterString = searchText.getText().toUpperCase(); } } refreshTree(); if ((wbRegex.getSelection() && pattern != null) || (!wbRegex.getSelection() && filterString != null)) { while (getNrEmptyFolders(wTree.getItems()) > 0) { removeEmptyFolders(wTree.getItems()); try { Thread.sleep(0, 1); } catch (InterruptedException e) { } } expandAllItems(wTree.getItems(), true); } } private void removeEmptyFolders(TreeItem[] treeitems) { for (TreeItem item : treeitems) { if (item.getImage().equals(GUIResource.getInstance().getImageArrow()) && item.getItemCount() == 0) item.dispose(); else removeEmptyFolders(item.getItems()); } } private int getNrEmptyFolders(TreeItem[] treeitems) { int retval = 0; for (TreeItem item : treeitems) { if (item.getImage().equals(GUIResource.getInstance().getImageArrow()) && item.getItemCount() == 0) retval++; else retval += getNrEmptyFolders(item.getItems()); } return retval; } protected void setSort(int i) { if (sortColumn == i) { ascending = !ascending; } else { sortColumn = i; ascending = true; } if (sortColumn >= 0 && sortColumn < 5) { TreeColumn column = wTree.getColumn(sortColumn); wTree.setSortColumn(column); wTree.setSortDirection(ascending ? SWT.UP : SWT.DOWN); } refreshTree(); } private void refreshTree() { try { wTree.removeAll(); TreeItem ti = null; // If the directory is a root directory and is visible to the user we will // display that on the UI otherwise we will hide it if (directoryTree.isRoot() && directoryTree.isVisible()) { ti = new TreeItem(wTree, SWT.NONE); ti.setImage(GUIResource.getInstance().getImageFolderConnections()); ti.setExpanded(true); RepositoryDirectoryUI.getTreeWithNames(ti, rep, dircolor, sortColumn, includeDeleted, ascending, showTrans, showJobs, directoryTree, filterString, pattern); } else { for (int i = 0; i < directoryTree.getNrSubdirectories(); i++) { RepositoryDirectory subdir = directoryTree.getSubdirectory(i); ti = new TreeItem(wTree, SWT.NONE); ti.setImage(GUIResource.getInstance().getImageArrow()); RepositoryDirectoryUI.getTreeWithNames(ti, rep, dircolor, sortColumn, includeDeleted, ascending, showTrans, showJobs, subdir, filterString, pattern); } } } catch (KettleException e) { new ErrorDialog(shell, BaseMessages.getString(PKG, "SelectObjectDialog.Dialog.UnexpectedError.Title"), BaseMessages.getString(PKG, "SelectObjectDialog.Dialog.UnexpectedError.Message"), e); //$NON-NLS-1$ //$NON-NLS-2$ } } public void dispose() { props.setScreen(new WindowProperty(shell)); shell.dispose(); } public void getData() { setSort(0); } private void cancel() { objectName = null; dispose(); } private void ok() { // Something has to be selected! if (wTree.getSelectionCount() > 0) { TreeItem ti = wTree.getSelection()[0]; // No directory! if (!ti.getForeground().equals(dircolor)) { int level = ConstUI.getTreeLevel(ti); if (level > 0) { repositoryObject = (RepositoryElementMetaInterface) ti.getData(); if (repositoryObject != null) { objectName = repositoryObject.getName(); objectDirectory = repositoryObject.getRepositoryDirectory(); objectId = repositoryObject.getObjectId(); objectType = repositoryObject.getObjectType(); } else { // For backward compatibility, we should rarely end up here... // String path[] = ConstUI.getTreeStrings(ti.getParentItem()); objectName = ti.getText(0); objectType = null; for (RepositoryObjectType type : RepositoryObjectType.values()) { if (type.getTypeDescription().equalsIgnoreCase(ti.getText(1))) { objectType = type; break; } } objectDirectory = directoryTree.findDirectory(path); } if (objectDirectory != null) { dispose(); } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR); mb.setMessage(BaseMessages.getString(PKG, "SelectObjectDialog.Dialog.DirectoryNotFound.Message")); //$NON-NLS-1$ mb.setText(BaseMessages.getString(PKG, "SelectObjectDialog.Dialog.DirectoryNotFound.Title")); //$NON-NLS-1$ mb.open(); } } } } } public RepositoryDirectoryInterface getDirectory() { return objectDirectory; } /** * @return the objectType */ public RepositoryObjectType getObjectType() { return objectType; } /** * @return the objectName */ public String getObjectName() { return objectName; } /** * @return the repositoryObject */ public RepositoryElementMetaInterface getRepositoryObject() { return repositoryObject; } /** * @param repositoryObject * the repositoryObject to set */ public void setRepositoryObject(RepositoryElementMetaInterface repositoryObject) { this.repositoryObject = repositoryObject; } /** * @return the objectId */ public ObjectId getObjectId() { return objectId; } /** * @param objectId * the objectId to set */ public void setObjectId(ObjectId objectId) { this.objectId = objectId; } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ package org.apache.ofbiz.accounting.thirdparty.worldpay; import java.io.IOException; import java.math.BigDecimal; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.ofbiz.base.util.Debug; import org.apache.ofbiz.base.util.UtilDateTime; import org.apache.ofbiz.base.util.UtilFormatOut; import org.apache.ofbiz.base.util.UtilHttp; import org.apache.ofbiz.base.util.UtilMisc; import org.apache.ofbiz.base.util.UtilProperties; import org.apache.ofbiz.base.util.UtilValidate; import org.apache.ofbiz.entity.Delegator; import org.apache.ofbiz.entity.GenericEntityException; import org.apache.ofbiz.entity.GenericValue; import org.apache.ofbiz.entity.transaction.GenericTransactionException; import org.apache.ofbiz.entity.transaction.TransactionUtil; import org.apache.ofbiz.entity.util.EntityQuery; import org.apache.ofbiz.entity.util.EntityUtilProperties; import org.apache.ofbiz.order.order.OrderChangeHelper; import org.apache.ofbiz.product.store.ProductStoreWorker; import org.apache.ofbiz.service.GenericServiceException; import org.apache.ofbiz.service.LocalDispatcher; import org.apache.ofbiz.service.ModelService; /** * WorldPay Select Junior Integration Events/Services */ public class WorldPayEvents { public static final String resource = "AccountingUiLabels"; public static final String resourceErr = "AccountingErrorUiLabels"; public static final String commonResource = "CommonUiLabels"; public static final String module = WorldPayEvents.class.getName(); public static String worldPayRequest(HttpServletRequest request, HttpServletResponse response) { Locale locale = UtilHttp.getLocale(request); Delegator delegator = (Delegator) request.getAttribute("delegator"); // get the orderId from the request, stored by previous event(s) String orderId = (String) request.getAttribute("orderId"); // get the order header GenericValue orderHeader = null; try { orderHeader = EntityQuery.use(delegator).from("OrderHeader").where("orderId", orderId).queryOne(); } catch (GenericEntityException e) { Debug.logError(e, "Cannot get the order header for order: " + orderId, module); request.setAttribute("_ERROR_MESSAGE_", UtilProperties.getMessage(resourceErr, "worldPayEvents.problemsGettingOrderHeader", locale)); return "error"; } // get the order total String orderTotal = orderHeader.getBigDecimal("grandTotal").toPlainString(); // get the product store GenericValue productStore = ProductStoreWorker.getProductStore(request); if (productStore == null) { Debug.logError("ProductStore is null", module); request.setAttribute("_ERROR_MESSAGE_", UtilProperties.getMessage(resourceErr, "worldPayEvents.problemsGettingMerchantConfiguration", locale)); return "error"; } // get the payment properties file GenericValue paymentConfig = ProductStoreWorker.getProductStorePaymentSetting(delegator, productStore.getString("productStoreId"), "EXT_WORLDPAY", null, true); String configString = null; String paymentGatewayConfigId = null; if (paymentConfig != null) { paymentGatewayConfigId = paymentConfig.getString("paymentGatewayConfigId"); configString = paymentConfig.getString("paymentPropertiesPath"); } if (configString == null) { configString = "payment.properties"; } String redirectURL = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "redirectUrl", configString, "payment.worldpay.redirectUrl", ""); String instId = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "instId", configString, "payment.worldpay.instId", "NONE"); String authMode = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "authMode", configString, "payment.worldpay.authMode", "A"); String fixContact = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "fixContact", configString, "payment.worldpay.fixContact", "N"); String hideContact = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "hideContact", configString, "payment.worldpay.hideContact", "N"); String hideCurrency = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "hideCurrency", configString, "payment.worldpay.hideCurrency", "N"); String langId = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "langId", configString, "payment.worldpay.langId", ""); String noLanguageMenu = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "noLanguageMenu", configString, "payment.worldpay.noLanguageMenu", "N"); String withDelivery = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "withDelivery", configString, "payment.worldpay.withDelivery", "N"); String testMode = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, "testMode", configString, "payment.worldpay.testMode", "100"); // get the contact address to pass over GenericValue contactAddress = null; GenericValue contactAddressShip = null; GenericValue addressOcm = null; GenericValue shippingAddress = null; try { addressOcm = EntityQuery.use(delegator).from("OrderContactMech").where("orderId", orderId, "contactMechPurposeTypeId", "BILLING_LOCATION").queryFirst(); shippingAddress = EntityQuery.use(delegator).from("OrderContactMech").where("orderId", orderId, "contactMechPurposeTypeId", "SHIPPING_LOCATION").queryFirst(); if (addressOcm == null) { addressOcm = shippingAddress; } contactAddress = EntityQuery.use(delegator).from("PostalAddress").where("contactMechId", addressOcm.getString("contactMechId")).queryOne(); } catch (GenericEntityException e) { Debug.logWarning(e, "Problems getting order contact information", module); } // get the country geoID GenericValue countryGeo = null; String country = ""; if (contactAddress != null) { try { countryGeo = contactAddress.getRelatedOne("CountryGeo", false); if (countryGeo != null) { country = countryGeo.getString("geoCode"); } } catch (GenericEntityException e) { Debug.logWarning(e, "Problems getting country geo entity", module); } } // string of customer's name String name = ""; if (contactAddress != null) { if (UtilValidate.isNotEmpty(contactAddress.getString("attnName"))) name = contactAddress.getString("attnName"); else if (UtilValidate.isNotEmpty(contactAddress.getString("toName"))) name = contactAddress.getString("toName"); } // build an address string StringBuilder address = new StringBuilder(); String postalCode = ""; if (contactAddress != null) { if (contactAddress.get("address1") != null) { address.append(contactAddress.getString("address1").trim()); } if (contactAddress.get("address2") != null) { if (address.length() > 0) { address.append("&#10;"); } address.append(contactAddress.getString("address2").trim()); } if (contactAddress.get("city") != null) { if (address.length() > 0) { address.append("&#10;"); } address.append(contactAddress.getString("city").trim()); } if (contactAddress.get("stateProvinceGeoId") != null) { if (contactAddress.get("city") != null) { address.append(", "); } address.append(contactAddress.getString("stateProvinceGeoId").trim()); } if (contactAddress.get("postalCode") != null) { postalCode = contactAddress.getString("postalCode"); } } // get the email address to pass over String emailAddress = null; GenericValue emailContact = null; try { GenericValue emailOcm = EntityQuery.use(delegator).from("OrderContactMech").where("orderId", orderId, "contactMechPurposeTypeId", "ORDER_EMAIL").queryFirst(); emailContact = emailOcm.getRelatedOne("ContactMech", false); emailAddress = emailContact.getString("infoString"); } catch (GenericEntityException e) { Debug.logWarning(e, "Problems getting order email address", module); } // build an shipping address string StringBuilder shipAddress = new StringBuilder(); String shipPostalCode = ""; String shipName = ""; if (shippingAddress != null) { try { contactAddressShip = EntityQuery.use(delegator).from("PostalAddress").where("contactMechId", shippingAddress.get("contactMechId")).queryOne(); if (UtilValidate.isNotEmpty(contactAddressShip)) { if (UtilValidate.isNotEmpty(contactAddressShip.getString("attnName"))) { shipName = contactAddressShip.getString("attnName"); } else if (UtilValidate.isNotEmpty(contactAddressShip.getString("toName"))) { shipName = contactAddressShip.getString("toName"); } if (contactAddressShip.get("address1") != null) { shipAddress.append(contactAddressShip.getString("address1").trim()); } if (contactAddressShip.get("address2") != null) { if (shipAddress.length() > 0) { shipAddress.append("&#10;"); } shipAddress.append(contactAddressShip.getString("address2").trim()); } if (contactAddressShip.get("city") != null) { if (shipAddress.length() > 0) { shipAddress.append("&#10;"); } shipAddress.append(contactAddressShip.getString("city").trim()); } if (contactAddressShip.get("stateProvinceGeoId") != null) { if (contactAddressShip.get("city") != null) { shipAddress.append(", "); } shipAddress.append(contactAddressShip.getString("stateProvinceGeoId").trim()); } if (contactAddressShip.get("postalCode") != null) { shipPostalCode = contactAddressShip.getString("postalCode"); } } } catch (GenericEntityException e) { Debug.logWarning(e, "Problems getting shipping address", module); } } // get the company name String company = UtilFormatOut.checkEmpty(productStore.getString("companyName"), ""); // get the currency String defCur = UtilFormatOut.checkEmpty(productStore.getString("defaultCurrencyUomId"), "USD"); // order description String description = UtilProperties.getMessage(resource, "AccountingOrderNr", locale) + orderId + " " + (company != null ? UtilProperties.getMessage(commonResource, "CommonFrom", locale) + " "+ company : ""); // check the instId - very important if (instId == null || instId.equals("NONE")) { Debug.logError("Worldpay InstId not found, cannot continue", module); request.setAttribute("_ERROR_MESSAGE_", UtilProperties.getMessage(resourceErr, "worldPayEvents.problemsGettingInstId", locale)); return "error"; } try { Integer.parseInt(instId); } catch (NumberFormatException nfe) { Debug.logError(nfe, "Problem converting instId string to integer", module); request.setAttribute("_ERROR_MESSAGE_", UtilProperties.getMessage(resourceErr, "worldPayEvents.problemsGettingInstIdToInteger", locale)); return "error"; } // check the testMode if (testMode != null) { try { Integer.parseInt(testMode); } catch (NumberFormatException nfe) { Debug.logWarning(nfe, "Problems getting the testMode value, setting to 0", module); } } // create the redirect string Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("instId", instId); parameters.put("cartId", orderId); parameters.put("currency", defCur); parameters.put("amount", orderTotal); parameters.put("desc", description); parameters.put("testMode", testMode); parameters.put("authMode", authMode); parameters.put("name", name); parameters.put("address", address.toString()); parameters.put("country", country); parameters.put("postcode", postalCode); parameters.put("email", emailAddress); if (UtilValidate.isNotEmpty(shipName)) { parameters.put("M_shipping_name", shipName); if (UtilValidate.isNotEmpty(shipAddress.toString())) { parameters.put("M_shipping_address", shipAddress.toString()); } if (UtilValidate.isNotEmpty(shipPostalCode)) { parameters.put("M_shipping_postcode", shipPostalCode); } } if ("Y".equals(fixContact)) { parameters.put("fixContact", ""); } if ("Y".equals(hideContact)) { parameters.put("hideContact", ""); } if ("Y".equals(hideCurrency)) { parameters.put("hideCurrency", ""); } if ("Y".equals(noLanguageMenu)) { parameters.put("noLanguageMenu", ""); } if ("Y".equals(withDelivery)) { parameters.put("withDelivery", ""); } if (UtilValidate.isNotEmpty(langId)) { parameters.put("langId", langId); } // create the redirect URL String encodedParameters = UtilHttp.urlEncodeArgs(parameters, false); String redirectString = redirectURL + "?" + encodedParameters; // redirect to WorldPay try { response.sendRedirect(redirectString); } catch (IOException e) { Debug.logError(e, "Problems redirecting to WorldPay", module); request.setAttribute("_ERROR_MESSAGE_", UtilProperties.getMessage(resourceErr, "worldPayEvents.problemsConnectingWithWorldPay", locale)); return "error"; } return "success"; } /** WorldPay notification */ public static String worldPayNotify(HttpServletRequest request, HttpServletResponse response) { Locale locale = UtilHttp.getLocale(request); Delegator delegator = (Delegator) request.getAttribute("delegator"); LocalDispatcher dispatcher = (LocalDispatcher) request.getAttribute("dispatcher"); GenericValue userLogin = (GenericValue) request.getSession().getAttribute("userLogin"); Map <String, Object> parametersMap = UtilHttp.getParameterMap(request); String orderId = request.getParameter("cartId"); for (String name : parametersMap.keySet()) { String value = request.getParameter(name); Debug.logError("### Param: " + name + " => " + value, module); } // get the user if (userLogin == null) { String userLoginId = "system"; try { userLogin = EntityQuery.use(delegator).from("UserLogin").where("userLoginId", userLoginId).queryOne(); } catch (GenericEntityException e) { Debug.logError(e, "Cannot get UserLogin for: " + userLoginId + "; cannot continue", module); request.setAttribute("_ERROR_MESSAGE_", UtilProperties.getMessage(resourceErr, "worldPayEvents.problemsGettingAuthenticationUser", locale)); return "error"; } } // get the order header GenericValue orderHeader = null; if (UtilValidate.isNotEmpty(orderId)) { try { orderHeader = EntityQuery.use(delegator).from("OrderHeader").where("orderId", orderId).queryOne(); } catch (GenericEntityException e) { Debug.logError(e, "Cannot get the order header for order: " + orderId, module); request.setAttribute("_ERROR_MESSAGE_", UtilProperties.getMessage(resourceErr, "worldPayEvents.problemsGettingOrderHeader", locale)); return "error"; } } else { Debug.logError("WorldPay did not callback with a valid orderId!", module); request.setAttribute("_ERROR_MESSAGE_", UtilProperties.getMessage(resourceErr, "worldPayEvents.noValidOrderIdReturned", locale)); return "error"; } if (orderHeader == null) { Debug.logError("Cannot get the order header for order: " + orderId, module); request.setAttribute("_ERROR_MESSAGE_", UtilProperties.getMessage(resourceErr, "worldPayEvents.problemsGettingOrderHeader", locale)); return "error"; } // get the transaction status String paymentStatus = request.getParameter("transStatus"); // attempt to start a transaction boolean okay = true; boolean beganTransaction = false; try { beganTransaction = TransactionUtil.begin(); // authorized if ("Y".equals(paymentStatus)) { okay = OrderChangeHelper.approveOrder(dispatcher, userLogin, orderId); // cancelled } else if ("C".equals(paymentStatus)) { okay = OrderChangeHelper.cancelOrder(dispatcher, userLogin, orderId); } if (okay) { // set the payment preference okay = setPaymentPreferences(delegator, dispatcher, userLogin, orderId, request); } } catch (Exception e) { String errMsg = "Error handling WorldPay notification"; Debug.logError(e, errMsg, module); try { TransactionUtil.rollback(beganTransaction, errMsg, e); } catch (GenericTransactionException gte2) { Debug.logError(gte2, "Unable to rollback transaction", module); } } finally { if (!okay) { try { TransactionUtil.rollback(beganTransaction, "Failure in processing WorldPay callback", null); } catch (GenericTransactionException gte) { Debug.logError(gte, "Unable to rollback transaction", module); } } else { try { TransactionUtil.commit(beganTransaction); } catch (GenericTransactionException gte) { Debug.logError(gte, "Unable to commit transaction", module); } } } if (okay) { // attempt to release the offline hold on the order (workflow) OrderChangeHelper.releaseInitialOrderHold(dispatcher, orderId); // call the email confirm service Map<String, Object> emailContext = UtilMisc.toMap("orderId", orderId, "userLogin", userLogin); try { dispatcher.runSync("sendOrderConfirmation", emailContext); } catch (GenericServiceException e) { Debug.logError(e, "Problems sending email confirmation", module); } } return "success"; } private static boolean setPaymentPreferences(Delegator delegator, LocalDispatcher dispatcher, GenericValue userLogin, String orderId, HttpServletRequest request) { Debug.logVerbose("Setting payment preferences..", module); List<GenericValue> paymentPrefs = null; try { paymentPrefs = EntityQuery.use(delegator).from("OrderPaymentPreference") .where("orderId", orderId, "statusId", "PAYMENT_NOT_RECEIVED").queryList(); } catch (GenericEntityException e) { Debug.logError(e, "Cannot get payment preferences for order #" + orderId, module); return false; } if (paymentPrefs.size() > 0) { for (GenericValue pref : paymentPrefs) { boolean okay = setPaymentPreference(dispatcher, userLogin, pref, request); if (!okay) { return false; } } } return true; } private static boolean setPaymentPreference(LocalDispatcher dispatcher, GenericValue userLogin, GenericValue paymentPreference, HttpServletRequest request) { Locale locale = UtilHttp.getLocale(request); String paymentStatus = request.getParameter("transStatus"); String paymentAmount = request.getParameter("authAmount"); Long paymentDate = new Long(request.getParameter("transTime")); String transactionId = request.getParameter("transId"); String gatewayFlag = request.getParameter("rawAuthCode"); String avs = request.getParameter("AVS"); List<GenericValue> toStore = new LinkedList<GenericValue>(); java.sql.Timestamp authDate = null; try { authDate = new java.sql.Timestamp(paymentDate.longValue()); } catch (Exception e) { Debug.logError(e, "Cannot create date from long: " + paymentDate, module); authDate = UtilDateTime.nowTimestamp(); } paymentPreference.set("maxAmount", new BigDecimal(paymentAmount)); if ("Y".equals(paymentStatus)) { paymentPreference.set("statusId", "PAYMENT_RECEIVED"); } else if ("C".equals(paymentStatus)) { paymentPreference.set("statusId", "PAYMENT_CANCELLED"); } else { paymentPreference.set("statusId", "PAYMENT_NOT_RECEIVED"); } toStore.add(paymentPreference); Delegator delegator = paymentPreference.getDelegator(); // create the PaymentGatewayResponse String responseId = delegator.getNextSeqId("PaymentGatewayResponse"); GenericValue response = delegator.makeValue("PaymentGatewayResponse"); response.set("paymentGatewayResponseId", responseId); response.set("paymentServiceTypeEnumId", "PRDS_PAY_EXTERNAL"); response.set("orderPaymentPreferenceId", paymentPreference.get("orderPaymentPreferenceId")); response.set("paymentMethodTypeId", paymentPreference.get("paymentMethodTypeId")); response.set("paymentMethodId", paymentPreference.get("paymentMethodId")); // set the auth info response.set("amount", new BigDecimal(paymentAmount)); response.set("referenceNum", transactionId); response.set("gatewayCode", paymentStatus); response.set("gatewayFlag", gatewayFlag); response.set("transactionDate", authDate); response.set("gatewayAvsResult", avs); response.set("gatewayCvResult", avs.substring(0, 1)); toStore.add(response); try { delegator.storeAll(toStore); } catch (GenericEntityException e) { Debug.logError(e, "Cannot set payment preference/payment info", module); return false; } // create a payment record too Map<String, Object> results = null; try { String comment = UtilProperties.getMessage(resource, "AccountingPaymentReceiveViaWorldPay", locale); results = dispatcher.runSync("createPaymentFromPreference", UtilMisc.toMap("userLogin", userLogin, "orderPaymentPreferenceId", paymentPreference.get("orderPaymentPreferenceId"), "comments", comment)); } catch (GenericServiceException e) { Debug.logError(e, "Failed to execute service createPaymentFromPreference", module); request.setAttribute("_ERROR_MESSAGE_", UtilProperties.getMessage(resourceErr, "worldPayEvents.failedToExecuteServiceCreatePaymentFromPreference", locale)); return false; } if ((results == null) || (results.get(ModelService.RESPONSE_MESSAGE).equals(ModelService.RESPOND_ERROR))) { Debug.logError((String) results.get(ModelService.ERROR_MESSAGE), module); request.setAttribute("_ERROR_MESSAGE_", (String) results.get(ModelService.ERROR_MESSAGE)); return false; } return true; } private static String getPaymentGatewayConfigValue(Delegator delegator, String paymentGatewayConfigId, String paymentGatewayConfigParameterName, String resource, String parameterName) { String returnValue = ""; if (UtilValidate.isNotEmpty(paymentGatewayConfigId)) { try { GenericValue worldPay = EntityQuery.use(delegator).from("PaymentGatewayWorldPay").where("paymentGatewayConfigId", paymentGatewayConfigId).queryOne(); if (UtilValidate.isNotEmpty(worldPay)) { Object worldPayField = worldPay.get(paymentGatewayConfigParameterName); if (worldPayField != null) { returnValue = worldPayField.toString().trim(); } } } catch (GenericEntityException e) { Debug.logError(e, module); } } else { String value = EntityUtilProperties.getPropertyValue(resource, parameterName, delegator); if (value != null) { returnValue = value.trim(); } } return returnValue; } private static String getPaymentGatewayConfigValue(Delegator delegator, String paymentGatewayConfigId, String paymentGatewayConfigParameterName, String resource, String parameterName, String defaultValue) { String returnValue = getPaymentGatewayConfigValue(delegator, paymentGatewayConfigId, paymentGatewayConfigParameterName, resource, parameterName); if (UtilValidate.isEmpty(returnValue)) { returnValue = defaultValue; } return returnValue; } }
/* * Copyright 2011 The Kuali Foundation. * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kfs.module.endow.batch.service.impl; import java.sql.Date; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.kuali.kfs.module.endow.EndowPropertyConstants; import org.kuali.kfs.module.endow.batch.service.TicklerDeliveryService; import org.kuali.kfs.module.endow.businessobject.Tickler; import org.kuali.kfs.module.endow.businessobject.TicklerDeliveryStatisticsReportDetailTableRow; import org.kuali.kfs.module.endow.businessobject.TicklerRecipientGroup; import org.kuali.kfs.module.endow.businessobject.TicklerRecipientPrincipal; import org.kuali.kfs.module.endow.document.service.KEMService; import org.kuali.kfs.sys.KFSConstants; import org.kuali.kfs.sys.context.SpringContext; import org.kuali.kfs.sys.service.ReportWriterService; import org.kuali.kfs.sys.service.UniversityDateService; import org.kuali.rice.kew.api.KewApiConstants; import org.kuali.rice.kew.api.exception.WorkflowException; import org.kuali.rice.kns.document.MaintenanceDocument; import org.kuali.rice.kns.maintenance.KualiMaintainableImpl; import org.kuali.rice.kns.service.MaintenanceDocumentDictionaryService; import org.kuali.rice.krad.bo.AdHocRoutePerson; import org.kuali.rice.krad.bo.AdHocRouteRecipient; import org.kuali.rice.krad.bo.AdHocRouteWorkgroup; import org.kuali.rice.krad.rules.rule.event.SendAdHocRequestsEvent; import org.kuali.rice.krad.service.BusinessObjectService; import org.kuali.rice.krad.service.DocumentService; import org.kuali.rice.krad.service.KualiRuleService; import org.kuali.rice.krad.util.KRADConstants; import org.kuali.rice.krad.util.ObjectUtils; import org.springframework.transaction.annotation.Transactional; @Transactional public class TicklerDeliveryServiceImpl implements TicklerDeliveryService { private static org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(TicklerDeliveryServiceImpl.class); private BusinessObjectService businessObjectService; private KEMService kemService; private UniversityDateService universityDateService; private KualiRuleService kualiRuleService; private DocumentService documentService; private Date currentDate; private ReportWriterService ticklerDeliveryStatisticsReportsWriterService; private TicklerDeliveryStatisticsReportDetailTableRow ticklerDeliveryStatisticsReportDetailTableRow; public TicklerDeliveryServiceImpl(){ //statistics report... ticklerDeliveryStatisticsReportDetailTableRow = new TicklerDeliveryStatisticsReportDetailTableRow(); } public boolean generateTicklerNotices() { //set current date currentDate = kemService.getCurrentDate(); LOG.info("Begin generateTicklerNotices() with notification date of " + currentDate); //get tickler documents ArrayList<Tickler>ticklerBOs = new ArrayList<Tickler>(getTicklerBusinessObjects()); //route tickler delivery notice routeTicklerDeliveryNotice(ticklerBOs); writeStatisticsReport(); LOG.info("End generateTicklerNotices() with notification date of " + currentDate); return false; } /** * Retrieves a list of tickler BOs where the next review date is * equal to today, the term date is null or greater than today, and the record is active. * * @param currentDate * @return */ protected List<Tickler> getTicklerBusinessObjects(){ Map<String, Object> queryCriteria = new HashMap<String, Object>(); queryCriteria.put(EndowPropertyConstants.TICKLER_NEXT_DUE_DATE, currentDate); queryCriteria.put(EndowPropertyConstants.TICKLER_ACTIVE_INDICATOR, KFSConstants.ParameterValues.YES); ArrayList<Tickler> ticklerBOs = null; ticklerBOs = new ArrayList<Tickler>(businessObjectService.findMatching(Tickler.class, queryCriteria)); //Go through and remove tickler docs where the term date has expired if(ObjectUtils.isNotNull(ticklerBOs)){ for(int i = ticklerBOs.size()-1; i > -1; i--){ if(ObjectUtils.isNotNull(ticklerBOs.get(i).getTerminationDate()) && ticklerBOs.get(i).getTerminationDate().before(currentDate)){ ticklerBOs.remove(i); } } } return ticklerBOs; } /** * Routes FYI tickler documents to Tickler persons and groups * * @param ticklerDocs * @return */ protected boolean routeTicklerDeliveryNotice(List<Tickler> ticklerBOs){ boolean success = false; boolean rulePassed = false; MaintenanceDocument ticklerDocument = null; if(ObjectUtils.isNotNull(ticklerBOs)){ for(Tickler ticklerBO : ticklerBOs){ //create a maintenance document from the ticklerDocument = createTicklerDocument(ticklerBO); //add principals and groups ticklerDocument.setAdHocRoutePersons(convertTicklerPrincipalToAdhocRoutePerson(ticklerBO.getRecipientPrincipals())); ticklerDocument.setAdHocRouteWorkgroups(convertTicklerGroupsToAdhocRouteGroup(ticklerBO.getRecipientGroups())); //check rules to ensure valid recipients rulePassed = kualiRuleService.applyRules(new SendAdHocRequestsEvent(ticklerDocument)); if (rulePassed) { try{ //rule passed to send adhoc requests documentService.routeDocument(ticklerDocument, "Tickler Notification - " + currentDate, combineAdHocRecipients(ticklerDocument)); ticklerDeliveryStatisticsReportDetailTableRow.increaseTicklerDeliveryNotificationsCount(); success = true; }catch(WorkflowException wfe){ ticklerDeliveryStatisticsReportDetailTableRow.increaseNumberOfExceptionsCount(); //just warn, but continue routing with other tickler BOs LOG.warn("Failed to route Tickler Delivery notices for Tickler Number " + ticklerBO.getNumber() + " with notification date of " + currentDate); } }else{ ticklerDeliveryStatisticsReportDetailTableRow.increaseNumberOfExceptionsCount(); LOG.warn("Invalid recipients for Tickler Delivery notices for Tickler Number " + ticklerBO.getNumber() + " with notification date of " + currentDate); } } }else{ //nothing to process success = true; } return success; } /** * Creates a Tickler Maintenance Document based on a Tickler BO * * @param ticklerBo * @return */ protected MaintenanceDocument createTicklerDocument(Tickler ticklerBo){ MaintenanceDocument document = null; try{ document = (MaintenanceDocument) documentService.getNewDocument(SpringContext.getBean(MaintenanceDocumentDictionaryService.class).getDocumentTypeName(ticklerBo.getClass())); } catch (WorkflowException e) { throw new RuntimeException(e); } // add all the pieces document.getDocumentHeader().setDocumentDescription("Tickler Notification - " + currentDate); document.setOldMaintainableObject(new KualiMaintainableImpl(ticklerBo)); document.getOldMaintainableObject().setBoClass(ticklerBo.getClass()); document.setNewMaintainableObject(new KualiMaintainableImpl(ticklerBo)); document.getNewMaintainableObject().setBoClass(ticklerBo.getClass()); document.getNewMaintainableObject().setMaintenanceAction(KRADConstants.MAINTENANCE_EDIT_ACTION); document.getNewMaintainableObject().setDocumentNumber(document.getDocumentNumber()); return document; } /** * Converts tickler principals into normal AdHocRoutePerson list * * @param principals * @return */ protected List<AdHocRoutePerson> convertTicklerPrincipalToAdhocRoutePerson(List<TicklerRecipientPrincipal> principals){ List<AdHocRoutePerson> personList = new ArrayList<AdHocRoutePerson>(); AdHocRoutePerson person = null; if(ObjectUtils.isNotNull(principals)){ //for each principal, make an AdHocRoutePerson for(TicklerRecipientPrincipal principal : principals){ if(principal.isActive()){ person = new AdHocRoutePerson(); person.setId(principal.getContact().getPrincipalName()); person.setActionRequested(KewApiConstants.ACTION_REQUEST_FYI_REQ); personList.add(person); } } } return personList; } /** * Converts tickler groups into normal AdHocRouteWorkgroup list * * @param groups * @return */ protected List<AdHocRouteWorkgroup> convertTicklerGroupsToAdhocRouteGroup(List<TicklerRecipientGroup> groups){ List<AdHocRouteWorkgroup> groupList = new ArrayList<AdHocRouteWorkgroup>(); AdHocRouteWorkgroup workgroup = null; if(ObjectUtils.isNotNull(groups)){ //for each group, make an AdHocWorkgroup for(TicklerRecipientGroup group : groups){ if(group.isActive()){ workgroup = new AdHocRouteWorkgroup(); workgroup.setId(group.getGroupId()); workgroup.setRecipientName(group.getAssignedToGroup().getName()); workgroup.setRecipientNamespaceCode(group.getAssignedToGroup().getNamespaceCode()); workgroup.setActionRequested(KewApiConstants.ACTION_REQUEST_FYI_REQ); groupList.add(workgroup); } } } return groupList; } /** * Combines persons and workgroups from document into one list. * * @param ticklerDocument * @return */ protected List<AdHocRouteRecipient> combineAdHocRecipients(MaintenanceDocument ticklerDocument) { List<AdHocRouteRecipient> adHocRecipients = new ArrayList<AdHocRouteRecipient>(); adHocRecipients.addAll(ticklerDocument.getAdHocRoutePersons()); adHocRecipients.addAll(ticklerDocument.getAdHocRouteWorkgroups()); return adHocRecipients; } protected void writeStatisticsReport() { //now print the statistics report..... long totalTicklerDeliveryNotifications = 0; long totalNumberOfExceptions = 0; //write the header line.... ticklerDeliveryStatisticsReportsWriterService.writeStatisticLine("Number of Tickler Notifications\t\tNumber of Exceptions"); ticklerDeliveryStatisticsReportsWriterService.writeStatisticLine("-------------------------------\t\t--------------------"); ticklerDeliveryStatisticsReportsWriterService.writeStatisticLine("%31d\t\t%20d", ticklerDeliveryStatisticsReportDetailTableRow.getTicklerDeliveryNotifications(), ticklerDeliveryStatisticsReportDetailTableRow.getNumberOfExceptions()); } protected TicklerDeliveryStatisticsReportDetailTableRow getTicklerDeliveryStatisticsReportDetailTableRow() { return ticklerDeliveryStatisticsReportDetailTableRow; } public void setTicklerDeliveryStatisticsReportDetailTableRow(TicklerDeliveryStatisticsReportDetailTableRow ticklerDeliveryStatisticsReportDetailTableRow) { this.ticklerDeliveryStatisticsReportDetailTableRow = ticklerDeliveryStatisticsReportDetailTableRow; } public void setBusinessObjectService(BusinessObjectService businessObjectService) { this.businessObjectService = businessObjectService; } public void setKemService(KEMService kemService) { this.kemService = kemService; } public void setUniversityDateService(UniversityDateService universityDateService) { this.universityDateService = universityDateService; } public void setKualiRuleService(KualiRuleService kualiRuleService) { this.kualiRuleService = kualiRuleService; } public void setDocumentService(DocumentService documentService) { this.documentService = documentService; } public void setTicklerDeliveryStatisticsReportsWriterService(ReportWriterService ticklerDeliveryStatisticsReportsWriterService) { this.ticklerDeliveryStatisticsReportsWriterService = ticklerDeliveryStatisticsReportsWriterService; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.email; import microsoft.exchange.webservices.data.autodiscover.IAutodiscoverRedirectionUrl; import microsoft.exchange.webservices.data.core.ExchangeService; import microsoft.exchange.webservices.data.core.PropertySet; import microsoft.exchange.webservices.data.core.enumeration.misc.ExchangeVersion; import microsoft.exchange.webservices.data.core.enumeration.property.BodyType; import microsoft.exchange.webservices.data.core.enumeration.property.BasePropertySet; import microsoft.exchange.webservices.data.core.enumeration.property.WellKnownFolderName; import microsoft.exchange.webservices.data.core.enumeration.search.FolderTraversal; import microsoft.exchange.webservices.data.core.enumeration.search.LogicalOperator; import microsoft.exchange.webservices.data.core.enumeration.search.SortDirection; import microsoft.exchange.webservices.data.core.enumeration.service.ConflictResolutionMode; import microsoft.exchange.webservices.data.core.enumeration.service.DeleteMode; import microsoft.exchange.webservices.data.core.service.folder.Folder; import microsoft.exchange.webservices.data.core.service.item.EmailMessage; import microsoft.exchange.webservices.data.core.service.item.Item; import microsoft.exchange.webservices.data.core.service.schema.EmailMessageSchema; import microsoft.exchange.webservices.data.core.service.schema.FolderSchema; import microsoft.exchange.webservices.data.core.service.schema.ItemSchema; import microsoft.exchange.webservices.data.credential.ExchangeCredentials; import microsoft.exchange.webservices.data.credential.WebCredentials; import microsoft.exchange.webservices.data.property.complex.FileAttachment; import microsoft.exchange.webservices.data.property.complex.ItemAttachment; import microsoft.exchange.webservices.data.search.FindFoldersResults; import microsoft.exchange.webservices.data.search.FindItemsResults; import microsoft.exchange.webservices.data.search.FolderView; import microsoft.exchange.webservices.data.search.ItemView; import microsoft.exchange.webservices.data.search.filter.SearchFilter; import org.apache.commons.lang3.StringUtils; import org.apache.commons.mail.EmailAttachment; import org.apache.commons.mail.EmailException; import org.apache.commons.mail.HtmlEmail; import org.apache.commons.mail.MultiPartEmail; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.lifecycle.OnStopped; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.Validator; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.io.OutputStreamCallback; import org.apache.nifi.processor.util.StandardValidators; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.mail.Address; import javax.mail.Flags; import javax.mail.Message; import javax.mail.MessagingException; import javax.mail.internet.MimeMessage; import javax.mail.util.ByteArrayDataSource; import java.io.IOException; import java.io.OutputStream; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.TimeUnit; @InputRequirement(InputRequirement.Requirement.INPUT_FORBIDDEN) @CapabilityDescription("Consumes messages from Microsoft Exchange using Exchange Web Services. " + "The raw-bytes of each received email message are written as contents of the FlowFile") @Tags({ "Email", "EWS", "Exchange", "Get", "Ingest", "Ingress", "Message", "Consume" }) public class ConsumeEWS extends AbstractProcessor { public static final PropertyDescriptor USER = new PropertyDescriptor.Builder() .name("user") .displayName("User Name") .description("User Name used for authentication and authorization with Email server.") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor PASSWORD = new PropertyDescriptor.Builder() .name("password") .displayName("Password") .description("Password used for authentication and authorization with Email server.") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .sensitive(true) .build(); public static final PropertyDescriptor FOLDER = new PropertyDescriptor.Builder() .name("folder") .displayName("Folder") .description("Email folder to retrieve messages from (e.g., INBOX)") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .defaultValue("INBOX") .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor FETCH_SIZE = new PropertyDescriptor.Builder() .name("fetch.size") .displayName("Fetch Size") .description("Specify the maximum number of Messages to fetch per call to Email Server.") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .defaultValue("10") .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR) .build(); public static final PropertyDescriptor SHOULD_DELETE_MESSAGES = new PropertyDescriptor.Builder() .name("delete.messages") .displayName("Delete Messages") .description("Specify whether mail messages should be deleted after retrieval.") .required(true) .allowableValues("true", "false") .defaultValue("false") .addValidator(StandardValidators.BOOLEAN_VALIDATOR) .build(); static final PropertyDescriptor CONNECTION_TIMEOUT = new PropertyDescriptor.Builder() .name("connection.timeout") .displayName("Connection timeout") .description("The amount of time to wait to connect to Email server") .required(true) .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .defaultValue("30 sec") .build(); public static final PropertyDescriptor EXCHANGE_VERSION = new PropertyDescriptor.Builder() .name("mail-ews-version") .displayName("Exchange Version") .description("What version of Exchange Server the server is running.") .required(true) .allowableValues(ExchangeVersion.values()) .defaultValue(ExchangeVersion.Exchange2010_SP2.name()) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor EWS_URL = new PropertyDescriptor.Builder() .name("ews-url") .displayName("EWS URL") .description("URL of the EWS Endpoint. Required if Autodiscover is false.") .required(false) .addValidator(StandardValidators.URL_VALIDATOR) .build(); public static final PropertyDescriptor USE_AUTODISCOVER = new PropertyDescriptor.Builder() .name("ews-autodiscover") .displayName("Auto Discover URL") .description("Whether or not to use the Exchange email address to Autodiscover the EWS endpoint URL.") .required(true) .allowableValues("true","false") .defaultValue("true") .build(); public static final PropertyDescriptor SHOULD_MARK_READ = new PropertyDescriptor.Builder() .name("ews-mark-as-read") .displayName("Mark Messages as Read") .description("Specify if messages should be marked as read after retrieval.") .required(true) .allowableValues("true", "false") .defaultValue("true") .addValidator(StandardValidators.BOOLEAN_VALIDATOR) .build(); public static final PropertyDescriptor INCLUDE_EMAIL_HEADERS = new PropertyDescriptor.Builder() .name("ews-include-headers") .displayName("Original Headers to Include") .description("Comma delimited list specifying which headers from the original message to include in the exported email message. Blank means copy all headers. " + "Some headers can cause problems with message parsing, specifically the 'Content-Type' header.") .defaultValue("") .addValidator(Validator.VALID) .build(); public static final PropertyDescriptor EXCLUDE_EMAIL_HEADERS = new PropertyDescriptor.Builder() .name("ews-exclude-headers") .displayName("Original Headers to Exclude") .description("Comma delimited list specifying which headers from the original message to exclude in the exported email message. Blank means don't exclude any headers.") .defaultValue("") .addValidator(Validator.VALID) .build(); static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("All messages that are the are successfully received from Email server and converted to FlowFiles are routed to this relationship") .build(); final protected List<PropertyDescriptor> DESCRIPTORS; final protected Set<Relationship> RELATIONSHIPS; protected final Logger logger = LoggerFactory.getLogger(this.getClass()); protected volatile BlockingQueue<Message> messageQueue; protected volatile String displayUrl; protected volatile ProcessSession processSession; protected volatile boolean shouldSetDeleteFlag; protected volatile String folderName; public ConsumeEWS(){ final Set<Relationship> relationshipSet = new HashSet<>(); relationshipSet.add(REL_SUCCESS); RELATIONSHIPS = relationshipSet; final List<PropertyDescriptor> descriptors = new ArrayList<>(); descriptors.add(USER); descriptors.add(PASSWORD); descriptors.add(FOLDER); descriptors.add(FETCH_SIZE); descriptors.add(SHOULD_DELETE_MESSAGES); descriptors.add(CONNECTION_TIMEOUT); descriptors.add(EXCHANGE_VERSION); descriptors.add(EWS_URL); descriptors.add(USE_AUTODISCOVER); descriptors.add(SHOULD_MARK_READ); descriptors.add(INCLUDE_EMAIL_HEADERS); descriptors.add(EXCLUDE_EMAIL_HEADERS); DESCRIPTORS = descriptors; } @Override public Set<Relationship> getRelationships() { return RELATIONSHIPS; } @Override protected List<PropertyDescriptor> getSupportedPropertyDescriptors() { return DESCRIPTORS; } @Override public void onTrigger(ProcessContext context, ProcessSession processSession) throws ProcessException { if(this.messageQueue == null){ int fetchSize = context.getProperty(FETCH_SIZE).evaluateAttributeExpressions().asInteger(); this.messageQueue = new ArrayBlockingQueue<>(fetchSize); } this.folderName = context.getProperty(FOLDER).evaluateAttributeExpressions().getValue(); Message emailMessage = this.receiveMessage(context); if (emailMessage != null) { this.transfer(emailMessage, context, processSession); } else { //No new messages found, yield the processor context.yield(); } } protected ExchangeService initializeIfNecessary(ProcessContext context) throws ProcessException { ExchangeVersion ver = ExchangeVersion.valueOf(context.getProperty(EXCHANGE_VERSION).getValue()); ExchangeService service = new ExchangeService(ver); final String timeoutInMillis = String.valueOf(context.getProperty(CONNECTION_TIMEOUT).evaluateAttributeExpressions().asTimePeriod(TimeUnit.MILLISECONDS)); service.setTimeout(Integer.parseInt(timeoutInMillis)); String userEmail = context.getProperty(USER).evaluateAttributeExpressions().getValue(); String password = context.getProperty(PASSWORD).evaluateAttributeExpressions().getValue(); ExchangeCredentials credentials = new WebCredentials(userEmail, password); service.setCredentials(credentials); Boolean useAutodiscover = context.getProperty(USE_AUTODISCOVER).asBoolean(); if(useAutodiscover){ try { service.autodiscoverUrl(userEmail, new RedirectionUrlCallback()); } catch (Exception e) { throw new ProcessException("Failure setting Autodiscover URL from email address.", e); } } else { String ewsURL = context.getProperty(EWS_URL).getValue(); try { service.setUrl(new URI(ewsURL)); } catch (URISyntaxException e) { throw new ProcessException("Failure setting EWS URL.", e); } } return service; } @Override protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) { return new PropertyDescriptor.Builder() .description("Specifies the value for '" + propertyDescriptorName + "' Java Mail property.") .name(propertyDescriptorName).addValidator(StandardValidators.NON_EMPTY_VALIDATOR).dynamic(true) .build(); } /** * Return the target receivere's mail protocol (e.g., imap, pop etc.) */ protected String getProtocol(ProcessContext processContext) { return "ews"; } /** * Fills the internal message queue if such queue is empty. This is due to * the fact that per single session there may be multiple messages retrieved * from the email server (see FETCH_SIZE). */ protected void fillMessageQueueIfNecessary(ProcessContext context) throws ProcessException { if (this.messageQueue.isEmpty()) { ExchangeService service = this.initializeIfNecessary(context); boolean deleteOnRead = context.getProperty(SHOULD_DELETE_MESSAGES).getValue().equals("true"); boolean markAsRead = context.getProperty(SHOULD_MARK_READ).getValue().equals("true"); String includeHeaders = context.getProperty(INCLUDE_EMAIL_HEADERS).getValue(); String excludeHeaders = context.getProperty(EXCLUDE_EMAIL_HEADERS).getValue(); List<String> includeHeadersList = null; List<String> excludeHeadersList = null; if (!StringUtils.isEmpty(includeHeaders)) { includeHeadersList = Arrays.asList(includeHeaders.split(",")); } if (!StringUtils.isEmpty(excludeHeaders)) { excludeHeadersList = Arrays.asList(excludeHeaders.split(",")); } try { //Get Folder Folder folder = getFolder(service); ItemView view = new ItemView(messageQueue.remainingCapacity()); view.getOrderBy().add(ItemSchema.DateTimeReceived, SortDirection.Ascending); SearchFilter sf = new SearchFilter.SearchFilterCollection(LogicalOperator.And, new SearchFilter.IsEqualTo(EmailMessageSchema.IsRead, false)); FindItemsResults<Item> findResults = service.findItems(folder.getId(), sf, view); if(findResults == null || findResults.getItems().size()== 0){ return; } service.loadPropertiesForItems(findResults, PropertySet.FirstClassProperties); for (Item item : findResults) { EmailMessage ewsMessage = (EmailMessage) item; messageQueue.add(parseMessage(ewsMessage,includeHeadersList,excludeHeadersList)); if(deleteOnRead){ ewsMessage.delete(DeleteMode.HardDelete); } else if(markAsRead){ ewsMessage.setIsRead(true); ewsMessage.update(ConflictResolutionMode.AlwaysOverwrite); } } service.close(); } catch (Exception e) { throw new ProcessException("Failed retrieving new messages from EWS.", e); } } } protected Folder getFolder(ExchangeService service) { Folder folder; if(folderName.equals("INBOX")){ try { folder = Folder.bind(service, WellKnownFolderName.Inbox); } catch (Exception e) { throw new ProcessException("Failed to bind Inbox Folder on EWS Server", e); } } else { FolderView view = new FolderView(10); view.setTraversal(FolderTraversal.Deep); SearchFilter searchFilter = new SearchFilter.IsEqualTo(FolderSchema.DisplayName, folderName); try { FindFoldersResults foldersResults = service.findFolders(WellKnownFolderName.Root,searchFilter, view); ArrayList<Folder> folderIds = foldersResults.getFolders(); if(folderIds.size() > 1){ throw new ProcessException("More than 1 folder found with the name " + folderName); } folder = Folder.bind(service, folderIds.get(0).getId()); } catch (Exception e) { throw new ProcessException("Search for Inbox Subfolder failed.", e); } } return folder; } public MimeMessage parseMessage(EmailMessage item, List<String> hdrIncludeList, List<String> hdrExcludeList) throws Exception { EmailMessage ewsMessage = item; final String bodyText = ewsMessage.getBody().toString(); MultiPartEmail mm; if(ewsMessage.getBody().getBodyType() == BodyType.HTML){ mm = new HtmlEmail(); if(!StringUtils.isEmpty(bodyText)){ ((HtmlEmail)mm).setHtmlMsg(bodyText); } } else { mm = new MultiPartEmail(); if(!StringUtils.isEmpty(bodyText)){ mm.setMsg(bodyText); } } mm.setHostName("NiFi-EWS"); //from mm.setFrom(ewsMessage.getFrom().getAddress()); //to recipients ewsMessage.getToRecipients().forEach(x->{ try { mm.addTo(x.getAddress()); } catch (EmailException e) { throw new ProcessException("Failed to add TO recipient.", e); } }); //cc recipients ewsMessage.getCcRecipients().forEach(x->{ try { mm.addCc(x.getAddress()); } catch (EmailException e) { throw new ProcessException("Failed to add CC recipient.", e); } }); //subject mm.setSubject(ewsMessage.getSubject()); //sent date mm.setSentDate(ewsMessage.getDateTimeSent()); //add message headers ewsMessage.getInternetMessageHeaders().getItems().stream() .filter(x -> (hdrIncludeList == null || hdrIncludeList.isEmpty() || hdrIncludeList.contains(x.getName())) && (hdrExcludeList == null || hdrExcludeList.isEmpty() || !hdrExcludeList.contains(x.getName()))) .forEach(x-> mm.addHeader(x.getName(), x.getValue())); //Any attachments if(ewsMessage.getHasAttachments()){ ewsMessage.getAttachments().forEach(x->{ try { if(x instanceof FileAttachment) { FileAttachment file = (FileAttachment) x; file.load(); String type = file.getContentType() == null ? "text/plain" : file.getContentType(); ByteArrayDataSource bds = new ByteArrayDataSource(file.getContent(), type); mm.attach(bds, file.getName(), "", EmailAttachment.ATTACHMENT); } else { // x instanceof ItemAttachment ItemAttachment eml = (ItemAttachment) x; PropertySet oPropSetForBodyText = new PropertySet(BasePropertySet.FirstClassProperties); oPropSetForBodyText.add(ItemSchema.MimeContent); eml.load(oPropSetForBodyText); Item it = eml.getItem(); ByteArrayDataSource bds = new ByteArrayDataSource(it.getMimeContent().getContent(), "text/plain"); mm.attach(bds, eml.getName(), "", EmailAttachment.ATTACHMENT); } } catch (MessagingException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } }); } mm.buildMimeMessage(); return mm.getMimeMessage(); } /** * Disposes the message by converting it to a {@link FlowFile} transferring * it to the REL_SUCCESS relationship. */ private void transfer(Message emailMessage, ProcessContext context, ProcessSession processSession) { long start = System.nanoTime(); FlowFile flowFile = processSession.create(); flowFile = processSession.append(flowFile, new OutputStreamCallback() { @Override public void process(final OutputStream out) throws IOException { try { emailMessage.writeTo(out); } catch (MessagingException e) { throw new IOException(e); } } }); long executionDuration = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start); String fromAddressesString = ""; try { Address[] fromAddresses = emailMessage.getFrom(); if (fromAddresses != null) { fromAddressesString = Arrays.asList(fromAddresses).toString(); } } catch (MessagingException e) { this.logger.warn("Faild to retrieve 'From' attribute from Message."); } processSession.getProvenanceReporter().receive(flowFile, this.displayUrl, "Received message from " + fromAddressesString, executionDuration); this.getLogger().info("Successfully received {} from {} in {} millis", new Object[]{flowFile, fromAddressesString, executionDuration}); processSession.transfer(flowFile, REL_SUCCESS); try { emailMessage.setFlag(Flags.Flag.DELETED, this.shouldSetDeleteFlag); } catch (MessagingException e) { this.logger.warn("Failed to set DELETE Flag on the message, data duplication may occur."); } } /** * Receives message from the internal queue filling up the queue if * necessary. */ protected Message receiveMessage(ProcessContext context) { Message emailMessage = null; try { this.fillMessageQueueIfNecessary(context); emailMessage = this.messageQueue.poll(1, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { context.yield(); this.logger.error("Failed retrieving messages from EWS.", e); Thread.currentThread().interrupt(); this.logger.debug("Current thread is interrupted"); } return emailMessage; } @OnStopped public void stop(ProcessContext processContext) { this.flushRemainingMessages(processContext); } /** * Will flush the remaining messages when this processor is stopped. */ protected void flushRemainingMessages(ProcessContext processContext) { Message emailMessage; try { while ((emailMessage = this.messageQueue.poll(1, TimeUnit.MILLISECONDS)) != null) { this.transfer(emailMessage, processContext, this.processSession); this.processSession.commitAsync(); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); this.logger.debug("Current thread is interrupted"); } } static class RedirectionUrlCallback implements IAutodiscoverRedirectionUrl { public boolean autodiscoverRedirectionUrlValidationCallback( String redirectionUrl) { return redirectionUrl.toLowerCase().startsWith("https://"); } } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.sunshine.app.data; import android.annotation.TargetApi; import android.content.ContentProvider; import android.content.ContentValues; import android.content.UriMatcher; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteQueryBuilder; import android.net.Uri; public class WeatherProvider extends ContentProvider { // The URI Matcher used by this content provider. private static final UriMatcher sUriMatcher = buildUriMatcher(); private WeatherDbHelper mOpenHelper; static final int WEATHER = 100; static final int WEATHER_WITH_LOCATION = 101; static final int WEATHER_WITH_LOCATION_AND_DATE = 102; static final int LOCATION = 300; private static final SQLiteQueryBuilder sWeatherByLocationSettingQueryBuilder; static{ sWeatherByLocationSettingQueryBuilder = new SQLiteQueryBuilder(); //This is an inner join which looks like //weather INNER JOIN location ON weather.location_id = location._id sWeatherByLocationSettingQueryBuilder.setTables( WeatherContract.WeatherEntry.TABLE_NAME + " INNER JOIN " + WeatherContract.LocationEntry.TABLE_NAME + " ON " + WeatherContract.WeatherEntry.TABLE_NAME + "." + WeatherContract.WeatherEntry.COLUMN_LOC_KEY + " = " + WeatherContract.LocationEntry.TABLE_NAME + "." + WeatherContract.LocationEntry._ID); } //location.location_setting = ? private static final String sLocationSettingSelection = WeatherContract.LocationEntry.TABLE_NAME+ "." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? "; //location.location_setting = ? AND date >= ? private static final String sLocationSettingWithStartDateSelection = WeatherContract.LocationEntry.TABLE_NAME+ "." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? AND " + WeatherContract.WeatherEntry.COLUMN_DATE + " >= ? "; //location.location_setting = ? AND date = ? private static final String sLocationSettingAndDaySelection = WeatherContract.LocationEntry.TABLE_NAME + "." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? AND " + WeatherContract.WeatherEntry.COLUMN_DATE + " = ? "; private Cursor getWeatherByLocationSetting(Uri uri, String[] projection, String sortOrder) { String locationSetting = WeatherContract.WeatherEntry.getLocationSettingFromUri(uri); long startDate = WeatherContract.WeatherEntry.getStartDateFromUri(uri); String[] selectionArgs; String selection; if (startDate == 0) { selection = sLocationSettingSelection; selectionArgs = new String[]{locationSetting}; } else { selectionArgs = new String[]{locationSetting, Long.toString(startDate)}; selection = sLocationSettingWithStartDateSelection; } return sWeatherByLocationSettingQueryBuilder.query(mOpenHelper.getReadableDatabase(), projection, selection, selectionArgs, null, null, sortOrder ); } private Cursor getWeatherByLocationSettingAndDate( Uri uri, String[] projection, String sortOrder) { String locationSetting = WeatherContract.WeatherEntry.getLocationSettingFromUri(uri); long date = WeatherContract.WeatherEntry.getDateFromUri(uri); return sWeatherByLocationSettingQueryBuilder.query(mOpenHelper.getReadableDatabase(), projection, sLocationSettingAndDaySelection, new String[]{locationSetting, Long.toString(date)}, null, null, sortOrder ); } /* Students: Here is where you need to create the UriMatcher. This UriMatcher will match each URI to the WEATHER, WEATHER_WITH_LOCATION, WEATHER_WITH_LOCATION_AND_DATE, and LOCATION integer constants defined above. You can test this by uncommenting the testUriMatcher test within TestUriMatcher. */ static UriMatcher buildUriMatcher() { // 1) The code passed into the constructor represents the code to return for the root // URI. It's common to use NO_MATCH as the code for this case. Add the constructor below. final UriMatcher matcher = new UriMatcher(UriMatcher.NO_MATCH); final String authority = WeatherContract.CONTENT_AUTHORITY; //for the type of URI you want to add, create a corresponding code matcher.addURI(authority, WeatherContract.PATH_WEATHER, WEATHER); matcher.addURI(authority, WeatherContract.PATH_WEATHER + "/*", WEATHER_WITH_LOCATION); matcher.addURI(authority, WeatherContract.PATH_WEATHER + "/*/#", WEATHER_WITH_LOCATION_AND_DATE); matcher.addURI(authority, WeatherContract.PATH_LOCATION, LOCATION); // 2) Use the addURI function to match each of the types. Use the constants from // WeatherContract to help define the types to the UriMatcher. // 3) Return the new matcher! return matcher; } /* Students: We've coded this for you. We just create a new WeatherDbHelper for later use here. */ @Override public boolean onCreate() { mOpenHelper = new WeatherDbHelper(getContext()); return true; } /* Students: Here's where you'll code the getType function that uses the UriMatcher. You can test this by uncommenting testGetType in TestProvider. */ @Override public String getType(Uri uri) { // Use the Uri Matcher to determine what kind of URI this is. final int match = sUriMatcher.match(uri); switch (match) { // Student: Uncomment and fill out these two cases // case WEATHER_WITH_LOCATION_AND_DATE: // case WEATHER_WITH_LOCATION: case WEATHER: return WeatherContract.WeatherEntry.CONTENT_TYPE; case LOCATION: return WeatherContract.LocationEntry.CONTENT_TYPE; case WEATHER_WITH_LOCATION: return WeatherContract.WeatherEntry.CONTENT_TYPE; case WEATHER_WITH_LOCATION_AND_DATE: //return a specific value of type item return WeatherContract.WeatherEntry.CONTENT_ITEM_TYPE; default: throw new UnsupportedOperationException("Unknown uri: " + uri); } } //Selection, this argument you can determine which records to update, deleted, insert //SelectionArgs, The binding parameters to the previous selection argument @Override public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) { // Here's the switch statement that, given a URI, will determine what kind of request it is, // and query the database accordingly. Cursor retCursor; switch (sUriMatcher.match(uri)) { // "weather/*/#" case WEATHER_WITH_LOCATION_AND_DATE: { retCursor = getWeatherByLocationSettingAndDate(uri, projection, sortOrder); break; } // "weather/*" case WEATHER_WITH_LOCATION: { retCursor = getWeatherByLocationSetting(uri, projection, sortOrder); break; } // "weather" case WEATHER: { retCursor = mOpenHelper.getReadableDatabase().query( WeatherContract.WeatherEntry.TABLE_NAME, projection, selection, selectionArgs, null, null, sortOrder ); break; } // "location" case LOCATION: { retCursor = mOpenHelper.getReadableDatabase().query( WeatherContract.LocationEntry.TABLE_NAME, projection, selection, selectionArgs, null, null, sortOrder ); break; } default: throw new UnsupportedOperationException("Unknown uri: " + uri); } retCursor.setNotificationUri(getContext().getContentResolver(), uri); return retCursor; } /* Student: Add the ability to insert Locations to the implementation of this function. */ @Override public Uri insert(Uri uri, ContentValues values) { final SQLiteDatabase db = mOpenHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); Uri returnUri; switch (match) { case WEATHER: { normalizeDate(values); long _id = db.insert(WeatherContract.WeatherEntry.TABLE_NAME, null, values); if ( _id > 0 ) returnUri = WeatherContract.WeatherEntry.buildWeatherUri(_id); else throw new android.database.SQLException("Failed to insert row into " + uri); break; } case LOCATION: { normalizeDate(values); long _id = db.insert(WeatherContract.LocationEntry.TABLE_NAME, null, values); if (_id > 0) returnUri = WeatherContract.LocationEntry.buildLocationUri(_id); else throw new android.database.SQLException("Failed to insert row into " + uri); break; } default: throw new UnsupportedOperationException("Unknown uri: " + uri); } getContext().getContentResolver().notifyChange(uri, null); db.close(); return returnUri; } @Override public int delete(Uri uri, String selection, String[] selectionArgs) { // Student: Start by getting a writable database final SQLiteDatabase db = mOpenHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); int rowDeleted; // Student: Use the uriMatcher to match the WEATHER and LOCATION URI's we are going to // handle. If it doesn't match these, throw an UnsupportedOperationException. // if (null == selection) selection = "1"; // Student: A null value deletes all rows. In my implementation of this, I only notified // the uri listeners (using the content resolver) if the rowsDeleted != 0 or the selection // is null. // Oh, and you should notify the listeners here. switch (match) { case WEATHER: rowDeleted = db.delete(WeatherContract.WeatherEntry.TABLE_NAME, selection, selectionArgs); break; case LOCATION: rowDeleted = db.delete(WeatherContract.LocationEntry.TABLE_NAME, selection, selectionArgs); break; default: throw new UnsupportedOperationException("Unknown uri: " + uri); } // Student: return the actual rows deleted if (rowDeleted != 0){ getContext().getContentResolver().notifyChange(uri, null); } return rowDeleted; } private void normalizeDate(ContentValues values) { // normalize the date value if (values.containsKey(WeatherContract.WeatherEntry.COLUMN_DATE)) { long dateValue = values.getAsLong(WeatherContract.WeatherEntry.COLUMN_DATE); values.put(WeatherContract.WeatherEntry.COLUMN_DATE, WeatherContract.normalizeDate(dateValue)); } } @Override public int update( Uri uri, ContentValues values, String selection, String[] selectionArgs) { // Student: This is a lot like the delete function. We return the number of rows impacted // by the update. final SQLiteDatabase db = mOpenHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); int rowUpdated; switch (match) { case WEATHER: rowUpdated = db.update(WeatherContract.WeatherEntry.TABLE_NAME, values, selection, selectionArgs); break; case LOCATION: rowUpdated = db.update(WeatherContract.LocationEntry.TABLE_NAME, values, selection, selectionArgs); break; default: throw new UnsupportedOperationException("Unknown uri: " + uri); } if (rowUpdated != 0) { getContext().getContentResolver().notifyChange(uri, null); } return rowUpdated; } @Override public int bulkInsert(Uri uri, ContentValues[] values) { final SQLiteDatabase db = mOpenHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); switch (match) { case WEATHER: db.beginTransaction(); int returnCount = 0; try { for (ContentValues value : values) { normalizeDate(value); long _id = db.insert(WeatherContract.WeatherEntry.TABLE_NAME, null, value); if (_id != -1) { returnCount++; } } db.setTransactionSuccessful(); } finally { db.endTransaction(); } getContext().getContentResolver().notifyChange(uri, null); return returnCount; default: return super.bulkInsert(uri, values); } } // You do not need to call this method. This is a method specifically to assist the testing // framework in running smoothly. You can read more at: // http://developer.android.com/reference/android/content/ContentProvider.html#shutdown() @Override @TargetApi(11) public void shutdown() { mOpenHelper.close(); super.shutdown(); } }
package org.deuce.test.main; import java.util.*; import org.deuce.*; import org.deuce.benchmark.intset.IntSet; /** * @author Pascal Felber * @since 0.3 */ public class IntSetSkipList implements IntSet { public class Node { private int m_value; private Node[] m_forward; public Node(int level, int value) { m_value = value; m_forward = new Node[level + 1]; } public void setValue(int value) { m_value = value; } public int getValue() { return m_value; } public int getLevel() { return m_forward.length - 1; } public void setForward(int level, Node next) { m_forward[level] = next; } public Node getForward(int level) { return m_forward[level]; } public String toString() { String result = ""; result += "<l=" + getLevel() + ",v=" + m_value + ">:"; for (int i = 0; i <= getLevel(); i++) { result += " @[" + i + "]="; if (m_forward[i] != null) result += m_forward[i].getValue(); else result += "null"; } return result; } } // Probability to increase level private double m_probability; // Upper bound on the number of levels private int m_maxLevel; // Highest level so far private int m_level; // First element of the list private Node m_head; // Thread-private PRNG private static ThreadLocal<Random> s_random = new ThreadLocal<Random>() { protected synchronized Random initialValue() { return new Random(); } }; public IntSetSkipList(int maxLevel, double probability) { m_maxLevel = maxLevel; m_probability = probability; m_level = 0; m_head = new Node(m_maxLevel, Integer.MIN_VALUE); Node tail = new Node(m_maxLevel, Integer.MAX_VALUE); for (int i = 0; i <= m_maxLevel; i++) m_head.setForward(i, tail); } public IntSetSkipList() { this(32, 0.25); } protected int randomLevel() { int l = 0; while (l < m_maxLevel && s_random.get().nextDouble() < m_probability) l++; return l; } @Atomic public boolean add(int value) { boolean result; Node[] update = new Node[m_maxLevel + 1]; Node node = m_head; for (int i = m_level; i >= 0; i--) { Node next = node.getForward(i); while (next.getValue() < value) { node = next; next = node.getForward(i); } update[i] = node; } node = node.getForward(0); if (node.getValue() == value) { result = false; } else { int level = randomLevel(); if (level > m_level) { for (int i = m_level + 1; i <= level; i++) update[i] = m_head; m_level = level; } node = new Node(level, value); for (int i = 0; i <= level; i++) { node.setForward(i, update[i].getForward(i)); update[i].setForward(i, node); } result = true; } return result; } @Atomic public boolean remove(int value) { boolean result; Node[] update = new Node[m_maxLevel + 1]; Node node = m_head; for (int i = m_level; i >= 0; i--) { Node next = node.getForward(i); while (next.getValue() < value) { node = next; next = node.getForward(i); } update[i] = node; } node = node.getForward(0); if (node.getValue() != value) { result = false; } else { for (int i = 0; i <= m_level; i++) { if (update[i].getForward(i) == node) update[i].setForward(i, node.getForward(i)); } while (m_level > 0 && m_head.getForward(m_level) == m_head) m_level--; result = true; } return result; } @Atomic public boolean contains(int value) { boolean result; Node node = m_head; for (int i = m_level; i >= 0; i--) { Node next = node.getForward(i); while (next.getValue() < value) { node = next; next = node.getForward(i); } } node = node.getForward(0); result = (node.getValue() == value); return result; } public String toString() { String result = ""; result += "Skip list:\n"; result += " Level=" + m_level + "\n"; result += " Max_level=" + m_maxLevel + "\n"; result += " Probability=" + m_probability + "\n"; result += "Elements:\n"; int[] countLevel = new int[m_maxLevel + 1]; Node element = m_head.getForward(0); while (element.getValue() < Integer.MAX_VALUE) { countLevel[element.getLevel()]++; result += " " + element.toString() + "\n"; element = element.getForward(0); } result += "Level distribution:\n"; for (int i = 0; i <= m_maxLevel; i++) result += " #[" + i + "]=" + countLevel[i] + "\n"; return result; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cloudstack.storage.image.store; import java.util.Date; import java.util.Map; import javax.inject.Inject; import org.apache.cloudstack.engine.subsystem.api.storage.DataObjectInStore; import org.apache.cloudstack.engine.subsystem.api.storage.DataStore; import org.apache.cloudstack.engine.subsystem.api.storage.ObjectInDataStoreStateMachine; import org.apache.cloudstack.engine.subsystem.api.storage.ObjectInDataStoreStateMachine.Event; import org.apache.cloudstack.engine.subsystem.api.storage.TemplateInfo; import org.apache.cloudstack.storage.command.CopyCmdAnswer; import org.apache.cloudstack.storage.datastore.ObjectInDataStoreManager; import org.apache.cloudstack.storage.datastore.db.TemplateDataStoreDao; import org.apache.cloudstack.storage.datastore.db.TemplateDataStoreVO; import org.apache.cloudstack.storage.to.TemplateObjectTO; import org.apache.log4j.Logger; import com.cloud.agent.api.Answer; import com.cloud.agent.api.to.DataObjectType; import com.cloud.agent.api.to.DataTO; import com.cloud.exception.ConcurrentOperationException; import com.cloud.hypervisor.Hypervisor.HypervisorType; import com.cloud.storage.DataStoreRole; import com.cloud.storage.Storage.ImageFormat; import com.cloud.storage.Storage.TemplateType; import com.cloud.storage.VMTemplateStoragePoolVO; import com.cloud.storage.VMTemplateStorageResourceAssoc.Status; import com.cloud.storage.VMTemplateVO; import com.cloud.storage.dao.VMTemplateDao; import com.cloud.storage.dao.VMTemplatePoolDao; import com.cloud.utils.component.ComponentContext; import com.cloud.utils.exception.CloudRuntimeException; import com.cloud.utils.fsm.NoTransitionException; public class TemplateObject implements TemplateInfo { private static final Logger s_logger = Logger.getLogger(TemplateObject.class); private VMTemplateVO imageVO; private DataStore dataStore; private String url; @Inject VMTemplateDao imageDao; @Inject ObjectInDataStoreManager objectInStoreMgr; @Inject VMTemplatePoolDao templatePoolDao; @Inject TemplateDataStoreDao templateStoreDao; public TemplateObject() { } protected void configure(VMTemplateVO template, DataStore dataStore) { this.imageVO = template; this.dataStore = dataStore; } public static TemplateObject getTemplate(VMTemplateVO vo, DataStore store) { TemplateObject to = ComponentContext.inject(TemplateObject.class); to.configure(vo, store); return to; } public void setSize(Long size) { this.imageVO.setSize(size); } public VMTemplateVO getImage() { return this.imageVO; } @Override public DataStore getDataStore() { return this.dataStore; } @Override public String getUniqueName() { return this.imageVO.getUniqueName(); } @Override public long getId() { return this.imageVO.getId(); } @Override public String getUuid() { return this.imageVO.getUuid(); } @Override public String getUri() { if ( url != null ){ return url; } VMTemplateVO image = imageDao.findById(this.imageVO.getId()); return image.getUrl(); } @Override public Long getSize() { if (this.dataStore == null) { return this.imageVO.getSize(); } /* * * // If the template that was passed into this allocator is not * installed in the storage pool, // add 3 * (template size on secondary * storage) to the running total VMTemplateHostVO templateHostVO = * _storageMgr.findVmTemplateHost(templateForVmCreation.getId(), null); * * if (templateHostVO == null) { VMTemplateSwiftVO templateSwiftVO = * _swiftMgr.findByTmpltId(templateForVmCreation.getId()); if * (templateSwiftVO != null) { long templateSize = * templateSwiftVO.getPhysicalSize(); if (templateSize == 0) { * templateSize = templateSwiftVO.getSize(); } totalAllocatedSize += * (templateSize + _extraBytesPerVolume); } } else { long templateSize = * templateHostVO.getPhysicalSize(); if ( templateSize == 0 ){ * templateSize = templateHostVO.getSize(); } totalAllocatedSize += * (templateSize + _extraBytesPerVolume); } */ VMTemplateVO image = imageDao.findById(this.imageVO.getId()); return image.getSize(); } @Override public DataObjectType getType() { return DataObjectType.TEMPLATE; } @Override public ImageFormat getFormat() { return this.imageVO.getFormat(); } @Override public void processEvent(Event event) { try { objectInStoreMgr.update(this, event); } catch (NoTransitionException e) { throw new CloudRuntimeException("Failed to update state", e); } catch (ConcurrentOperationException e) { throw new CloudRuntimeException("Failed to update state", e); } finally { // in case of OperationFailed, expunge the entry if (event == ObjectInDataStoreStateMachine.Event.OperationFailed) { objectInStoreMgr.deleteIfNotReady(this); } } } @Override public void processEvent(ObjectInDataStoreStateMachine.Event event, Answer answer) { try { if (this.getDataStore().getRole() == DataStoreRole.Primary) { if (answer instanceof CopyCmdAnswer) { CopyCmdAnswer cpyAnswer = (CopyCmdAnswer) answer; TemplateObjectTO newTemplate = (TemplateObjectTO) cpyAnswer.getNewData(); VMTemplateStoragePoolVO templatePoolRef = templatePoolDao.findByPoolTemplate(this.getDataStore() .getId(), this.getId()); templatePoolRef.setDownloadPercent(100); templatePoolRef.setDownloadState(Status.DOWNLOADED); templatePoolRef.setLocalDownloadPath(newTemplate.getPath()); templatePoolRef.setInstallPath(newTemplate.getPath()); templatePoolDao.update(templatePoolRef.getId(), templatePoolRef); } } else if (this.getDataStore().getRole() == DataStoreRole.Image || this.getDataStore().getRole() == DataStoreRole.ImageCache) { if (answer instanceof CopyCmdAnswer) { CopyCmdAnswer cpyAnswer = (CopyCmdAnswer) answer; TemplateObjectTO newTemplate = (TemplateObjectTO) cpyAnswer.getNewData(); TemplateDataStoreVO templateStoreRef = this.templateStoreDao.findByStoreTemplate(this .getDataStore().getId(), this.getId()); templateStoreRef.setInstallPath(newTemplate.getPath()); templateStoreRef.setDownloadPercent(100); templateStoreRef.setDownloadState(Status.DOWNLOADED); templateStoreRef.setSize(newTemplate.getSize()); if (newTemplate.getPhysicalSize() != null) { templateStoreRef.setPhysicalSize(newTemplate.getPhysicalSize()); } templateStoreDao.update(templateStoreRef.getId(), templateStoreRef); if (this.getDataStore().getRole() == DataStoreRole.Image) { VMTemplateVO templateVO = this.imageDao.findById(this.getId()); if (newTemplate.getFormat() != null) { templateVO.setFormat(newTemplate.getFormat()); } if (newTemplate.getName() != null ){ // For template created from snapshot, template name is determine by resource code. templateVO.setUniqueName(newTemplate.getName()); } templateVO.setSize(newTemplate.getSize()); this.imageDao.update(templateVO.getId(), templateVO); } } } objectInStoreMgr.update(this, event); } catch (NoTransitionException e) { s_logger.debug("failed to update state", e); throw new CloudRuntimeException("Failed to update state" + e.toString()); } catch (Exception ex) { s_logger.debug("failed to process event and answer", ex); objectInStoreMgr.delete(this); throw new CloudRuntimeException("Failed to process event", ex); } finally { // in case of OperationFailed, expunge the entry if (event == ObjectInDataStoreStateMachine.Event.OperationFailed) { objectInStoreMgr.deleteIfNotReady(this); } } } @Override public void incRefCount() { if (this.dataStore == null) { return; } if (this.dataStore.getRole() == DataStoreRole.Image || this.dataStore.getRole() == DataStoreRole.ImageCache) { TemplateDataStoreVO store = templateStoreDao.findByStoreTemplate(dataStore.getId(), this.getId()); store.incrRefCnt(); store.setLastUpdated(new Date()); templateStoreDao.update(store.getId(), store); } } @Override public void decRefCount() { if (this.dataStore == null) { return; } if (this.dataStore.getRole() == DataStoreRole.Image || this.dataStore.getRole() == DataStoreRole.ImageCache) { TemplateDataStoreVO store = templateStoreDao.findByStoreTemplate(dataStore.getId(), this.getId()); store.decrRefCnt(); store.setLastUpdated(new Date()); templateStoreDao.update(store.getId(), store); } } @Override public Long getRefCount() { if (this.dataStore == null) { return null; } if (this.dataStore.getRole() == DataStoreRole.Image || this.dataStore.getRole() == DataStoreRole.ImageCache) { TemplateDataStoreVO store = templateStoreDao.findByStoreTemplate(dataStore.getId(), this.getId()); return store.getRefCnt(); } return null; } @Override public DataTO getTO() { DataTO to = null; if (this.dataStore == null) { to = new TemplateObjectTO(this); } else { to = this.dataStore.getDriver().getTO(this); if (to == null) { to = new TemplateObjectTO(this); } } return to; } @Override public String getInstallPath() { if (this.dataStore == null) { return null; } DataObjectInStore obj = objectInStoreMgr.findObject(this, this.dataStore); return obj.getInstallPath(); } @Override public long getAccountId() { return this.imageVO.getAccountId(); } @Override public boolean isFeatured() { return this.imageVO.isFeatured(); } @Override public boolean isPublicTemplate() { return this.imageVO.isPublicTemplate(); } @Override public boolean isExtractable() { return this.imageVO.isExtractable(); } @Override public String getName() { return this.imageVO.getName(); } @Override public boolean isRequiresHvm() { return this.imageVO.isRequiresHvm(); } @Override public String getDisplayText() { return this.imageVO.getDisplayText(); } @Override public boolean getEnablePassword() { return this.imageVO.getEnablePassword(); } @Override public boolean getEnableSshKey() { return this.imageVO.getEnableSshKey(); } @Override public boolean isCrossZones() { return this.imageVO.isCrossZones(); } @Override public Date getCreated() { return this.imageVO.getCreated(); } @Override public long getGuestOSId() { return this.imageVO.getGuestOSId(); } @Override public boolean isBootable() { return this.imageVO.isBootable(); } @Override public TemplateType getTemplateType() { return this.imageVO.getTemplateType(); } @Override public HypervisorType getHypervisorType() { return this.imageVO.getHypervisorType(); } @Override public int getBits() { return this.imageVO.getBits(); } @Override public String getUrl() { if (url != null ){ return url; } return this.imageVO.getUrl(); } public void setUrl(String url){ this.url = url; } @Override public String getChecksum() { return this.imageVO.getChecksum(); } @Override public Long getSourceTemplateId() { return this.imageVO.getSourceTemplateId(); } @Override public String getTemplateTag() { return this.imageVO.getTemplateTag(); } @Override public Map getDetails() { return this.imageVO.getDetails(); } @Override public Boolean isDynamicallyScalable() { return Boolean.FALSE; } @Override public long getDomainId() { return this.imageVO.getDomainId(); } @Override public boolean delete() { if (dataStore != null) { return dataStore.delete(this); } return true; } }
/** * Copyright 2016 https://github.com/diego-torres * * Licensed under the MIT License (MIT). * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sub-license, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.nowgroup.ngMantisExtractor.mbt.dto; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; import javax.persistence.ManyToMany; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.Table; import org.hibernate.annotations.NotFound; import org.hibernate.annotations.NotFoundAction; /** * @author https://github.com/diego-torres * */ @Entity @Table(name = "mantis_bug_table") public class Bug implements Serializable { private static final long serialVersionUID = 1L; private Integer id; private Category category; private Project project; private BugText bugText; private User reporter; private User handler; private Integer priority; private Integer severity; private String summary; private Integer submitted; private Integer status; private Integer resolution; private List<Tag> tags = new ArrayList<>(); private List<CustomFieldString> customFields = new ArrayList<>(); /** * @return the id */ @Id @GeneratedValue(strategy = GenerationType.IDENTITY) public Integer getId() { return id; } /** * @param id * the id to set */ public void setId(Integer id) { this.id = id; } /** * @return the category */ @ManyToOne @JoinColumn(name = "category_id") public Category getCategory() { return category; } /** * @param category * the category to set */ public void setCategory(Category category) { this.category = category; } /** * @return the project */ @ManyToOne @JoinColumn(name = "project_id") public Project getProject() { return project; } /** * @param project * the project to set */ public void setProject(Project project) { this.project = project; } /** * @return the bugText */ @ManyToOne @JoinColumn(name = "bug_text_id") public BugText getBugText() { return bugText; } /** * @param bugText * the bugText to set */ public void setBugText(BugText bugText) { this.bugText = bugText; } /** * @return the reporter */ @ManyToOne @JoinColumn(name = "reporter_id") public User getReporter() { return reporter; } /** * @param reporter * the reporter to set */ public void setReporter(User reporter) { this.reporter = reporter; } /** * @return the handler */ @ManyToOne(optional = true) @JoinColumn(name = "handler_id") @NotFound(action = NotFoundAction.IGNORE) public User getHandler() { return handler; } /** * @param handler * the handler to set */ public void setHandler(User handler) { this.handler = handler; } /** * @return the priority */ @Column public Integer getPriority() { return priority; } /** * @param priority * the priority to set */ public void setPriority(Integer priority) { this.priority = priority; } /** * @return the severity */ @Column public Integer getSeverity() { return severity; } /** * @param severity * the severity to set */ public void setSeverity(Integer severity) { this.severity = severity; } /** * @return the summary */ @Column public String getSummary() { return summary; } /** * @param summary * the summary to set */ public void setSummary(String summary) { this.summary = summary; } /** * @return the submited */ @Column(name = "date_submitted") public Integer getSubmitted() { return submitted; } /** * @param submited * the submited to set */ public void setSubmitted(Integer submitted) { this.submitted = submitted; } /** * @return the status */ @Column public Integer getStatus() { return status; } /** * @param status * the status to set */ public void setStatus(Integer status) { this.status = status; } /** * @return the resolution */ @Column public Integer getResolution() { return resolution; } /** * @param resolution * the resolution to set */ public void setResolution(Integer resolution) { this.resolution = resolution; } /** * @return the tags */ @ManyToMany @JoinTable(name = "mantis_bug_tag_table", joinColumns = @JoinColumn(name = "bug_id", referencedColumnName = "id"), inverseJoinColumns = @JoinColumn(name = "tag_id", referencedColumnName = "id")) public List<Tag> getTags() { return tags; } /** * @param tags * the tags to set */ public void setTags(List<Tag> tags) { this.tags = tags; } /** * @return the customFields */ @OneToMany(mappedBy = "key.bug", fetch = FetchType.EAGER) @NotFound(action = NotFoundAction.IGNORE) public List<CustomFieldString> getCustomFields() { return customFields; } /** * @param customFields * the customFields to set */ public void setCustomFields(List<CustomFieldString> customFields) { this.customFields = customFields; } /* * (non-Javadoc) * * @see java.lang.Object#toString() */ @Override public String toString() { return "Bug [id=" + id + ", category=" + category + ", project=" + project + ", bugText=" + bugText + ", reporter=" + reporter + ", handler=" + handler + ", priority=" + priority + ", severity=" + severity + ", summary=" + summary + ", submitted=" + submitted + ", status=" + status + ", resolution=" + resolution + ", tags=" + tags + ", customFields=" + customFields + "]"; } }
/* * File: CognitiveModelLiteState.java * Authors: Justin Basilico * Company: Sandia National Laboratories * Project: Cognitive Framework Lite * * Copyright March 15, 2006, Sandia Corporation. Under the terms of Contract * DE-AC04-94AL85000, there is a non-exclusive license for use of this work by * or on behalf of the U.S. Government. Export of this program may require a * license from the United States Government. See CopyrightHistory.txt for * complete details. * * */ package gov.sandia.cognition.framework.lite; import gov.sandia.cognition.framework.CognitiveModelInput; import gov.sandia.cognition.framework.CognitiveModelState; import gov.sandia.cognition.framework.CognitiveModuleState; import gov.sandia.cognition.util.AbstractCloneableSerializable; import java.util.Arrays; import java.util.Collection; /** * The CognitiveModelLiteState class implements a CognitiveModelState * object for the CognitiveModelLite. * * @author Justin Basilico * @since 1.0 */ public class CognitiveModelLiteState extends AbstractCloneableSerializable implements CognitiveModelState { /** A flag indicating if the state has been initialized or not. */ private boolean initialized = false; /** The input to the model. */ private CognitiveModelInput input = null; /** The state of the Cogxels. */ private CogxelStateLite cogxels = null; /** The states of the modules. */ private CognitiveModuleState[] moduleStatesArray = null; /** * Creates a new instance of CognitiveModelState. * * @param numModules The number of modules in the model. */ public CognitiveModelLiteState( int numModules) { super(); this.setInitialized(false); this.setInput(null); this.setCogxels(new CogxelStateLite()); this.setModuleStatesArray(new CognitiveModuleState[numModules]); } /** * Creates a new instance of CognitiveModelState. * * @param numModules The number of modules in the model. * @param expectedMaxIdentifier The expected maximum identifier. */ public CognitiveModelLiteState( int numModules, int expectedMaxIdentifier) { super(); this.setInitialized(false); this.setInput(null); this.setCogxels(new CogxelStateLite(expectedMaxIdentifier)); this.setModuleStatesArray(new CognitiveModuleState[numModules]); } /** * Creates a new copy of a CognitiveModelLiteState. * * @param other The CognitiveModelLiteState to copy. */ public CognitiveModelLiteState( CognitiveModelLiteState other) { super(); this.setInitialized(other.initialized); this.setInput(other.input); this.setCogxels(other.cogxels.clone()); int numModules = other.moduleStatesArray.length; CognitiveModuleState[] moduleStates = new CognitiveModuleState[numModules]; for (int i = 0; i < numModules; i++) { if ( other.moduleStatesArray[i] != null ) { moduleStates[i] = other.moduleStatesArray[i].clone(); } } this.setModuleStatesArray(moduleStates); } /** * {@inheritDoc} * * @return {@inheritDoc} */ @Override public CognitiveModelLiteState clone() { final CognitiveModelLiteState clone = (CognitiveModelLiteState) super.clone(); int numModules = this.moduleStatesArray.length; clone.cogxels = this.cogxels.clone(); clone.moduleStatesArray = new CognitiveModuleState[numModules]; for (int i = 0; i < numModules; i++) { if (this.moduleStatesArray[i] != null) { clone.moduleStatesArray[i] = this.moduleStatesArray[i].clone(); } } return clone; } /** * Clears this CognitiveModelLite state, resetting it to being * uninitialized. */ public void clear() { // Clear out all of the state data. this.setInitialized(false); this.setInput(null); this.cogxels.clear(); Arrays.fill(this.moduleStatesArray, null); } /** * {@inheritDoc} * * @return {@inheritDoc} */ public CognitiveModelInput getInput() { return this.input; } /** * {@inheritDoc} * * @return {@inheritDoc} */ public CogxelStateLite getCogxels() { return this.cogxels; } /** * {@inheritDoc} * * @return {@inheritDoc} */ public Collection<CognitiveModuleState> getModuleStates() { return Arrays.asList(this.getModuleStatesArray()); } /** * Returns the number of module states in this model state. This includes * the null states. It should be equal to the number of states in the * model from which it was created. * * @return The number of module states in this model state. */ public int getNumModuleStates() { return this.moduleStatesArray.length; } /** * Returns true if the state has been initialized. * * @return True if the state has been initialized. */ public boolean isInitialized() { return this.initialized; } /** * Sets whether or not this state has been initialized. * * @param initialized True if the state has been initialized. */ public void setInitialized( boolean initialized) { this.initialized = initialized; } /** * {@inheritDoc} * * @param input {@inheritDoc} */ public void setInput( CognitiveModelInput input) { this.input = input; } /** * Setter for cogxels * * @param cogxels The state of the Cogxels. */ protected void setCogxels( CogxelStateLite cogxels) { if ( cogxels == null ) { // Error: The cogxels cannot be null. throw new NullPointerException("The cogxels cannot be null."); } this.cogxels = cogxels; } /** * Gets the module states array. * * @return The array containing module states. */ public CognitiveModuleState[] getModuleStatesArray() { return this.moduleStatesArray; } /** * Sets the array of module states. * * @param moduleStatesArray The new array of module states. */ protected void setModuleStatesArray( CognitiveModuleState[] moduleStatesArray) { if ( moduleStatesArray == null ) { // Error: The module states cannot be null. throw new NullPointerException( "The moduleStates cannot be null."); } this.moduleStatesArray = moduleStatesArray; } }
/* * Copyright (c) 2011, University of Konstanz, Distributed Systems Group All rights reserved. * <p> * Redistribution and use in source and binary forms, with or without modification, are permitted * provided that the following conditions are met: * Redistributions of source code must retain the * above copyright notice, this list of conditions and the following disclaimer. * Redistributions * in binary form must reproduce the above copyright notice, this list of conditions and the * following disclaimer in the documentation and/or other materials provided with the distribution. * * Neither the name of the University of Konstanz nor the names of its contributors may be used to * endorse or promote products derived from this software without specific prior written permission. * <p> * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.sirix.page; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.HashMap; import java.util.Map; import org.checkerframework.checker.nullness.qual.NonNull; import org.sirix.api.PageReadOnlyTrx; import org.sirix.page.interfaces.Page; /** * All Page types. */ public enum PageKind { /** * {@link UnorderedKeyValuePage}. */ RECORDPAGE((byte) 1, UnorderedKeyValuePage.class) { @Override @NonNull Page deserializePage(final DataInput source, final PageReadOnlyTrx pageReadTrx, final SerializationType type) throws IOException { return new UnorderedKeyValuePage(source, pageReadTrx); } @Override void serializePage(final DataOutput sink, final Page page, final SerializationType type) throws IOException { sink.writeByte(RECORDPAGE.id); page.serialize(sink, type); } @Override public @NonNull Page getInstance(final Page nodePage, final PageReadOnlyTrx pageReadTrx) { assert nodePage instanceof UnorderedKeyValuePage; final UnorderedKeyValuePage page = (UnorderedKeyValuePage) nodePage; return new UnorderedKeyValuePage(page.getPageKey(), page.getIndexType(), pageReadTrx); } }, /** * {@link NamePage}. */ NAMEPAGE((byte) 2, NamePage.class) { @Override @NonNull Page deserializePage(final DataInput source, final PageReadOnlyTrx pageReadTrx, final SerializationType type) throws IOException { return new NamePage(source, type); } @Override void serializePage(final DataOutput sink, final Page page, final SerializationType type) throws IOException { sink.writeByte(NAMEPAGE.id); page.serialize(sink, type); } @Override public @NonNull Page getInstance(final Page page, final PageReadOnlyTrx pageReadTrx) { return new NamePage(); } }, /** * {@link UberPage}. */ UBERPAGE((byte) 3, UberPage.class) { @Override @NonNull Page deserializePage(final DataInput source, final PageReadOnlyTrx pageReadTrx, final SerializationType type) throws IOException { return new UberPage(source, type); } @Override void serializePage(final DataOutput sink, final Page page, final SerializationType type) throws IOException { sink.writeByte(UBERPAGE.id); page.serialize(sink, type); } @Override public @NonNull Page getInstance(final Page page, final PageReadOnlyTrx pageReadTrx) { return new UberPage(); } }, /** * {@link IndirectPage}. */ INDIRECTPAGE((byte) 4, IndirectPage.class) { @Override @NonNull Page deserializePage(final DataInput source, final PageReadOnlyTrx pageReadTrx, final SerializationType type) { return new IndirectPage(source, type); } @Override void serializePage(final DataOutput sink, final Page page, final SerializationType type) throws IOException { sink.writeByte(INDIRECTPAGE.id); page.serialize(sink, type); } @Override public @NonNull Page getInstance(final Page page, final PageReadOnlyTrx pageReadTrx) { return new IndirectPage(); } }, /** * {@link RevisionRootPage}. */ REVISIONROOTPAGE((byte) 5, RevisionRootPage.class) { @Override @NonNull Page deserializePage(final DataInput source, final PageReadOnlyTrx pageReadTrx, final SerializationType type) throws IOException { return new RevisionRootPage(source, type); } @Override void serializePage(final DataOutput sink, final Page page, final SerializationType type) throws IOException { sink.writeByte(REVISIONROOTPAGE.id); page.serialize(sink, type); } @Override public @NonNull Page getInstance(final Page page, final PageReadOnlyTrx pageReadTrx) { return new RevisionRootPage(); } }, /** * {@link PathSummaryPage}. */ PATHSUMMARYPAGE((byte) 6, PathSummaryPage.class) { @Override @NonNull Page deserializePage(final DataInput source, final PageReadOnlyTrx pageReadTrx, final @NonNull SerializationType type) throws IOException { return new PathSummaryPage(source, type); } @Override void serializePage(final DataOutput sink, final Page page, final @NonNull SerializationType type) throws IOException { sink.writeByte(PATHSUMMARYPAGE.id); page.serialize(sink, type); } @Override public @NonNull Page getInstance(final Page page, final PageReadOnlyTrx pageReadTrx) { return new PathSummaryPage(); } }, /** * {@link PathPage}. */ HASHED_KEY_VALUE_PAGE((byte) 7, HashedKeyValuePage.class) { @Override @NonNull Page deserializePage(final DataInput source, final PageReadOnlyTrx pageReadTrx, @NonNull final SerializationType type) throws IOException { return new HashedKeyValuePage(source, pageReadTrx); } @Override void serializePage(final DataOutput sink, final Page page, final @NonNull SerializationType type) throws IOException { sink.writeByte(HASHED_KEY_VALUE_PAGE.id); page.serialize(sink, type); } @Override public @NonNull Page getInstance(final Page keyValuePage, final PageReadOnlyTrx pageReadTrx) { assert keyValuePage instanceof HashedKeyValuePage; final HashedKeyValuePage page = (HashedKeyValuePage) keyValuePage; return new UnorderedKeyValuePage(page.getPageKey(), page.getIndexType(), pageReadTrx); } }, /** * {@link CASPage}. */ CASPAGE((byte) 8, CASPage.class) { @Override @NonNull Page deserializePage(final DataInput source, final PageReadOnlyTrx pageReadTrx, final SerializationType type) throws IOException { return new CASPage(source, type); } @Override void serializePage(final DataOutput sink, final Page page, final SerializationType type) throws IOException { sink.writeByte(CASPAGE.id); page.serialize(sink, type); } @Override public @NonNull Page getInstance(final Page page, final PageReadOnlyTrx pageReadTrx) { return new CASPage(); } }, /** * {@link OverflowPage}. */ OVERFLOWPAGE((byte) 9, OverflowPage.class) { @Override @NonNull Page deserializePage(final DataInput source, final PageReadOnlyTrx pageReadTrx, final SerializationType type) throws IOException { return new OverflowPage(source); } @Override void serializePage(final DataOutput sink, final Page page, @NonNull SerializationType type) throws IOException { sink.writeByte(OVERFLOWPAGE.id); page.serialize(sink, type); } @Override public @NonNull Page getInstance(final Page page, final PageReadOnlyTrx pageReadTrx) { return new OverflowPage(); } }, /** * {@link PathPage}. */ PATHPAGE((byte) 10, PathPage.class) { @Override void serializePage(DataOutput sink, @NonNull Page page, @NonNull SerializationType type) throws IOException { sink.writeByte(PATHPAGE.id); page.serialize(sink, type); } @Override Page deserializePage(DataInput source, @NonNull PageReadOnlyTrx pageReadTrx, @NonNull SerializationType type) throws IOException { return new PathPage(source, type); } @Override public @NonNull Page getInstance(Page page, @NonNull PageReadOnlyTrx pageReadTrx) { return new PathPage(); } }, /** * {@link PathPage}. */ DEWEYIDPAGE((byte) 11, DeweyIDPage.class) { @Override void serializePage(DataOutput sink, @NonNull Page page, @NonNull SerializationType type) throws IOException { sink.writeByte(DEWEYIDPAGE.id); page.serialize(sink, type); } @Override Page deserializePage(DataInput source, @NonNull PageReadOnlyTrx pageReadTrx, @NonNull SerializationType type) throws IOException { return new DeweyIDPage(source, type); } @Override public @NonNull Page getInstance(Page page, @NonNull PageReadOnlyTrx pageReadTrx) { return new DeweyIDPage(); } }; /** * Mapping of keys -> page */ private static final Map<Byte, PageKind> INSTANCEFORID = new HashMap<>(); /** * Mapping of class -> page. */ private static final Map<Class<? extends Page>, PageKind> INSTANCEFORCLASS = new HashMap<>(); static { for (final PageKind page : values()) { INSTANCEFORID.put(page.id, page); INSTANCEFORCLASS.put(page.clazz, page); } } /** * Unique ID. */ private final byte id; /** * Class. */ private final Class<? extends Page> clazz; /** * Constructor. * * @param id unique identifier * @param clazz class */ PageKind(final byte id, final Class<? extends Page> clazz) { this.id = id; this.clazz = clazz; } /** * Get the unique page ID. * * @return unique page ID */ public byte getID() { return id; } /** * Serialize page. * * @param sink {@link DataInput} instance * @param page {@link Page} implementation */ abstract void serializePage(final DataOutput sink, final Page page, final SerializationType type) throws IOException; /** * Deserialize page. * * @param source {@link DataInput} instance * @param pageReadTrx implementing {@link PageReadOnlyTrx} instance * @return page instance implementing the {@link Page} interface */ abstract Page deserializePage(final DataInput source, final PageReadOnlyTrx pageReadTrx, final SerializationType type) throws IOException; /** * Public method to get the related page based on the identifier. * * @param id the identifier for the page * @return the related page */ public static PageKind getKind(final byte id) { final PageKind page = INSTANCEFORID.get(id); if (page == null) { throw new IllegalStateException(); } return page; } /** * Public method to get the related page based on the class. * * @param clazz the class for the page * @return the related page */ public static @NonNull PageKind getKind(final Class<? extends Page> clazz) { final PageKind page = INSTANCEFORCLASS.get(clazz); if (page == null) { throw new IllegalStateException(); } return page; } /** * New page instance. * * @param page instance of class which implements {@link Page} * @param pageReadTrx instance of class which implements {@link PageReadOnlyTrx} * @return new page instance */ public abstract @NonNull Page getInstance(final Page page, final PageReadOnlyTrx pageReadTrx); }
/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.videoeditor.widgets; import com.android.videoeditor.R; import android.content.Context; import android.graphics.Canvas; import android.graphics.drawable.Drawable; import android.util.AttributeSet; import android.view.MotionEvent; import android.view.View; /** * The zoom control */ public class ZoomControl extends View { private static final double MAX_ANGLE = Math.PI / 3; private static final double THUMB_RADIUS_CONTAINER_SIZE_RATIO = 0.432; private static final double THUMB_INTERNAL_RADIUS_CONTAINER_SIZE_RATIO = 0.24; // Instance variables private final Drawable mThumb; private double mRadius; private double mInternalRadius; private int mMaxProgress, mProgress; private OnZoomChangeListener mListener; private int mThumbX, mThumbY; private double mInterval; /** * The zoom change listener */ public interface OnZoomChangeListener { /** * The progress value has changed * * @param progress The progress value * @param fromUser true if the user is changing the zoom */ public void onProgressChanged(int progress, boolean fromUser); } public ZoomControl(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); // Set the default maximum progress mMaxProgress = 100; computeInterval(); // Load the thumb selector mThumb = context.getResources().getDrawable(R.drawable.zoom_thumb_selector); } @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { super.onLayout(changed, left, top, right, bottom); double width = right - left; mRadius = width * THUMB_RADIUS_CONTAINER_SIZE_RATIO; mInternalRadius = width * THUMB_INTERNAL_RADIUS_CONTAINER_SIZE_RATIO; } public ZoomControl(Context context, AttributeSet attrs) { this(context, attrs, 0); } public ZoomControl(Context context) { this(context, null, 0); } @Override public void refreshDrawableState() { mThumb.setState(isPressed() ? PRESSED_WINDOW_FOCUSED_STATE_SET : ENABLED_STATE_SET); invalidate(); } /** * @param max The maximum value */ public void setMax(int max) { mMaxProgress = max; computeInterval(); } /** * @param progress The progress */ public void setProgress(int progress) { mProgress = progress; progressToPosition(); invalidate(); } /** * @param listener The listener */ public void setOnZoomChangeListener(OnZoomChangeListener listener) { mListener = listener; } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); if (mThumbX == 0 && mThumbY == 0) { progressToPosition(); } final int halfWidth = mThumb.getIntrinsicWidth() / 2; final int halfHeight = mThumb.getIntrinsicHeight() / 2; mThumb.setBounds(mThumbX - halfWidth, mThumbY - halfHeight, mThumbX + halfWidth, mThumbY + halfHeight); mThumb.setAlpha(isEnabled() ? 255 : 100); mThumb.draw(canvas); } @Override public boolean onTouchEvent(MotionEvent ev) { super.onTouchEvent(ev); switch (ev.getAction()) { case MotionEvent.ACTION_DOWN: { if (isEnabled()) { getParent().requestDisallowInterceptTouchEvent(true); } break; } case MotionEvent.ACTION_MOVE: { if (isEnabled()) { final float x = ev.getX() - (getWidth() / 2); final float y = -(ev.getY() - (getHeight() / 2)); final double alpha = Math.atan((double)y / (double)x); if (!checkHit(x, y, alpha)) { return true; } final int progress; if (x >= 0 && y >= 0) { mThumbX = (int)((mRadius * Math.cos(alpha)) + (getWidth() / 2)); mThumbY = (int)((getHeight() / 2) - (mRadius * Math.sin(alpha))); progress = (int)((mMaxProgress / 2) - (alpha / mInterval)); } else if (x >= 0 && y <= 0) { mThumbX = (int)((mRadius * Math.cos(alpha)) + (getWidth() / 2)); mThumbY = (int)((getHeight() / 2) - (mRadius * Math.sin(alpha))); progress = (int)((mMaxProgress / 2) - (alpha / mInterval)); } else if (x <= 0 && y >= 0) { mThumbX = (int)((getWidth() / 2) - (mRadius * Math.cos(alpha))); mThumbY = (int)((getHeight() / 2) + (mRadius * Math.sin(alpha))); progress = -(int)(((alpha + MAX_ANGLE) / mInterval)); } else { mThumbX = (int)((getWidth() / 2) - (mRadius * Math.cos(alpha))); mThumbY = (int)((getHeight() / 2) + (mRadius * Math.sin(alpha))); progress = (int)(mMaxProgress - ((alpha - MAX_ANGLE) / mInterval)); } invalidate(); if (mListener != null) { if (progress != mProgress) { mProgress = progress; mListener.onProgressChanged(mProgress, true); } } } break; } case MotionEvent.ACTION_CANCEL: case MotionEvent.ACTION_UP: { break; } default: { break; } } return true; } /** * Check if the user is touching the correct area * * @param x The horizontal coordinate * @param y The vertical coordinate * @param alpha The angle * @return true if there is a hit in the allowed area */ private boolean checkHit(float x, float y, double alpha) { final double radius = Math.sqrt((x * x) + (y * y)); if (radius < mInternalRadius) { return false; } if (x >= 0) { return true; } else if (y >= 0) { if ((alpha >= -(Math.PI / 2)) && (alpha <= -MAX_ANGLE)) { return true; } } else { if ((alpha >= MAX_ANGLE) && (alpha <= (Math.PI / 2))) { return true; } } return false; } /** * Compute the position of the thumb based on the progress values */ private void progressToPosition() { if (getWidth() == 0) { // Layout is not yet complete return; } final double beta; if (mProgress <= mMaxProgress / 2) { beta = ((mMaxProgress / 2) - mProgress) * mInterval; } else { beta = ((mMaxProgress - mProgress) * mInterval) + Math.PI + MAX_ANGLE; } final double alpha; if (beta >= 0 && beta <= Math.PI / 2) { alpha = beta; mThumbX = (int)((mRadius * Math.cos(alpha)) + (getWidth() / 2)); mThumbY = (int)((getHeight() / 2) - (mRadius * Math.sin(alpha))); } else if (beta > Math.PI / 2 && beta < (Math.PI / 2) + MAX_ANGLE) { alpha = beta - Math.PI; mThumbX = (int)((getWidth() / 2) - (mRadius * Math.cos(alpha))); mThumbY = (int)((getHeight() / 2) + (mRadius * Math.sin(alpha))); } else if (beta <= 2 * Math.PI && beta > (3 * Math.PI) / 2) { alpha = beta - (2 * Math.PI); mThumbX = (int)((mRadius * Math.cos(alpha)) + (getWidth() / 2)); mThumbY = (int)((getHeight() / 2) - (mRadius * Math.sin(alpha))); } else { alpha = beta - Math.PI; mThumbX = (int)((getWidth() / 2) - (mRadius * Math.cos(alpha))); mThumbY = (int)((getHeight() / 2) + (mRadius * Math.sin(alpha))); } } /** * Compute the radians interval between progress values */ private void computeInterval() { mInterval = (Math.PI - MAX_ANGLE) / (mMaxProgress / 2); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.tools; import static org.apache.cassandra.utils.ByteBufferUtil.hexToBytes; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.util.List; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.TimeUnit; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.db.*; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.config.Schema; import org.apache.cassandra.db.marshal.AbstractType; import org.apache.cassandra.db.marshal.BytesType; import org.apache.cassandra.db.marshal.CompositeType; import org.apache.cassandra.dht.IPartitioner; import org.apache.cassandra.io.sstable.SSTableWriter; import org.apache.cassandra.serializers.MarshalException; import org.apache.cassandra.utils.ByteBufferUtil; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonParser; import org.codehaus.jackson.JsonToken; import org.codehaus.jackson.map.MappingJsonFactory; import org.codehaus.jackson.type.TypeReference; /** * Create SSTables from JSON input */ public class SSTableImport { private static final String KEYSPACE_OPTION = "K"; private static final String COLUMN_FAMILY_OPTION = "c"; private static final String KEY_COUNT_OPTION = "n"; private static final String IS_SORTED_OPTION = "s"; private static final String OLD_SC_FORMAT_OPTION = "S"; private static final Options options = new Options(); private static CommandLine cmd; private Integer keyCountToImport; private final boolean isSorted; private final boolean oldSCFormat; private static final JsonFactory factory = new MappingJsonFactory().configure( JsonParser.Feature.INTERN_FIELD_NAMES, false); static { Option optKeyspace = new Option(KEYSPACE_OPTION, true, "Keyspace name."); optKeyspace.setRequired(true); options.addOption(optKeyspace); Option optColfamily = new Option(COLUMN_FAMILY_OPTION, true, "Column Family name."); optColfamily.setRequired(true); options.addOption(optColfamily); options.addOption(new Option(KEY_COUNT_OPTION, true, "Number of keys to import (Optional).")); options.addOption(new Option(IS_SORTED_OPTION, false, "Assume JSON file as already sorted (e.g. created by sstable2json tool) (Optional).")); options.addOption(new Option(OLD_SC_FORMAT_OPTION, false, "Assume JSON file use legacy super column format (Optional).")); } private static class JsonColumn<T> { private ByteBuffer name; private ByteBuffer value; private long timestamp; private String kind; // Expiring columns private int ttl; private int localExpirationTime; // Counter columns private long timestampOfLastDelete; public JsonColumn(T json, CFMetaData meta, boolean oldSCFormat, boolean isSubColumn) { if (json instanceof List) { AbstractType<?> comparator = oldSCFormat ? SuperColumns.getComparatorFor(meta, isSubColumn) : meta.comparator; List fields = (List<?>) json; assert fields.size() >= 3 : "Column definition should have at least 3"; name = stringAsType((String) fields.get(0), comparator); timestamp = (Long) fields.get(2); kind = ""; if (fields.size() > 3) { if (fields.get(3) instanceof Boolean) { // old format, reading this for backward compatibility sake if (fields.size() == 6) { kind = "e"; ttl = (Integer) fields.get(4); localExpirationTime = (Integer) fields.get(5); } else { kind = ((Boolean) fields.get(3)) ? "d" : ""; } } else { kind = (String) fields.get(3); if (isExpiring()) { ttl = (Integer) fields.get(4); localExpirationTime = (Integer) fields.get(5); } else if (isCounter()) { timestampOfLastDelete = (long) ((Integer) fields.get(4)); } else if (isRangeTombstone()) { localExpirationTime = (Integer) fields.get(4); } } } if (isDeleted()) { value = ByteBufferUtil.hexToBytes((String) fields.get(1)); } else if (isRangeTombstone()) { value = comparator.fromString((String)fields.get(1)); } else { value = stringAsType((String) fields.get(1), meta.getValueValidator(meta.getColumnDefinitionFromColumnName(name))); } } } public boolean isDeleted() { return kind.equals("d"); } public boolean isExpiring() { return kind.equals("e"); } public boolean isCounter() { return kind.equals("c"); } public boolean isRangeTombstone() { return kind.equals("t"); } public ByteBuffer getName() { return name.duplicate(); } public ByteBuffer getValue() { return value.duplicate(); } } public SSTableImport() { this(null, false, false); } public SSTableImport(boolean isSorted) { this(isSorted, false); } public SSTableImport(boolean isSorted, boolean oldSCFormat) { this(null, isSorted, oldSCFormat); } public SSTableImport(Integer keyCountToImport, boolean isSorted, boolean oldSCFormat) { this.keyCountToImport = keyCountToImport; this.isSorted = isSorted; this.oldSCFormat = oldSCFormat; } private void addToStandardCF(List<?> row, ColumnFamily cfamily) { addColumnsToCF(row, null, cfamily); } /** * Add columns to a column family. * * @param row the columns associated with a row * @param superName name of the super column if any * @param cfamily the column family to add columns to */ private void addColumnsToCF(List<?> row, ByteBuffer superName, ColumnFamily cfamily) { CFMetaData cfm = cfamily.metadata(); assert cfm != null; for (Object c : row) { JsonColumn col = new JsonColumn<List>((List) c, cfm, oldSCFormat, (superName != null)); ByteBuffer cname = superName == null ? col.getName() : CompositeType.build(superName, col.getName()); if (col.isExpiring()) { cfamily.addColumn(new ExpiringColumn(cname, col.getValue(), col.timestamp, col.ttl, col.localExpirationTime)); } else if (col.isCounter()) { cfamily.addColumn(new CounterColumn(cname, col.getValue(), col.timestamp, col.timestampOfLastDelete)); } else if (col.isDeleted()) { cfamily.addTombstone(cname, col.getValue(), col.timestamp); } else if (col.isRangeTombstone()) { ByteBuffer end = superName == null ? col.getValue() : CompositeType.build(superName, col.getValue()); cfamily.addAtom(new RangeTombstone(cname, end, col.timestamp, col.localExpirationTime)); } // cql3 row marker, see CASSANDRA-5852 else if (!cname.hasRemaining()) { cfamily.addColumn(ByteBuffer.wrap(new byte[3]), col.getValue(), col.timestamp); } else { cfamily.addColumn(cname, col.getValue(), col.timestamp); } } } private void parseMeta(Map<?, ?> map, ColumnFamily cf, ByteBuffer superColumnName) { // deletionInfo is the only metadata we store for now if (map.containsKey("deletionInfo")) { Map<?, ?> unparsedDeletionInfo = (Map<?, ?>) map.get("deletionInfo"); Number number = (Number) unparsedDeletionInfo.get("markedForDeleteAt"); long markedForDeleteAt = number instanceof Long ? (Long) number : number.longValue(); int localDeletionTime = (Integer) unparsedDeletionInfo.get("localDeletionTime"); if (superColumnName == null) cf.setDeletionInfo(new DeletionInfo(markedForDeleteAt, localDeletionTime)); else cf.addAtom(new RangeTombstone(SuperColumns.startOf(superColumnName), SuperColumns.endOf(superColumnName), markedForDeleteAt, localDeletionTime)); } } /** * Add super columns to a column family. * * @param row the super columns associated with a row * @param cfamily the column family to add columns to */ private void addToSuperCF(Map<?, ?> row, ColumnFamily cfamily) { CFMetaData metaData = cfamily.metadata(); assert metaData != null; AbstractType<?> comparator = metaData.comparator; // Super columns for (Map.Entry<?, ?> entry : row.entrySet()) { Map<?, ?> data = (Map<?, ?>) entry.getValue(); ByteBuffer superName = stringAsType((String) entry.getKey(), ((CompositeType)comparator).types.get(0)); addColumnsToCF((List<?>) data.get("subColumns"), superName, cfamily); if (data.containsKey("metadata")) { parseMeta((Map<?, ?>) data.get("metadata"), cfamily, superName); } } } /** * Convert a JSON formatted file to an SSTable. * * @param jsonFile the file containing JSON formatted data * @param keyspace keyspace the data belongs to * @param cf column family the data belongs to * @param ssTablePath file to write the SSTable to * * @throws IOException for errors reading/writing input/output */ public int importJson(String jsonFile, String keyspace, String cf, String ssTablePath) throws IOException { ColumnFamily columnFamily = TreeMapBackedSortedColumns.factory.create(keyspace, cf); IPartitioner<?> partitioner = DatabaseDescriptor.getPartitioner(); int importedKeys = (isSorted) ? importSorted(jsonFile, columnFamily, ssTablePath, partitioner) : importUnsorted(jsonFile, columnFamily, ssTablePath, partitioner); if (importedKeys != -1) System.out.printf("%d keys imported successfully.%n", importedKeys); return importedKeys; } private int importUnsorted(String jsonFile, ColumnFamily columnFamily, String ssTablePath, IPartitioner<?> partitioner) throws IOException { int importedKeys = 0; long start = System.nanoTime(); JsonParser parser = getParser(jsonFile); Object[] data = parser.readValueAs(new TypeReference<Object[]>(){}); keyCountToImport = (keyCountToImport == null) ? data.length : keyCountToImport; SSTableWriter writer = new SSTableWriter(ssTablePath, keyCountToImport); System.out.printf("Importing %s keys...%n", keyCountToImport); // sort by dk representation, but hold onto the hex version SortedMap<DecoratedKey,Map<?, ?>> decoratedKeys = new TreeMap<DecoratedKey,Map<?, ?>>(); for (Object row : data) { Map<?,?> rowAsMap = (Map<?, ?>)row; decoratedKeys.put(partitioner.decorateKey(hexToBytes((String)rowAsMap.get("key"))), rowAsMap); } for (Map.Entry<DecoratedKey, Map<?, ?>> row : decoratedKeys.entrySet()) { if (row.getValue().containsKey("metadata")) { parseMeta((Map<?, ?>) row.getValue().get("metadata"), columnFamily, null); } Object columns = row.getValue().get("columns"); if (columnFamily.getType() == ColumnFamilyType.Super && oldSCFormat) addToSuperCF((Map<?, ?>) columns, columnFamily); else addToStandardCF((List<?>) columns, columnFamily); writer.append(row.getKey(), columnFamily); columnFamily.clear(); importedKeys++; long current = System.nanoTime(); if (TimeUnit.NANOSECONDS.toSeconds(current - start) >= 5) // 5 secs. { System.out.printf("Currently imported %d keys.%n", importedKeys); start = current; } if (keyCountToImport == importedKeys) break; } writer.closeAndOpenReader(); return importedKeys; } private int importSorted(String jsonFile, ColumnFamily columnFamily, String ssTablePath, IPartitioner<?> partitioner) throws IOException { int importedKeys = 0; // already imported keys count long start = System.nanoTime(); JsonParser parser = getParser(jsonFile); if (keyCountToImport == null) { keyCountToImport = 0; System.out.println("Counting keys to import, please wait... (NOTE: to skip this use -n <num_keys>)"); parser.nextToken(); // START_ARRAY while (parser.nextToken() != null) { parser.skipChildren(); if (parser.getCurrentToken() == JsonToken.END_ARRAY) break; keyCountToImport++; } } System.out.printf("Importing %s keys...%n", keyCountToImport); parser = getParser(jsonFile); // renewing parser SSTableWriter writer = new SSTableWriter(ssTablePath, keyCountToImport); int lineNumber = 1; DecoratedKey prevStoredKey = null; parser.nextToken(); // START_ARRAY while (parser.nextToken() != null) { String key = parser.getCurrentName(); Map<?, ?> row = parser.readValueAs(new TypeReference<Map<?, ?>>(){}); DecoratedKey currentKey = partitioner.decorateKey(hexToBytes((String) row.get("key"))); if (row.containsKey("metadata")) parseMeta((Map<?, ?>) row.get("metadata"), columnFamily, null); if (columnFamily.getType() == ColumnFamilyType.Super && oldSCFormat) addToSuperCF((Map<?, ?>)row.get("columns"), columnFamily); else addToStandardCF((List<?>)row.get("columns"), columnFamily); if (prevStoredKey != null && prevStoredKey.compareTo(currentKey) != -1) { System.err .printf("Line %d: Key %s is greater than previous, collection is not sorted properly. Aborting import. You might need to delete SSTables manually.%n", lineNumber, key); return -1; } // saving decorated key writer.append(currentKey, columnFamily); columnFamily.clear(); prevStoredKey = currentKey; importedKeys++; lineNumber++; long current = System.nanoTime(); if (TimeUnit.NANOSECONDS.toSeconds(current - start) >= 5) // 5 secs. { System.out.printf("Currently imported %d keys.%n", importedKeys); start = current; } if (keyCountToImport == importedKeys) break; } writer.closeAndOpenReader(); return importedKeys; } /** * Get JsonParser object for file * @param fileName name of the file * @return json parser instance for given file * @throws IOException if any I/O error. */ private JsonParser getParser(String fileName) throws IOException { return factory.createJsonParser(new File(fileName)); } /** * Converts JSON to an SSTable file. JSON input can either be a file specified * using an optional command line argument, or supplied on standard in. * * @param args command line arguments * @throws IOException on failure to open/read/write files or output streams * @throws ParseException on failure to parse JSON input * @throws ConfigurationException on configuration error. */ public static void main(String[] args) throws ParseException, ConfigurationException { CommandLineParser parser = new PosixParser(); try { cmd = parser.parse(options, args); } catch (org.apache.commons.cli.ParseException e) { System.err.println(e.getMessage()); printProgramUsage(); System.exit(1); } if (cmd.getArgs().length != 2) { printProgramUsage(); System.exit(1); } String json = cmd.getArgs()[0]; String ssTable = cmd.getArgs()[1]; String keyspace = cmd.getOptionValue(KEYSPACE_OPTION); String cfamily = cmd.getOptionValue(COLUMN_FAMILY_OPTION); Integer keyCountToImport = null; boolean isSorted = false; boolean oldSCFormat = false; if (cmd.hasOption(KEY_COUNT_OPTION)) { keyCountToImport = Integer.valueOf(cmd.getOptionValue(KEY_COUNT_OPTION)); } if (cmd.hasOption(IS_SORTED_OPTION)) { isSorted = true; } if (cmd.hasOption(OLD_SC_FORMAT_OPTION)) { oldSCFormat = true; } DatabaseDescriptor.loadSchemas(); if (Schema.instance.getNonSystemKeyspaces().size() < 1) { String msg = "no non-system keyspaces are defined"; System.err.println(msg); throw new ConfigurationException(msg); } try { new SSTableImport(keyCountToImport, isSorted, oldSCFormat).importJson(json, keyspace, cfamily, ssTable); } catch (Exception e) { e.printStackTrace(); System.err.println("ERROR: " + e.getMessage()); System.exit(-1); } System.exit(0); } private static void printProgramUsage() { System.out.printf("Usage: %s -s -K <keyspace> -c <column_family> -n <num_keys> <json> <sstable>%n%n", SSTableImport.class.getName()); System.out.println("Options:"); for (Object o : options.getOptions()) { Option opt = (Option) o; System.out.println(" -" +opt.getOpt() + " - " + opt.getDescription()); } } /** * Convert a string to bytes (ByteBuffer) according to type * @param content string to convert * @param type type to use for conversion * @return byte buffer representation of the given string */ private static ByteBuffer stringAsType(String content, AbstractType<?> type) { try { return (type == BytesType.instance) ? hexToBytes(content) : type.fromString(content); } catch (MarshalException e) { throw new RuntimeException(e.getMessage()); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.operator; import com.google.common.annotations.VisibleForTesting; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import io.trino.execution.Lifespan; import io.trino.spi.type.Type; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Function; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.util.concurrent.Futures.transform; import static com.google.common.util.concurrent.MoreExecutors.directExecutor; import static io.trino.operator.PipelineExecutionStrategy.UNGROUPED_EXECUTION; import static java.util.Objects.requireNonNull; public class JoinBridgeManager<T extends JoinBridge> { @VisibleForTesting public static JoinBridgeManager<PartitionedLookupSourceFactory> lookupAllAtOnce(PartitionedLookupSourceFactory factory) { return new JoinBridgeManager<>( false, UNGROUPED_EXECUTION, UNGROUPED_EXECUTION, ignored -> factory, factory.getOutputTypes()); } private final List<Type> buildOutputTypes; private final boolean buildOuter; private final PipelineExecutionStrategy probeExecutionStrategy; private final PipelineExecutionStrategy buildExecutionStrategy; private final Function<Lifespan, T> joinBridgeProvider; private final FreezeOnReadCounter probeFactoryCount = new FreezeOnReadCounter(); private final AtomicBoolean initialized = new AtomicBoolean(); private InternalJoinBridgeDataManager<T> internalJoinBridgeDataManager; public JoinBridgeManager( boolean buildOuter, PipelineExecutionStrategy probeExecutionStrategy, PipelineExecutionStrategy lookupSourceExecutionStrategy, Function<Lifespan, T> lookupSourceFactoryProvider, List<Type> buildOutputTypes) { this.buildOuter = buildOuter; this.probeExecutionStrategy = requireNonNull(probeExecutionStrategy, "probeExecutionStrategy is null"); this.buildExecutionStrategy = requireNonNull(lookupSourceExecutionStrategy, "lookupSourceExecutionStrategy is null"); this.joinBridgeProvider = requireNonNull(lookupSourceFactoryProvider, "lookupSourceFactoryProvider is null"); this.buildOutputTypes = requireNonNull(buildOutputTypes, "buildOutputTypes is null"); } private void initializeIfNecessary() { if (!initialized.get()) { synchronized (this) { if (initialized.get()) { return; } int finalProbeFactoryCount = probeFactoryCount.get(); internalJoinBridgeDataManager = internalJoinBridgeDataManager(probeExecutionStrategy, buildExecutionStrategy, joinBridgeProvider, finalProbeFactoryCount, buildOuter ? 1 : 0); initialized.set(true); } } } public List<Type> getBuildOutputTypes() { return buildOutputTypes; } public PipelineExecutionStrategy getBuildExecutionStrategy() { return buildExecutionStrategy; } public void incrementProbeFactoryCount() { probeFactoryCount.increment(); } public T getJoinBridge(Lifespan lifespan) { initializeIfNecessary(); return internalJoinBridgeDataManager.getJoinBridge(lifespan); } /** * Invoked when a probe operator factory indicates that it will not * create any more operators, for any lifespan. * <p> * It is expected that this method will only be invoked after * {@link #probeOperatorFactoryClosed(Lifespan)} has been invoked * for every known lifespan. */ public void probeOperatorFactoryClosedForAllLifespans() { initializeIfNecessary(); internalJoinBridgeDataManager.probeOperatorFactoryClosedForAllLifespans(); } public void probeOperatorFactoryClosed(Lifespan lifespan) { initializeIfNecessary(); internalJoinBridgeDataManager.probeOperatorFactoryClosed(lifespan); } public void probeOperatorCreated(Lifespan lifespan) { initializeIfNecessary(); internalJoinBridgeDataManager.probeOperatorCreated(lifespan); } public void probeOperatorClosed(Lifespan lifespan) { initializeIfNecessary(); internalJoinBridgeDataManager.probeOperatorClosed(lifespan); } public void outerOperatorFactoryClosed(Lifespan lifespan) { initializeIfNecessary(); internalJoinBridgeDataManager.outerOperatorFactoryClosed(lifespan); } public void outerOperatorCreated(Lifespan lifespan) { initializeIfNecessary(); internalJoinBridgeDataManager.outerOperatorCreated(lifespan); } public void outerOperatorClosed(Lifespan lifespan) { initializeIfNecessary(); internalJoinBridgeDataManager.outerOperatorClosed(lifespan); } public ListenableFuture<OuterPositionIterator> getOuterPositionsFuture(Lifespan lifespan) { initializeIfNecessary(); return internalJoinBridgeDataManager.getOuterPositionsFuture(lifespan); } private static <T extends JoinBridge> InternalJoinBridgeDataManager<T> internalJoinBridgeDataManager( PipelineExecutionStrategy probeExecutionStrategy, PipelineExecutionStrategy buildExecutionStrategy, Function<Lifespan, T> joinBridgeProvider, int probeFactoryCount, int outerFactoryCount) { checkArgument(outerFactoryCount == 0 || outerFactoryCount == 1, "outerFactoryCount should only be 0 or 1 because it is expected that outer factory never gets duplicated."); switch (probeExecutionStrategy) { case UNGROUPED_EXECUTION: switch (buildExecutionStrategy) { case UNGROUPED_EXECUTION: return new TaskWideInternalJoinBridgeDataManager<>(joinBridgeProvider, probeFactoryCount, outerFactoryCount); case GROUPED_EXECUTION: throw new UnsupportedOperationException("Invalid combination. Lookup source should not be grouped if probe is not going to take advantage of it."); } throw new UnsupportedOperationException("Unknown buildExecutionStrategy: " + buildExecutionStrategy); case GROUPED_EXECUTION: switch (buildExecutionStrategy) { case UNGROUPED_EXECUTION: return new SharedInternalJoinBridgeDataManager<>(joinBridgeProvider, probeFactoryCount, outerFactoryCount); case GROUPED_EXECUTION: return new OneToOneInternalJoinBridgeDataManager<>(joinBridgeProvider, probeFactoryCount, outerFactoryCount); } throw new UnsupportedOperationException("Unknown buildExecutionStrategy: " + buildExecutionStrategy); } throw new UnsupportedOperationException("Unknown probeExecutionStrategy: " + probeExecutionStrategy); } private interface InternalJoinBridgeDataManager<T extends JoinBridge> { T getJoinBridge(Lifespan lifespan); ListenableFuture<OuterPositionIterator> getOuterPositionsFuture(Lifespan lifespan); void probeOperatorFactoryClosedForAllLifespans(); void probeOperatorFactoryClosed(Lifespan lifespan); void probeOperatorCreated(Lifespan lifespan); void probeOperatorClosed(Lifespan lifespan); void outerOperatorFactoryClosed(Lifespan lifespan); void outerOperatorCreated(Lifespan lifespan); void outerOperatorClosed(Lifespan lifespan); } // 1 probe, 1 lookup source private static class TaskWideInternalJoinBridgeDataManager<T extends JoinBridge> implements InternalJoinBridgeDataManager<T> { private final T joinBridge; private final JoinLifecycle joinLifecycle; public TaskWideInternalJoinBridgeDataManager(Function<Lifespan, T> lookupSourceFactoryProvider, int probeFactoryCount, int outerFactoryCount) { joinBridge = lookupSourceFactoryProvider.apply(Lifespan.taskWide()); joinLifecycle = new JoinLifecycle(joinBridge, probeFactoryCount, outerFactoryCount); } @Override public T getJoinBridge(Lifespan lifespan) { checkArgument(Lifespan.taskWide().equals(lifespan)); return joinBridge; } @Override public ListenableFuture<OuterPositionIterator> getOuterPositionsFuture(Lifespan lifespan) { checkArgument(Lifespan.taskWide().equals(lifespan)); return transform(joinLifecycle.whenBuildAndProbeFinishes(), ignored -> joinBridge.getOuterPositionIterator(), directExecutor()); } @Override public void probeOperatorFactoryClosedForAllLifespans() { // do nothing } @Override public void probeOperatorFactoryClosed(Lifespan lifespan) { checkArgument(Lifespan.taskWide().equals(lifespan)); joinLifecycle.releaseForProbe(); } @Override public void probeOperatorCreated(Lifespan lifespan) { checkArgument(Lifespan.taskWide().equals(lifespan)); joinLifecycle.retainForProbe(); } @Override public void probeOperatorClosed(Lifespan lifespan) { checkArgument(Lifespan.taskWide().equals(lifespan)); joinLifecycle.releaseForProbe(); } @Override public void outerOperatorFactoryClosed(Lifespan lifespan) { checkArgument(Lifespan.taskWide().equals(lifespan)); joinLifecycle.releaseForOuter(); } @Override public void outerOperatorCreated(Lifespan lifespan) { checkArgument(Lifespan.taskWide().equals(lifespan)); joinLifecycle.retainForOuter(); } @Override public void outerOperatorClosed(Lifespan lifespan) { checkArgument(Lifespan.taskWide().equals(lifespan)); joinLifecycle.releaseForOuter(); } } // N probe, N lookup source; one-to-one mapping, bijective private static class OneToOneInternalJoinBridgeDataManager<T extends JoinBridge> implements InternalJoinBridgeDataManager<T> { private final Map<Lifespan, JoinBridgeAndLifecycle<T>> joinBridgeMap = new ConcurrentHashMap<>(); private final Function<Lifespan, T> joinBridgeProvider; private final int probeFactoryCount; private final int outerFactoryCount; public OneToOneInternalJoinBridgeDataManager(Function<Lifespan, T> joinBridgeProvider, int probeFactoryCount, int outerFactoryCount) { this.joinBridgeProvider = joinBridgeProvider; this.probeFactoryCount = probeFactoryCount; this.outerFactoryCount = outerFactoryCount; } @Override public T getJoinBridge(Lifespan lifespan) { return data(lifespan).joinBridge; } @Override public ListenableFuture<OuterPositionIterator> getOuterPositionsFuture(Lifespan lifespan) { return transform( data(lifespan).joinLifecycle.whenBuildAndProbeFinishes(), ignored -> data(lifespan).joinBridge.getOuterPositionIterator(), directExecutor()); } @Override public void probeOperatorFactoryClosedForAllLifespans() { // do nothing } @Override public void probeOperatorFactoryClosed(Lifespan lifespan) { checkArgument(!Lifespan.taskWide().equals(lifespan)); data(lifespan).joinLifecycle.releaseForProbe(); } @Override public void probeOperatorCreated(Lifespan lifespan) { checkArgument(!Lifespan.taskWide().equals(lifespan)); data(lifespan).joinLifecycle.retainForProbe(); } @Override public void probeOperatorClosed(Lifespan lifespan) { checkArgument(!Lifespan.taskWide().equals(lifespan)); data(lifespan).joinLifecycle.releaseForProbe(); } @Override public void outerOperatorFactoryClosed(Lifespan lifespan) { checkArgument(!Lifespan.taskWide().equals(lifespan)); data(lifespan).joinLifecycle.releaseForOuter(); } @Override public void outerOperatorCreated(Lifespan lifespan) { checkArgument(!Lifespan.taskWide().equals(lifespan)); data(lifespan).joinLifecycle.retainForOuter(); } @Override public void outerOperatorClosed(Lifespan lifespan) { checkArgument(!Lifespan.taskWide().equals(lifespan)); data(lifespan).joinLifecycle.releaseForOuter(); } private JoinBridgeAndLifecycle<T> data(Lifespan lifespan) { checkArgument(!Lifespan.taskWide().equals(lifespan)); return joinBridgeMap.computeIfAbsent(lifespan, span -> { T joinBridge = joinBridgeProvider.apply(span); return new JoinBridgeAndLifecycle<>(joinBridge, new JoinLifecycle(joinBridge, probeFactoryCount, outerFactoryCount)); }); } private static class JoinBridgeAndLifecycle<T extends JoinBridge> { T joinBridge; JoinLifecycle joinLifecycle; public JoinBridgeAndLifecycle(T joinBridge, JoinLifecycle joinLifecycle) { this.joinBridge = joinBridge; this.joinLifecycle = joinLifecycle; } } } // N probe, 1 lookup source private static class SharedInternalJoinBridgeDataManager<T extends JoinBridge> implements InternalJoinBridgeDataManager<T> { private final T taskWideJoinBridge; private final JoinLifecycle joinLifecycle; public SharedInternalJoinBridgeDataManager(Function<Lifespan, T> lookupSourceFactoryProvider, int probeFactoryCount, int outerFactoryCount) { this.taskWideJoinBridge = lookupSourceFactoryProvider.apply(Lifespan.taskWide()); this.joinLifecycle = new JoinLifecycle(taskWideJoinBridge, probeFactoryCount, outerFactoryCount); } @Override public T getJoinBridge(Lifespan lifespan) { return taskWideJoinBridge; } @Override public ListenableFuture<OuterPositionIterator> getOuterPositionsFuture(Lifespan lifespan) { checkArgument(Lifespan.taskWide().equals(lifespan), "join bridge is not partitioned"); return transform(joinLifecycle.whenBuildAndProbeFinishes(), ignored -> taskWideJoinBridge.getOuterPositionIterator(), directExecutor()); } @Override public void probeOperatorFactoryClosedForAllLifespans() { joinLifecycle.releaseForProbe(); } @Override public void probeOperatorFactoryClosed(Lifespan lifespan) { // do nothing } @Override public void probeOperatorCreated(Lifespan lifespan) { checkArgument(!Lifespan.taskWide().equals(lifespan), "build operator should not produce or destroy probes"); joinLifecycle.retainForProbe(); } @Override public void probeOperatorClosed(Lifespan lifespan) { checkArgument(!Lifespan.taskWide().equals(lifespan), "build operator should not produce or destroy probes"); joinLifecycle.releaseForProbe(); } @Override public void outerOperatorFactoryClosed(Lifespan lifespan) { checkArgument(Lifespan.taskWide().equals(lifespan), "join bridge is not partitioned"); joinLifecycle.releaseForOuter(); } @Override public void outerOperatorCreated(Lifespan lifespan) { checkArgument(Lifespan.taskWide().equals(lifespan), "join bridge is not partitioned"); joinLifecycle.retainForOuter(); } @Override public void outerOperatorClosed(Lifespan lifespan) { checkArgument(Lifespan.taskWide().equals(lifespan), "join bridge is not partitioned"); joinLifecycle.releaseForOuter(); } } private static class JoinLifecycle { private final ReferenceCount probeReferenceCount; private final ReferenceCount outerReferenceCount; private final ListenableFuture<?> whenBuildAndProbeFinishes; private final ListenableFuture<?> whenAllFinishes; public JoinLifecycle(JoinBridge joinBridge, int probeFactoryCount, int outerFactoryCount) { // When all probe and lookup-outer operators finish, destroy the join bridge (freeing the memory) // * Each LookupOuterOperatorFactory count as 1 // * There is at most 1 LookupOuterOperatorFactory // * Each LookupOuterOperator count as 1 checkArgument(outerFactoryCount == 0 || outerFactoryCount == 1); outerReferenceCount = new ReferenceCount(outerFactoryCount); // * Each probe operator factory count as 1 // * Each probe operator count as 1 probeReferenceCount = new ReferenceCount(probeFactoryCount); whenBuildAndProbeFinishes = Futures.whenAllSucceed(joinBridge.whenBuildFinishes(), probeReferenceCount.getFreeFuture()).call(() -> null, directExecutor()); whenAllFinishes = Futures.whenAllSucceed(whenBuildAndProbeFinishes, outerReferenceCount.getFreeFuture()).call(() -> null, directExecutor()); whenAllFinishes.addListener(joinBridge::destroy, directExecutor()); } public ListenableFuture<?> whenBuildAndProbeFinishes() { return whenBuildAndProbeFinishes; } private void retainForProbe() { probeReferenceCount.retain(); } private void releaseForProbe() { probeReferenceCount.release(); } private void retainForOuter() { outerReferenceCount.retain(); } private void releaseForOuter() { outerReferenceCount.release(); } } private static class FreezeOnReadCounter { private int count; private boolean frozen; public synchronized void increment() { checkState(!frozen, "Counter has been read"); count++; } public synchronized int get() { frozen = true; return count; } } }
/* * Copyright 2000-2011 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package git4idea.actions; import com.intellij.dvcs.repo.Repository; import com.intellij.openapi.actionSystem.ActionPlaces; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.vcs.ProjectLevelVcsManager; import com.intellij.openapi.vcs.TransactionRunnable; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.ArrayUtil; import com.intellij.vcsUtil.VcsFileUtil; import git4idea.GitUtil; import git4idea.GitVcs; import git4idea.branch.GitBranchUtil; import git4idea.i18n.GitBundle; import git4idea.repo.GitRepository; import git4idea.repo.GitRepositoryManager; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; /** * Base class for actions that affect the entire git repository. * The action is available if there is at least one git root. */ public abstract class GitRepositoryAction extends DumbAwareAction { /** * The task delayed until end of the primary action. These tasks happen after repository refresh. */ final List<TransactionRunnable> myDelayedTasks = new ArrayList<>(); public void actionPerformed(@NotNull final AnActionEvent e) { myDelayedTasks.clear(); FileDocumentManager.getInstance().saveAllDocuments(); final Project project = e.getRequiredData(CommonDataKeys.PROJECT); GitVcs vcs = GitVcs.getInstance(project); final List<VirtualFile> roots = getGitRoots(project, vcs); if (roots == null) return; final VirtualFile defaultRoot = getDefaultRoot(project, roots, e.getData(CommonDataKeys.VIRTUAL_FILE_ARRAY)); final Set<VirtualFile> affectedRoots = new HashSet<>(); String actionName = getActionName(); List<VcsException> exceptions = new ArrayList<>(); try { perform(project, roots, defaultRoot, affectedRoots, exceptions); } catch (VcsException ex) { exceptions.add(ex); } if (executeFinalTasksSynchronously()) { runFinalTasks(project, vcs, affectedRoots, actionName, exceptions); } } @NotNull private static VirtualFile getDefaultRoot(@NotNull Project project, @NotNull List<VirtualFile> roots, @Nullable VirtualFile[] vFiles) { if (vFiles != null) { for (VirtualFile file : vFiles) { VirtualFile root = GitUtil.gitRootOrNull(file); if (root != null) { return root; } } } GitRepository currentRepository = GitBranchUtil.getCurrentRepository(project); return currentRepository != null ? currentRepository.getRoot() : roots.get(0); } protected final void runFinalTasks(@NotNull final Project project, @NotNull final GitVcs vcs, @NotNull final Set<VirtualFile> affectedRoots, @NotNull final String actionName, @NotNull final List<VcsException> exceptions) { VfsUtil.markDirty(true, false, ArrayUtil.toObjectArray(affectedRoots, VirtualFile.class)); LocalFileSystem.getInstance().refreshFiles(affectedRoots, true, true, new Runnable() { @Override public void run() { VcsFileUtil.markFilesDirty(project, affectedRoots); for (TransactionRunnable task : myDelayedTasks) { task.run(exceptions); } myDelayedTasks.clear(); vcs.showErrors(exceptions, actionName); } }); } /** * Return true to indicate that the final tasks should be executed after the action invocation, * false if the task is responsible to call the final tasks manually via {@link #runFinalTasks(Project, GitVcs, Set, String, List)}. */ protected boolean executeFinalTasksSynchronously() { return true; } protected static boolean isRebasing(AnActionEvent e) { final Project project = e.getData(CommonDataKeys.PROJECT); if (project != null) { final VirtualFile[] files = e.getData(CommonDataKeys.VIRTUAL_FILE_ARRAY); if (files != null) { for (VirtualFile file : files) { GitRepositoryManager manager = GitUtil.getRepositoryManager(project); if (isRebasing(manager.getRepositoryForFile(file))) return true; } } if (isRebasing(GitBranchUtil.getCurrentRepository(project))) return true; } return false; } private static boolean isRebasing(@Nullable GitRepository repository) { return repository != null && repository.getState() == Repository.State.REBASING; } /** * Get git roots for the project. The method shows dialogs in the case when roots cannot be retrieved, so it should be called * from the event dispatch thread. * * @param project the project * @param vcs the git Vcs * @return the list of the roots, or null */ @Nullable public static List<VirtualFile> getGitRoots(Project project, GitVcs vcs) { List<VirtualFile> roots; try { roots = GitUtil.getGitRoots(project, vcs); } catch (VcsException e) { Messages.showErrorDialog(project, e.getMessage(), GitBundle.getString("repository.action.missing.roots.title")); return null; } return roots; } /** * Delay task to be executed after refresh * * @param task the task to run */ public final void delayTask(@NotNull TransactionRunnable task) { myDelayedTasks.add(task); } /** * Get name of action (for error reporting) * * @return the name of action */ @NotNull protected abstract String getActionName(); /** * Perform action for some repositories * * @param project a context project * @param gitRoots a git roots that affect the current project (sorted by {@link VirtualFile#getPresentableUrl()}) * @param defaultRoot a guessed default root (based on the currently selected file list) * @param affectedRoots a set of roots affected by the action * @param exceptions a list of exceptions from running git * @throws VcsException if there is a problem with running git (this exception is considered to be added to the end of the exception list) */ protected abstract void perform(@NotNull Project project, @NotNull List<VirtualFile> gitRoots, @NotNull VirtualFile defaultRoot, final Set<VirtualFile> affectedRoots, List<VcsException> exceptions) throws VcsException; @Override public void update(final AnActionEvent e) { super.update(e); boolean enabled = isEnabled(e); e.getPresentation().setEnabled(enabled); if (ActionPlaces.isPopupPlace(e.getPlace())) { e.getPresentation().setVisible(enabled); } else { e.getPresentation().setVisible(true); } } protected boolean isEnabled(AnActionEvent e) { Project project = e.getData(CommonDataKeys.PROJECT); if (project == null) { return false; } GitVcs vcs = GitVcs.getInstance(project); final VirtualFile[] roots = ProjectLevelVcsManager.getInstance(project).getRootsUnderVcs(vcs); if (roots == null || roots.length == 0) { return false; } return true; } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; /** * <p> * Describes the storage parameters for S3 and S3 buckets for an instance * store-backed AMI. * </p> */ public class S3Storage implements Serializable, Cloneable { /** * The bucket in which to store the AMI. You can specify a bucket that * you already own or a new bucket that Amazon EC2 creates on your * behalf. If you specify a bucket that belongs to someone else, Amazon * EC2 returns an error. */ private String bucket; /** * The beginning of the file name of the AMI. */ private String prefix; /** * The access key ID of the owner of the bucket. Before you specify a * value for your access key ID, review and follow the guidance in <a * href="http://docs.aws.amazon.com/general/latest/gr/aws-access-keys-best-practices.html">Best * Practices for Managing AWS Access Keys</a>. */ private String aWSAccessKeyId; /** * A Base64-encoded Amazon S3 upload policy that gives Amazon EC2 * permission to upload items into Amazon S3 on your behalf. */ private String uploadPolicy; /** * The signature of the Base64 encoded JSON document. */ private String uploadPolicySignature; /** * The bucket in which to store the AMI. You can specify a bucket that * you already own or a new bucket that Amazon EC2 creates on your * behalf. If you specify a bucket that belongs to someone else, Amazon * EC2 returns an error. * * @return The bucket in which to store the AMI. You can specify a bucket that * you already own or a new bucket that Amazon EC2 creates on your * behalf. If you specify a bucket that belongs to someone else, Amazon * EC2 returns an error. */ public String getBucket() { return bucket; } /** * The bucket in which to store the AMI. You can specify a bucket that * you already own or a new bucket that Amazon EC2 creates on your * behalf. If you specify a bucket that belongs to someone else, Amazon * EC2 returns an error. * * @param bucket The bucket in which to store the AMI. You can specify a bucket that * you already own or a new bucket that Amazon EC2 creates on your * behalf. If you specify a bucket that belongs to someone else, Amazon * EC2 returns an error. */ public void setBucket(String bucket) { this.bucket = bucket; } /** * The bucket in which to store the AMI. You can specify a bucket that * you already own or a new bucket that Amazon EC2 creates on your * behalf. If you specify a bucket that belongs to someone else, Amazon * EC2 returns an error. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param bucket The bucket in which to store the AMI. You can specify a bucket that * you already own or a new bucket that Amazon EC2 creates on your * behalf. If you specify a bucket that belongs to someone else, Amazon * EC2 returns an error. * * @return A reference to this updated object so that method calls can be chained * together. */ public S3Storage withBucket(String bucket) { this.bucket = bucket; return this; } /** * The beginning of the file name of the AMI. * * @return The beginning of the file name of the AMI. */ public String getPrefix() { return prefix; } /** * The beginning of the file name of the AMI. * * @param prefix The beginning of the file name of the AMI. */ public void setPrefix(String prefix) { this.prefix = prefix; } /** * The beginning of the file name of the AMI. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param prefix The beginning of the file name of the AMI. * * @return A reference to this updated object so that method calls can be chained * together. */ public S3Storage withPrefix(String prefix) { this.prefix = prefix; return this; } /** * The access key ID of the owner of the bucket. Before you specify a * value for your access key ID, review and follow the guidance in <a * href="http://docs.aws.amazon.com/general/latest/gr/aws-access-keys-best-practices.html">Best * Practices for Managing AWS Access Keys</a>. * * @return The access key ID of the owner of the bucket. Before you specify a * value for your access key ID, review and follow the guidance in <a * href="http://docs.aws.amazon.com/general/latest/gr/aws-access-keys-best-practices.html">Best * Practices for Managing AWS Access Keys</a>. */ public String getAWSAccessKeyId() { return aWSAccessKeyId; } /** * The access key ID of the owner of the bucket. Before you specify a * value for your access key ID, review and follow the guidance in <a * href="http://docs.aws.amazon.com/general/latest/gr/aws-access-keys-best-practices.html">Best * Practices for Managing AWS Access Keys</a>. * * @param aWSAccessKeyId The access key ID of the owner of the bucket. Before you specify a * value for your access key ID, review and follow the guidance in <a * href="http://docs.aws.amazon.com/general/latest/gr/aws-access-keys-best-practices.html">Best * Practices for Managing AWS Access Keys</a>. */ public void setAWSAccessKeyId(String aWSAccessKeyId) { this.aWSAccessKeyId = aWSAccessKeyId; } /** * The access key ID of the owner of the bucket. Before you specify a * value for your access key ID, review and follow the guidance in <a * href="http://docs.aws.amazon.com/general/latest/gr/aws-access-keys-best-practices.html">Best * Practices for Managing AWS Access Keys</a>. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param aWSAccessKeyId The access key ID of the owner of the bucket. Before you specify a * value for your access key ID, review and follow the guidance in <a * href="http://docs.aws.amazon.com/general/latest/gr/aws-access-keys-best-practices.html">Best * Practices for Managing AWS Access Keys</a>. * * @return A reference to this updated object so that method calls can be chained * together. */ public S3Storage withAWSAccessKeyId(String aWSAccessKeyId) { this.aWSAccessKeyId = aWSAccessKeyId; return this; } /** * A Base64-encoded Amazon S3 upload policy that gives Amazon EC2 * permission to upload items into Amazon S3 on your behalf. * * @return A Base64-encoded Amazon S3 upload policy that gives Amazon EC2 * permission to upload items into Amazon S3 on your behalf. */ public String getUploadPolicy() { return uploadPolicy; } /** * A Base64-encoded Amazon S3 upload policy that gives Amazon EC2 * permission to upload items into Amazon S3 on your behalf. * * @param uploadPolicy A Base64-encoded Amazon S3 upload policy that gives Amazon EC2 * permission to upload items into Amazon S3 on your behalf. */ public void setUploadPolicy(String uploadPolicy) { this.uploadPolicy = uploadPolicy; } /** * A Base64-encoded Amazon S3 upload policy that gives Amazon EC2 * permission to upload items into Amazon S3 on your behalf. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param uploadPolicy A Base64-encoded Amazon S3 upload policy that gives Amazon EC2 * permission to upload items into Amazon S3 on your behalf. * * @return A reference to this updated object so that method calls can be chained * together. */ public S3Storage withUploadPolicy(String uploadPolicy) { this.uploadPolicy = uploadPolicy; return this; } /** * The signature of the Base64 encoded JSON document. * * @return The signature of the Base64 encoded JSON document. */ public String getUploadPolicySignature() { return uploadPolicySignature; } /** * The signature of the Base64 encoded JSON document. * * @param uploadPolicySignature The signature of the Base64 encoded JSON document. */ public void setUploadPolicySignature(String uploadPolicySignature) { this.uploadPolicySignature = uploadPolicySignature; } /** * The signature of the Base64 encoded JSON document. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param uploadPolicySignature The signature of the Base64 encoded JSON document. * * @return A reference to this updated object so that method calls can be chained * together. */ public S3Storage withUploadPolicySignature(String uploadPolicySignature) { this.uploadPolicySignature = uploadPolicySignature; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getBucket() != null) sb.append("Bucket: " + getBucket() + ","); if (getPrefix() != null) sb.append("Prefix: " + getPrefix() + ","); if (getAWSAccessKeyId() != null) sb.append("AWSAccessKeyId: " + getAWSAccessKeyId() + ","); if (getUploadPolicy() != null) sb.append("UploadPolicy: " + getUploadPolicy() + ","); if (getUploadPolicySignature() != null) sb.append("UploadPolicySignature: " + getUploadPolicySignature() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getBucket() == null) ? 0 : getBucket().hashCode()); hashCode = prime * hashCode + ((getPrefix() == null) ? 0 : getPrefix().hashCode()); hashCode = prime * hashCode + ((getAWSAccessKeyId() == null) ? 0 : getAWSAccessKeyId().hashCode()); hashCode = prime * hashCode + ((getUploadPolicy() == null) ? 0 : getUploadPolicy().hashCode()); hashCode = prime * hashCode + ((getUploadPolicySignature() == null) ? 0 : getUploadPolicySignature().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof S3Storage == false) return false; S3Storage other = (S3Storage)obj; if (other.getBucket() == null ^ this.getBucket() == null) return false; if (other.getBucket() != null && other.getBucket().equals(this.getBucket()) == false) return false; if (other.getPrefix() == null ^ this.getPrefix() == null) return false; if (other.getPrefix() != null && other.getPrefix().equals(this.getPrefix()) == false) return false; if (other.getAWSAccessKeyId() == null ^ this.getAWSAccessKeyId() == null) return false; if (other.getAWSAccessKeyId() != null && other.getAWSAccessKeyId().equals(this.getAWSAccessKeyId()) == false) return false; if (other.getUploadPolicy() == null ^ this.getUploadPolicy() == null) return false; if (other.getUploadPolicy() != null && other.getUploadPolicy().equals(this.getUploadPolicy()) == false) return false; if (other.getUploadPolicySignature() == null ^ this.getUploadPolicySignature() == null) return false; if (other.getUploadPolicySignature() != null && other.getUploadPolicySignature().equals(this.getUploadPolicySignature()) == false) return false; return true; } @Override public S3Storage clone() { try { return (S3Storage) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vcs.configurable; import com.intellij.openapi.fileChooser.FileChooserDescriptor; import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.options.UnnamedConfigurable; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.*; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vcs.*; import com.intellij.openapi.vcs.impl.DefaultVcsRootPolicy; import com.intellij.openapi.vcs.impl.VcsDescriptor; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.ui.components.JBLabel; import com.intellij.util.continuation.ModalityIgnorantBackgroundableTask; import com.intellij.xml.util.XmlStringUtil; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.HashMap; import java.util.Map; /** * @author yole */ public class VcsMappingConfigurationDialog extends DialogWrapper { private final Project myProject; private JComboBox myVCSComboBox; private TextFieldWithBrowseButton myDirectoryTextField; private JPanel myPanel; private JPanel myVcsConfigurablePlaceholder; private JRadioButton myProjectRadioButton; private JRadioButton myDirectoryRadioButton; private JBLabel myProjectButtonComment; private UnnamedConfigurable myVcsConfigurable; private VcsDirectoryMapping myMappingCopy; private JComponent myVcsConfigurableComponent; private ProjectLevelVcsManager myVcsManager; private final Map<String, VcsDescriptor> myVcses; public VcsMappingConfigurationDialog(final Project project, final String title) { super(project, false); myProject = project; myVcsManager = ProjectLevelVcsManager.getInstance(myProject); final VcsDescriptor[] vcsDescriptors = myVcsManager.getAllVcss(); myVcses = new HashMap<>(); for (VcsDescriptor vcsDescriptor : vcsDescriptors) { myVcses.put(vcsDescriptor.getName(), vcsDescriptor); } myVCSComboBox.setModel(VcsDirectoryConfigurationPanel.buildVcsWrappersModel(project)); myDirectoryTextField.addActionListener(new MyBrowseFolderListener("Select Directory", "Select directory to map to a VCS", myDirectoryTextField, project, FileChooserDescriptorFactory.createSingleFolderDescriptor())); myMappingCopy = new VcsDirectoryMapping("", ""); setTitle(title); init(); myVCSComboBox.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { updateVcsConfigurable(); } }); } protected JComponent createCenterPanel() { return myPanel; } public void setMapping(@NotNull VcsDirectoryMapping mapping) { myMappingCopy = new VcsDirectoryMapping(mapping.getDirectory(), mapping.getVcs(), mapping.getRootSettings()); myProjectRadioButton.setSelected(myMappingCopy.isDefaultMapping()); myDirectoryRadioButton.setSelected(! myProjectRadioButton.isSelected()); if (myMappingCopy.isDefaultMapping()) { myDirectoryTextField.setText(""); } else { myDirectoryTextField.setText(FileUtil.toSystemDependentName(mapping.getDirectory())); } myVCSComboBox.setSelectedItem(myVcses.get(mapping.getVcs())); updateVcsConfigurable(); myDirectoryTextField.setEnabled(myDirectoryRadioButton.isSelected()); initProjectMessage(); } @NotNull public VcsDirectoryMapping getMapping() { VcsDescriptor wrapper = (VcsDescriptor) myVCSComboBox.getSelectedItem(); String vcs = wrapper == null || wrapper.isNone() ? "" : wrapper.getName(); String directory = myProjectRadioButton.isSelected() ? "" : FileUtil.toSystemIndependentName(myDirectoryTextField.getText()); return new VcsDirectoryMapping(directory, vcs, myMappingCopy.getRootSettings()); } private void updateVcsConfigurable() { if (myVcsConfigurable != null) { myVcsConfigurablePlaceholder.remove(myVcsConfigurableComponent); myVcsConfigurable.disposeUIResources(); myVcsConfigurable = null; } VcsDescriptor wrapper = (VcsDescriptor) myVCSComboBox.getSelectedItem(); if (wrapper != null && (! wrapper.isNone())) { final AbstractVcs vcs = myVcsManager.findVcsByName(wrapper.getName()); if (vcs != null) { UnnamedConfigurable configurable = vcs.getRootConfigurable(myMappingCopy); if (configurable != null) { myVcsConfigurable = configurable; myVcsConfigurableComponent = myVcsConfigurable.createComponent(); myVcsConfigurablePlaceholder.add(myVcsConfigurableComponent, BorderLayout.CENTER); } } } pack(); } @NotNull @Override protected Action[] createLeftSideActions() { return new Action[] { new ConfigureVcsAction() }; } protected void doOKAction() { if (myVcsConfigurable != null) { try { myVcsConfigurable.apply(); } catch(ConfigurationException ex) { Messages.showErrorDialog(myPanel, "Invalid VCS options: " + ex.getMessage()); } } super.doOKAction(); } private void createUIComponents() { ButtonGroup bg = new ButtonGroup(); myProjectRadioButton = new JRadioButton(); myDirectoryRadioButton = new JRadioButton(); bg.add(myProjectRadioButton); bg.add(myDirectoryRadioButton); final ActionListener al = new ActionListener() { @Override public void actionPerformed(ActionEvent e) { myDirectoryTextField.setEnabled(myDirectoryRadioButton.isSelected()); } }; myProjectRadioButton.addActionListener(al); myDirectoryRadioButton.addActionListener(al); myDirectoryRadioButton.setSelected(true); } public void initProjectMessage() { myProjectButtonComment.setText(XmlStringUtil.wrapInHtml(DefaultVcsRootPolicy.getInstance(myProject).getProjectConfigurationMessage(myProject))); } private class MyBrowseFolderListener extends ComponentWithBrowseButton.BrowseFolderActionListener<JTextField> { public MyBrowseFolderListener(String title, String description, TextFieldWithBrowseButton textField, Project project, FileChooserDescriptor fileChooserDescriptor) { super(title, description, textField, project, fileChooserDescriptor, TextComponentAccessor.TEXT_FIELD_WHOLE_TEXT); } @Override protected VirtualFile getInitialFile() { // suggest project base dir only if nothing is typed in the component. String text = getComponentText(); if(text.length() == 0) { VirtualFile file = myProject.getBaseDir(); if(file != null) { return file; } } return super.getInitialFile(); } @Override protected void onFileChosen(@NotNull final VirtualFile chosenFile) { String oldText = myDirectoryTextField.getText(); super.onFileChosen(chosenFile); final VcsDescriptor wrapper = (VcsDescriptor) myVCSComboBox.getSelectedItem(); if (oldText.length() == 0 && (wrapper == null || wrapper.isNone())) { final ModalityIgnorantBackgroundableTask task = new ModalityIgnorantBackgroundableTask(myProject, "Looking for VCS administrative area", false) { VcsDescriptor probableVcs = null; @Override protected void doInAwtIfFail(Exception e) { } @Override protected void doInAwtIfCancel() { } @Override protected void doInAwtIfSuccess() { if (probableVcs != null) { // todo none myVCSComboBox.setSelectedItem(probableVcs); } } @Override protected void runImpl(@NotNull ProgressIndicator indicator) { for (VcsDescriptor vcs : myVcses.values()) { if (vcs.probablyUnderVcs(chosenFile)) { if (probableVcs != null) { probableVcs = null; break; } probableVcs = vcs; } } } }; ProgressManager.getInstance().run(task); } } } private class ConfigureVcsAction extends AbstractAction { public ConfigureVcsAction() { super(VcsBundle.message("button.configure")); } public void actionPerformed(ActionEvent e) { VcsDescriptor wrapper = (VcsDescriptor) myVCSComboBox.getSelectedItem(); new VcsConfigurationsDialog(myProject, null, wrapper).show(); } } }
/* Derby - Class org.apache.derby.impl.services.reflect.DatabaseClasses Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.impl.services.reflect; import org.apache.derby.iapi.services.sanity.SanityManager; import org.apache.derby.iapi.services.loader.ClassFactory; import org.apache.derby.iapi.services.loader.GeneratedClass; import org.apache.derby.iapi.services.loader.ClassInspector; import org.apache.derby.iapi.services.monitor.ModuleControl; import org.apache.derby.iapi.services.monitor.ModuleSupportable; import org.apache.derby.iapi.services.monitor.Monitor; import org.apache.derby.iapi.error.StandardException; import org.apache.derby.iapi.services.property.PropertyUtil; import org.apache.derby.iapi.services.stream.HeaderPrintWriter; import org.apache.derby.iapi.services.monitor.Monitor; import org.apache.derby.iapi.services.compiler.*; import java.lang.reflect.Modifier; import org.apache.derby.iapi.sql.compile.CodeGeneration; import org.apache.derby.iapi.util.ByteArray; import org.apache.derby.iapi.services.io.FileUtil; import org.apache.derby.iapi.services.i18n.MessageService; import org.apache.derby.iapi.reference.Property; import org.apache.derby.iapi.reference.SQLState; import org.apache.derby.iapi.reference.MessageId; import org.apache.derby.iapi.reference.ClassName; import java.util.Properties; import java.util.Hashtable; import java.io.ObjectStreamClass; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.Serializable; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; /** An abstract implementation of the ClassFactory. This package can be extended to fully implement a ClassFactory. Implementations can differ in two areas, how they load a class and how they invoke methods of the generated class. <P> This class manages a hash table of loaded generated classes and their GeneratedClass objects. A loaded class may be referenced multiple times -- each class has a reference count associated with it. When a load request arrives, if the class has already been loaded, its ref count is incremented. For a remove request, the ref count is decremented unless it is the last reference, in which case the class is removed. This is transparent to users. @see org.apache.derby.iapi.services.loader.ClassFactory */ abstract class DatabaseClasses implements ClassFactory, ModuleControl { /* ** Fields */ private ClassInspector classInspector; private JavaFactory javaFactory; private UpdateLoader applicationLoader; /* ** Constructor */ DatabaseClasses() { } /* ** Public methods of ModuleControl */ public void boot(boolean create, Properties startParams) throws StandardException { classInspector = new ClassInspector(this); // //The ClassFactory runs per service (database) mode (booted as a service module after AccessFactory). //If the code that booted //us needs a per-database classpath then they pass in the classpath using //the runtime property BOOT_DB_CLASSPATH in startParams String classpath = null; if (startParams != null) { classpath = startParams.getProperty(Property.BOOT_DB_CLASSPATH); } if (classpath != null) { applicationLoader = new UpdateLoader(classpath, this, true, true); } javaFactory = (JavaFactory) org.apache.derby.iapi.services.monitor.Monitor.startSystemModule(org.apache.derby.iapi.reference.Module.JavaFactory); } public void stop() { if (applicationLoader != null) applicationLoader.close(); } /* ** Public methods of ClassFactory */ /** Here we load the newly added class now, rather than waiting for the findGeneratedClass(). Thus we are assuming that the class is going to be used sometime soon. Delaying the load would mean storing the class data in a file, this wastes cycles and compilcates the cleanup. @see ClassFactory#loadGeneratedClass @exception StandardException Class format is bad. */ public final GeneratedClass loadGeneratedClass(String fullyQualifiedName, ByteArray classDump) throws StandardException { try { return loadGeneratedClassFromData(fullyQualifiedName, classDump); } catch (LinkageError le) { WriteClassFile(fullyQualifiedName, classDump, le); throw StandardException.newException(SQLState.GENERATED_CLASS_LINKAGE_ERROR, le, fullyQualifiedName); } catch (VirtualMachineError vme) { // these may be beyond saving, but fwiw WriteClassFile(fullyQualifiedName, classDump, vme); throw vme; } } private static void WriteClassFile(String fullyQualifiedName, ByteArray bytecode, Throwable t) { // get the un-qualified name and add the extension int lastDot = fullyQualifiedName.lastIndexOf((int)'.'); String filename = fullyQualifiedName.substring(lastDot+1,fullyQualifiedName.length()).concat(".class"); Object env = Monitor.getMonitor().getEnvironment(); File dir = env instanceof File ? (File) env : null; final File classFile = FileUtil.newFile(dir,filename); // find the error stream HeaderPrintWriter errorStream = Monitor.getStream(); try { FileOutputStream fis; try { fis = (FileOutputStream) AccessController.doPrivileged( new PrivilegedExceptionAction() { public Object run() throws IOException { return new FileOutputStream(classFile); } }); } catch (PrivilegedActionException pae) { throw (IOException) pae.getCause(); } fis.write(bytecode.getArray(), bytecode.getOffset(), bytecode.getLength()); fis.flush(); if (t!=null) { errorStream.printlnWithHeader(MessageService.getTextMessage(MessageId.CM_WROTE_CLASS_FILE, fullyQualifiedName, classFile, t)); } fis.close(); } catch (IOException e) { if (SanityManager.DEBUG) SanityManager.THROWASSERT("Unable to write .class file", e); } } public ClassInspector getClassInspector() { return classInspector; } public final Class loadApplicationClass(String className) throws ClassNotFoundException { if (className.startsWith("org.apache.derby.")) { // Assume this is an engine class, if so // try to load from this class loader, // this ensures in strange class loader // environments we do not get ClassCastExceptions // when an engine class is loaded through a different // class loader to the rest of the engine. try { return Class.forName(className); } catch (ClassNotFoundException cnfe) { // fall through to the code below, // could be client or tools class // in a different loader. } } Throwable loadError; try { try { return loadClassNotInDatabaseJar(className); } catch (ClassNotFoundException cnfe) { if (applicationLoader == null) throw cnfe; Class c = applicationLoader.loadClass(className, true); if (c == null) throw cnfe; return c; } } catch (SecurityException se) { // Thrown if the class has been comprimised in some // way, e.g. modified in a signed jar. loadError = se; } catch (LinkageError le) { // some error linking the jar, again could // be malicious code inserted into a jar. loadError = le; } throw new ClassNotFoundException(className + " : " + loadError.getMessage()); } abstract Class loadClassNotInDatabaseJar(String className) throws ClassNotFoundException; public final Class loadApplicationClass(ObjectStreamClass classDescriptor) throws ClassNotFoundException { return loadApplicationClass(classDescriptor.getName()); } public boolean isApplicationClass(Class theClass) { return theClass.getClassLoader() instanceof JarLoader; } public void notifyModifyJar(boolean reload) throws StandardException { if (applicationLoader != null) { applicationLoader.modifyJar(reload); } } /** Notify the class manager that the classpath has been modified. @exception StandardException thrown on error */ public void notifyModifyClasspath(String classpath) throws StandardException { if (applicationLoader != null) { applicationLoader.modifyClasspath(classpath); } } public int getClassLoaderVersion() { if (applicationLoader != null) { return applicationLoader.getClassLoaderVersion(); } return -1; } public ByteArray buildSpecificFactory(String className, String factoryName) throws StandardException { ClassBuilder cb = javaFactory.newClassBuilder(this, CodeGeneration.GENERATED_PACKAGE_PREFIX, Modifier.PUBLIC | Modifier.FINAL, factoryName, "org.apache.derby.impl.services.reflect.GCInstanceFactory"); MethodBuilder constructor = cb.newConstructorBuilder(Modifier.PUBLIC); constructor.callSuper(); constructor.methodReturn(); constructor.complete(); constructor = null; MethodBuilder noArg = cb.newMethodBuilder(Modifier.PUBLIC, ClassName.GeneratedByteCode, "getNewInstance"); noArg.pushNewStart(className); noArg.pushNewComplete(0); noArg.methodReturn(); noArg.complete(); noArg = null; return cb.getClassBytecode(); } /* ** Class specific methods */ /* ** Keep track of loaded generated classes and their GeneratedClass objects. */ abstract LoadedGeneratedClass loadGeneratedClassFromData(String fullyQualifiedName, ByteArray classDump); }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ssmincidents.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Information about a Amazon Web Services Region in your replication set. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ssm-incidents-2018-05-10/RegionInfo" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class RegionInfo implements Serializable, Cloneable, StructuredPojo { /** * <p> * The ID of the KMS key used to encrypt the data in this Amazon Web Services Region. * </p> */ private String sseKmsKeyId; /** * <p> * The status of the Amazon Web Services Region in the replication set. * </p> */ private String status; /** * <p> * Information displayed about the status of the Amazon Web Services Region. * </p> */ private String statusMessage; /** * <p> * The most recent date and time that Incident Manager updated the Amazon Web Services Region's status. * </p> */ private java.util.Date statusUpdateDateTime; /** * <p> * The ID of the KMS key used to encrypt the data in this Amazon Web Services Region. * </p> * * @param sseKmsKeyId * The ID of the KMS key used to encrypt the data in this Amazon Web Services Region. */ public void setSseKmsKeyId(String sseKmsKeyId) { this.sseKmsKeyId = sseKmsKeyId; } /** * <p> * The ID of the KMS key used to encrypt the data in this Amazon Web Services Region. * </p> * * @return The ID of the KMS key used to encrypt the data in this Amazon Web Services Region. */ public String getSseKmsKeyId() { return this.sseKmsKeyId; } /** * <p> * The ID of the KMS key used to encrypt the data in this Amazon Web Services Region. * </p> * * @param sseKmsKeyId * The ID of the KMS key used to encrypt the data in this Amazon Web Services Region. * @return Returns a reference to this object so that method calls can be chained together. */ public RegionInfo withSseKmsKeyId(String sseKmsKeyId) { setSseKmsKeyId(sseKmsKeyId); return this; } /** * <p> * The status of the Amazon Web Services Region in the replication set. * </p> * * @param status * The status of the Amazon Web Services Region in the replication set. * @see RegionStatus */ public void setStatus(String status) { this.status = status; } /** * <p> * The status of the Amazon Web Services Region in the replication set. * </p> * * @return The status of the Amazon Web Services Region in the replication set. * @see RegionStatus */ public String getStatus() { return this.status; } /** * <p> * The status of the Amazon Web Services Region in the replication set. * </p> * * @param status * The status of the Amazon Web Services Region in the replication set. * @return Returns a reference to this object so that method calls can be chained together. * @see RegionStatus */ public RegionInfo withStatus(String status) { setStatus(status); return this; } /** * <p> * The status of the Amazon Web Services Region in the replication set. * </p> * * @param status * The status of the Amazon Web Services Region in the replication set. * @return Returns a reference to this object so that method calls can be chained together. * @see RegionStatus */ public RegionInfo withStatus(RegionStatus status) { this.status = status.toString(); return this; } /** * <p> * Information displayed about the status of the Amazon Web Services Region. * </p> * * @param statusMessage * Information displayed about the status of the Amazon Web Services Region. */ public void setStatusMessage(String statusMessage) { this.statusMessage = statusMessage; } /** * <p> * Information displayed about the status of the Amazon Web Services Region. * </p> * * @return Information displayed about the status of the Amazon Web Services Region. */ public String getStatusMessage() { return this.statusMessage; } /** * <p> * Information displayed about the status of the Amazon Web Services Region. * </p> * * @param statusMessage * Information displayed about the status of the Amazon Web Services Region. * @return Returns a reference to this object so that method calls can be chained together. */ public RegionInfo withStatusMessage(String statusMessage) { setStatusMessage(statusMessage); return this; } /** * <p> * The most recent date and time that Incident Manager updated the Amazon Web Services Region's status. * </p> * * @param statusUpdateDateTime * The most recent date and time that Incident Manager updated the Amazon Web Services Region's status. */ public void setStatusUpdateDateTime(java.util.Date statusUpdateDateTime) { this.statusUpdateDateTime = statusUpdateDateTime; } /** * <p> * The most recent date and time that Incident Manager updated the Amazon Web Services Region's status. * </p> * * @return The most recent date and time that Incident Manager updated the Amazon Web Services Region's status. */ public java.util.Date getStatusUpdateDateTime() { return this.statusUpdateDateTime; } /** * <p> * The most recent date and time that Incident Manager updated the Amazon Web Services Region's status. * </p> * * @param statusUpdateDateTime * The most recent date and time that Incident Manager updated the Amazon Web Services Region's status. * @return Returns a reference to this object so that method calls can be chained together. */ public RegionInfo withStatusUpdateDateTime(java.util.Date statusUpdateDateTime) { setStatusUpdateDateTime(statusUpdateDateTime); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getSseKmsKeyId() != null) sb.append("SseKmsKeyId: ").append(getSseKmsKeyId()).append(","); if (getStatus() != null) sb.append("Status: ").append(getStatus()).append(","); if (getStatusMessage() != null) sb.append("StatusMessage: ").append(getStatusMessage()).append(","); if (getStatusUpdateDateTime() != null) sb.append("StatusUpdateDateTime: ").append(getStatusUpdateDateTime()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof RegionInfo == false) return false; RegionInfo other = (RegionInfo) obj; if (other.getSseKmsKeyId() == null ^ this.getSseKmsKeyId() == null) return false; if (other.getSseKmsKeyId() != null && other.getSseKmsKeyId().equals(this.getSseKmsKeyId()) == false) return false; if (other.getStatus() == null ^ this.getStatus() == null) return false; if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false; if (other.getStatusMessage() == null ^ this.getStatusMessage() == null) return false; if (other.getStatusMessage() != null && other.getStatusMessage().equals(this.getStatusMessage()) == false) return false; if (other.getStatusUpdateDateTime() == null ^ this.getStatusUpdateDateTime() == null) return false; if (other.getStatusUpdateDateTime() != null && other.getStatusUpdateDateTime().equals(this.getStatusUpdateDateTime()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getSseKmsKeyId() == null) ? 0 : getSseKmsKeyId().hashCode()); hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode()); hashCode = prime * hashCode + ((getStatusMessage() == null) ? 0 : getStatusMessage().hashCode()); hashCode = prime * hashCode + ((getStatusUpdateDateTime() == null) ? 0 : getStatusUpdateDateTime().hashCode()); return hashCode; } @Override public RegionInfo clone() { try { return (RegionInfo) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.ssmincidents.model.transform.RegionInfoMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* Copyright 2010 Santiago Ontanon and Ashwin Ram */ package gatech.mmpm; import gatech.mmpm.util.XMLWriter; import java.util.LinkedList; import java.util.List; public class TwoDMap extends Map { private PhysicalEntity map[][]; int size[]; public TwoDMap(int a_width, int a_height, float a_cell_width, float a_cell_height) { super(2); size = new int[2]; size[0]= a_width; size[1]= a_height; cell_size[0] = a_cell_width; cell_size[1] = a_cell_height; map = new PhysicalEntity[size[0]][size[1]]; } public int getSizeInDimension(int d) { return size[d]; } public void addEntity(PhysicalEntity pe) { if(pe == null || pe.get_Coords() == null) return; int cc[] = toCellCoords(pe.get_Coords()); map[cc[0]][cc[1]] = pe; } public boolean setCellLocation(char mapCharacter, int cellCoords[], gatech.mmpm.IDomain idomain) { float coords[] = toCoords(cellCoords); return setCellLocation(mapCharacter, coords, idomain); } public boolean setCellLocation(char mapCharacter, float coords[], gatech.mmpm.IDomain idomain) { if(coords == null) return false; int cellCoords[] = toCellCoords(coords); map[cellCoords[0]][cellCoords[1]] = null; if (mapCharacter!='.') { PhysicalEntity mapEntity = (PhysicalEntity) idomain.getEntityByShortName(mapCharacter, null, null); if(mapEntity != null){ mapEntity.setx(coords[0]); mapEntity.sety(coords[1]); } map[cellCoords[0]][cellCoords[1]] = mapEntity; } return true; } public void printMap() { for(int i=0; i<size[1]; i++) { for(int j=0; j<size[0]; j++) { if ( map[j][i] != null ) System.out.print(map[j][i].instanceShortName()); else System.out.print("."); } System.out.println(); } } public PhysicalEntity getCellLocation(int cellCoords[]) { if(cellCoords == null) return null; return map[cellCoords[0]][cellCoords[1]]; } public PhysicalEntity getCellLocation(float coords[]) { return getCellLocation(toCellCoords(coords)); } // Objects in the map are NOT cloned, since the assumption is that // objects in the map are undistinguishable (they have no ID, etc.) public Object clone() { int i,j; TwoDMap m = new TwoDMap(size[0],size[1],cell_size[0],cell_size[1]); for(i=0;i<size[0];i++) { for(j=0;j<size[1];j++) { // If we wanted the entities to be cloned too, then we should swap // the following two lines: // m.map[i][j] = (map[i][j]==null ? null:(PhysicalEntity)(map[i][j].clone())); m.map[i][j] = map[i][j]; } } return m; } public Object cloneWithSameEntities() { int i,j; TwoDMap m = new TwoDMap(size[0],size[1],cell_size[0],cell_size[1]); for(i=0;i<size[0];i++) { for(j=0;j<size[1];j++) { m.map[i][j] = map[i][j]; } } return m; } public int size() { return size[0]*size[1]; } public PhysicalEntity get(int i) { int x = i%size[0]; int y = i/size[0]; if (x>=0 && x<size[0] && y>=0 && y<size[1]) return map[x][y]; return null; } public String toString() { return "a " + size[0] + "x" + size[1] + " 2dmap"; } /** * Writes the 2DMap info to an XMLWriter object * @param w The XMLWriter object */ public void writeToXML(XMLWriter w) { w.tagWithAttributes("entity","id=\"0\""); w.tag("type","map"); w.tag("width",size[0]); w.tag("height",size[1]); w.tag("cell-width",cell_size[0]); w.tag("cell-height",cell_size[1]); w.tag("background"); for(int i=0;i<size[1];i++) { char s[] = new char[size[0]]; for(int j=0;j<size[0];j++) { s[j]=(map[j][i]!=null ? map[j][i].instanceShortName() : '.'); } w.tag("r",new String(s)); } w.tag("/background"); w.tag("/entity"); } public void writeDifferenceToXML(XMLWriter w, Map previousMap) { w.tagWithAttributes("entity","id=\"0\""); w.tag("background"); String row = ""; for(int i=0;i<size[1];i++) { char s[] = new char[size[0]]; char prev_s[] = new char[size[0]]; for(int j=0;j<size[0];j++) { s[j]=(map[j][i]!=null ? map[j][i].instanceShortName() : '.'); prev_s[j]=(previousMap.getCellLocation(new int[]{j,i})!=null ? previousMap.getCellLocation(new int[]{j,i}).instanceShortName() : '.'); } if(new String(s).equals(new String(prev_s))) row +="<r></r>"; //w.tag("r",""); else row += "<r>" + new String(s) + "</r>"; //w.tag("r",new String(s)); } w.rawXML(row); w.tag("/background"); w.tag("/entity"); } public void deleteEntity(String id) { for(int x=0;x<size[0];x++) { for(int y=0;y<size[1];y++) { if (map[x][y]!=null && map[x][y].getentityID().equals(id)) { map[x][y]=null; return; } } } } public void deleteEntity(PhysicalEntity e) { if(e == null || e.get_Coords() == null) return; int cellCoords[] = toCellCoords(e.get_Coords()); int x = cellCoords[0]; int y = cellCoords[1]; /* if (map[x][y]!=null && map[x][y]==e) { map[x][y]=null; } */ if (map[x][y]!=null && map[x][y].getClass()==e.getClass()) { map[x][y]=null; } } public List<PhysicalEntity> getCollisionsOf(PhysicalEntity e) { List<PhysicalEntity> l = new LinkedList<PhysicalEntity>(); if(e == null || e.get_Coords() == null) return l; int x1 = (int)(e.getx()/cell_size[0]); int y1 = (int)(e.gety()/cell_size[1]); int x2 = (int)((e.getx()+e.getwidth())/cell_size[0]); int y2 = (int)((e.gety()+e.getlength())/cell_size[1]); for(int x=x1;x<=x2;x++) { for(int y=y1;y<=y2;y++) { if (x>=0 && x<size[0] && y>=0 && y<size[1] && map[x][y]!=null && e.collision(map[x][y])) l.add(map[x][y]); } } return l; } public float[] toCoords(int pos1) { float coords[] = new float[3]; coords[0]=(pos1%size[0])*cell_size[0]; coords[1]=(pos1/size[0])*cell_size[1]; coords[2]=0; return coords; } public double distance(int cellCoords1[],int cellCoords2[]) { return distance(toCoords(cellCoords1), toCoords(cellCoords2)); } public double distance(float coords1[],float coords2[]) { if(coords1 == null || coords2 == null) return Float.MAX_VALUE; return Math.sqrt((coords2[0]-coords1[0])*(coords2[0]-coords1[0])+(coords2[1]-coords1[1])*(coords2[1]-coords1[1])); } public float squareDistance(int cellCoords1[],int cellCoords2[]) { // return squareDistance(toCoords(cellCoords1), toCoords(cellCoords2)); if(cellCoords1 == null || cellCoords1 == null) return Float.MAX_VALUE; return (cellCoords1[0]-cellCoords2[0])*(cellCoords1[0]-cellCoords2[0])+(cellCoords1[1]-cellCoords2[1])*(cellCoords1[1]-cellCoords2[1]); } public float squareDistance(float coords1[],float coords2[]) { if(coords1 == null || coords2 == null) return Float.MAX_VALUE; return (coords2[0]-coords1[0])*(coords2[0]-coords1[0])+(coords2[1]-coords1[1])*(coords2[1]-coords1[1]); } public boolean areNeighbors(int cellCoords1[],int cellCoords2[]) { if(cellCoords1 == null || cellCoords2 == null) return false; if( ( (cellCoords1[0] == cellCoords2[0]) && (Math.abs(cellCoords1[1] - cellCoords2[1]) == 1) ) || ( (cellCoords1[1] == cellCoords2[1]) && (Math.abs(cellCoords1[0] - cellCoords2[0]) == 1) ) ) return true; else return false; } public boolean areNeighbors(float coords1[],float coords2[]) { int cellCoords1[] = toCellCoords(coords1); int cellCoords2[] = toCellCoords(coords2); return areNeighbors(cellCoords1, cellCoords2); } public int[] toCellCoords(int pos1) { int coords[] = new int[3]; coords[0]=(pos1%size[0]); coords[1]=(pos1/size[0]); coords[2]=0; return coords; } public void toCellCoords(int pos1,int []cellCoords) { cellCoords[0]=(pos1%size[0]); cellCoords[1]=(pos1/size[0]); cellCoords[2]=0; } public int[] toCellCoords(float coords[]) { int cellCoords[] = new int[3]; if(coords != null) { cellCoords[0]=(int)(coords[0]/cell_size[0]); cellCoords[1]=(int)(coords[1]/cell_size[1]); cellCoords[2]=0; } return cellCoords; } public float[] toCoords(int cellcoords[]) { float coords[] = new float[3]; if(cellcoords != null) { coords[0]=(cellcoords[0])*cell_size[0]; coords[1]=(cellcoords[1])*cell_size[1]; coords[2]=0; } return coords; } public int toCell(float coords[]) { int cellCoords[] = toCellCoords(coords); return toCell(cellCoords); } public int toCell(int cellCoords[]) { if(cellCoords == null) return 0; int cell; cell=cellCoords[0]; cell+=cellCoords[1]*size[0]; return cell; } public void toCellCoords(float[] coords_in, int[] coords_out) { if(coords_in == null) return; coords_out[0]=(int) (coords_in[0]/cell_size[0]); coords_out[1]=(int) (coords_in[1]/cell_size[1]); coords_out[2]=0; } }
/** * Copyright (C) 2013 Motown.IO ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.motown.ocpp.v12.soap.chargepoint; import io.motown.domain.api.chargingstation.*; import io.motown.ocpp.v12.soap.chargepoint.schema.*; import io.motown.ocpp.viewmodel.domain.DomainService; import io.motown.ocpp.viewmodel.ocpp.ChargingStationOcpp12Client; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Date; import static com.google.common.base.Preconditions.checkNotNull; public class ChargingStationOcpp12SoapClient implements ChargingStationOcpp12Client { private static final Logger LOG = LoggerFactory.getLogger(ChargingStationOcpp12SoapClient.class); private DomainService domainService; private ChargingStationProxyFactory chargingStationProxyFactory; /** * {@inheritDoc} */ @Override public boolean startTransaction(ChargingStationId id, IdentifyingToken identifyingToken, EvseId evseId) { LOG.info("Requesting remote start transaction on {}", id); ChargePointService chargePointService = this.createChargingStationService(id); RemoteStartTransactionRequest request = new RemoteStartTransactionRequest(); request.setIdTag(identifyingToken.getToken()); request.setConnectorId(evseId.getNumberedId()); RemoteStartTransactionResponse response = chargePointService.remoteStartTransaction(request, id.getId()); boolean willTransactionStart; switch (response.getStatus()) { case ACCEPTED: LOG.info("Remote start transaction request on {} has been accepted", id); willTransactionStart = true; break; case REJECTED: LOG.info("Remote start transaction request on {} has been rejected", id); willTransactionStart = false; break; default: throw new AssertionError("Start transaction returned unknown response status " + response.getStatus()); } return willTransactionStart; } /** * {@inheritDoc} */ @Override public boolean stopTransaction(ChargingStationId id, int transactionId) { LOG.debug("Stopping transaction {} on {}", transactionId, id); ChargePointService chargePointService = this.createChargingStationService(id); RemoteStopTransactionRequest request = new RemoteStopTransactionRequest(); request.setTransactionId(transactionId); RemoteStopTransactionResponse response; response = chargePointService.remoteStopTransaction(request, id.getId()); boolean willTransactionStop; switch (response.getStatus()) { case ACCEPTED: LOG.info("Remote stop transaction request on {} has been accepted", id); willTransactionStop = true; break; case REJECTED: LOG.info("Remote stop transaction request on {} has been rejected", id); willTransactionStop = false; break; default: throw new AssertionError("Stop transaction returned unknown response status " + response.getStatus()); } return willTransactionStop; } /** * {@inheritDoc} */ @Override public boolean softReset(ChargingStationId id) { LOG.info("Requesting soft reset on {}", id); return reset(id, ResetType.SOFT); } /** * {@inheritDoc} */ @Override public boolean hardReset(ChargingStationId id) { LOG.info("Requesting hard reset on {}", id); return reset(id, ResetType.HARD); } @Override public RequestResult unlockConnector(ChargingStationId id, EvseId evseId) { LOG.debug("Unlocking of connector {} on {}", evseId, id); ChargePointService chargePointService = this.createChargingStationService(id); UnlockConnectorRequest request = new UnlockConnectorRequest(); request.setConnectorId(evseId.getNumberedId()); UnlockConnectorResponse response = chargePointService.unlockConnector(request, id.getId()); if (UnlockStatus.ACCEPTED.equals(response.getStatus())) { LOG.info("Unlocking of connector {} on {} has been accepted", evseId, id); return RequestResult.SUCCESS; } else { LOG.warn("Unlocking of connector {} on {} has been rejected", evseId, id); return RequestResult.FAILURE; } } @Override public RequestResult changeAvailabilityToInoperative(ChargingStationId id, EvseId evseId) { LOG.debug("Changing availability of connector {} on {} to inoperative", evseId, id); return changeAvailability(id, evseId, AvailabilityType.INOPERATIVE); } @Override public RequestResult changeAvailabilityToOperative(ChargingStationId id, EvseId evseId) { LOG.debug("Changing availability of connector {} on {} to operative", evseId, id); return changeAvailability(id, evseId, AvailabilityType.OPERATIVE); } /** * {@inheritDoc} */ @Override public boolean changeConfiguration(ChargingStationId id, ConfigurationItem configurationItem) { checkNotNull(id); checkNotNull(configurationItem); final String key = configurationItem.getKey(); final String value = configurationItem.getValue(); ChargePointService chargePointService = this.createChargingStationService(id); ChangeConfigurationRequest request = new ChangeConfigurationRequest(); request.setKey(key); request.setValue(value); ChangeConfigurationResponse response = chargePointService.changeConfiguration(request, id.getId()); boolean hasConfigurationChanged; switch (response.getStatus()) { case ACCEPTED: LOG.info("Configuration change of {} on {} has been accepted", key, id); hasConfigurationChanged = true; break; case REJECTED: LOG.info("Configuration change of {} on {} was rejected", key, id); hasConfigurationChanged = false; break; case NOT_SUPPORTED: LOG.info("Configuration change of {} on {} was not supported", key, id); hasConfigurationChanged = false; break; default: throw new AssertionError("Configuration change returned unknown response status " + response.getStatus()); } return hasConfigurationChanged; } @Override public String getDiagnostics(ChargingStationId id, DiagnosticsUploadSettings diagnosticsUploadSettings) { ChargePointService chargePointService = this.createChargingStationService(id); GetDiagnosticsRequest request = new GetDiagnosticsRequest(); request.setLocation(diagnosticsUploadSettings.getUploadLocation()); request.setRetries(diagnosticsUploadSettings.getNumRetries()); request.setRetryInterval(diagnosticsUploadSettings.getRetryInterval()); request.setStartTime(diagnosticsUploadSettings.getPeriodStartTime()); request.setStopTime(diagnosticsUploadSettings.getPeriodStopTime()); GetDiagnosticsResponse response = chargePointService.getDiagnostics(request, id.getId()); return response.getFileName(); } @Override public boolean clearCache(ChargingStationId id) { ChargePointService chargePointService = this.createChargingStationService(id); ClearCacheRequest request = new ClearCacheRequest(); boolean requestResult; ClearCacheResponse response = chargePointService.clearCache(request, id.getId()); if (ClearCacheStatus.ACCEPTED.equals(response.getStatus())) { LOG.info("Clear cache on {} has been accepted", id.getId()); requestResult = true; } else { LOG.warn("Clear cache on {} has been rejected", id.getId()); requestResult = false; } return requestResult; } @Override public void updateFirmware(ChargingStationId id, String downloadLocation, Date retrieveDate, Integer numRetries, Integer retryInterval) { ChargePointService chargePointService = this.createChargingStationService(id); UpdateFirmwareRequest request = new UpdateFirmwareRequest(); request.setLocation(downloadLocation); request.setRetrieveDate(retrieveDate); request.setRetries(numRetries); request.setRetryInterval(retryInterval); chargePointService.updateFirmware(request, id.getId()); //The charging station will respond with an async 'firmware status update' message LOG.info("Update firmware on {} has been requested", id.getId()); } public void setDomainService(DomainService domainService) { this.domainService = domainService; } public void setChargingStationProxyFactory(ChargingStationProxyFactory chargingStationProxyFactory) { this.chargingStationProxyFactory = chargingStationProxyFactory; } /** * Reset a charging station. * * @param id the charging station's id. * @param type the type of reset (i.e. soft or hard). * @return true if the charging station has reset, false if it hasn't. */ private boolean reset(ChargingStationId id, ResetType type) { ChargePointService chargePointService = this.createChargingStationService(id); ResetRequest request = new ResetRequest(); request.setType(type); ResetResponse response = chargePointService.reset(request, id.getId()); boolean hasReset; switch (response.getStatus()) { case ACCEPTED: LOG.info("Reset was accepted"); hasReset = true; break; case REJECTED: LOG.info("Reset was rejected"); hasReset = false; break; default: throw new AssertionError("Unknown ResetStatus: " + response.getStatus()); } return hasReset; } private RequestResult changeAvailability(ChargingStationId id, EvseId evseId, AvailabilityType type) { ChargePointService chargePointService = this.createChargingStationService(id); ChangeAvailabilityRequest request = new ChangeAvailabilityRequest(); request.setConnectorId(evseId.getNumberedId()); request.setType(type); ChangeAvailabilityResponse response = chargePointService.changeAvailability(request, id.getId()); if (AvailabilityStatus.ACCEPTED.equals(response.getStatus()) || AvailabilityStatus.SCHEDULED.equals(response.getStatus())) { return RequestResult.SUCCESS; } else { return RequestResult.FAILURE; } } /** * Creates a charging station web service proxy based on the address that has been stored for this charging station identifier. * * @param id charging station identifier * @return charging station web service proxy */ private ChargePointService createChargingStationService(ChargingStationId id) { return chargingStationProxyFactory.createChargingStationService(domainService.retrieveChargingStationAddress(id)); } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.execution.actions; import com.intellij.execution.Location; import com.intellij.execution.PsiLocation; import com.intellij.execution.RunManager; import com.intellij.execution.RunnerAndConfigurationSettings; import com.intellij.execution.configurations.ConfigurationType; import com.intellij.execution.configurations.RunConfiguration; import com.intellij.execution.junit.RuntimeConfigurationProducer; import com.intellij.execution.runners.ExecutionEnvironment; import com.intellij.ide.DataManager; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.util.PsiUtilCore; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.stream.Collectors; /** * Context for creating run configurations from a location in the source code. * * @see RunConfigurationProducer */ public class ConfigurationContext { private static final Logger LOG = Logger.getInstance(ConfigurationContext.class); public static final Key<ConfigurationContext> SHARED_CONTEXT = Key.create("SHARED_CONTEXT"); private final Location<PsiElement> myLocation; private final Editor myEditor; private RunnerAndConfigurationSettings myConfiguration; private boolean myInitialized; private boolean myMultipleSelection; private Ref<RunnerAndConfigurationSettings> myExistingConfiguration; private final Module myModule; private final RunConfiguration myRuntimeConfiguration; private final DataContext myDataContext; private final String myPlace; private List<RuntimeConfigurationProducer> myPreferredProducers; private List<ConfigurationFromContext> myConfigurationsFromContext; /** * @deprecated use {@link ConfigurationContext#getFromContext(DataContext dataContext, String place)} */ @NotNull @Deprecated public static ConfigurationContext getFromContext(DataContext dataContext) { return getFromContext(dataContext, ActionPlaces.UNKNOWN); } @NotNull public static ConfigurationContext getFromContext(DataContext dataContext, String place) { DataManager dataManager = DataManager.getInstance(); ConfigurationContext sharedContext = dataManager.loadFromDataContext(dataContext, SHARED_CONTEXT); Location<?> sharedLocation = sharedContext == null ? null : sharedContext.getLocation(); PsiElement sharedPsiElement = sharedLocation == null ? null : sharedLocation.getPsiElement(); Module module = PlatformCoreDataKeys.MODULE.getData(dataContext); Location<PsiElement> location = calcLocation(dataContext, module); PsiElement psiElement = location == null ? null : location.getPsiElement(); if (sharedLocation == null || location == null || !Comparing.equal(sharedPsiElement, psiElement)) { boolean isMultipleSelection = isMultipleSelection(dataContext); sharedContext = new ConfigurationContext(dataContext, location, module, isMultipleSelection, place); dataManager.saveInDataContext(dataContext, SHARED_CONTEXT, sharedContext); } return sharedContext; } @NotNull public static ConfigurationContext createEmptyContextForLocation(@NotNull Location location) { return new ConfigurationContext(location); } private ConfigurationContext(final DataContext dataContext, Location<PsiElement> location, Module module, boolean multipleSelection, String place) { RunConfiguration configuration = RunConfiguration.DATA_KEY.getData(dataContext); if (configuration == null) { ExecutionEnvironment environment = dataContext.getData(ExecutionDataKeys.EXECUTION_ENVIRONMENT); if (environment != null) { myConfiguration = environment.getRunnerAndConfigurationSettings(); if (myConfiguration != null) { myExistingConfiguration = Ref.create(myConfiguration); configuration = myConfiguration.getConfiguration(); } } } myEditor = CommonDataKeys.EDITOR.getData(dataContext); myRuntimeConfiguration = configuration; myDataContext = dataContext; myModule = module; myLocation = location; myMultipleSelection = multipleSelection; myPlace = place; } private static @Nullable Location<PsiElement> calcLocation(@NotNull DataContext dataContext, Module module) { Location<?> location = Location.DATA_KEY.getData(dataContext); if (location != null) { //noinspection unchecked return (Location<PsiElement>)location; } Project project = CommonDataKeys.PROJECT.getData(dataContext); if (project == null) { return null; } PsiElement element = getSelectedPsiElement(dataContext, project); if (element == null) { return null; } return new PsiLocation<>(project, module, element); } private static boolean isMultipleSelection(@NotNull DataContext dataContext) { Location<?> location = Location.DATA_KEY.getData(dataContext); Location<?>[] locations = Location.DATA_KEYS.getData(dataContext); PsiElement[] elements = LangDataKeys.PSI_ELEMENT_ARRAY.getData(dataContext); VirtualFile[] files = CommonDataKeys.VIRTUAL_FILE_ARRAY.getData(dataContext); return location != null && locations != null && locations.length > 1 || elements != null && elements.length > 1 || files != null && files.length > 1; } public ConfigurationContext(@NotNull PsiElement element) { myModule = ModuleUtilCore.findModuleForPsiElement(element); myLocation = new PsiLocation<>(element.getProject(), myModule, element); myRuntimeConfiguration = null; myDataContext = this::getDefaultData; myEditor = null; myPlace = null; } private ConfigurationContext(@NotNull Location location) { //noinspection unchecked myLocation = location; myModule = location.getModule(); myEditor = null; myRuntimeConfiguration = null; myDataContext = this::getDefaultData; myPlace = null; } private Object getDefaultData(String dataId) { if (CommonDataKeys.PROJECT.is(dataId)) return myLocation.getProject(); if (PlatformCoreDataKeys.MODULE.is(dataId)) return myModule; if (Location.DATA_KEY.is(dataId)) return myLocation; if (CommonDataKeys.PSI_ELEMENT.is(dataId)) return myLocation.getPsiElement(); if (LangDataKeys.PSI_ELEMENT_ARRAY.is(dataId)) return ContainerUtil.ar(myLocation.getPsiElement()); if (CommonDataKeys.VIRTUAL_FILE.is(dataId)) return PsiUtilCore.getVirtualFile(myLocation.getPsiElement()); if (CommonDataKeys.EDITOR.is(dataId)) return myEditor; return null; } public DataContext getDefaultDataContext() { return this::getDefaultData; } public boolean containsMultipleSelection() { return myMultipleSelection; } /** * Returns the configuration created from this context. * * @return the configuration, or null if none of the producers were able to create a configuration from this context. */ @Nullable public synchronized RunnerAndConfigurationSettings getConfiguration() { if (myConfiguration == null && !myInitialized) { createConfiguration(); } return myConfiguration; } private void createConfiguration() { LOG.assertTrue(myConfiguration == null); final Location location = getLocation(); myConfiguration = location != null && !DumbService.isDumb(location.getProject()) ? PreferredProducerFind.createConfiguration(location, this) : null; myInitialized = true; } public synchronized void setConfiguration(@NotNull RunnerAndConfigurationSettings configuration) { myConfiguration = configuration; myInitialized = true; } /** * Returns the source code location for this context. * * @return the source code location, or null if no source code fragment is currently selected. */ @Nullable public Location getLocation() { return myLocation; } /** * Returns the place for action which created this context. * @return the place for action which created this context. */ @Nullable public String getPlace() { return myPlace; } /** * Returns the PSI element at caret for this context. * * @return the PSI element, or null if no source code fragment is currently selected. */ @Nullable public PsiElement getPsiLocation() { return myLocation != null ? myLocation.getPsiElement() : null; } /** * Finds an existing run configuration matching the context. * * @return an existing configuration, or null if none was found. */ @Nullable public RunnerAndConfigurationSettings findExisting() { if (myExistingConfiguration != null) { RunnerAndConfigurationSettings configuration = myExistingConfiguration.get(); if (configuration == null || !Registry.is("suggest.all.run.configurations.from.context") || configuration.equals(myConfiguration)) { return configuration; } } myExistingConfiguration = new Ref<>(); if (myLocation == null) { return null; } final PsiElement psiElement = myLocation.getPsiElement(); if (!psiElement.isValid()) { return null; } if (MultipleRunLocationsProvider.findAlternativeLocations(myLocation) != null) { myExistingConfiguration.set(null); return null; } final List<RuntimeConfigurationProducer> producers = findPreferredProducers(); List<ExistingConfiguration> existingConfigurations = new ArrayList<>(); if (producers != null) { for (RuntimeConfigurationProducer producer : producers) { RunnerAndConfigurationSettings configuration = producer.findExistingConfiguration(myLocation, this); if (configuration != null) { existingConfigurations.add(new ExistingConfiguration(configuration, null)); } } } for (RunConfigurationProducer<?> producer : RunConfigurationProducer.getProducers(getProject())) { RunnerAndConfigurationSettings configuration = producer.findExistingConfiguration(this); if (configuration != null) { existingConfigurations.add(new ExistingConfiguration(configuration, producer)); } } myExistingConfiguration.set(findPreferredConfiguration(existingConfigurations, psiElement)); return myExistingConfiguration.get(); } @Nullable private RunnerAndConfigurationSettings findPreferredConfiguration(@NotNull List<ExistingConfiguration> existingConfigurations, @NotNull PsiElement psiElement) { List<ConfigurationFromContext> configurationsFromContext = getConfigurationsFromContext(); if (configurationsFromContext == null) return null; for (ExistingConfiguration configuration : existingConfigurations) { RunnerAndConfigurationSettings settings = configuration.getSettings(); if (settings.equals(myConfiguration)) { return settings; } if (myRuntimeConfiguration != null && settings.getConfiguration() == myRuntimeConfiguration) { return settings; } } Set<RunnerAndConfigurationSettings> fromContextSettings = configurationsFromContext.stream().map(c -> c.getConfigurationSettings()).collect(Collectors.toSet()); if (!ContainerUtil.exists(existingConfigurations, e -> fromContextSettings.contains(e.getSettings()))) { return null; } if (Registry.is("suggest.all.run.configurations.from.context")) { return null; } List<ConfigurationFromContext> contexts = ContainerUtil.mapNotNull(existingConfigurations, configuration -> { if (configuration.getProducer() == null || !fromContextSettings.contains(configuration.getSettings())) { return null; } return new ConfigurationFromContextImpl(configuration.getProducer(), configuration.getSettings(), psiElement); }); if (!contexts.isEmpty()) { ConfigurationFromContext min = Collections.min(contexts, ConfigurationFromContext.COMPARATOR); return min.getConfigurationSettings(); } ExistingConfiguration first = ContainerUtil.getFirstItem(existingConfigurations); return first != null ? first.getSettings() : null; } @Nullable private static PsiElement getSelectedPsiElement(final DataContext dataContext, final Project project) { PsiElement element = null; final Editor editor = CommonDataKeys.EDITOR.getData(dataContext); if (editor != null){ final PsiFile psiFile = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument()); if (psiFile != null) { final int offset = editor.getCaretModel().getOffset(); element = psiFile.findElementAt(offset); if (element == null && offset > 0 && offset == psiFile.getTextLength()) { element = psiFile.findElementAt(offset-1); } } } if (element == null) { final PsiElement[] elements = LangDataKeys.PSI_ELEMENT_ARRAY.getData(dataContext); element = elements != null && elements.length > 0 ? elements[0] : null; } if (element == null) { final VirtualFile[] files = CommonDataKeys.VIRTUAL_FILE_ARRAY.getData(dataContext); if (files != null && files.length > 0) { element = PsiManager.getInstance(project).findFile(files[0]); } } return element; } @NotNull public RunManager getRunManager() { return RunManager.getInstance(getProject()); } public Project getProject() { return myConfiguration == null ? myLocation.getProject() : myConfiguration.getConfiguration().getProject(); } public Module getModule() { return myModule; } public DataContext getDataContext() { return myDataContext; } /** * Returns original {@link RunConfiguration} from this context. * For example, it could be some test framework runtime configuration that had been launched * and that had brought a result test tree on which a right-click action was performed. * * @param type {@link ConfigurationType} instance to filter original runtime configuration by its type * @return {@link RunConfiguration} instance, it could be null */ @Nullable public RunConfiguration getOriginalConfiguration(@Nullable ConfigurationType type) { if (type == null || (myRuntimeConfiguration != null && myRuntimeConfiguration.getType() == type)) { return myRuntimeConfiguration; } return null; } /** * Checks if the original run configuration matches the passed type. * If the original run configuration is undefined, the check is passed too. * An original run configuration is a run configuration associated with given context. * For example, it could be a test framework run configuration that had been launched * and that had brought a result test tree on which a right-click action was performed (and this context was created). In this case, other run configuration producers might want to not work on such elements. * * @param type {@link ConfigurationType} instance to match the original run configuration * @return true if the original run configuration is of the same type or it's undefined; false otherwise */ public boolean isCompatibleWithOriginalRunConfiguration(@NotNull ConfigurationType type) { return myRuntimeConfiguration == null || myRuntimeConfiguration.getType() == type; } @Deprecated @ApiStatus.ScheduledForRemoval(inVersion = "2021.3") @Nullable public List<RuntimeConfigurationProducer> findPreferredProducers() { if (myPreferredProducers == null) { myPreferredProducers = PreferredProducerFind.findPreferredProducers(myLocation, this, true); } return myPreferredProducers; } @Nullable public List<ConfigurationFromContext> getConfigurationsFromContext() { if (myConfigurationsFromContext == null) { myConfigurationsFromContext = PreferredProducerFind.getConfigurationsFromContext(myLocation, this, true); } return myConfigurationsFromContext; } private static final class ExistingConfiguration { private final RunnerAndConfigurationSettings myConfigurationSettings; private final RunConfigurationProducer<?> myProducer; private ExistingConfiguration(@NotNull RunnerAndConfigurationSettings configurationSettings, @Nullable RunConfigurationProducer<?> producer) { myConfigurationSettings = configurationSettings; myProducer = producer; } @NotNull private RunnerAndConfigurationSettings getSettings() { return myConfigurationSettings; } @Nullable private RunConfigurationProducer<?> getProducer() { return myProducer; } } }
// Copyright 2015 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.dfp.jaxws.utils.v201502; import org.apache.commons.lang.builder.EqualsBuilder; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import java.util.Calendar; import java.util.Locale; import java.util.TimeZone; /** * Test for {@link DateTimes}. */ @RunWith(JUnit4.class) public class DateTimesTest { private static final String TIME_ZONE_ID1 = "Asia/Shanghai"; private static final String TIME_ZONE_ID2 = "PST8PDT"; private static final String TIME_ZONE_ID3 = "Etc/GMT"; private static final Locale LOCALE1 = Locale.CHINA; private static final Locale LOCALE2 = Locale.ENGLISH; private static final Locale LOCALE3 = Locale.UK; private com.google.api.ads.dfp.jaxws.v201502.DateTime apiDateTime1; private com.google.api.ads.dfp.jaxws.v201502.DateTime apiDateTime2; private com.google.api.ads.dfp.jaxws.v201502.DateTime apiDateTime3; private DateTime jodaDateTime1; private DateTime jodaDateTime2; private DateTime jodaDateTime3; private Calendar calendar1; private Calendar calendar2; private Calendar calendar3; private Calendar calendarWithLocale1; private Calendar calendarWithLocale2; private Calendar calendarWithLocale3; private String stringDate1; private String stringDate2; private String stringDate3; private String stringDateTime1; private String stringDateTime2; private String stringDateTime3; private String stringDateTimeWithTimeZone1; private String stringDateTimeWithTimeZone2; private String stringDateTimeWithTimeZone3; public DateTimesTest() {} @Before public void setUp() throws Exception { apiDateTime1 = new com.google.api.ads.dfp.jaxws.v201502.DateTime(); com.google.api.ads.dfp.jaxws.v201502.Date apiDate1 = new com.google.api.ads.dfp.jaxws.v201502.Date(); apiDate1.setYear(2012); apiDate1.setMonth(12); apiDate1.setDay(2); apiDateTime1.setDate(apiDate1); apiDateTime1.setHour(12); apiDateTime1.setMinute(45); apiDateTime1.setSecond(0); apiDateTime1.setTimeZoneID(TIME_ZONE_ID1); stringDate1 = "2012-12-02"; stringDateTime1 = "2012-12-02T12:45:00"; stringDateTimeWithTimeZone1 = "2012-12-02T12:45:00+08:00"; jodaDateTime1 = new DateTime(DateTimeZone.forID(TIME_ZONE_ID1)).withYear(2012).withMonthOfYear(12) .withDayOfMonth(2).withHourOfDay(12).withMinuteOfHour(45).withSecondOfMinute(0) .withMillisOfSecond(0); calendar1 = Calendar.getInstance(); calendar1.clear(); calendar1.setTimeZone(DateTimeZone.forID(TIME_ZONE_ID1).toTimeZone()); calendar1.set(2012, 11, 2, 12, 45, 0); calendarWithLocale1 = Calendar.getInstance(LOCALE1); calendarWithLocale1.clear(); calendarWithLocale1.setTimeZone(DateTimeZone.forID(TIME_ZONE_ID1).toTimeZone()); calendarWithLocale1.set(2012, 11, 2, 12, 45, 0); apiDateTime2 = new com.google.api.ads.dfp.jaxws.v201502.DateTime(); com.google.api.ads.dfp.jaxws.v201502.Date apiDate2 = new com.google.api.ads.dfp.jaxws.v201502.Date(); apiDate2.setYear(2004); apiDate2.setMonth(2); apiDate2.setDay(29); apiDateTime2.setDate(apiDate2); apiDateTime2.setHour(0); apiDateTime2.setMinute(0); apiDateTime2.setSecond(0); apiDateTime2.setTimeZoneID(TIME_ZONE_ID2); stringDate2 = "2004-02-29"; stringDateTime2 = "2004-02-29T00:00:00"; stringDateTimeWithTimeZone2 = "2004-02-29T00:00:00-08:00"; jodaDateTime2 = new DateTime(DateTimeZone.forID(TIME_ZONE_ID2)).withYear(2004).withMonthOfYear(2) .withDayOfMonth(29).withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0) .withMillisOfSecond(0); calendar2 = Calendar.getInstance(); calendar2.clear(); calendar2.setTimeZone(DateTimeZone.forID(TIME_ZONE_ID2).toTimeZone()); calendar2.set(2004, 1, 29, 0, 0, 0); calendarWithLocale2 = Calendar.getInstance(LOCALE2); calendarWithLocale2.clear(); calendarWithLocale2.setTimeZone(DateTimeZone.forID(TIME_ZONE_ID2).toTimeZone()); calendarWithLocale2.set(2004, 1, 29, 0, 0, 0); apiDateTime3 = new com.google.api.ads.dfp.jaxws.v201502.DateTime(); com.google.api.ads.dfp.jaxws.v201502.Date apiDate3 = new com.google.api.ads.dfp.jaxws.v201502.Date(); apiDate3.setYear(2007); apiDate3.setMonth(1); apiDate3.setDay(1); apiDateTime3.setDate(apiDate3); apiDateTime3.setHour(18); apiDateTime3.setMinute(0); apiDateTime3.setSecond(30); apiDateTime3.setTimeZoneID(TIME_ZONE_ID3); stringDate3 = "2007-01-01"; stringDateTime3 = "2007-01-01T18:00:30"; stringDateTimeWithTimeZone3 = "2007-01-01T18:00:30Z"; jodaDateTime3 = new DateTime(DateTimeZone.forID(TIME_ZONE_ID3)).withYear(2007).withMonthOfYear(1) .withDayOfMonth(1).withHourOfDay(18).withMinuteOfHour(0).withSecondOfMinute(30) .withMillisOfSecond(0); calendar3 = Calendar.getInstance(); calendar3.clear(); calendar3.setTimeZone(DateTimeZone.forID(TIME_ZONE_ID3).toTimeZone()); calendar3.set(2007, 0, 1, 18, 0, 30); calendarWithLocale3 = Calendar.getInstance(LOCALE3); calendarWithLocale3.clear(); calendarWithLocale3.setTimeZone(DateTimeZone.forID(TIME_ZONE_ID3).toTimeZone()); calendarWithLocale3.set(2007, 0, 1, 18, 0, 30); } @Test public void testToDateTime_fromInstantToApiDateTime() { assertEquals(apiDateTime1, DateTimes.toDateTime(jodaDateTime1.toInstant(), TIME_ZONE_ID1)); assertEquals(apiDateTime2, DateTimes.toDateTime(jodaDateTime2.toInstant(), TIME_ZONE_ID2)); assertEquals(apiDateTime3, DateTimes.toDateTime(jodaDateTime3.toInstant(), TIME_ZONE_ID3)); } @Test public void testToDateTime_fromCalendarToApiDateTime() { assertEquals(apiDateTime1, DateTimes.toDateTime(calendar1)); assertEquals(apiDateTime2, DateTimes.toDateTime(calendar2)); assertEquals(apiDateTime3, DateTimes.toDateTime(calendar3)); } @Test public void testToDateTime_fromStringToApiDateTime() { assertEquals(apiDateTime1, DateTimes.toDateTime(stringDateTime1, TIME_ZONE_ID1)); assertEquals(apiDateTime2, DateTimes.toDateTime(stringDateTime2, TIME_ZONE_ID2)); assertEquals(apiDateTime3, DateTimes.toDateTime(stringDateTime3, TIME_ZONE_ID3)); } @Test public void testToDateTime_fromStringToApiDateTimeWithTimeZone() { assertEquals(apiDateTime1, DateTimes.toDateTimeWithTimeZone(stringDateTimeWithTimeZone1)); assertEquals(apiDateTime2, DateTimes.toDateTimeWithTimeZone(stringDateTimeWithTimeZone2)); assertEquals(apiDateTime3, DateTimes.toDateTimeWithTimeZone(stringDateTimeWithTimeZone3)); } @Test public void testToDateTime_fromApiDateTimeToJoda() { Object joda = DateTimes.toDateTime(apiDateTime3); assertEquals(jodaDateTime1, DateTimes.toDateTime(apiDateTime1)); assertEquals(jodaDateTime2, DateTimes.toDateTime(apiDateTime2)); assertEquals(jodaDateTime3, DateTimes.toDateTime(apiDateTime3)); } @Test public void testToCalendar_fromApiDate() { assertEquals(calendar1, DateTimes.toCalendar(apiDateTime1)); assertEquals(calendar2, DateTimes.toCalendar(apiDateTime2)); assertEquals(calendar3, DateTimes.toCalendar(apiDateTime3)); } @Test public void testToCalendar_fromApiDate_withLocales() { assertEquals(calendarWithLocale1, DateTimes.toCalendar(apiDateTime1, LOCALE1)); assertEquals(calendarWithLocale2, DateTimes.toCalendar(apiDateTime2, LOCALE2)); assertEquals(calendarWithLocale3, DateTimes.toCalendar(apiDateTime3, LOCALE3)); } @Test public void testToString_fromApiDate() { Assert.assertEquals(stringDate1, DateTimes.toString(apiDateTime1.getDate())); Assert.assertEquals(stringDate2, DateTimes.toString(apiDateTime2.getDate())); Assert.assertEquals(stringDate3, DateTimes.toString(apiDateTime3.getDate())); } @Test public void testToString_fromApiDateTime() { Assert.assertEquals(stringDateTime1, DateTimes.toString(apiDateTime1)); Assert.assertEquals(stringDateTime2, DateTimes.toString(apiDateTime2)); Assert.assertEquals(stringDateTime3, DateTimes.toString(apiDateTime3)); } @Test public void testToStringWithTimeZone_fromApiDateTime() { Assert.assertEquals(stringDateTimeWithTimeZone1, DateTimes.toStringWithTimeZone(apiDateTime1)); Assert.assertEquals(stringDateTimeWithTimeZone2, DateTimes.toStringWithTimeZone(apiDateTime2)); Assert.assertEquals(stringDateTimeWithTimeZone3, DateTimes.toStringWithTimeZone(apiDateTime3)); } @Test public void testToStringForTimeZone_fromApiDate() { Assert.assertEquals(stringDateTime1, DateTimes.toStringForTimeZone(apiDateTime1, TIME_ZONE_ID1)); Assert.assertEquals(stringDateTime2, DateTimes.toStringForTimeZone(apiDateTime2, TIME_ZONE_ID2)); Assert.assertEquals(stringDateTime3, DateTimes.toStringForTimeZone(apiDateTime3, TIME_ZONE_ID3)); Assert.assertEquals(stringDateTime1, DateTimes.toStringForTimeZone( DateTimes.toDateTime(jodaDateTime1.withZone(DateTimeZone.forID(TIME_ZONE_ID2))), TIME_ZONE_ID1)); Assert.assertEquals(stringDateTime2, DateTimes.toStringForTimeZone( DateTimes.toDateTime(jodaDateTime2.withZone(DateTimeZone.forID(TIME_ZONE_ID1))), TIME_ZONE_ID2)); Assert.assertEquals(stringDateTime3, DateTimes.toStringForTimeZone( DateTimes.toDateTime(jodaDateTime3.withZone(DateTimeZone.forID(TIME_ZONE_ID1))), TIME_ZONE_ID3)); } @Test public void testTransitive_apiJodaApi() { assertEquals(apiDateTime1, DateTimes.toDateTime(DateTimes.toDateTime(apiDateTime1))); assertEquals(apiDateTime2, DateTimes.toDateTime(DateTimes.toDateTime(apiDateTime2))); assertEquals(apiDateTime3, DateTimes.toDateTime(DateTimes.toDateTime(apiDateTime3))); } @Test public void testTransitive_apiStringApi() { assertEquals(apiDateTime1, DateTimes.toDateTime(DateTimes.toString(apiDateTime1), TIME_ZONE_ID1)); assertEquals(apiDateTime2, DateTimes.toDateTime(DateTimes.toString(apiDateTime2), TIME_ZONE_ID2)); assertEquals(apiDateTime3, DateTimes.toDateTime(DateTimes.toString(apiDateTime3), TIME_ZONE_ID3)); } @Test public void testTransitive_apiCalendarApi() { assertEquals(apiDateTime1, DateTimes.toDateTime(DateTimes.toCalendar(apiDateTime1))); assertEquals(apiDateTime2, DateTimes.toDateTime(DateTimes.toCalendar(apiDateTime2))); assertEquals(apiDateTime3, DateTimes.toDateTime(DateTimes.toCalendar(apiDateTime3))); } @Test public void testTransitive_jodaApiJoda() { assertEquals(jodaDateTime1, DateTimes.toDateTime(DateTimes.toDateTime(jodaDateTime1))); assertEquals(jodaDateTime2, DateTimes.toDateTime(DateTimes.toDateTime(jodaDateTime2))); assertEquals(jodaDateTime3, DateTimes.toDateTime(DateTimes.toDateTime(jodaDateTime3))); } @Test public void testTransitive_calendarApiJoda() { assertEquals(jodaDateTime1, DateTimes.toDateTime(DateTimes.toDateTime(calendar1))); assertEquals(jodaDateTime2, DateTimes.toDateTime(DateTimes.toDateTime(calendar2))); assertEquals(jodaDateTime3, DateTimes.toDateTime(DateTimes.toDateTime(calendar3))); } @Test public void testTransitive_stringApiJoda() { assertEquals(jodaDateTime1, DateTimes.toDateTime(DateTimes.toDateTime(stringDateTime1, TIME_ZONE_ID1))); assertEquals(jodaDateTime2, DateTimes.toDateTime(DateTimes.toDateTime(stringDateTime2, TIME_ZONE_ID2))); assertEquals(jodaDateTime3, DateTimes.toDateTime(DateTimes.toDateTime(stringDateTime3, TIME_ZONE_ID3))); } @Test public void testTransitive_stringApiJodaWithTimeZone() { assertEquals(jodaDateTime1, DateTimes.toDateTime(DateTimes.toDateTimeWithTimeZone(stringDateTimeWithTimeZone1))); assertEquals(jodaDateTime2, DateTimes.toDateTime(DateTimes.toDateTimeWithTimeZone(stringDateTimeWithTimeZone2))); assertEquals(jodaDateTime3, DateTimes.toDateTime(DateTimes.toDateTimeWithTimeZone(stringDateTimeWithTimeZone3))); } @Test public void testTransitive_instantApiJoda() { assertEquals(jodaDateTime1, DateTimes.toDateTime(DateTimes.toDateTime(jodaDateTime1.toInstant(), TIME_ZONE_ID1))); assertEquals(jodaDateTime2, DateTimes.toDateTime(DateTimes.toDateTime(jodaDateTime2.toInstant(), TIME_ZONE_ID2))); assertEquals(jodaDateTime3, DateTimes.toDateTime(DateTimes.toDateTime(jodaDateTime3.toInstant(), TIME_ZONE_ID3))); } /** * Asserts Joda DateTimes have the same millis. */ private static void assertEquals(DateTime expected, DateTime actual) { DateTime expectedCopy = new DateTime(expected).withMillisOfSecond(0); DateTime actualCopy = new DateTime(actual).withMillisOfSecond(0); Assert.assertEquals(expectedCopy.getMillis(), actualCopy.getMillis()); } /** * Asserts Calendars have the same millis and locale (first day of week). */ private static void assertEquals(Calendar expected, Calendar actual) { Assert.assertEquals(expected.getTimeInMillis(), actual.getTimeInMillis()); Assert.assertEquals(expected.getFirstDayOfWeek(), actual.getFirstDayOfWeek()); } /** * Asserts that two API date times are equal. */ private static void assertEquals(com.google.api.ads.dfp.jaxws.v201502.DateTime expected, com.google.api.ads.dfp.jaxws.v201502.DateTime actual) { boolean equals = expected == actual || new EqualsBuilder() .append(expected.getDate().getYear(), actual.getDate().getYear()) .append(expected.getDate().getMonth(), actual.getDate().getMonth()) .append(expected.getDate().getDay(), actual.getDate().getDay()) .append(expected.getHour(), actual.getHour()) .append(expected.getMinute(), actual.getMinute()) .append(expected.getSecond(), actual.getSecond()) .append( DateTimeZone.forTimeZone( TimeZone.getTimeZone(expected.getTimeZoneID())).toTimeZone().getRawOffset(), DateTimeZone.forTimeZone( TimeZone.getTimeZone(actual.getTimeZoneID())).toTimeZone().getRawOffset()) .isEquals(); Assert.assertTrue( String.format("Expected: <%s> Actual: <%s>", toString(expected), toString(actual)), equals); } private static String toString(com.google.api.ads.dfp.jaxws.v201502.DateTime apiDateTime) { return apiDateTime == null ? "null" : new StringBuilder().append("Year (").append(apiDateTime.getDate().getYear()).append(") ") .append("Month (").append(apiDateTime.getDate().getMonth()).append(") ") .append("Day (").append(apiDateTime.getDate().getDay()).append(") ") .append("Hour (").append(apiDateTime.getHour()).append(") ") .append("Minute (").append(apiDateTime.getMinute()).append(") ") .append("Second (").append(apiDateTime.getSecond()).append(") ") .append("TimeZoneID (").append(apiDateTime.getTimeZoneID()).append(") ") .toString(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.suggest.context; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.search.suggest.analyzing.XAnalyzingSuggester; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.BasicAutomata; import org.apache.lucene.util.automaton.BasicOperations; import org.apache.lucene.util.fst.FST; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; import java.io.IOException; import java.util.*; /** * A {@link ContextMapping} is used t define a context that may used * in conjunction with a suggester. To define a suggester that depends on a * specific context derived class of {@link ContextMapping} will be * used to specify the kind of additional information required in order to make * suggestions. */ public abstract class ContextMapping implements ToXContent { /** Character used to separate several contexts */ public static final char SEPARATOR = '\u001D'; /** Dummy Context Mapping that should be used if no context is used*/ public static final SortedMap<String, ContextMapping> EMPTY_MAPPING = Maps.newTreeMap(); /** Dummy Context Config matching the Dummy Mapping by providing an empty context*/ public static final SortedMap<String, ContextConfig> EMPTY_CONFIG = Maps.newTreeMap(); /** Dummy Context matching the Dummy Mapping by not wrapping a {@link TokenStream} */ public static final Context EMPTY_CONTEXT = new Context(EMPTY_CONFIG, null); public static final String FIELD_VALUE = "value"; public static final String FIELD_MISSING = "default"; public static final String FIELD_TYPE = "type"; protected final String type; // Type of the Contextmapping protected final String name; /** * Define a new context mapping of a specific type * * @param type * name of the new context mapping */ protected ContextMapping(String type, String name) { super(); this.type = type; this.name = name; } /** * @return the type name of the context */ protected String type() { return type; } /** * @return the name/id of the context */ public String name() { return name; } @Override public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(name); builder.field(FIELD_TYPE, type); toInnerXContent(builder, params); builder.endObject(); return builder; } /** * A {@link ContextMapping} combined with the information provided by a document * form a {@link ContextConfig} which is used to build the underlying FST. * * @param parseContext context of parsing phase * @param parser {@link XContentParser} used to read and setup the configuration * @return A {@link ContextConfig} related to <b>this</b> mapping * * @throws IOException * @throws ElasticsearchParseException */ public abstract ContextConfig parseContext(ParseContext parseContext, XContentParser parser) throws IOException, ElasticsearchParseException; public abstract ContextConfig defaultConfig(); /** * Parse a query according to the context. Parsing starts at parsers <b>current</b> position * * @param name name of the context * @param parser {@link XContentParser} providing the data of the query * * @return {@link ContextQuery} according to this mapping * * @throws IOException * @throws ElasticsearchParseException */ public abstract ContextQuery parseQuery(String name, XContentParser parser) throws IOException, ElasticsearchParseException; /** * Since every context mapping is assumed to have a name given by the field name of an context object, this * method is used to build the value used to serialize the mapping * * @param builder builder to append the mapping to * @param params parameters passed to the builder * * @return the builder used * * @throws IOException */ protected abstract XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException; /** * Test equality of two mapping * * @param thisMappings first mapping * @param otherMappings second mapping * * @return true if both arguments are equal */ public static boolean mappingsAreEqual(SortedMap<String, ? extends ContextMapping> thisMappings, SortedMap<String, ? extends ContextMapping> otherMappings) { return Iterables.elementsEqual(thisMappings.entrySet(), otherMappings.entrySet()); } @Override public String toString() { try { return toXContent(JsonXContent.contentBuilder(), ToXContent.EMPTY_PARAMS).string(); } catch (IOException e) { return super.toString(); } } /** * A collection of {@link ContextMapping}s, their {@link ContextConfig}uration and a * Document form a complete {@link Context}. Since this Object provides all information used * to setup a suggestion, it can be used to wrap the entire {@link TokenStream} used to build a * path within the {@link FST}. */ public static class Context { final SortedMap<String, ContextConfig> contexts; final Document doc; public Context(SortedMap<String, ContextConfig> contexts, Document doc) { super(); this.contexts = contexts; this.doc = doc; } /** * Wrap the {@link TokenStream} according to the provided informations of {@link ContextConfig} * and a related {@link Document}. * * @param tokenStream {@link TokenStream} to wrap * * @return wrapped token stream */ public TokenStream wrapTokenStream(TokenStream tokenStream) { for (ContextConfig context : contexts.values()) { tokenStream = context.wrapTokenStream(doc, tokenStream); } return tokenStream; } } /** * A {@link ContextMapping} combined with the information provided by a document * form a {@link ContextConfig} which is used to build the underlying {@link FST}. This class hold * a simple method wrapping a {@link TokenStream} by provided document informations. */ public static abstract class ContextConfig { /** * Wrap a {@link TokenStream} for building suggestions to use context informations * provided by a document or a {@link ContextMapping} * * @param doc document related to the stream * @param stream original stream used to build the underlying {@link FST} * * @return A new {@link TokenStream} providing additional context information */ protected abstract TokenStream wrapTokenStream(Document doc, TokenStream stream); } /** * A {@link ContextQuery} defines the context information for a specific {@link ContextMapping} * defined within a suggestion request. According to the parameters set in the request and the * {@link ContextMapping} such a query is used to wrap the {@link TokenStream} of the actual * suggestion request into a {@link TokenStream} with the context settings */ public static abstract class ContextQuery implements ToXContent { protected final String name; protected ContextQuery(String name) { this.name = name; } public String name() { return name; } /** * Create a automaton for a given context query this automaton will be used * to find the matching paths with the fst * * @param preserveSep set an additional char (<code>XAnalyzingSuggester.SEP_LABEL</code>) between each context query * @param queries list of {@link ContextQuery} defining the lookup context * * @return Automaton matching the given Query */ public static Automaton toAutomaton(boolean preserveSep, Iterable<ContextQuery> queries) { Automaton a = BasicAutomata.makeEmptyString(); Automaton gap = BasicAutomata.makeChar(ContextMapping.SEPARATOR); if (preserveSep) { // if separators are preserved the fst contains a SEP_LABEL // behind each gap. To have a matching automaton, we need to // include the SEP_LABEL in the query as well gap = BasicOperations.concatenate(gap, BasicAutomata.makeChar(XAnalyzingSuggester.SEP_LABEL)); } for (ContextQuery query : queries) { a = Automaton.concatenate(Arrays.asList(query.toAutomaton(), gap, a)); } BasicOperations.determinize(a); return a; } /** * Build a LookUp Automaton for this context. * @return LookUp Automaton */ protected abstract Automaton toAutomaton(); /** * Parse a set of {@link ContextQuery} according to a given mapping * @param mappings List of mapping defined y the suggest field * @param parser parser holding the settings of the queries. The parsers * current token is assumed hold an array. The number of elements * in this array must match the number of elements in the mappings. * @return List of context queries * * @throws IOException if something unexpected happened on the underlying stream * @throws ElasticsearchParseException if the list of queries could not be parsed */ public static List<ContextQuery> parseQueries(Map<String, ContextMapping> mappings, XContentParser parser) throws IOException, ElasticsearchParseException { Map<String, ContextQuery> querySet = new HashMap<String, ContextMapping.ContextQuery>(); Token token = parser.currentToken(); if(token == Token.START_OBJECT) { while ((token = parser.nextToken()) != Token.END_OBJECT) { String name = parser.text(); ContextMapping mapping = mappings.get(name); if (mapping == null) { throw new ElasticsearchParseException("no mapping defined for [" + name + "]"); } parser.nextToken(); querySet.put(name, mapping.parseQuery(name, parser)); } } List<ContextQuery> queries = Lists.newArrayListWithExpectedSize(mappings.size()); for (ContextMapping mapping : mappings.values()) { queries.add(querySet.get(mapping.name)); } return queries; } @Override public String toString() { try { return toXContent(JsonXContent.contentBuilder(), ToXContent.EMPTY_PARAMS).string(); } catch (IOException e) { return super.toString(); } } } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/metadata_service.proto package com.google.cloud.aiplatform.v1; /** * * * <pre> * Details of operations that perform [MetadataService.PurgeContexts][google.cloud.aiplatform.v1.MetadataService.PurgeContexts]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.PurgeContextsMetadata} */ public final class PurgeContextsMetadata extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.PurgeContextsMetadata) PurgeContextsMetadataOrBuilder { private static final long serialVersionUID = 0L; // Use PurgeContextsMetadata.newBuilder() to construct. private PurgeContextsMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PurgeContextsMetadata() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new PurgeContextsMetadata(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private PurgeContextsMetadata( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder subBuilder = null; if (genericMetadata_ != null) { subBuilder = genericMetadata_.toBuilder(); } genericMetadata_ = input.readMessage( com.google.cloud.aiplatform.v1.GenericOperationMetadata.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(genericMetadata_); genericMetadata_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.MetadataServiceProto .internal_static_google_cloud_aiplatform_v1_PurgeContextsMetadata_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.MetadataServiceProto .internal_static_google_cloud_aiplatform_v1_PurgeContextsMetadata_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.PurgeContextsMetadata.class, com.google.cloud.aiplatform.v1.PurgeContextsMetadata.Builder.class); } public static final int GENERIC_METADATA_FIELD_NUMBER = 1; private com.google.cloud.aiplatform.v1.GenericOperationMetadata genericMetadata_; /** * * * <pre> * Operation metadata for purging Contexts. * </pre> * * <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code> * * @return Whether the genericMetadata field is set. */ @java.lang.Override public boolean hasGenericMetadata() { return genericMetadata_ != null; } /** * * * <pre> * Operation metadata for purging Contexts. * </pre> * * <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code> * * @return The genericMetadata. */ @java.lang.Override public com.google.cloud.aiplatform.v1.GenericOperationMetadata getGenericMetadata() { return genericMetadata_ == null ? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance() : genericMetadata_; } /** * * * <pre> * Operation metadata for purging Contexts. * </pre> * * <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder getGenericMetadataOrBuilder() { return getGenericMetadata(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (genericMetadata_ != null) { output.writeMessage(1, getGenericMetadata()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (genericMetadata_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getGenericMetadata()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.PurgeContextsMetadata)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.PurgeContextsMetadata other = (com.google.cloud.aiplatform.v1.PurgeContextsMetadata) obj; if (hasGenericMetadata() != other.hasGenericMetadata()) return false; if (hasGenericMetadata()) { if (!getGenericMetadata().equals(other.getGenericMetadata())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasGenericMetadata()) { hash = (37 * hash) + GENERIC_METADATA_FIELD_NUMBER; hash = (53 * hash) + getGenericMetadata().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.PurgeContextsMetadata parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.PurgeContextsMetadata parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.PurgeContextsMetadata parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.PurgeContextsMetadata parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.PurgeContextsMetadata parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.PurgeContextsMetadata parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.PurgeContextsMetadata parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.PurgeContextsMetadata parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.PurgeContextsMetadata parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.PurgeContextsMetadata parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.PurgeContextsMetadata parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.PurgeContextsMetadata parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.aiplatform.v1.PurgeContextsMetadata prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Details of operations that perform [MetadataService.PurgeContexts][google.cloud.aiplatform.v1.MetadataService.PurgeContexts]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.PurgeContextsMetadata} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.PurgeContextsMetadata) com.google.cloud.aiplatform.v1.PurgeContextsMetadataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.MetadataServiceProto .internal_static_google_cloud_aiplatform_v1_PurgeContextsMetadata_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.MetadataServiceProto .internal_static_google_cloud_aiplatform_v1_PurgeContextsMetadata_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.PurgeContextsMetadata.class, com.google.cloud.aiplatform.v1.PurgeContextsMetadata.Builder.class); } // Construct using com.google.cloud.aiplatform.v1.PurgeContextsMetadata.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); if (genericMetadataBuilder_ == null) { genericMetadata_ = null; } else { genericMetadata_ = null; genericMetadataBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.MetadataServiceProto .internal_static_google_cloud_aiplatform_v1_PurgeContextsMetadata_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.PurgeContextsMetadata getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.PurgeContextsMetadata.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.PurgeContextsMetadata build() { com.google.cloud.aiplatform.v1.PurgeContextsMetadata result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.PurgeContextsMetadata buildPartial() { com.google.cloud.aiplatform.v1.PurgeContextsMetadata result = new com.google.cloud.aiplatform.v1.PurgeContextsMetadata(this); if (genericMetadataBuilder_ == null) { result.genericMetadata_ = genericMetadata_; } else { result.genericMetadata_ = genericMetadataBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.PurgeContextsMetadata) { return mergeFrom((com.google.cloud.aiplatform.v1.PurgeContextsMetadata) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1.PurgeContextsMetadata other) { if (other == com.google.cloud.aiplatform.v1.PurgeContextsMetadata.getDefaultInstance()) return this; if (other.hasGenericMetadata()) { mergeGenericMetadata(other.getGenericMetadata()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.aiplatform.v1.PurgeContextsMetadata parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.aiplatform.v1.PurgeContextsMetadata) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private com.google.cloud.aiplatform.v1.GenericOperationMetadata genericMetadata_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.GenericOperationMetadata, com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder, com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder> genericMetadataBuilder_; /** * * * <pre> * Operation metadata for purging Contexts. * </pre> * * <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code> * * @return Whether the genericMetadata field is set. */ public boolean hasGenericMetadata() { return genericMetadataBuilder_ != null || genericMetadata_ != null; } /** * * * <pre> * Operation metadata for purging Contexts. * </pre> * * <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code> * * @return The genericMetadata. */ public com.google.cloud.aiplatform.v1.GenericOperationMetadata getGenericMetadata() { if (genericMetadataBuilder_ == null) { return genericMetadata_ == null ? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance() : genericMetadata_; } else { return genericMetadataBuilder_.getMessage(); } } /** * * * <pre> * Operation metadata for purging Contexts. * </pre> * * <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code> */ public Builder setGenericMetadata( com.google.cloud.aiplatform.v1.GenericOperationMetadata value) { if (genericMetadataBuilder_ == null) { if (value == null) { throw new NullPointerException(); } genericMetadata_ = value; onChanged(); } else { genericMetadataBuilder_.setMessage(value); } return this; } /** * * * <pre> * Operation metadata for purging Contexts. * </pre> * * <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code> */ public Builder setGenericMetadata( com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder builderForValue) { if (genericMetadataBuilder_ == null) { genericMetadata_ = builderForValue.build(); onChanged(); } else { genericMetadataBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Operation metadata for purging Contexts. * </pre> * * <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code> */ public Builder mergeGenericMetadata( com.google.cloud.aiplatform.v1.GenericOperationMetadata value) { if (genericMetadataBuilder_ == null) { if (genericMetadata_ != null) { genericMetadata_ = com.google.cloud.aiplatform.v1.GenericOperationMetadata.newBuilder(genericMetadata_) .mergeFrom(value) .buildPartial(); } else { genericMetadata_ = value; } onChanged(); } else { genericMetadataBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Operation metadata for purging Contexts. * </pre> * * <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code> */ public Builder clearGenericMetadata() { if (genericMetadataBuilder_ == null) { genericMetadata_ = null; onChanged(); } else { genericMetadata_ = null; genericMetadataBuilder_ = null; } return this; } /** * * * <pre> * Operation metadata for purging Contexts. * </pre> * * <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code> */ public com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder getGenericMetadataBuilder() { onChanged(); return getGenericMetadataFieldBuilder().getBuilder(); } /** * * * <pre> * Operation metadata for purging Contexts. * </pre> * * <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code> */ public com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder getGenericMetadataOrBuilder() { if (genericMetadataBuilder_ != null) { return genericMetadataBuilder_.getMessageOrBuilder(); } else { return genericMetadata_ == null ? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance() : genericMetadata_; } } /** * * * <pre> * Operation metadata for purging Contexts. * </pre> * * <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.GenericOperationMetadata, com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder, com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder> getGenericMetadataFieldBuilder() { if (genericMetadataBuilder_ == null) { genericMetadataBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.GenericOperationMetadata, com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder, com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder>( getGenericMetadata(), getParentForChildren(), isClean()); genericMetadata_ = null; } return genericMetadataBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.PurgeContextsMetadata) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.PurgeContextsMetadata) private static final com.google.cloud.aiplatform.v1.PurgeContextsMetadata DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.PurgeContextsMetadata(); } public static com.google.cloud.aiplatform.v1.PurgeContextsMetadata getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PurgeContextsMetadata> PARSER = new com.google.protobuf.AbstractParser<PurgeContextsMetadata>() { @java.lang.Override public PurgeContextsMetadata parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new PurgeContextsMetadata(input, extensionRegistry); } }; public static com.google.protobuf.Parser<PurgeContextsMetadata> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PurgeContextsMetadata> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.PurgeContextsMetadata getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.elasticloadbalancing.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * Contains the parameters for CreateLoadBalancer. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/CreateLoadBalancer" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CreateLoadBalancerRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The name of the load balancer. * </p> * <p> * This name must be unique within your set of load balancers for the region, must have a maximum of 32 characters, * must contain only alphanumeric characters or hyphens, and cannot begin or end with a hyphen. * </p> */ private String loadBalancerName; /** * <p> * The listeners. * </p> * <p> * For more information, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html">Listeners for Your * Classic Load Balancer</a> in the <i>Classic Load Balancers Guide</i>. * </p> */ private com.amazonaws.internal.SdkInternalList<Listener> listeners; /** * <p> * One or more Availability Zones from the same region as the load balancer. * </p> * <p> * You must specify at least one Availability Zone. * </p> * <p> * You can add more Availability Zones after you create the load balancer using * <a>EnableAvailabilityZonesForLoadBalancer</a>. * </p> */ private com.amazonaws.internal.SdkInternalList<String> availabilityZones; /** * <p> * The IDs of the subnets in your VPC to attach to the load balancer. Specify one subnet per Availability Zone * specified in <code>AvailabilityZones</code>. * </p> */ private com.amazonaws.internal.SdkInternalList<String> subnets; /** * <p> * The IDs of the security groups to assign to the load balancer. * </p> */ private com.amazonaws.internal.SdkInternalList<String> securityGroups; /** * <p> * The type of a load balancer. Valid only for load balancers in a VPC. * </p> * <p> * By default, Elastic Load Balancing creates an Internet-facing load balancer with a DNS name that resolves to * public IP addresses. For more information about Internet-facing and Internal load balancers, see <a href= * "http://docs.aws.amazon.com/elasticloadbalancing/latest/userguide/how-elastic-load-balancing-works.html#load-balancer-scheme" * >Load Balancer Scheme</a> in the <i>Elastic Load Balancing User Guide</i>. * </p> * <p> * Specify <code>internal</code> to create a load balancer with a DNS name that resolves to private IP addresses. * </p> */ private String scheme; /** * <p> * A list of tags to assign to the load balancer. * </p> * <p> * For more information about tagging your load balancer, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/add-remove-tags.html">Tag Your Classic Load * Balancer</a> in the <i>Classic Load Balancers Guide</i>. * </p> */ private com.amazonaws.internal.SdkInternalList<Tag> tags; /** * Default constructor for CreateLoadBalancerRequest object. Callers should use the setter or fluent setter * (with...) methods to initialize the object after creating it. */ public CreateLoadBalancerRequest() { } /** * Constructs a new CreateLoadBalancerRequest object. Callers should use the setter or fluent setter (with...) * methods to initialize any additional object members. * * @param loadBalancerName * The name of the load balancer.</p> * <p> * This name must be unique within your set of load balancers for the region, must have a maximum of 32 * characters, must contain only alphanumeric characters or hyphens, and cannot begin or end with a hyphen. */ public CreateLoadBalancerRequest(String loadBalancerName) { setLoadBalancerName(loadBalancerName); } /** * Constructs a new CreateLoadBalancerRequest object. Callers should use the setter or fluent setter (with...) * methods to initialize any additional object members. * * @param loadBalancerName * The name of the load balancer.</p> * <p> * This name must be unique within your set of load balancers for the region, must have a maximum of 32 * characters, must contain only alphanumeric characters or hyphens, and cannot begin or end with a hyphen. * @param listeners * The listeners. * </p> * <p> * For more information, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html">Listeners * for Your Classic Load Balancer</a> in the <i>Classic Load Balancers Guide</i>. * @param availabilityZones * One or more Availability Zones from the same region as the load balancer. * </p> * <p> * You must specify at least one Availability Zone. * </p> * <p> * You can add more Availability Zones after you create the load balancer using * <a>EnableAvailabilityZonesForLoadBalancer</a>. */ public CreateLoadBalancerRequest(String loadBalancerName, java.util.List<Listener> listeners, java.util.List<String> availabilityZones) { setLoadBalancerName(loadBalancerName); setListeners(listeners); setAvailabilityZones(availabilityZones); } /** * <p> * The name of the load balancer. * </p> * <p> * This name must be unique within your set of load balancers for the region, must have a maximum of 32 characters, * must contain only alphanumeric characters or hyphens, and cannot begin or end with a hyphen. * </p> * * @param loadBalancerName * The name of the load balancer.</p> * <p> * This name must be unique within your set of load balancers for the region, must have a maximum of 32 * characters, must contain only alphanumeric characters or hyphens, and cannot begin or end with a hyphen. */ public void setLoadBalancerName(String loadBalancerName) { this.loadBalancerName = loadBalancerName; } /** * <p> * The name of the load balancer. * </p> * <p> * This name must be unique within your set of load balancers for the region, must have a maximum of 32 characters, * must contain only alphanumeric characters or hyphens, and cannot begin or end with a hyphen. * </p> * * @return The name of the load balancer.</p> * <p> * This name must be unique within your set of load balancers for the region, must have a maximum of 32 * characters, must contain only alphanumeric characters or hyphens, and cannot begin or end with a hyphen. */ public String getLoadBalancerName() { return this.loadBalancerName; } /** * <p> * The name of the load balancer. * </p> * <p> * This name must be unique within your set of load balancers for the region, must have a maximum of 32 characters, * must contain only alphanumeric characters or hyphens, and cannot begin or end with a hyphen. * </p> * * @param loadBalancerName * The name of the load balancer.</p> * <p> * This name must be unique within your set of load balancers for the region, must have a maximum of 32 * characters, must contain only alphanumeric characters or hyphens, and cannot begin or end with a hyphen. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateLoadBalancerRequest withLoadBalancerName(String loadBalancerName) { setLoadBalancerName(loadBalancerName); return this; } /** * <p> * The listeners. * </p> * <p> * For more information, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html">Listeners for Your * Classic Load Balancer</a> in the <i>Classic Load Balancers Guide</i>. * </p> * * @return The listeners.</p> * <p> * For more information, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html">Listeners * for Your Classic Load Balancer</a> in the <i>Classic Load Balancers Guide</i>. */ public java.util.List<Listener> getListeners() { if (listeners == null) { listeners = new com.amazonaws.internal.SdkInternalList<Listener>(); } return listeners; } /** * <p> * The listeners. * </p> * <p> * For more information, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html">Listeners for Your * Classic Load Balancer</a> in the <i>Classic Load Balancers Guide</i>. * </p> * * @param listeners * The listeners.</p> * <p> * For more information, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html">Listeners * for Your Classic Load Balancer</a> in the <i>Classic Load Balancers Guide</i>. */ public void setListeners(java.util.Collection<Listener> listeners) { if (listeners == null) { this.listeners = null; return; } this.listeners = new com.amazonaws.internal.SdkInternalList<Listener>(listeners); } /** * <p> * The listeners. * </p> * <p> * For more information, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html">Listeners for Your * Classic Load Balancer</a> in the <i>Classic Load Balancers Guide</i>. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setListeners(java.util.Collection)} or {@link #withListeners(java.util.Collection)} if you want to * override the existing values. * </p> * * @param listeners * The listeners.</p> * <p> * For more information, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html">Listeners * for Your Classic Load Balancer</a> in the <i>Classic Load Balancers Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateLoadBalancerRequest withListeners(Listener... listeners) { if (this.listeners == null) { setListeners(new com.amazonaws.internal.SdkInternalList<Listener>(listeners.length)); } for (Listener ele : listeners) { this.listeners.add(ele); } return this; } /** * <p> * The listeners. * </p> * <p> * For more information, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html">Listeners for Your * Classic Load Balancer</a> in the <i>Classic Load Balancers Guide</i>. * </p> * * @param listeners * The listeners.</p> * <p> * For more information, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html">Listeners * for Your Classic Load Balancer</a> in the <i>Classic Load Balancers Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateLoadBalancerRequest withListeners(java.util.Collection<Listener> listeners) { setListeners(listeners); return this; } /** * <p> * One or more Availability Zones from the same region as the load balancer. * </p> * <p> * You must specify at least one Availability Zone. * </p> * <p> * You can add more Availability Zones after you create the load balancer using * <a>EnableAvailabilityZonesForLoadBalancer</a>. * </p> * * @return One or more Availability Zones from the same region as the load balancer.</p> * <p> * You must specify at least one Availability Zone. * </p> * <p> * You can add more Availability Zones after you create the load balancer using * <a>EnableAvailabilityZonesForLoadBalancer</a>. */ public java.util.List<String> getAvailabilityZones() { if (availabilityZones == null) { availabilityZones = new com.amazonaws.internal.SdkInternalList<String>(); } return availabilityZones; } /** * <p> * One or more Availability Zones from the same region as the load balancer. * </p> * <p> * You must specify at least one Availability Zone. * </p> * <p> * You can add more Availability Zones after you create the load balancer using * <a>EnableAvailabilityZonesForLoadBalancer</a>. * </p> * * @param availabilityZones * One or more Availability Zones from the same region as the load balancer.</p> * <p> * You must specify at least one Availability Zone. * </p> * <p> * You can add more Availability Zones after you create the load balancer using * <a>EnableAvailabilityZonesForLoadBalancer</a>. */ public void setAvailabilityZones(java.util.Collection<String> availabilityZones) { if (availabilityZones == null) { this.availabilityZones = null; return; } this.availabilityZones = new com.amazonaws.internal.SdkInternalList<String>(availabilityZones); } /** * <p> * One or more Availability Zones from the same region as the load balancer. * </p> * <p> * You must specify at least one Availability Zone. * </p> * <p> * You can add more Availability Zones after you create the load balancer using * <a>EnableAvailabilityZonesForLoadBalancer</a>. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setAvailabilityZones(java.util.Collection)} or {@link #withAvailabilityZones(java.util.Collection)} if * you want to override the existing values. * </p> * * @param availabilityZones * One or more Availability Zones from the same region as the load balancer.</p> * <p> * You must specify at least one Availability Zone. * </p> * <p> * You can add more Availability Zones after you create the load balancer using * <a>EnableAvailabilityZonesForLoadBalancer</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateLoadBalancerRequest withAvailabilityZones(String... availabilityZones) { if (this.availabilityZones == null) { setAvailabilityZones(new com.amazonaws.internal.SdkInternalList<String>(availabilityZones.length)); } for (String ele : availabilityZones) { this.availabilityZones.add(ele); } return this; } /** * <p> * One or more Availability Zones from the same region as the load balancer. * </p> * <p> * You must specify at least one Availability Zone. * </p> * <p> * You can add more Availability Zones after you create the load balancer using * <a>EnableAvailabilityZonesForLoadBalancer</a>. * </p> * * @param availabilityZones * One or more Availability Zones from the same region as the load balancer.</p> * <p> * You must specify at least one Availability Zone. * </p> * <p> * You can add more Availability Zones after you create the load balancer using * <a>EnableAvailabilityZonesForLoadBalancer</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateLoadBalancerRequest withAvailabilityZones(java.util.Collection<String> availabilityZones) { setAvailabilityZones(availabilityZones); return this; } /** * <p> * The IDs of the subnets in your VPC to attach to the load balancer. Specify one subnet per Availability Zone * specified in <code>AvailabilityZones</code>. * </p> * * @return The IDs of the subnets in your VPC to attach to the load balancer. Specify one subnet per Availability * Zone specified in <code>AvailabilityZones</code>. */ public java.util.List<String> getSubnets() { if (subnets == null) { subnets = new com.amazonaws.internal.SdkInternalList<String>(); } return subnets; } /** * <p> * The IDs of the subnets in your VPC to attach to the load balancer. Specify one subnet per Availability Zone * specified in <code>AvailabilityZones</code>. * </p> * * @param subnets * The IDs of the subnets in your VPC to attach to the load balancer. Specify one subnet per Availability * Zone specified in <code>AvailabilityZones</code>. */ public void setSubnets(java.util.Collection<String> subnets) { if (subnets == null) { this.subnets = null; return; } this.subnets = new com.amazonaws.internal.SdkInternalList<String>(subnets); } /** * <p> * The IDs of the subnets in your VPC to attach to the load balancer. Specify one subnet per Availability Zone * specified in <code>AvailabilityZones</code>. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setSubnets(java.util.Collection)} or {@link #withSubnets(java.util.Collection)} if you want to override * the existing values. * </p> * * @param subnets * The IDs of the subnets in your VPC to attach to the load balancer. Specify one subnet per Availability * Zone specified in <code>AvailabilityZones</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateLoadBalancerRequest withSubnets(String... subnets) { if (this.subnets == null) { setSubnets(new com.amazonaws.internal.SdkInternalList<String>(subnets.length)); } for (String ele : subnets) { this.subnets.add(ele); } return this; } /** * <p> * The IDs of the subnets in your VPC to attach to the load balancer. Specify one subnet per Availability Zone * specified in <code>AvailabilityZones</code>. * </p> * * @param subnets * The IDs of the subnets in your VPC to attach to the load balancer. Specify one subnet per Availability * Zone specified in <code>AvailabilityZones</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateLoadBalancerRequest withSubnets(java.util.Collection<String> subnets) { setSubnets(subnets); return this; } /** * <p> * The IDs of the security groups to assign to the load balancer. * </p> * * @return The IDs of the security groups to assign to the load balancer. */ public java.util.List<String> getSecurityGroups() { if (securityGroups == null) { securityGroups = new com.amazonaws.internal.SdkInternalList<String>(); } return securityGroups; } /** * <p> * The IDs of the security groups to assign to the load balancer. * </p> * * @param securityGroups * The IDs of the security groups to assign to the load balancer. */ public void setSecurityGroups(java.util.Collection<String> securityGroups) { if (securityGroups == null) { this.securityGroups = null; return; } this.securityGroups = new com.amazonaws.internal.SdkInternalList<String>(securityGroups); } /** * <p> * The IDs of the security groups to assign to the load balancer. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setSecurityGroups(java.util.Collection)} or {@link #withSecurityGroups(java.util.Collection)} if you want * to override the existing values. * </p> * * @param securityGroups * The IDs of the security groups to assign to the load balancer. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateLoadBalancerRequest withSecurityGroups(String... securityGroups) { if (this.securityGroups == null) { setSecurityGroups(new com.amazonaws.internal.SdkInternalList<String>(securityGroups.length)); } for (String ele : securityGroups) { this.securityGroups.add(ele); } return this; } /** * <p> * The IDs of the security groups to assign to the load balancer. * </p> * * @param securityGroups * The IDs of the security groups to assign to the load balancer. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateLoadBalancerRequest withSecurityGroups(java.util.Collection<String> securityGroups) { setSecurityGroups(securityGroups); return this; } /** * <p> * The type of a load balancer. Valid only for load balancers in a VPC. * </p> * <p> * By default, Elastic Load Balancing creates an Internet-facing load balancer with a DNS name that resolves to * public IP addresses. For more information about Internet-facing and Internal load balancers, see <a href= * "http://docs.aws.amazon.com/elasticloadbalancing/latest/userguide/how-elastic-load-balancing-works.html#load-balancer-scheme" * >Load Balancer Scheme</a> in the <i>Elastic Load Balancing User Guide</i>. * </p> * <p> * Specify <code>internal</code> to create a load balancer with a DNS name that resolves to private IP addresses. * </p> * * @param scheme * The type of a load balancer. Valid only for load balancers in a VPC.</p> * <p> * By default, Elastic Load Balancing creates an Internet-facing load balancer with a DNS name that resolves * to public IP addresses. For more information about Internet-facing and Internal load balancers, see <a * href= * "http://docs.aws.amazon.com/elasticloadbalancing/latest/userguide/how-elastic-load-balancing-works.html#load-balancer-scheme" * >Load Balancer Scheme</a> in the <i>Elastic Load Balancing User Guide</i>. * </p> * <p> * Specify <code>internal</code> to create a load balancer with a DNS name that resolves to private IP * addresses. */ public void setScheme(String scheme) { this.scheme = scheme; } /** * <p> * The type of a load balancer. Valid only for load balancers in a VPC. * </p> * <p> * By default, Elastic Load Balancing creates an Internet-facing load balancer with a DNS name that resolves to * public IP addresses. For more information about Internet-facing and Internal load balancers, see <a href= * "http://docs.aws.amazon.com/elasticloadbalancing/latest/userguide/how-elastic-load-balancing-works.html#load-balancer-scheme" * >Load Balancer Scheme</a> in the <i>Elastic Load Balancing User Guide</i>. * </p> * <p> * Specify <code>internal</code> to create a load balancer with a DNS name that resolves to private IP addresses. * </p> * * @return The type of a load balancer. Valid only for load balancers in a VPC.</p> * <p> * By default, Elastic Load Balancing creates an Internet-facing load balancer with a DNS name that resolves * to public IP addresses. For more information about Internet-facing and Internal load balancers, see <a * href= * "http://docs.aws.amazon.com/elasticloadbalancing/latest/userguide/how-elastic-load-balancing-works.html#load-balancer-scheme" * >Load Balancer Scheme</a> in the <i>Elastic Load Balancing User Guide</i>. * </p> * <p> * Specify <code>internal</code> to create a load balancer with a DNS name that resolves to private IP * addresses. */ public String getScheme() { return this.scheme; } /** * <p> * The type of a load balancer. Valid only for load balancers in a VPC. * </p> * <p> * By default, Elastic Load Balancing creates an Internet-facing load balancer with a DNS name that resolves to * public IP addresses. For more information about Internet-facing and Internal load balancers, see <a href= * "http://docs.aws.amazon.com/elasticloadbalancing/latest/userguide/how-elastic-load-balancing-works.html#load-balancer-scheme" * >Load Balancer Scheme</a> in the <i>Elastic Load Balancing User Guide</i>. * </p> * <p> * Specify <code>internal</code> to create a load balancer with a DNS name that resolves to private IP addresses. * </p> * * @param scheme * The type of a load balancer. Valid only for load balancers in a VPC.</p> * <p> * By default, Elastic Load Balancing creates an Internet-facing load balancer with a DNS name that resolves * to public IP addresses. For more information about Internet-facing and Internal load balancers, see <a * href= * "http://docs.aws.amazon.com/elasticloadbalancing/latest/userguide/how-elastic-load-balancing-works.html#load-balancer-scheme" * >Load Balancer Scheme</a> in the <i>Elastic Load Balancing User Guide</i>. * </p> * <p> * Specify <code>internal</code> to create a load balancer with a DNS name that resolves to private IP * addresses. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateLoadBalancerRequest withScheme(String scheme) { setScheme(scheme); return this; } /** * <p> * A list of tags to assign to the load balancer. * </p> * <p> * For more information about tagging your load balancer, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/add-remove-tags.html">Tag Your Classic Load * Balancer</a> in the <i>Classic Load Balancers Guide</i>. * </p> * * @return A list of tags to assign to the load balancer.</p> * <p> * For more information about tagging your load balancer, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/add-remove-tags.html">Tag Your * Classic Load Balancer</a> in the <i>Classic Load Balancers Guide</i>. */ public java.util.List<Tag> getTags() { if (tags == null) { tags = new com.amazonaws.internal.SdkInternalList<Tag>(); } return tags; } /** * <p> * A list of tags to assign to the load balancer. * </p> * <p> * For more information about tagging your load balancer, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/add-remove-tags.html">Tag Your Classic Load * Balancer</a> in the <i>Classic Load Balancers Guide</i>. * </p> * * @param tags * A list of tags to assign to the load balancer.</p> * <p> * For more information about tagging your load balancer, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/add-remove-tags.html">Tag Your * Classic Load Balancer</a> in the <i>Classic Load Balancers Guide</i>. */ public void setTags(java.util.Collection<Tag> tags) { if (tags == null) { this.tags = null; return; } this.tags = new com.amazonaws.internal.SdkInternalList<Tag>(tags); } /** * <p> * A list of tags to assign to the load balancer. * </p> * <p> * For more information about tagging your load balancer, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/add-remove-tags.html">Tag Your Classic Load * Balancer</a> in the <i>Classic Load Balancers Guide</i>. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tags * A list of tags to assign to the load balancer.</p> * <p> * For more information about tagging your load balancer, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/add-remove-tags.html">Tag Your * Classic Load Balancer</a> in the <i>Classic Load Balancers Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateLoadBalancerRequest withTags(Tag... tags) { if (this.tags == null) { setTags(new com.amazonaws.internal.SdkInternalList<Tag>(tags.length)); } for (Tag ele : tags) { this.tags.add(ele); } return this; } /** * <p> * A list of tags to assign to the load balancer. * </p> * <p> * For more information about tagging your load balancer, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/add-remove-tags.html">Tag Your Classic Load * Balancer</a> in the <i>Classic Load Balancers Guide</i>. * </p> * * @param tags * A list of tags to assign to the load balancer.</p> * <p> * For more information about tagging your load balancer, see <a * href="http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/add-remove-tags.html">Tag Your * Classic Load Balancer</a> in the <i>Classic Load Balancers Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateLoadBalancerRequest withTags(java.util.Collection<Tag> tags) { setTags(tags); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getLoadBalancerName() != null) sb.append("LoadBalancerName: ").append(getLoadBalancerName()).append(","); if (getListeners() != null) sb.append("Listeners: ").append(getListeners()).append(","); if (getAvailabilityZones() != null) sb.append("AvailabilityZones: ").append(getAvailabilityZones()).append(","); if (getSubnets() != null) sb.append("Subnets: ").append(getSubnets()).append(","); if (getSecurityGroups() != null) sb.append("SecurityGroups: ").append(getSecurityGroups()).append(","); if (getScheme() != null) sb.append("Scheme: ").append(getScheme()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CreateLoadBalancerRequest == false) return false; CreateLoadBalancerRequest other = (CreateLoadBalancerRequest) obj; if (other.getLoadBalancerName() == null ^ this.getLoadBalancerName() == null) return false; if (other.getLoadBalancerName() != null && other.getLoadBalancerName().equals(this.getLoadBalancerName()) == false) return false; if (other.getListeners() == null ^ this.getListeners() == null) return false; if (other.getListeners() != null && other.getListeners().equals(this.getListeners()) == false) return false; if (other.getAvailabilityZones() == null ^ this.getAvailabilityZones() == null) return false; if (other.getAvailabilityZones() != null && other.getAvailabilityZones().equals(this.getAvailabilityZones()) == false) return false; if (other.getSubnets() == null ^ this.getSubnets() == null) return false; if (other.getSubnets() != null && other.getSubnets().equals(this.getSubnets()) == false) return false; if (other.getSecurityGroups() == null ^ this.getSecurityGroups() == null) return false; if (other.getSecurityGroups() != null && other.getSecurityGroups().equals(this.getSecurityGroups()) == false) return false; if (other.getScheme() == null ^ this.getScheme() == null) return false; if (other.getScheme() != null && other.getScheme().equals(this.getScheme()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getLoadBalancerName() == null) ? 0 : getLoadBalancerName().hashCode()); hashCode = prime * hashCode + ((getListeners() == null) ? 0 : getListeners().hashCode()); hashCode = prime * hashCode + ((getAvailabilityZones() == null) ? 0 : getAvailabilityZones().hashCode()); hashCode = prime * hashCode + ((getSubnets() == null) ? 0 : getSubnets().hashCode()); hashCode = prime * hashCode + ((getSecurityGroups() == null) ? 0 : getSecurityGroups().hashCode()); hashCode = prime * hashCode + ((getScheme() == null) ? 0 : getScheme().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public CreateLoadBalancerRequest clone() { return (CreateLoadBalancerRequest) super.clone(); } }
/*L * Copyright Georgetown University, Washington University. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cab2b/LICENSE.txt for details. */ /* * JSheet.java * * Created on October 4, 2007, 2:00 PM */ package edu.wustl.cab2b.client.ui.controls.sheet; import static edu.wustl.cab2b.client.ui.controls.sheet.Common.setBackgroundWhite; import java.awt.BorderLayout; import java.awt.Component; import java.awt.Dimension; import java.awt.EventQueue; import java.awt.Toolkit; import java.awt.Window; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.TreeSet; import java.util.logging.Logger; import javax.swing.Action; import javax.swing.JComponent; import javax.swing.JDialog; import javax.swing.JFileChooser; import javax.swing.JOptionPane; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import javax.swing.event.RowSorterEvent; import javax.swing.event.RowSorterListener; import javax.swing.event.TableModelEvent; import javax.swing.event.TableModelListener; import javax.swing.table.TableModel; import edu.wustl.cab2b.client.ui.controls.Cab2bFileFilter; /** * * @author jasbir_sachdeva */ public class JSheet extends javax.swing.JPanel { Logger lgr = Logger.getLogger(getClass().getName()); /** Event name that notifies that User has pressed Magnifying-Glass button... */ public static final String EVENT_HEADER_ROW_DOUBLE_CLICKED = "EVENT_HEADER_ROW_DOUBLE_CLICKED"; /** Event name that notifies that User has pressed Magnifying-Glass button... */ public static final String EVENT_DATA_ROW_DOUBLE_CLICKED = "EVENT_DATA_ROW_DOUBLE_CLICKED"; /** Added by Deepak : Event name that notifies that User has selected some data... */ public static final String EVENT_DATA_SINGLE_CLICKED = "EVENT_DATA_SINGLE_CLICKED"; /** Event name that notifies that User is interested in details of some Row... */ public static final String REQUESTED_SHOW_ROW_DETAILS = "REQUESTED_SHOW_ROW_DETAILS"; /** * Copy menu button name */ public static final String MENU_BUTTON_COPY = "MENU_BUTTON_COPY"; /** * Paste menu button name */ public static final String MENU_BUTTON_PASTE = "MENU_BUTTON_PASTE"; /** * Add Column menu button name */ public static final String MENU_BUTTON_ADD_COLUMN = "MENU_BUTTON_ADD_COLUMN"; /** * Properties menu button name */ public static final String MENU_BUTTON_PROPERTIES = "MENU_BUTTON_PROPERTIES"; /** * Reset menu button name */ public static final String MENU_BUTTON_RESET = "MENU_BUTTON_RESET"; /** * Clear menu button name */ public static final String MENU_BUTTON_CLEAR = "MENU_BUTTON_CLEAR"; /** Should I allow user to create new columns and allow cut/paste on them? */ boolean allowWrite = true; /** The Visual components that accepts User settings: which Columns to view: */ SheetCustomizationConsole consSheetCust = new SheetCustomizationConsole(); // SheetCustomizationConsole conSheetCustomization = new SheetCustomizationConsole(); /** This panel presents context sensitive Filter Control - for single column. * The last selected Column in Data View is picked up. This Visual component that shows applicable filter as GUI to user, providing chance for correction*/ ColumnFilterVerticalConsole consColFilter = new ColumnFilterVerticalConsole(); /***/ FiltersViewConsole consFiltersView = new FiltersViewConsole(); /***/ JDialog colVisibilitySettingsDialog; /** Sheet Customization (Visibility & Filters) are kept here... */ SheetCustomizationModel scm; /** Sheet Configuration Monitoring */ InternalPCListener lsnSheetConf; /** Data View COnsole Montoring. */ InternalPCListener lsnVeiwData = new InternalPCListener(); // ArrayList<Set<Object>> sampleValuesAL; /** Indicator flag if sample values from filter should be creatred from Table Model, or filter should be diasbled if NOT explicitly provided. **/ private boolean createSampleValuesFromModel = false; /** Column Selection Listener */ private ColSelectionListener lnsColSelection = new ColSelectionListener(); /////////////////////////////////////////////////////////// /** The actual Visual Component that renders table on the scren for the user.*/ ViewDataConsole consData = new ViewDataConsole(); /** Special Model to show a Button, and Row selection chk box on LHS.*/ // FixedLeftColsTblModel tmFixedLeft; /** If this is true, SelectionHanger is allowed to paint itself as selected, * iff table cell selected is true as per model. */ private boolean selectionRowMode = false; /** * Indication of mouse pressed event */ private boolean mousePressed = false; /** refernce to oginal params setModel(...) , for RESET */ private TableModel tmROData; /** refernce to oginal params setModel(...) , for RESET */ private ArrayList<TreeSet<Comparable<?>>> sampleValuesAL; /** * Creates new form JSheet */ public JSheet() { initComponents(); // Set up Main Date Viewer and Column Manager... pnlDataView.add(consData); consData.addPropertyChangeListener(lsnVeiwData); consData.addTableColSelectionListener(lnsColSelection); pnlSheetCust.add(consSheetCust, BorderLayout.CENTER); applySizeOn(diaConsSheetCust); // Debug... // showColFilterConsoleInDialog(); // showFIlterViewConsoleInDialog(); // pnlSheetCust.add(colVisibilityConsole); // consData.addPropertyChangeListener(colVisibilityConsole); // colVisibilityConsole.addPropertyChangeListener(consData); consData.addExportCellsActionlistener(new ExportCellsActionListener()); consData.addShowPropertyDialogActionListener(new ShowPropDialogActionListener()); // Setup Filter UI Component... // pnlCommonFilter.removeAll(); // pnlCommonFilter.add(pnlCommonRangeFilter); // consData.addPropertyChangeListener(pnlCommonRangeFilter); // colVisibilityConsole.addPropertyChangeListener(pnlCommonRangeFilter); // pnlCommonRangeFilter.setRelatedTable(consData.getDataViewTable()); // For column removal Button... // TableChangesSynchronizer tcs = new TableChangesSynchronizer(); // consData.addTableListener(tcs); // consData.addTableRowsorterListener(tcs); // For Row Selections... // tblFixed.setSelectionModel(consData.getSelectionModel()); // tblFixed.getSelectionModel().addListSelectionListener(tcs); // DataViewCellsSelectionListener dvcsl = new DataViewCellsSelectionListener(); // consData.addTableColSelectionListener(dvcsl); // consData.addTableRowSelectionListener(dvcsl); setBackgroundWhite(this); } /** Append new JButtons in the Toolbar, with the specified actions. All the old specified Actions will be removed. However, the JSheet default Actions cannot be changed. @param actions Arralist specifiing Actions of one or more JButton(s). */ public void setAdditionalToolbarActions(List<Action> actions) { consData.setAdditionalToolbarActions(actions); } /** * Remove component from toolbar * @param menuName */ public void removeComponentFromToolBar(String menuName) { consData.removeComponentFromToolBar(menuName); } /** * Gets JSheet TableModel * @return TableModel */ public TableModel getJSheetTableModel() { return consData.getCompositeDataModel(); } /** * Gets View TableModel * @return JSheetViewDataModel */ public JSheetViewDataModel getViewTableModel() { return consData.getViewTableModel(); } /** * Method to resize JSheet component * @param dia */ public void applySizeOn(JDialog dia) { setBackgroundWhite((JComponent) dia.getContentPane()); Dimension ss = Toolkit.getDefaultToolkit().getScreenSize(); dia.setBounds(ss.width * 2 / 8, ss.height / 7, ss.width * 4 / 8, ss.height * 5 / 7); //Added code to fix bug 6460 - Deepak dia.setModal(true); } /** * Returns the top most compoennt in the containment hierarchy that is heavy weight. * @param comp * @return Window */ static Window getHWRoot(Component comp) { while (null != comp && !(comp instanceof Window)) { comp = comp.getParent(); } // Either comp is null, or it is some heavy weight component, as top most container... return (Window) comp; } /** * Returns number of selected rows * @return int[] */ public int[] getSelectedRows() { return consData.getSelectedRows(); } /** * Returns the number of selected columns * @return int[] */ public int[] getSelectedColumns() { return consData.getSelectedColumns(); } /** * Returns the number of selected row count * @return int */ public int getSelectedRow() { return consData.getSelectedRow(); } /** * Returns the number of selected column count * @return int */ public int getSelectedColumn() { return consData.getSelectedColumn(); } /** * Returns the cell value at row and column * @param row * @param column * @return Object */ public Object getValueAt(int row, int column) { return consData.getValueAt(row, column); } /** * Returns ContextFilterConsole * @return JComponent */ public JComponent getContextFilterConsole() { diaFilterConsole.getContentPane().removeAll(); diaFilterConsole.setVisible(false); return consColFilter; } /* * Returns FiltersViewConsole * return JComponent */ /** * Gives the Console View of all Filters * @return Component */ public JComponent getFiltersViewConsole() { diaFiltersViewConsole.getContentPane().removeAll(); diaFiltersViewConsole.setVisible(false); return consFiltersView; } /** * Adds RowSelectionListener to the table * @param lsl */ public void addRowSelectionListener(ListSelectionListener lsl) { consData.addTableRowSelectionListener(lsl); } /** * Removes RowSelectionListener from Jsheet table * @param lsl */ public void removeRowSelectionListener(ListSelectionListener lsl) { consData.removeTableRowSelectionListener(lsl); } /** * Adds ColumnSelectionListener to the table * @param lsl */ public void addColumnSelectionListener(ListSelectionListener lsl) { consData.addTableColSelectionListener(lsl); } /** * Removes ColumnSelectionListener * @param lsl */ public void removeColumnSelectionListener(ListSelectionListener lsl) { consData.removeTableColSelectionListener(lsl); } /** * Shows ColFilterConsole in a separate Dialog */ private void showColFilterConsoleInDialog() { diaFilterConsole.getContentPane().removeAll(); diaFilterConsole.getContentPane().add(consColFilter, BorderLayout.CENTER); diaFilterConsole.setVisible(true); } /** * Shows FIlterViewConsole in a separate Dialog */ private void showFIlterViewConsoleInDialog() { diaFiltersViewConsole.getContentPane().removeAll(); diaFiltersViewConsole.getContentPane().add(consFiltersView, BorderLayout.CENTER); diaFiltersViewConsole.setVisible(true); } /** * An action class for handling table event having TableModelListener, RowSorterListener and ListSelectionListener * properties. * @author jasbir_sachdeva * */ class TableChangesSynchronizer implements TableModelListener, RowSorterListener, ListSelectionListener { /* (non-Javadoc) * @see javax.swing.event.TableModelListener#tableChanged(javax.swing.event.TableModelEvent) */ public void tableChanged(TableModelEvent e) { } /* (non-Javadoc) * @see javax.swing.event.RowSorterListener#sorterChanged(javax.swing.event.RowSorterEvent) */ public void sorterChanged(RowSorterEvent e) { // // // tmFixedLeft.setRowCount(consData.getRowCount()); } /* Selection is Row-Selection_hanger should match full row selection is data-view. * (non-Javadoc) * @see javax.swing.event.ListSelectionListener#valueChanged(javax.swing.event.ListSelectionEvent) */ public void valueChanged(ListSelectionEvent e) { if (selectionRowMode) { consData.extendSelectionsToAllColumns(); } } } /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// /////////// PUBLIC setters: configeration of ths is instance. /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// /** * Senario - Type C, No 2: public method call SetReadOnlyDataModel on JSheet. * * Set the Model model that will be shown. * Default View is without filtering and Sorting. * Sample values are computed by scanning over entire Table Model. Lazy Table Model, if any will in this case. * @param tmROData - The basic data to be shown in JSheet. */ public void setReadOnlyDataModel(TableModel tmROData) { setReadOnlyDataModel(tmROData, null); } /** * Senario - Type C, No 2: public method call SetReadOnlyDataModel on JSheet. * * Set the Model model that will be shown. * Default View is without filtering and Sorting. * @param tmROData - The basic data to be shown in JSheet. * @param sampleValuesAL Each column can take some possible values which are expected in a Set. * Since there are more than 1 column, we have to supply more than 1 set. Club all those Sets in an * ArrayList <code>sampleValues </code>. */ public void setReadOnlyDataModel(TableModel tmROData, ArrayList<TreeSet<Comparable<?>>> sampleValuesAL) { if (null == sampleValuesAL) { sampleValuesAL = new ArrayList(); createSampleValuesFromModel = true; } else { this.sampleValuesAL = sampleValuesAL; createSampleValuesFromModel = false; } this.tmROData = tmROData; if (null == tmROData) { throw new IllegalArgumentException("JSheet.setReadOnlyDataModel() does NOT accepts null model. "); } // A: Property Event Propagation Chain setup... setupModels(tmROData.getColumnCount()); // B: Table & TableColumn Model settings and setup infra for Relay of Table Model Events... // Table filter setting... consData.setTableFilter(scm.getTableFilter()); consData.setReadOnlyDataModel(tmROData, scm.getRowInfoAL()); // C: Setting up sample values in ColFilterModel (Used by Range & List Filters)... setupSampleValues(sampleValuesAL); consData.revalidate(); consData.repaint(); } /** * Senario - C.2-A: Property Event Propagation Chain setup. * @param colCount */ private void setupModels(int colCount) { // reference to old network of objects maintained by SheetCustomizationModel is now lost - for garbage collection... scm = new SheetCustomizationModel(); lsnSheetConf = new InternalPCListener(); // Create set of Sheet Column for Column in data view to hold configuration information, // Also chain dependencies and Event Propogation chain... ArrayList<SheetColumn> alCols = new ArrayList<SheetColumn>(); for (int idx = 0; idx < colCount; idx++) { SheetColumn newSheetCol = createNewColumnModel(idx); alCols.add(newSheetCol); } scm.setRowInfos(alCols); scm.addPropertyChangeListener(lsnSheetConf); //setting button looks on property sheet Common.setButtonsLooks(consSheetCust); Common.setButtonsLooks(consColFilter); // Set Model to Sheet Customization Console... consSheetCust.setModel(scm); consFiltersView.setModel(scm); consColFilter.setModel(null); } /** * Creates new ColumnModel for JSheet * @param idx * @return SheetColumn */ private SheetColumn createNewColumnModel(int idx) { SheetColumn col = new SheetColumn(idx); ColumnFilterModel cfm = new ColumnFilterModel(); col.setFilterCondition(cfm); return col; } /** * Senario - C.2-C: Setting up sample values in ColFilterModel (Used by Range & List Filters) * @param sampleValuesAL */ @SuppressWarnings("empty-statement") private void setupSampleValues(ArrayList<TreeSet<Comparable<?>>> sampleValuesAL) { if (null == sampleValuesAL) { sampleValuesAL = new ArrayList(); } ArrayList<SheetColumn> colSheetAL = scm.getRowInfoAL(); TableModel roModel = consData.getReadOnlyDataModel(); assert (colSheetAL.size() == roModel.getColumnCount()); for (int idxCol = 0; idxCol < roModel.getColumnCount(); idxCol++) { ColumnFilterModel cfm = colSheetAL.get(idxCol).getFilterCondition(); TreeSet sampleValues = getSetForIdx(sampleValuesAL, idxCol); if (null == sampleValues) { if (createSampleValuesFromModel) { // we should create sample values from table... cfm.setSampleValues(roModel, idxCol); } else // ignore, Filter will be ineffective, due to want of Sample Values... { ; } } else { // Setting up sample values... cfm.setSampleValues(sampleValues); } } } /** * Get Id Set * @param sampleValues * @param idx * @return TreeSet */ static private TreeSet getSetForIdx(ArrayList<TreeSet<Comparable<?>>> sampleValues, int idx) { if (idx < sampleValues.size()) // Value exists at given index... { return sampleValues.get(idx); } return null; } /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// /////////// BEHAVIOUR SPECIFIC /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// /** *Adds User Column */ public void addUserColumn() { // model index is -1, for unknown. // Create a Column... SheetColumn sCol = createNewColumnModel(-1); sCol.setUserColumn(true); // Add it to configuration model... scm.appendRowInfo(sCol); // Create space to accomodate new values in data mode... consData.addUserColumn(sCol); // Announce... firePropertyChange(Common.USER_COLUMN_ADDED, null, sCol); } /** * Method to reapply filter */ void reapplyFilter() { EventQueue.invokeLater(new Runnable() { public void run() { consData.reapplyFilter(); } }); } /** * Reseting all changes on Jsheet model */ void resetAll() { EventQueue.invokeLater(new Runnable() { public void run() { if (createSampleValuesFromModel) { setReadOnlyDataModel(tmROData); } else { setReadOnlyDataModel(tmROData, sampleValuesAL); } } }); } /** * Rearranging column visibility according to SheetColumn model * @param colSheet */ void reapplyColumnVisibility(SheetColumn colSheet) { consData.setColumnVisibility(colSheet); } // /** Notifyies listeners that Magnifing glass button has clicked. // * Model Row index, wher click was detected is passed as New-Value. */ // void fireRowDoubleClicked(int viewRowIndex, int viewColumnIndex) { // int row = consData.convertRowIndexToModel(viewRowIndex); //// // firePropertyChange(EVENT_ROW_DOUBLE_CLICKED, -1, row); // } // /** Notifyies listeners that Magnifing glass button has clicked. // * Model Row index, wher click was detected is passed as New-Value. */ // void fireShowDetailsClicked(int viewRowIndex, int viewColumnIndex) { // int row = consData.convertRowIndexToModel(viewRowIndex); //// // firePropertyChange(REQUESTED_SHOW_ROW_DETAILS, -1, row); // } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { tblFixed = new javax.swing.JTable(); pnlFixedTblHeader = new javax.swing.JPanel(); pnlFixedTblMGCellRenderer = new javax.swing.JPanel(); pnlFixedTblMGCellEditor = new javax.swing.JPanel(); pnlFixedTblHangerRenderer = new javax.swing.JPanel(); diaFilterConsole = new javax.swing.JDialog(); jPanel1 = new javax.swing.JPanel(); butDiaFilterClose = new javax.swing.JButton(); diaConsSheetCust = new javax.swing.JDialog(); pnlSheetCust = new javax.swing.JPanel(); jPanel2 = new javax.swing.JPanel(); butDiaSheetCustClose = new javax.swing.JButton(); diaFiltersViewConsole = new javax.swing.JDialog(); jPanel3 = new javax.swing.JPanel(); pnlConsole = new javax.swing.JPanel(); pnlDataView = new javax.swing.JPanel(); tblFixed.setModel(new javax.swing.table.DefaultTableModel( new Object[][] { { null, null, null, null }, { null, null, null, null }, { null, null, null, null }, { null, null, null, null } }, new String[] { "Title 1", "Title 2", "Title 3", "Title 4" })); tblFixed.addMouseListener(new java.awt.event.MouseAdapter() { public void mouseClicked(java.awt.event.MouseEvent evt) { tblFixedMouseClicked(evt); } public void mouseEntered(java.awt.event.MouseEvent evt) { tblFixedMouseEntered(evt); } public void mousePressed(java.awt.event.MouseEvent evt) { tblFixedMousePressed(evt); } public void mouseReleased(java.awt.event.MouseEvent evt) { tblFixedMouseReleased(evt); } }); pnlFixedTblHeader.setLayout(new java.awt.GridBagLayout()); pnlFixedTblMGCellRenderer.setPreferredSize(new java.awt.Dimension(20, 20)); pnlFixedTblMGCellRenderer.setLayout(new java.awt.BorderLayout()); pnlFixedTblMGCellEditor.setToolTipText("Click Me to View This Row in More Details"); pnlFixedTblMGCellEditor.setPreferredSize(new java.awt.Dimension(20, 20)); pnlFixedTblMGCellEditor.setLayout(new java.awt.BorderLayout()); diaFilterConsole.setTitle("JSheet - Context Sensitive Column Filter"); diaFilterConsole.setLocationByPlatform(true); diaFilterConsole.setMinimumSize(new java.awt.Dimension(200, 200)); butDiaFilterClose.setText("Close"); butDiaFilterClose.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { butDiaFilterCloseActionPerformed(evt); } }); jPanel1.add(butDiaFilterClose); diaFilterConsole.getContentPane().add(jPanel1, java.awt.BorderLayout.SOUTH); diaConsSheetCust.setTitle("Data View - Define Columns "); pnlSheetCust.setLayout(new java.awt.BorderLayout()); diaConsSheetCust.getContentPane().add(pnlSheetCust, java.awt.BorderLayout.CENTER); butDiaSheetCustClose.setText("Close"); butDiaSheetCustClose.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { butDiaSheetCustCloseActionPerformed(evt); } }); jPanel2.add(butDiaSheetCustClose); diaConsSheetCust.getContentPane().add(jPanel2, java.awt.BorderLayout.SOUTH); diaFiltersViewConsole.setMinimumSize(new java.awt.Dimension(100, 100)); setLayout(new java.awt.BorderLayout()); jPanel3.setLayout(new java.awt.BorderLayout()); pnlConsole.setLayout(new java.awt.BorderLayout()); jPanel3.add(pnlConsole, java.awt.BorderLayout.NORTH); pnlDataView.setLayout(new java.awt.BorderLayout()); jPanel3.add(pnlDataView, java.awt.BorderLayout.CENTER); add(jPanel3, java.awt.BorderLayout.CENTER); }// </editor-fold>//GEN-END:initComponents private void tblFixedMouseEntered(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_tblFixedMouseEntered // TODO add your handling code here: }//GEN-LAST:event_tblFixedMouseEntered private void tblFixedMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_tblFixedMouseClicked // TODO add your handling code here: // }//GEN-LAST:event_tblFixedMouseClicked private void tblFixedMousePressed(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_tblFixedMousePressed // TODO add your handling code here: selectionRowMode = true; mousePressed = true; }//GEN-LAST:event_tblFixedMousePressed private void tblFixedMouseReleased(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_tblFixedMouseReleased // TODO add your handling code here: // selectionRowMode = false; mousePressed = false; }//GEN-LAST:event_tblFixedMouseReleased private void butDiaFilterCloseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_butDiaFilterCloseActionPerformed // TODO add your handling code here: diaFilterConsole.setVisible(false); }//GEN-LAST:event_butDiaFilterCloseActionPerformed private void butDiaSheetCustCloseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_butDiaSheetCustCloseActionPerformed // TODO add your handling code here: diaConsSheetCust.setVisible(false); }//GEN-LAST:event_butDiaSheetCustCloseActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton butDiaFilterClose; private javax.swing.JButton butDiaSheetCustClose; private javax.swing.JDialog diaConsSheetCust; private javax.swing.JDialog diaFilterConsole; private javax.swing.JDialog diaFiltersViewConsole; private javax.swing.JPanel jPanel1; private javax.swing.JPanel jPanel2; private javax.swing.JPanel jPanel3; private javax.swing.JPanel pnlConsole; private javax.swing.JPanel pnlDataView; private javax.swing.JPanel pnlFixedTblHangerRenderer; private javax.swing.JPanel pnlFixedTblHeader; private javax.swing.JPanel pnlFixedTblMGCellEditor; private javax.swing.JPanel pnlFixedTblMGCellRenderer; private javax.swing.JPanel pnlSheetCust; private javax.swing.JTable tblFixed; // End of variables declaration//GEN-END:variables // public static void main(String[] args) { // SheetTestFrame.main(args); // } class InternalPCListener implements PropertyChangeListener { public void propertyChange(PropertyChangeEvent evt) { if (evt.getPropertyName().equals(Common.USER_COLUMN_ADDITION_REQUESTED)) { addUserColumn(); return; } if (evt.getPropertyName().equals(Common.COLUMN_VISIBLITY_CHANGE_REQUESTED)) { reapplyColumnVisibility((SheetColumn) evt.getNewValue()); // The column may have some associated active filter ... reapplyFilter(); return; } if (evt.getPropertyName().equals(SheetColumn.FILTER_CHANGED)) { // Schedule call to filtering... reapplyFilter(); return; } if (evt.getPropertyName().equals(Common.REQUEST_RESET_ALL)) { // Schedule call to reset... resetAll(); return; } if (evt.getPropertyName().equals(Common.REQUESTED_SHOW_ROW_DETAILS)) { // Schedule call to reset... firePropertyChange(REQUESTED_SHOW_ROW_DETAILS, -1, evt.getNewValue()); return; } if (evt.getPropertyName().equals(Common.EVENT_DATA_ROW_DOUBLE_CLICKED)) { // Schedule call to reset... firePropertyChange(EVENT_DATA_ROW_DOUBLE_CLICKED, -1, evt.getNewValue()); return; } if (evt.getPropertyName().equals(Common.EVENT_DATA_SINGLE_CLICKED)) { // Schedule call to reset... firePropertyChange(EVENT_DATA_SINGLE_CLICKED, -1, evt.getNewValue()); return; } if (evt.getPropertyName().equals(Common.EVENT_HEADER_ROW_DOUBLE_CLICKED)) { // Schedule call to reset... firePropertyChange(EVENT_HEADER_ROW_DOUBLE_CLICKED, -1, evt.getNewValue()); return; } } } // /** If DataView cells selections has changed, we need to remove selection from Row Selectors... */ // class DataViewCellsSelectionListener implements ListSelectionListener { // // public void valueChanged(ListSelectionEvent e) { // if (!mousePressed) { // selectionRowMode = false; // tblFixed.repaint(); // } // } // } class ColSelectionListener implements ListSelectionListener { public void valueChanged(ListSelectionEvent e) { if (e.getValueIsAdjusting()) // Ignore it... { return; } SheetColumn sheetInfo = null; int idxSelCol = consData.getSelectedColumn(); if (idxSelCol < 0) // There is a valid column selection, may be no column is selected... { return; } sheetInfo = scm.getSheetInfo(consData.convertColumnIndexToModel(idxSelCol)); if (sheetInfo.isUserColumn()) { consColFilter.setModel(null); consColFilter.setHeader("<html><br>Filter NOT supported<br>on User Columns"); return; } // At last we can something useful... // Set appropriate model on the Context dependent FilterConsole... consColFilter.setModel(sheetInfo.getFilterCondition()); if (null == sheetInfo.getHeaderValue()) { consColFilter.setHeader(""); } else { consColFilter.setHeader(sheetInfo.getHeaderValue().toString()); } } } class ExportCellsActionListener implements ActionListener { public void actionPerformed(ActionEvent e) { exportSelectedCells(); } } class ShowPropDialogActionListener implements ActionListener { public void actionPerformed(ActionEvent e) { diaConsSheetCust.setVisible(true); //showColVisibilitySettingsDialog(); } } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // FILE Wrting: EXPORTING by NOT Jassi... //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// private void exportSelectedCells() { exportToFile(true); } private void exportMarkedRows() { exportToFile(false); } /** * Method to perform export operation on data list It prompt user for * specifying file name and then saves selected rows into it in the from of * .csv */ private void exportToFile(boolean cellSelectionEnabled) { boolean done = false; do { JFileChooser fileChooser = new JFileChooser(); fileChooser.setAcceptAllFileFilterUsed(false); fileChooser.setFileFilter(new Cab2bFileFilter(new String[] { "csv" })); int status = fileChooser.showSaveDialog(this); if (JFileChooser.APPROVE_OPTION == status) { File selFile = fileChooser.getSelectedFile(); String fileName = selFile.getAbsolutePath(); if (true == selFile.exists()) { // Prompt user to confirm if he wants to override the value int confirmationValue = JOptionPane.showConfirmDialog(fileChooser, "The file " + selFile.getName() + " already exists.\nDo you want to replace existing file?", "caB2B Confirmation", JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE); if (confirmationValue == JOptionPane.NO_OPTION) { continue; } } else { if (false == fileName.endsWith(".csv")) { fileName = fileName + ".csv"; } } BufferedWriter out = null; try { out = new BufferedWriter(new FileWriter(fileName)); String csvString = consData.getTblSelectionDataWithCommas(true).toString(); // String csvString = cellSelectionEnabled ? // consData.getTblSelectionDataWithCommas().toString() // : consData.getTblRowDataWithComma(tmFixedLeft.getRowSelections()).toString(); out.write(csvString); out.close(); } catch (IOException e) { String message = String.format("Exception while exporting data in file: %s. \nReason=%s", fileName, e.getMessage()); lgr.warning(message); JOptionPane.showMessageDialog(this, message); } finally { try { if (null != out) { out.close(); } } catch (Exception e) { lgr.warning(String.format( "Exception while closing file. Export may NOT have been successful.\nReason=%s", e.getMessage())); } } } done = true; } while (!done); } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.runtime.arrow; import org.apache.flink.api.java.tuple.Tuple5; import org.apache.flink.table.data.GenericRowData; import org.apache.flink.table.data.RowData; import org.apache.flink.table.data.vector.ColumnVector; import org.apache.flink.table.runtime.arrow.vectors.ArrowArrayColumnVector; import org.apache.flink.table.runtime.arrow.vectors.ArrowBigIntColumnVector; import org.apache.flink.table.runtime.arrow.vectors.ArrowBooleanColumnVector; import org.apache.flink.table.runtime.arrow.vectors.ArrowDateColumnVector; import org.apache.flink.table.runtime.arrow.vectors.ArrowDecimalColumnVector; import org.apache.flink.table.runtime.arrow.vectors.ArrowDoubleColumnVector; import org.apache.flink.table.runtime.arrow.vectors.ArrowFloatColumnVector; import org.apache.flink.table.runtime.arrow.vectors.ArrowIntColumnVector; import org.apache.flink.table.runtime.arrow.vectors.ArrowRowColumnVector; import org.apache.flink.table.runtime.arrow.vectors.ArrowSmallIntColumnVector; import org.apache.flink.table.runtime.arrow.vectors.ArrowTimeColumnVector; import org.apache.flink.table.runtime.arrow.vectors.ArrowTimestampColumnVector; import org.apache.flink.table.runtime.arrow.vectors.ArrowTinyIntColumnVector; import org.apache.flink.table.runtime.arrow.vectors.ArrowVarBinaryColumnVector; import org.apache.flink.table.runtime.arrow.vectors.ArrowVarCharColumnVector; import org.apache.flink.table.runtime.arrow.writers.ArrayWriter; import org.apache.flink.table.runtime.arrow.writers.ArrowFieldWriter; import org.apache.flink.table.runtime.arrow.writers.BigIntWriter; import org.apache.flink.table.runtime.arrow.writers.BooleanWriter; import org.apache.flink.table.runtime.arrow.writers.DateWriter; import org.apache.flink.table.runtime.arrow.writers.DecimalWriter; import org.apache.flink.table.runtime.arrow.writers.DoubleWriter; import org.apache.flink.table.runtime.arrow.writers.FloatWriter; import org.apache.flink.table.runtime.arrow.writers.IntWriter; import org.apache.flink.table.runtime.arrow.writers.RowWriter; import org.apache.flink.table.runtime.arrow.writers.SmallIntWriter; import org.apache.flink.table.runtime.arrow.writers.TimeWriter; import org.apache.flink.table.runtime.arrow.writers.TimestampWriter; import org.apache.flink.table.runtime.arrow.writers.TinyIntWriter; import org.apache.flink.table.runtime.arrow.writers.VarBinaryWriter; import org.apache.flink.table.runtime.arrow.writers.VarCharWriter; import org.apache.flink.table.types.logical.ArrayType; import org.apache.flink.table.types.logical.BigIntType; import org.apache.flink.table.types.logical.BooleanType; import org.apache.flink.table.types.logical.DateType; import org.apache.flink.table.types.logical.DecimalType; import org.apache.flink.table.types.logical.DoubleType; import org.apache.flink.table.types.logical.FloatType; import org.apache.flink.table.types.logical.IntType; import org.apache.flink.table.types.logical.LocalZonedTimestampType; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.logical.RowType; import org.apache.flink.table.types.logical.SmallIntType; import org.apache.flink.table.types.logical.TimeType; import org.apache.flink.table.types.logical.TimestampType; import org.apache.flink.table.types.logical.TinyIntType; import org.apache.flink.table.types.logical.VarBinaryType; import org.apache.flink.table.types.logical.VarCharType; import org.apache.flink.shaded.guava30.com.google.common.collect.Lists; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.vector.VectorSchemaRoot; import org.apache.arrow.vector.ipc.ArrowStreamWriter; import org.apache.arrow.vector.types.DateUnit; import org.apache.arrow.vector.types.FloatingPointPrecision; import org.apache.arrow.vector.types.TimeUnit; import org.apache.arrow.vector.types.pojo.ArrowType; import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.Schema; import org.junit.BeforeClass; import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.channels.Channels; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.junit.Assert.assertEquals; /** Tests for {@link ArrowUtils}. */ public class ArrowUtilsTest { private static List<Tuple5<String, LogicalType, ArrowType, Class<?>, Class<?>>> testFields; private static RowType rowType; private static BufferAllocator allocator; @BeforeClass public static void init() { testFields = new ArrayList<>(); testFields.add( Tuple5.of( "f1", new TinyIntType(), new ArrowType.Int(8, true), TinyIntWriter.TinyIntWriterForRow.class, ArrowTinyIntColumnVector.class)); testFields.add( Tuple5.of( "f2", new SmallIntType(), new ArrowType.Int(8 * 2, true), SmallIntWriter.SmallIntWriterForRow.class, ArrowSmallIntColumnVector.class)); testFields.add( Tuple5.of( "f3", new IntType(), new ArrowType.Int(8 * 4, true), IntWriter.IntWriterForRow.class, ArrowIntColumnVector.class)); testFields.add( Tuple5.of( "f4", new BigIntType(), new ArrowType.Int(8 * 8, true), BigIntWriter.BigIntWriterForRow.class, ArrowBigIntColumnVector.class)); testFields.add( Tuple5.of( "f5", new BooleanType(), new ArrowType.Bool(), BooleanWriter.BooleanWriterForRow.class, ArrowBooleanColumnVector.class)); testFields.add( Tuple5.of( "f6", new FloatType(), new ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE), FloatWriter.FloatWriterForRow.class, ArrowFloatColumnVector.class)); testFields.add( Tuple5.of( "f7", new DoubleType(), new ArrowType.FloatingPoint(FloatingPointPrecision.DOUBLE), DoubleWriter.DoubleWriterForRow.class, ArrowDoubleColumnVector.class)); testFields.add( Tuple5.of( "f8", new VarCharType(), ArrowType.Utf8.INSTANCE, VarCharWriter.VarCharWriterForRow.class, ArrowVarCharColumnVector.class)); testFields.add( Tuple5.of( "f9", new VarBinaryType(), ArrowType.Binary.INSTANCE, VarBinaryWriter.VarBinaryWriterForRow.class, ArrowVarBinaryColumnVector.class)); testFields.add( Tuple5.of( "f10", new DecimalType(10, 3), new ArrowType.Decimal(10, 3), DecimalWriter.DecimalWriterForRow.class, ArrowDecimalColumnVector.class)); testFields.add( Tuple5.of( "f11", new DateType(), new ArrowType.Date(DateUnit.DAY), DateWriter.DateWriterForRow.class, ArrowDateColumnVector.class)); testFields.add( Tuple5.of( "f13", new TimeType(0), new ArrowType.Time(TimeUnit.SECOND, 32), TimeWriter.TimeWriterForRow.class, ArrowTimeColumnVector.class)); testFields.add( Tuple5.of( "f14", new TimeType(2), new ArrowType.Time(TimeUnit.MILLISECOND, 32), TimeWriter.TimeWriterForRow.class, ArrowTimeColumnVector.class)); testFields.add( Tuple5.of( "f15", new TimeType(4), new ArrowType.Time(TimeUnit.MICROSECOND, 64), TimeWriter.TimeWriterForRow.class, ArrowTimeColumnVector.class)); testFields.add( Tuple5.of( "f16", new TimeType(8), new ArrowType.Time(TimeUnit.NANOSECOND, 64), TimeWriter.TimeWriterForRow.class, ArrowTimeColumnVector.class)); testFields.add( Tuple5.of( "f17", new LocalZonedTimestampType(0), new ArrowType.Timestamp(TimeUnit.SECOND, null), TimestampWriter.TimestampWriterForRow.class, ArrowTimestampColumnVector.class)); testFields.add( Tuple5.of( "f18", new LocalZonedTimestampType(2), new ArrowType.Timestamp(TimeUnit.MILLISECOND, null), TimestampWriter.TimestampWriterForRow.class, ArrowTimestampColumnVector.class)); testFields.add( Tuple5.of( "f19", new LocalZonedTimestampType(4), new ArrowType.Timestamp(TimeUnit.MICROSECOND, null), TimestampWriter.TimestampWriterForRow.class, ArrowTimestampColumnVector.class)); testFields.add( Tuple5.of( "f20", new LocalZonedTimestampType(8), new ArrowType.Timestamp(TimeUnit.NANOSECOND, null), TimestampWriter.TimestampWriterForRow.class, ArrowTimestampColumnVector.class)); testFields.add( Tuple5.of( "f21", new TimestampType(0), new ArrowType.Timestamp(TimeUnit.SECOND, null), TimestampWriter.TimestampWriterForRow.class, ArrowTimestampColumnVector.class)); testFields.add( Tuple5.of( "f22", new TimestampType(2), new ArrowType.Timestamp(TimeUnit.MILLISECOND, null), TimestampWriter.TimestampWriterForRow.class, ArrowTimestampColumnVector.class)); testFields.add( Tuple5.of( "f23", new TimestampType(4), new ArrowType.Timestamp(TimeUnit.MICROSECOND, null), TimestampWriter.TimestampWriterForRow.class, ArrowTimestampColumnVector.class)); testFields.add( Tuple5.of( "f24", new TimestampType(8), new ArrowType.Timestamp(TimeUnit.NANOSECOND, null), TimestampWriter.TimestampWriterForRow.class, ArrowTimestampColumnVector.class)); testFields.add( Tuple5.of( "f25", new ArrayType(new VarCharType()), ArrowType.List.INSTANCE, ArrayWriter.ArrayWriterForRow.class, ArrowArrayColumnVector.class)); RowType rowFieldType = new RowType( Arrays.asList( new RowType.RowField("a", new IntType()), new RowType.RowField("b", new VarCharType()), new RowType.RowField("c", new ArrayType(new VarCharType())), new RowType.RowField("d", new TimestampType(2)), new RowType.RowField( "e", new RowType( (Arrays.asList( new RowType.RowField("e1", new IntType()), new RowType.RowField( "e2", new VarCharType()))))))); testFields.add( Tuple5.of( "f26", rowFieldType, ArrowType.Struct.INSTANCE, RowWriter.RowWriterForRow.class, ArrowRowColumnVector.class)); List<RowType.RowField> rowFields = new ArrayList<>(); for (Tuple5<String, LogicalType, ArrowType, Class<?>, Class<?>> field : testFields) { rowFields.add(new RowType.RowField(field.f0, field.f1)); } rowType = new RowType(rowFields); allocator = ArrowUtils.getRootAllocator().newChildAllocator("stdout", 0, Long.MAX_VALUE); } @Test public void testConvertBetweenLogicalTypeAndArrowType() { Schema schema = ArrowUtils.toArrowSchema(rowType); assertEquals(testFields.size(), schema.getFields().size()); List<Field> fields = schema.getFields(); for (int i = 0; i < schema.getFields().size(); i++) { // verify convert from RowType to ArrowType assertEquals(testFields.get(i).f0, fields.get(i).getName()); assertEquals(testFields.get(i).f2, fields.get(i).getType()); } } @Test public void testCreateArrowReader() { VectorSchemaRoot root = VectorSchemaRoot.create(ArrowUtils.toArrowSchema(rowType), allocator); ArrowReader reader = ArrowUtils.createArrowReader(root, rowType); ColumnVector[] columnVectors = reader.getColumnVectors(); for (int i = 0; i < columnVectors.length; i++) { assertEquals(testFields.get(i).f4, columnVectors[i].getClass()); } } @Test public void testCreateArrowWriter() { VectorSchemaRoot root = VectorSchemaRoot.create(ArrowUtils.toArrowSchema(rowType), allocator); ArrowWriter<RowData> writer = ArrowUtils.createRowDataArrowWriter(root, rowType); ArrowFieldWriter<RowData>[] fieldWriters = writer.getFieldWriters(); for (int i = 0; i < fieldWriters.length; i++) { assertEquals(testFields.get(i).f3, fieldWriters[i].getClass()); } } @Test public void testReadArrowBatches() throws IOException { VectorSchemaRoot root = VectorSchemaRoot.create(ArrowUtils.toArrowSchema(rowType), allocator); ArrowWriter<RowData> arrowWriter = ArrowUtils.createRowDataArrowWriter(root, rowType); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ArrowStreamWriter arrowStreamWriter = new ArrowStreamWriter(root, null, baos); arrowStreamWriter.start(); List<RowData> testData = Arrays.asList( new GenericRowData(rowType.getFieldCount()), new GenericRowData(rowType.getFieldCount()), new GenericRowData(rowType.getFieldCount()), new GenericRowData(rowType.getFieldCount()), new GenericRowData(rowType.getFieldCount())); int batches = 3; List<List<RowData>> subLists = Lists.partition(testData, testData.size() / batches + 1); for (List<RowData> subList : subLists) { for (RowData value : subList) { arrowWriter.write(value); } arrowWriter.finish(); arrowStreamWriter.writeBatch(); arrowWriter.reset(); } assertEquals( batches, ArrowUtils.readArrowBatches( Channels.newChannel(new ByteArrayInputStream(baos.toByteArray()))) .length); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.managedblockchain.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/ListProposals" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListProposalsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The unique identifier of the network. * </p> */ private String networkId; /** * <p> * The maximum number of proposals to return. * </p> */ private Integer maxResults; /** * <p> * The pagination token that indicates the next set of results to retrieve. * </p> */ private String nextToken; /** * <p> * The unique identifier of the network. * </p> * * @param networkId * The unique identifier of the network. */ public void setNetworkId(String networkId) { this.networkId = networkId; } /** * <p> * The unique identifier of the network. * </p> * * @return The unique identifier of the network. */ public String getNetworkId() { return this.networkId; } /** * <p> * The unique identifier of the network. * </p> * * @param networkId * The unique identifier of the network. * @return Returns a reference to this object so that method calls can be chained together. */ public ListProposalsRequest withNetworkId(String networkId) { setNetworkId(networkId); return this; } /** * <p> * The maximum number of proposals to return. * </p> * * @param maxResults * The maximum number of proposals to return. */ public void setMaxResults(Integer maxResults) { this.maxResults = maxResults; } /** * <p> * The maximum number of proposals to return. * </p> * * @return The maximum number of proposals to return. */ public Integer getMaxResults() { return this.maxResults; } /** * <p> * The maximum number of proposals to return. * </p> * * @param maxResults * The maximum number of proposals to return. * @return Returns a reference to this object so that method calls can be chained together. */ public ListProposalsRequest withMaxResults(Integer maxResults) { setMaxResults(maxResults); return this; } /** * <p> * The pagination token that indicates the next set of results to retrieve. * </p> * * @param nextToken * The pagination token that indicates the next set of results to retrieve. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The pagination token that indicates the next set of results to retrieve. * </p> * * @return The pagination token that indicates the next set of results to retrieve. */ public String getNextToken() { return this.nextToken; } /** * <p> * The pagination token that indicates the next set of results to retrieve. * </p> * * @param nextToken * The pagination token that indicates the next set of results to retrieve. * @return Returns a reference to this object so that method calls can be chained together. */ public ListProposalsRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getNetworkId() != null) sb.append("NetworkId: ").append(getNetworkId()).append(","); if (getMaxResults() != null) sb.append("MaxResults: ").append(getMaxResults()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListProposalsRequest == false) return false; ListProposalsRequest other = (ListProposalsRequest) obj; if (other.getNetworkId() == null ^ this.getNetworkId() == null) return false; if (other.getNetworkId() != null && other.getNetworkId().equals(this.getNetworkId()) == false) return false; if (other.getMaxResults() == null ^ this.getMaxResults() == null) return false; if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getNetworkId() == null) ? 0 : getNetworkId().hashCode()); hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListProposalsRequest clone() { return (ListProposalsRequest) super.clone(); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.lexmodelsv2.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Filters the response form the <a * href="https://docs.aws.amazon.com/lexv2/latest/dg/API_ListExports.html">ListExports</a> operation * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/models.lex.v2-2020-08-07/ExportFilter" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ExportFilter implements Serializable, Cloneable, StructuredPojo { /** * <p> * The name of the field to use for filtering. * </p> */ private String name; /** * <p> * The values to use to filter the response. The values must be <code>Bot</code>, <code>BotLocale</code>, or * <code>CustomVocabulary</code>. * </p> */ private java.util.List<String> values; /** * <p> * The operator to use for the filter. Specify EQ when the <code>ListExports</code> operation should return only * resource types that equal the specified value. Specify CO when the <code>ListExports</code> operation should * return resource types that contain the specified value. * </p> */ private String operator; /** * <p> * The name of the field to use for filtering. * </p> * * @param name * The name of the field to use for filtering. * @see ExportFilterName */ public void setName(String name) { this.name = name; } /** * <p> * The name of the field to use for filtering. * </p> * * @return The name of the field to use for filtering. * @see ExportFilterName */ public String getName() { return this.name; } /** * <p> * The name of the field to use for filtering. * </p> * * @param name * The name of the field to use for filtering. * @return Returns a reference to this object so that method calls can be chained together. * @see ExportFilterName */ public ExportFilter withName(String name) { setName(name); return this; } /** * <p> * The name of the field to use for filtering. * </p> * * @param name * The name of the field to use for filtering. * @return Returns a reference to this object so that method calls can be chained together. * @see ExportFilterName */ public ExportFilter withName(ExportFilterName name) { this.name = name.toString(); return this; } /** * <p> * The values to use to filter the response. The values must be <code>Bot</code>, <code>BotLocale</code>, or * <code>CustomVocabulary</code>. * </p> * * @return The values to use to filter the response. The values must be <code>Bot</code>, <code>BotLocale</code>, or * <code>CustomVocabulary</code>. */ public java.util.List<String> getValues() { return values; } /** * <p> * The values to use to filter the response. The values must be <code>Bot</code>, <code>BotLocale</code>, or * <code>CustomVocabulary</code>. * </p> * * @param values * The values to use to filter the response. The values must be <code>Bot</code>, <code>BotLocale</code>, or * <code>CustomVocabulary</code>. */ public void setValues(java.util.Collection<String> values) { if (values == null) { this.values = null; return; } this.values = new java.util.ArrayList<String>(values); } /** * <p> * The values to use to filter the response. The values must be <code>Bot</code>, <code>BotLocale</code>, or * <code>CustomVocabulary</code>. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setValues(java.util.Collection)} or {@link #withValues(java.util.Collection)} if you want to override the * existing values. * </p> * * @param values * The values to use to filter the response. The values must be <code>Bot</code>, <code>BotLocale</code>, or * <code>CustomVocabulary</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public ExportFilter withValues(String... values) { if (this.values == null) { setValues(new java.util.ArrayList<String>(values.length)); } for (String ele : values) { this.values.add(ele); } return this; } /** * <p> * The values to use to filter the response. The values must be <code>Bot</code>, <code>BotLocale</code>, or * <code>CustomVocabulary</code>. * </p> * * @param values * The values to use to filter the response. The values must be <code>Bot</code>, <code>BotLocale</code>, or * <code>CustomVocabulary</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public ExportFilter withValues(java.util.Collection<String> values) { setValues(values); return this; } /** * <p> * The operator to use for the filter. Specify EQ when the <code>ListExports</code> operation should return only * resource types that equal the specified value. Specify CO when the <code>ListExports</code> operation should * return resource types that contain the specified value. * </p> * * @param operator * The operator to use for the filter. Specify EQ when the <code>ListExports</code> operation should return * only resource types that equal the specified value. Specify CO when the <code>ListExports</code> operation * should return resource types that contain the specified value. * @see ExportFilterOperator */ public void setOperator(String operator) { this.operator = operator; } /** * <p> * The operator to use for the filter. Specify EQ when the <code>ListExports</code> operation should return only * resource types that equal the specified value. Specify CO when the <code>ListExports</code> operation should * return resource types that contain the specified value. * </p> * * @return The operator to use for the filter. Specify EQ when the <code>ListExports</code> operation should return * only resource types that equal the specified value. Specify CO when the <code>ListExports</code> * operation should return resource types that contain the specified value. * @see ExportFilterOperator */ public String getOperator() { return this.operator; } /** * <p> * The operator to use for the filter. Specify EQ when the <code>ListExports</code> operation should return only * resource types that equal the specified value. Specify CO when the <code>ListExports</code> operation should * return resource types that contain the specified value. * </p> * * @param operator * The operator to use for the filter. Specify EQ when the <code>ListExports</code> operation should return * only resource types that equal the specified value. Specify CO when the <code>ListExports</code> operation * should return resource types that contain the specified value. * @return Returns a reference to this object so that method calls can be chained together. * @see ExportFilterOperator */ public ExportFilter withOperator(String operator) { setOperator(operator); return this; } /** * <p> * The operator to use for the filter. Specify EQ when the <code>ListExports</code> operation should return only * resource types that equal the specified value. Specify CO when the <code>ListExports</code> operation should * return resource types that contain the specified value. * </p> * * @param operator * The operator to use for the filter. Specify EQ when the <code>ListExports</code> operation should return * only resource types that equal the specified value. Specify CO when the <code>ListExports</code> operation * should return resource types that contain the specified value. * @return Returns a reference to this object so that method calls can be chained together. * @see ExportFilterOperator */ public ExportFilter withOperator(ExportFilterOperator operator) { this.operator = operator.toString(); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getValues() != null) sb.append("Values: ").append(getValues()).append(","); if (getOperator() != null) sb.append("Operator: ").append(getOperator()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ExportFilter == false) return false; ExportFilter other = (ExportFilter) obj; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getValues() == null ^ this.getValues() == null) return false; if (other.getValues() != null && other.getValues().equals(this.getValues()) == false) return false; if (other.getOperator() == null ^ this.getOperator() == null) return false; if (other.getOperator() != null && other.getOperator().equals(this.getOperator()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getValues() == null) ? 0 : getValues().hashCode()); hashCode = prime * hashCode + ((getOperator() == null) ? 0 : getOperator().hashCode()); return hashCode; } @Override public ExportFilter clone() { try { return (ExportFilter) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.lexmodelsv2.model.transform.ExportFilterMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2018 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.cluster; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.StatusLine; import org.apache.http.auth.AuthScope; import org.apache.http.auth.Credentials; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.protocol.HttpContext; import org.h2.util.IOUtils; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.pentaho.di.core.Const; import org.pentaho.di.core.encryption.Encr; import org.pentaho.di.core.encryption.TwoWayPasswordEncoderPluginType; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.util.EnvUtil; import org.pentaho.di.utils.TestUtils; import org.pentaho.di.www.GetPropertiesServlet; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyMapOf; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; /** * Tests for SlaveServer class * * @author Pavel Sakun * @see SlaveServer */ public class SlaveServerTest { SlaveServer slaveServer; @BeforeClass public static void beforeClass() throws KettleException { PluginRegistry.addPluginType( TwoWayPasswordEncoderPluginType.getInstance() ); PluginRegistry.init(); String passwordEncoderPluginID = Const.NVL( EnvUtil.getSystemProperty( Const.KETTLE_PASSWORD_ENCODER_PLUGIN ), "Kettle" ); Encr.init( passwordEncoderPluginID ); } @AfterClass public static void tearDown() { PluginRegistry.getInstance().reset(); } @Before public void init() throws IOException { SlaveConnectionManager connectionManager = SlaveConnectionManager.getInstance(); HttpClient httpClient = spy( connectionManager.createHttpClient() ); // mock response CloseableHttpResponse closeableHttpResponseMock = mock( CloseableHttpResponse.class ); // mock status line StatusLine statusLineMock = mock( StatusLine.class ); doReturn( HttpStatus.SC_NOT_FOUND ).when( statusLineMock ).getStatusCode(); doReturn( statusLineMock ).when( closeableHttpResponseMock ).getStatusLine(); // mock entity HttpEntity httpEntityMock = mock( HttpEntity.class ); doReturn( httpEntityMock ).when( closeableHttpResponseMock ).getEntity(); doReturn( closeableHttpResponseMock ).when( httpClient ).execute( any( HttpGet.class ) ); doReturn( closeableHttpResponseMock ).when( httpClient ).execute( any( HttpPost.class ) ); doReturn( closeableHttpResponseMock ).when( httpClient ).execute( any( HttpPost.class ), any( HttpClientContext.class ) ); slaveServer = spy( new SlaveServer() ); doReturn( httpClient ).when( slaveServer ).getHttpClient(); doReturn( "response_body" ).when( slaveServer ).getResponseBodyAsString( any( InputStream.class ) ); } private HttpResponse mockResponse( int statusCode, String entityText ) throws IOException { HttpResponse resp = mock( HttpResponse.class ); StatusLine status = mock( StatusLine.class ); when( status.getStatusCode() ).thenReturn( statusCode ); when( resp.getStatusLine() ).thenReturn( status ); HttpEntity entity = mock( HttpEntity.class ); when( entity.getContent() ).thenReturn( IOUtils.getInputStream( entityText ) ); when( resp.getEntity() ).thenReturn( entity ); return resp; } @Test( expected = KettleException.class ) public void testExecService() throws Exception { HttpGet httpGetMock = mock( HttpGet.class ); URI uriMock = new URI( "fake" ); doReturn( uriMock ).when( httpGetMock ).getURI(); doReturn( httpGetMock ).when( slaveServer ).buildExecuteServiceMethod( anyString(), anyMapOf( String.class, String.class ) ); slaveServer.setHostname( "hostNameStub" ); slaveServer.setUsername( "userNAmeStub" ); slaveServer.execService( "wrong_app_name" ); fail( "Incorrect connection details had been used, but no exception was thrown" ); } @Test( expected = KettleException.class ) public void testSendXML() throws Exception { slaveServer.setHostname( "hostNameStub" ); slaveServer.setUsername( "userNAmeStub" ); HttpPost httpPostMock = mock( HttpPost.class ); URI uriMock = new URI( "fake" ); doReturn( uriMock ).when( httpPostMock ).getURI(); doReturn( httpPostMock ).when( slaveServer ).buildSendXMLMethod( any( byte[].class ), anyString() ); slaveServer.sendXML( "", "" ); fail( "Incorrect connection details had been used, but no exception was thrown" ); } @Test( expected = KettleException.class ) public void testSendExport() throws Exception { slaveServer.setHostname( "hostNameStub" ); slaveServer.setUsername( "userNAmeStub" ); HttpPost httpPostMock = mock( HttpPost.class ); URI uriMock = new URI( "fake" ); doReturn( uriMock ).when( httpPostMock ).getURI(); doReturn( httpPostMock ).when( slaveServer ).buildSendExportMethod( anyString(), anyString(), any( InputStream.class ) ); File tempFile; tempFile = File.createTempFile( "PDI-", "tmp" ); tempFile.deleteOnExit(); slaveServer.sendExport( tempFile.getAbsolutePath(), "", "" ); fail( "Incorrect connection details had been used, but no exception was thrown" ); } @Test public void testSendExportOk() throws Exception { slaveServer.setUsername( "uname" ); slaveServer.setPassword( "passw" ); slaveServer.setHostname( "hname" ); slaveServer.setPort( "1111" ); HttpPost httpPostMock = mock( HttpPost.class ); URI uriMock = new URI( "fake" ); final String responseContent = "baah"; when( httpPostMock.getURI() ).thenReturn( uriMock ); doReturn( uriMock ).when( httpPostMock ).getURI(); HttpClient client = mock( HttpClient.class ); when( client.execute( any(), any( HttpContext.class ) ) ).then( new Answer<HttpResponse>() { @Override public HttpResponse answer( InvocationOnMock invocation ) throws Throwable { HttpClientContext context = invocation.getArgumentAt( 1, HttpClientContext.class ); Credentials cred = context.getCredentialsProvider().getCredentials( new AuthScope( "hname", 1111 ) ); assertEquals( "uname", cred.getUserPrincipal().getName() ); return mockResponse( 200, responseContent ); } } ); // override init when( slaveServer.getHttpClient() ).thenReturn( client ); when( slaveServer.getResponseBodyAsString( any() ) ).thenCallRealMethod(); doReturn( httpPostMock ).when( slaveServer ).buildSendExportMethod( anyString(), anyString(), any( InputStream.class ) ); File tempFile; tempFile = File.createTempFile( "PDI-", "tmp" ); tempFile.deleteOnExit(); String result = slaveServer.sendExport( tempFile.getAbsolutePath(), null, null ); assertEquals( responseContent, result ); } @Test public void testAddCredentials() throws IOException, ClassNotFoundException { String testUser = "test_username"; slaveServer.setUsername( testUser ); String testPassword = "test_password"; slaveServer.setPassword( testPassword ); String host = "somehost"; slaveServer.setHostname( host ); int port = 1000; slaveServer.setPort( "" + port ); HttpClientContext auth = slaveServer.getAuthContext(); Credentials cred = auth.getCredentialsProvider().getCredentials( new AuthScope( host, port ) ); assertEquals( testUser, cred.getUserPrincipal().getName() ); assertEquals( testPassword, cred.getPassword() ); String user2 = "user2"; slaveServer.setUsername( user2 ); slaveServer.setPassword( "pass2" ); auth = slaveServer.getAuthContext(); cred = auth.getCredentialsProvider().getCredentials( new AuthScope( host, port ) ); assertEquals( user2, cred.getUserPrincipal().getName() ); } @Test public void testModifyingName() { slaveServer.setName( "test" ); List<SlaveServer> list = new ArrayList<SlaveServer>(); list.add( slaveServer ); SlaveServer slaveServer2 = spy( new SlaveServer() ); slaveServer2.setName( "test" ); slaveServer2.verifyAndModifySlaveServerName( list, null ); assertTrue( !slaveServer.getName().equals( slaveServer2.getName() ) ); } @Test public void testEqualsHashCodeConsistency() throws Exception { SlaveServer slave = new SlaveServer(); slave.setName( "slave" ); TestUtils.checkEqualsHashCodeConsistency( slave, slave ); SlaveServer slaveSame = new SlaveServer(); slaveSame.setName( "slave" ); assertTrue( slave.equals( slaveSame ) ); TestUtils.checkEqualsHashCodeConsistency( slave, slaveSame ); SlaveServer slaveCaps = new SlaveServer(); slaveCaps.setName( "SLAVE" ); TestUtils.checkEqualsHashCodeConsistency( slave, slaveCaps ); SlaveServer slaveOther = new SlaveServer(); slaveOther.setName( "something else" ); TestUtils.checkEqualsHashCodeConsistency( slave, slaveOther ); } @Test public void testGetKettleProperties() throws Exception { String encryptedResponse = "3c3f786d6c2076657273696f6e3d22312e302220656e636f64696e6" + "73d225554462d38223f3e0a3c21444f43545950452070726f706572" + "746965730a202053595354454d2022687474703a2f2f6a6176612e737" + "56e2e636f6d2f6474642f70726f706572746965732e647464223e0a3c" + "70726f706572746965733e0a2020203c636f6d6d656e743e3c2f636f6d6d6" + "56e743e0a2020203c656e747279206b65793d224167696c6542494461746162" + "617365223e4167696c6542493c2f656e7470c7a6a5f445d7808bbb1cbc64d797bc84"; doReturn( encryptedResponse ).when( slaveServer ).execService( GetPropertiesServlet.CONTEXT_PATH + "/?xml=Y" ); slaveServer.getKettleProperties().getProperty( "AgileBIDatabase" ); assertEquals( "AgileBI", slaveServer.getKettleProperties().getProperty( "AgileBIDatabase" ) ); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tomcat.util.modeler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Iterator; import javax.management.Attribute; import javax.management.AttributeChangeNotification; import javax.management.AttributeList; import javax.management.AttributeNotFoundException; import javax.management.DynamicMBean; import javax.management.InstanceNotFoundException; import javax.management.InvalidAttributeValueException; import javax.management.ListenerNotFoundException; import javax.management.MBeanException; import javax.management.MBeanInfo; import javax.management.MBeanNotificationInfo; import javax.management.MBeanRegistration; import javax.management.MBeanServer; import javax.management.Notification; import javax.management.NotificationFilter; import javax.management.NotificationListener; import javax.management.ObjectName; import javax.management.ReflectionException; import javax.management.RuntimeErrorException; import javax.management.RuntimeOperationsException; import javax.management.modelmbean.InvalidTargetObjectTypeException; import javax.management.modelmbean.ModelMBeanNotificationBroadcaster; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; /* * Changes from commons.modeler: * * - use DynamicMBean * - remove methods not used in tomcat and redundant/not very generic * - must be created from the ManagedBean - I don't think there were any direct * uses, but now it is required. * - some of the gratuitous flexibility removed - instead this is more predictive and * strict with the use cases. * - all Method and metadata is stored in ManagedBean. BaseModelBMean and ManagedBean act * like Object and Class. * - setModelMBean is no longer called on resources ( not used in tomcat ) * - no caching of Methods for now - operations and setters are not called repeatedly in most * management use cases. Getters should't be called very frequently either - and even if they * are, the overhead of getting the method should be small compared with other JMX costs ( RMI, etc ). * We can add getter cache if needed. * - removed unused constructor, fields * * TODO: * - clean up catalina.mbeans, stop using weird inheritance */ /** * <p>Basic implementation of the <code>DynamicMBean</code> interface, which * supports the minimal requirements of the interface contract.</p> * * <p>This can be used directly to wrap an existing java bean, or inside * an mlet or anywhere an MBean would be used. * * Limitations: * <ul> * <li>Only managed resources of type <code>objectReference</code> are * supported.</li> * <li>Caching of attribute values and operation results is not supported. * All calls to <code>invoke()</code> are immediately executed.</li> * <li>Persistence of MBean attributes and operations is not supported.</li> * <li>All classes referenced as attribute types, operation parameters, or * operation return values must be one of the following: * <ul> * <li>One of the Java primitive types (boolean, byte, char, double, * float, integer, long, short). Corresponding value will be wrapped * in the appropriate wrapper class automatically.</li> * <li>Operations that return no value should declare a return type of * <code>void</code>.</li> * </ul> * <li>Attribute caching is not supported</li> * </ul> * * @author Craig R. McClanahan * @author Costin Manolache */ public class BaseModelMBean implements DynamicMBean, MBeanRegistration, ModelMBeanNotificationBroadcaster { private static final Log log = LogFactory.getLog(BaseModelMBean.class); // ----------------------------------------------------------- Constructors /** * Construct a <code>ModelMBean</code> with default * <code>ModelMBeanInfo</code> information. * * @exception MBeanException if the initializer of an object * throws an exception * @exception RuntimeOperationsException if an IllegalArgumentException * occurs */ protected BaseModelMBean() throws MBeanException, RuntimeOperationsException { super(); } // ----------------------------------------------------- Instance Variables protected ObjectName oname=null; /** * Notification broadcaster for attribute changes. */ protected BaseNotificationBroadcaster attributeBroadcaster = null; /** * Notification broadcaster for general notifications. */ protected BaseNotificationBroadcaster generalBroadcaster = null; /** Metadata for the mbean instance. */ protected ManagedBean managedBean = null; /** * The managed resource this MBean is associated with (if any). */ protected Object resource = null; // --------------------------------------------------- DynamicMBean Methods // TODO: move to ManagedBean static final Object[] NO_ARGS_PARAM = new Object[0]; static final Class<?>[] NO_ARGS_PARAM_SIG = new Class[0]; protected String resourceType = null; // key: operation val: invoke method //private Hashtable invokeAttMap=new Hashtable(); /** * Obtain and return the value of a specific attribute of this MBean. * * @param name Name of the requested attribute * * @exception AttributeNotFoundException if this attribute is not * supported by this MBean * @exception MBeanException if the initializer of an object * throws an exception * @exception ReflectionException if a Java reflection exception * occurs when invoking the getter */ @Override public Object getAttribute(String name) throws AttributeNotFoundException, MBeanException, ReflectionException { // Validate the input parameters if (name == null) throw new RuntimeOperationsException (new IllegalArgumentException("Attribute name is null"), "Attribute name is null"); if( (resource instanceof DynamicMBean) && ! ( resource instanceof BaseModelMBean )) { return ((DynamicMBean)resource).getAttribute(name); } Method m=managedBean.getGetter(name, this, resource); Object result = null; try { Class<?> declaring = m.getDeclaringClass(); // workaround for catalina weird mbeans - the declaring class is BaseModelMBean. // but this is the catalina class. if( declaring.isAssignableFrom(this.getClass()) ) { result = m.invoke(this, NO_ARGS_PARAM ); } else { result = m.invoke(resource, NO_ARGS_PARAM ); } } catch (InvocationTargetException e) { Throwable t = e.getTargetException(); if (t == null) t = e; if (t instanceof RuntimeException) throw new RuntimeOperationsException ((RuntimeException) t, "Exception invoking method " + name); else if (t instanceof Error) throw new RuntimeErrorException ((Error) t, "Error invoking method " + name); else throw new MBeanException (e, "Exception invoking method " + name); } catch (Exception e) { throw new MBeanException (e, "Exception invoking method " + name); } // Return the results of this method invocation // FIXME - should we validate the return type? return (result); } /** * Obtain and return the values of several attributes of this MBean. * * @param names Names of the requested attributes */ @Override public AttributeList getAttributes(String names[]) { // Validate the input parameters if (names == null) throw new RuntimeOperationsException (new IllegalArgumentException("Attribute names list is null"), "Attribute names list is null"); // Prepare our response, eating all exceptions AttributeList response = new AttributeList(); for (int i = 0; i < names.length; i++) { try { response.add(new Attribute(names[i],getAttribute(names[i]))); } catch (Exception e) { // Not having a particular attribute in the response // is the indication of a getter problem } } return (response); } public void setManagedBean(ManagedBean managedBean) { this.managedBean = managedBean; } /** * Return the <code>MBeanInfo</code> object for this MBean. */ @Override public MBeanInfo getMBeanInfo() { return managedBean.getMBeanInfo(); } /** * Invoke a particular method on this MBean, and return any returned * value. * * <p><strong>IMPLEMENTATION NOTE</strong> - This implementation will * attempt to invoke this method on the MBean itself, or (if not * available) on the managed resource object associated with this * MBean.</p> * * @param name Name of the operation to be invoked * @param params Array containing the method parameters of this operation * @param signature Array containing the class names representing * the signature of this operation * * @exception MBeanException if the initializer of an object * throws an exception * @exception ReflectionException if a Java reflection exception * occurs when invoking a method */ @Override public Object invoke(String name, Object params[], String signature[]) throws MBeanException, ReflectionException { if( (resource instanceof DynamicMBean) && ! ( resource instanceof BaseModelMBean )) { return ((DynamicMBean)resource).invoke(name, params, signature); } // Validate the input parameters if (name == null) throw new RuntimeOperationsException (new IllegalArgumentException("Method name is null"), "Method name is null"); if( log.isDebugEnabled()) log.debug("Invoke " + name); Method method= managedBean.getInvoke(name, params, signature, this, resource); // Invoke the selected method on the appropriate object Object result = null; try { if( method.getDeclaringClass().isAssignableFrom( this.getClass()) ) { result = method.invoke(this, params ); } else { result = method.invoke(resource, params); } } catch (InvocationTargetException e) { Throwable t = e.getTargetException(); log.error("Exception invoking method " + name , t ); if (t == null) t = e; if (t instanceof RuntimeException) throw new RuntimeOperationsException ((RuntimeException) t, "Exception invoking method " + name); else if (t instanceof Error) throw new RuntimeErrorException ((Error) t, "Error invoking method " + name); else throw new MBeanException ((Exception)t, "Exception invoking method " + name); } catch (Exception e) { log.error("Exception invoking method " + name , e ); throw new MBeanException (e, "Exception invoking method " + name); } // Return the results of this method invocation // FIXME - should we validate the return type? return (result); } static Class<?> getAttributeClass(String signature) throws ReflectionException { if (signature.equals(Boolean.TYPE.getName())) return Boolean.TYPE; else if (signature.equals(Byte.TYPE.getName())) return Byte.TYPE; else if (signature.equals(Character.TYPE.getName())) return Character.TYPE; else if (signature.equals(Double.TYPE.getName())) return Double.TYPE; else if (signature.equals(Float.TYPE.getName())) return Float.TYPE; else if (signature.equals(Integer.TYPE.getName())) return Integer.TYPE; else if (signature.equals(Long.TYPE.getName())) return Long.TYPE; else if (signature.equals(Short.TYPE.getName())) return Short.TYPE; else { try { ClassLoader cl=Thread.currentThread().getContextClassLoader(); if( cl!=null ) return cl.loadClass(signature); } catch( ClassNotFoundException e ) { } try { return Class.forName(signature); } catch (ClassNotFoundException e) { throw new ReflectionException (e, "Cannot find Class for " + signature); } } } /** * Set the value of a specific attribute of this MBean. * * @param attribute The identification of the attribute to be set * and the new value * * @exception AttributeNotFoundException if this attribute is not * supported by this MBean * @exception MBeanException if the initializer of an object * throws an exception * @exception ReflectionException if a Java reflection exception * occurs when invoking the getter */ @Override public void setAttribute(Attribute attribute) throws AttributeNotFoundException, MBeanException, ReflectionException { if( log.isDebugEnabled() ) log.debug("Setting attribute " + this + " " + attribute ); if( (resource instanceof DynamicMBean) && ! ( resource instanceof BaseModelMBean )) { try { ((DynamicMBean)resource).setAttribute(attribute); } catch (InvalidAttributeValueException e) { throw new MBeanException(e); } return; } // Validate the input parameters if (attribute == null) throw new RuntimeOperationsException (new IllegalArgumentException("Attribute is null"), "Attribute is null"); String name = attribute.getName(); Object value = attribute.getValue(); if (name == null) throw new RuntimeOperationsException (new IllegalArgumentException("Attribute name is null"), "Attribute name is null"); Object oldValue=null; //if( getAttMap.get(name) != null ) // oldValue=getAttribute( name ); Method m=managedBean.getSetter(name,this,resource); try { if( m.getDeclaringClass().isAssignableFrom( this.getClass()) ) { m.invoke(this, new Object[] { value }); } else { m.invoke(resource, new Object[] { value }); } } catch (InvocationTargetException e) { Throwable t = e.getTargetException(); if (t == null) t = e; if (t instanceof RuntimeException) throw new RuntimeOperationsException ((RuntimeException) t, "Exception invoking method " + name); else if (t instanceof Error) throw new RuntimeErrorException ((Error) t, "Error invoking method " + name); else throw new MBeanException (e, "Exception invoking method " + name); } catch (Exception e) { log.error("Exception invoking method " + name , e ); throw new MBeanException (e, "Exception invoking method " + name); } try { sendAttributeChangeNotification(new Attribute( name, oldValue), attribute); } catch(Exception ex) { log.error("Error sending notification " + name, ex); } //attributes.put( name, value ); // if( source != null ) { // // this mbean is associated with a source - maybe we want to persist // source.updateField(oname, name, value); // } } @Override public String toString() { if( resource==null ) return "BaseModelMbean[" + resourceType + "]"; return resource.toString(); } /** * Set the values of several attributes of this MBean. * * @param attributes THe names and values to be set * * @return The list of attributes that were set and their new values */ @Override public AttributeList setAttributes(AttributeList attributes) { AttributeList response = new AttributeList(); // Validate the input parameters if (attributes == null) return response; // Prepare and return our response, eating all exceptions String names[] = new String[attributes.size()]; int n = 0; Iterator<?> items = attributes.iterator(); while (items.hasNext()) { Attribute item = (Attribute) items.next(); names[n++] = item.getName(); try { setAttribute(item); } catch (Exception e) { // Ignore all exceptions } } return (getAttributes(names)); } // ----------------------------------------------------- ModelMBean Methods /** * Get the instance handle of the object against which we execute * all methods in this ModelMBean management interface. * * @exception InstanceNotFoundException if the managed resource object * cannot be found * @exception InvalidTargetObjectTypeException if the managed resource * object is of the wrong type * @exception MBeanException if the initializer of the object throws * an exception * @exception RuntimeOperationsException if the managed resource or the * resource type is <code>null</code> or invalid */ public Object getManagedResource() throws InstanceNotFoundException, InvalidTargetObjectTypeException, MBeanException, RuntimeOperationsException { if (resource == null) throw new RuntimeOperationsException (new IllegalArgumentException("Managed resource is null"), "Managed resource is null"); return resource; } /** * Set the instance handle of the object against which we will execute * all methods in this ModelMBean management interface. * * <strike>This method will detect and call "setModelMbean" method. A resource * can implement this method to get a reference to the model mbean. * The reference can be used to send notification and access the * registry. * </strike> The caller can provide the mbean instance or the object name to * the resource, if needed. * * @param resource The resource object to be managed * @param type The type of reference for the managed resource * ("ObjectReference", "Handle", "IOR", "EJBHandle", or * "RMIReference") * * @exception InstanceNotFoundException if the managed resource object * cannot be found * @exception MBeanException if the initializer of the object throws * an exception * @exception RuntimeOperationsException if the managed resource or the * resource type is <code>null</code> or invalid */ public void setManagedResource(Object resource, String type) throws InstanceNotFoundException, MBeanException, RuntimeOperationsException { if (resource == null) throw new RuntimeOperationsException (new IllegalArgumentException("Managed resource is null"), "Managed resource is null"); // if (!"objectreference".equalsIgnoreCase(type)) // throw new InvalidTargetObjectTypeException(type); this.resource = resource; this.resourceType = resource.getClass().getName(); // // Make the resource aware of the model mbean. // try { // Method m=resource.getClass().getMethod("setModelMBean", // new Class[] {ModelMBean.class}); // if( m!= null ) { // m.invoke(resource, new Object[] {this}); // } // } catch( NoSuchMethodException t ) { // // ignore // } catch( Throwable t ) { // log.error( "Can't set model mbean ", t ); // } } // ------------------------------ ModelMBeanNotificationBroadcaster Methods /** * Add an attribute change notification event listener to this MBean. * * @param listener Listener that will receive event notifications * @param name Name of the attribute of interest, or <code>null</code> * to indicate interest in all attributes * @param handback Handback object to be sent along with event * notifications * * @exception IllegalArgumentException if the listener parameter is null */ @Override public void addAttributeChangeNotificationListener (NotificationListener listener, String name, Object handback) throws IllegalArgumentException { if (listener == null) throw new IllegalArgumentException("Listener is null"); if (attributeBroadcaster == null) attributeBroadcaster = new BaseNotificationBroadcaster(); if( log.isDebugEnabled() ) log.debug("addAttributeNotificationListener " + listener); BaseAttributeFilter filter = new BaseAttributeFilter(name); attributeBroadcaster.addNotificationListener (listener, filter, handback); } /** * Remove an attribute change notification event listener from * this MBean. * * @param listener The listener to be removed * @param name The attribute name for which no more events are required * * * @exception ListenerNotFoundException if this listener is not * registered in the MBean */ @Override public void removeAttributeChangeNotificationListener (NotificationListener listener, String name) throws ListenerNotFoundException { if (listener == null) throw new IllegalArgumentException("Listener is null"); // FIXME - currently this removes *all* notifications for this listener if (attributeBroadcaster != null) { attributeBroadcaster.removeNotificationListener(listener); } } /** * Remove an attribute change notification event listener from * this MBean. * * @param listener The listener to be removed * @param attributeName The attribute name for which no more events are required * @param handback Handback object to be sent along with event * notifications * * * @exception ListenerNotFoundException if this listener is not * registered in the MBean */ public void removeAttributeChangeNotificationListener (NotificationListener listener, String attributeName, Object handback) throws ListenerNotFoundException { removeAttributeChangeNotificationListener(listener, attributeName); } /** * Send an <code>AttributeChangeNotification</code> to all registered * listeners. * * @param notification The <code>AttributeChangeNotification</code> * that will be passed * * @exception MBeanException if an object initializer throws an * exception * @exception RuntimeOperationsException wraps IllegalArgumentException * when the specified notification is <code>null</code> or invalid */ @Override public void sendAttributeChangeNotification (AttributeChangeNotification notification) throws MBeanException, RuntimeOperationsException { if (notification == null) throw new RuntimeOperationsException (new IllegalArgumentException("Notification is null"), "Notification is null"); if (attributeBroadcaster == null) return; // This means there are no registered listeners if( log.isDebugEnabled() ) log.debug( "AttributeChangeNotification " + notification ); attributeBroadcaster.sendNotification(notification); } /** * Send an <code>AttributeChangeNotification</code> to all registered * listeners. * * @param oldValue The original value of the <code>Attribute</code> * @param newValue The new value of the <code>Attribute</code> * * @exception MBeanException if an object initializer throws an * exception * @exception RuntimeOperationsException wraps IllegalArgumentException * when the specified notification is <code>null</code> or invalid */ @Override public void sendAttributeChangeNotification (Attribute oldValue, Attribute newValue) throws MBeanException, RuntimeOperationsException { // Calculate the class name for the change notification String type = null; if (newValue.getValue() != null) type = newValue.getValue().getClass().getName(); else if (oldValue.getValue() != null) type = oldValue.getValue().getClass().getName(); else return; // Old and new are both null == no change AttributeChangeNotification notification = new AttributeChangeNotification (this, 1, System.currentTimeMillis(), "Attribute value has changed", oldValue.getName(), type, oldValue.getValue(), newValue.getValue()); sendAttributeChangeNotification(notification); } /** * Send a <code>Notification</code> to all registered listeners as a * <code>jmx.modelmbean.general</code> notification. * * @param notification The <code>Notification</code> that will be passed * * @exception MBeanException if an object initializer throws an * exception * @exception RuntimeOperationsException wraps IllegalArgumentException * when the specified notification is <code>null</code> or invalid */ @Override public void sendNotification(Notification notification) throws MBeanException, RuntimeOperationsException { if (notification == null) throw new RuntimeOperationsException (new IllegalArgumentException("Notification is null"), "Notification is null"); if (generalBroadcaster == null) return; // This means there are no registered listeners generalBroadcaster.sendNotification(notification); } /** * Send a <code>Notification</code> which contains the specified string * as a <code>jmx.modelmbean.generic</code> notification. * * @param message The message string to be passed * * @exception MBeanException if an object initializer throws an * exception * @exception RuntimeOperationsException wraps IllegalArgumentException * when the specified notification is <code>null</code> or invalid */ @Override public void sendNotification(String message) throws MBeanException, RuntimeOperationsException { if (message == null) throw new RuntimeOperationsException (new IllegalArgumentException("Message is null"), "Message is null"); Notification notification = new Notification ("jmx.modelmbean.generic", this, 1, message); sendNotification(notification); } // ---------------------------------------- NotificationBroadcaster Methods /** * Add a notification event listener to this MBean. * * @param listener Listener that will receive event notifications * @param filter Filter object used to filter event notifications * actually delivered, or <code>null</code> for no filtering * @param handback Handback object to be sent along with event * notifications * * @exception IllegalArgumentException if the listener parameter is null */ @Override public void addNotificationListener(NotificationListener listener, NotificationFilter filter, Object handback) throws IllegalArgumentException { if (listener == null) throw new IllegalArgumentException("Listener is null"); if( log.isDebugEnabled() ) log.debug("addNotificationListener " + listener); if (generalBroadcaster == null) generalBroadcaster = new BaseNotificationBroadcaster(); generalBroadcaster.addNotificationListener (listener, filter, handback); // We'll send the attribute change notifications to all listeners ( who care ) // The normal filtering can be used. // The problem is that there is no other way to add attribute change listeners // to a model mbean ( AFAIK ). I suppose the spec should be fixed. if (attributeBroadcaster == null) attributeBroadcaster = new BaseNotificationBroadcaster(); if( log.isDebugEnabled() ) log.debug("addAttributeNotificationListener " + listener); attributeBroadcaster.addNotificationListener (listener, filter, handback); } /** * Return an <code>MBeanNotificationInfo</code> object describing the * notifications sent by this MBean. */ @Override public MBeanNotificationInfo[] getNotificationInfo() { // Acquire the set of application notifications MBeanNotificationInfo current[] = getMBeanInfo().getNotifications(); if (current == null) current = new MBeanNotificationInfo[0]; MBeanNotificationInfo response[] = new MBeanNotificationInfo[current.length + 2]; // Descriptor descriptor = null; // Fill in entry for general notifications // descriptor = new DescriptorSupport // (new String[] { "name=GENERIC", // "descriptorType=notification", // "log=T", // "severity=5", // "displayName=jmx.modelmbean.generic" }); response[0] = new MBeanNotificationInfo (new String[] { "jmx.modelmbean.generic" }, "GENERIC", "Text message notification from the managed resource"); //descriptor); // Fill in entry for attribute change notifications // descriptor = new DescriptorSupport // (new String[] { "name=ATTRIBUTE_CHANGE", // "descriptorType=notification", // "log=T", // "severity=5", // "displayName=jmx.attribute.change" }); response[1] = new MBeanNotificationInfo (new String[] { "jmx.attribute.change" }, "ATTRIBUTE_CHANGE", "Observed MBean attribute value has changed"); //descriptor); // Copy remaining notifications as reported by the application System.arraycopy(current, 0, response, 2, current.length); return (response); } /** * Remove a notification event listener from this MBean. * * @param listener The listener to be removed (any and all registrations * for this listener will be eliminated) * * @exception ListenerNotFoundException if this listener is not * registered in the MBean */ @Override public void removeNotificationListener(NotificationListener listener) throws ListenerNotFoundException { if (listener == null) throw new IllegalArgumentException("Listener is null"); if (generalBroadcaster != null) { generalBroadcaster.removeNotificationListener(listener); } if (attributeBroadcaster != null) { attributeBroadcaster.removeNotificationListener(listener); } } /** * Remove a notification event listener from this MBean. * * @param listener The listener to be removed (any and all registrations * for this listener will be eliminated) * @param handback Handback object to be sent along with event * notifications * * @exception ListenerNotFoundException if this listener is not * registered in the MBean */ public void removeNotificationListener(NotificationListener listener, Object handback) throws ListenerNotFoundException { removeNotificationListener(listener); } /** * Remove a notification event listener from this MBean. * * @param listener The listener to be removed (any and all registrations * for this listener will be eliminated) * @param filter Filter object used to filter event notifications * actually delivered, or <code>null</code> for no filtering * @param handback Handback object to be sent along with event * notifications * * @exception ListenerNotFoundException if this listener is not * registered in the MBean */ public void removeNotificationListener(NotificationListener listener, NotificationFilter filter, Object handback) throws ListenerNotFoundException { removeNotificationListener(listener); } // ------------------------------------------------ PersistentMBean Methods // /** // * Instantiates this MBean instance from data found in the persistent // * store. The data loaded could include attribute and operation values. // * This method should be called during construction or initialization // * of the instance, and before the MBean is registered with the // * <code>MBeanServer</code>. // * // * <p><strong>IMPLEMENTATION NOTE</strong> - This implementation does // * not support persistence.</p> // * // * @exception InstanceNotFoundException if the managed resource object // * cannot be found // * @exception MBeanException if the initializer of the object throws // * an exception // * @exception RuntimeOperationsException if an exception is reported // * by the persistence mechanism // */ // public void load() throws InstanceNotFoundException, // MBeanException, RuntimeOperationsException { // // XXX If a context was set, use it to load the data // throw new MBeanException // (new IllegalStateException("Persistence is not supported"), // "Persistence is not supported"); // // } // /** // * Capture the current state of this MBean instance and write it out // * to the persistent store. The state stored could include attribute // * and operation values. If one of these methods of persistence is not // * supported, a "service not found" exception will be thrown. // * // * <p><strong>IMPLEMENTATION NOTE</strong> - This implementation does // * not support persistence.</p> // * // * @exception InstanceNotFoundException if the managed resource object // * cannot be found // * @exception MBeanException if the initializer of the object throws // * an exception, or persistence is not supported // * @exception RuntimeOperationsException if an exception is reported // * by the persistence mechanism // */ // public void store() throws InstanceNotFoundException, // MBeanException, RuntimeOperationsException { // // // XXX if a context was set, use it to store the data // throw new MBeanException // (new IllegalStateException("Persistence is not supported"), // "Persistence is not supported"); // // } // -------------------- BaseModelMBean methods -------------------- // /** Set the type of the mbean. This is used as a key to locate // * the description in the Registry. // * // * @param type the type of classname of the modeled object // */ // void setModeledType( String type ) { // initModelInfo(type); // createResource(); // } // /** Set the type of the mbean. This is used as a key to locate // * the description in the Registry. // * // * @param type the type of classname of the modeled object // */ // void initModelInfo( String type ) { // try { // if( log.isDebugEnabled()) // log.debug("setModeledType " + type); // // log.debug( "Set model Info " + type); // if(type==null) { // return; // } // resourceType=type; // //Thread.currentThread().setContextClassLoader(BaseModelMBean.class.getClassLoader()); // Class c=null; // try { // c=Class.forName( type); // } catch( Throwable t ) { // log.debug( "Error creating class " + t); // } // // // The class c doesn't need to exist // ManagedBean descriptor=getRegistry().findManagedBean(c, type); // if( descriptor==null ) // return; // this.setModelMBeanInfo(descriptor.createMBeanInfo()); // } catch( Throwable ex) { // log.error( "TCL: " + Thread.currentThread().getContextClassLoader(), // ex); // } // } // /** Set the type of the mbean. This is used as a key to locate // * the description in the Registry. // */ // protected void createResource() { // try { // //Thread.currentThread().setContextClassLoader(BaseModelMBean.class.getClassLoader()); // Class c=null; // try { // c=Class.forName( resourceType ); // resource = c.newInstance(); // } catch( Throwable t ) { // log.error( "Error creating class " + t); // } // } catch( Throwable ex) { // log.error( "TCL: " + Thread.currentThread().getContextClassLoader(), // ex); // } // } public String getModelerType() { return resourceType; } public String getClassName() { return getModelerType(); } public ObjectName getJmxName() { return oname; } public String getObjectName() { if (oname != null) { return oname.toString(); } else { return null; } } // public void setRegistry(Registry registry) { // this.registry = registry; // } // // public Registry getRegistry() { // // XXX Need a better solution - to avoid the static // if( registry == null ) // registry=Registry.getRegistry(); // // return registry; // } // ------------------------------------------------------ Protected Methods // /** // * Create and return a default <code>ModelMBeanInfo</code> object. // */ // protected ModelMBeanInfo createDefaultModelMBeanInfo() { // // return (new ModelMBeanInfoSupport(this.getClass().getName(), // "Default ModelMBean", // null, null, null, null)); // // } // /** // * Is the specified <code>ModelMBeanInfo</code> instance valid? // * // * <p><strong>IMPLEMENTATION NOTE</strong> - This implementation // * does not check anything, but this method can be overridden // * as required.</p> // * // * @param info The <code>ModelMBeanInfo object to check // */ // protected boolean isModelMBeanInfoValid(ModelMBeanInfo info) { // return (true); // } // -------------------- Registration -------------------- // XXX We can add some method patterns here- like setName() and // setDomain() for code that doesn't implement the Registration @Override public ObjectName preRegister(MBeanServer server, ObjectName name) throws Exception { if( log.isDebugEnabled()) log.debug("preRegister " + resource + " " + name ); oname=name; if( resource instanceof MBeanRegistration ) { oname = ((MBeanRegistration)resource).preRegister(server, name ); } return oname; } @Override public void postRegister(Boolean registrationDone) { if( resource instanceof MBeanRegistration ) { ((MBeanRegistration)resource).postRegister(registrationDone); } } @Override public void preDeregister() throws Exception { if( resource instanceof MBeanRegistration ) { ((MBeanRegistration)resource).preDeregister(); } } @Override public void postDeregister() { if( resource instanceof MBeanRegistration ) { ((MBeanRegistration)resource).postDeregister(); } } static class MethodKey { private String name; private String[] signature; MethodKey(String name, String[] signature) { this.name = name; if(signature == null) { signature = new String[0]; } this.signature = signature; } @Override public boolean equals(Object other) { if(!(other instanceof MethodKey)) { return false; } MethodKey omk = (MethodKey)other; if(!name.equals(omk.name)) { return false; } if(signature.length != omk.signature.length) { return false; } for(int i=0; i < signature.length; i++) { if(!signature[i].equals(omk.signature[i])) { return false; } } return true; } @Override public int hashCode() { return name.hashCode(); } } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.ui.laf; import com.intellij.CommonBundle; import com.intellij.icons.AllIcons; import com.intellij.ide.IdeBundle; import com.intellij.ide.WelcomeWizardUtil; import com.intellij.ide.ui.LafManager; import com.intellij.ide.ui.LafManagerListener; import com.intellij.ide.ui.UISettings; import com.intellij.ide.ui.laf.darcula.DarculaInstaller; import com.intellij.ide.ui.laf.darcula.DarculaLaf; import com.intellij.ide.ui.laf.darcula.DarculaLookAndFeelInfo; import com.intellij.notification.Notification; import com.intellij.notification.NotificationListener; import com.intellij.notification.NotificationType; import com.intellij.notification.Notifications; import com.intellij.openapi.components.*; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.ui.JBPopupMenu; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.popup.util.PopupUtil; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.wm.ToolWindow; import com.intellij.openapi.wm.ToolWindowManager; import com.intellij.ui.JBColor; import com.intellij.ui.ScreenUtil; import com.intellij.ui.content.Content; import com.intellij.ui.mac.MacPopupMenuUI; import com.intellij.ui.popup.OurHeavyWeightPopup; import com.intellij.util.IJSwingUtilities; import com.intellij.util.ObjectUtils; import com.intellij.util.PlatformUtils; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import org.jdom.Element; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import sun.security.action.GetPropertyAction; import javax.swing.*; import javax.swing.event.EventListenerList; import javax.swing.plaf.ColorUIResource; import javax.swing.plaf.DimensionUIResource; import javax.swing.plaf.FontUIResource; import javax.swing.plaf.InsetsUIResource; import javax.swing.plaf.metal.DefaultMetalTheme; import javax.swing.plaf.metal.MetalLookAndFeel; import javax.swing.plaf.synth.Region; import javax.swing.plaf.synth.SynthLookAndFeel; import javax.swing.plaf.synth.SynthStyle; import javax.swing.plaf.synth.SynthStyleFactory; import javax.swing.text.DefaultEditorKit; import java.awt.*; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.security.AccessController; import java.util.*; import java.util.List; @State( name = "LafManager", storages = { @Storage(file = StoragePathMacros.APP_CONFIG + "/laf.xml", roamingType = RoamingType.PER_PLATFORM), @Storage(file = StoragePathMacros.APP_CONFIG + "/options.xml", deprecated = true) } ) public final class LafManagerImpl extends LafManager implements ApplicationComponent, PersistentStateComponent<Element> { private static final Logger LOG = Logger.getInstance("#com.intellij.ide.ui.LafManager"); @NonNls private static final String ELEMENT_LAF = "laf"; @NonNls private static final String ATTRIBUTE_CLASS_NAME = "class-name"; @NonNls private static final String GNOME_THEME_PROPERTY_NAME = "gnome.Net/ThemeName"; @NonNls private static final String[] ourPatchableFontResources = {"Button.font", "ToggleButton.font", "RadioButton.font", "CheckBox.font", "ColorChooser.font", "ComboBox.font", "Label.font", "List.font", "MenuBar.font", "MenuItem.font", "MenuItem.acceleratorFont", "RadioButtonMenuItem.font", "CheckBoxMenuItem.font", "Menu.font", "PopupMenu.font", "OptionPane.font", "Panel.font", "ProgressBar.font", "ScrollPane.font", "Viewport.font", "TabbedPane.font", "Table.font", "TableHeader.font", "TextField.font", "PasswordField.font", "TextArea.font", "TextPane.font", "EditorPane.font", "TitledBorder.font", "ToolBar.font", "ToolTip.font", "Tree.font"}; @NonNls private static final String[] ourFileChooserTextKeys = {"FileChooser.viewMenuLabelText", "FileChooser.newFolderActionLabelText", "FileChooser.listViewActionLabelText", "FileChooser.detailsViewActionLabelText", "FileChooser.refreshActionLabelText"}; private static final String[] ourAlloyComponentsToPatchSelection = {"Tree", "MenuItem", "Menu", "List", "ComboBox", "Table", "TextArea", "EditorPane", "TextPane", "FormattedTextField", "PasswordField", "TextField", "RadioButtonMenuItem", "CheckBoxMenuItem"}; private final EventListenerList myListenerList; private final UIManager.LookAndFeelInfo[] myLaFs; private UIManager.LookAndFeelInfo myCurrentLaf; private final Map<UIManager.LookAndFeelInfo, HashMap<String, Object>> myStoredDefaults = ContainerUtil.newHashMap(); private String myLastWarning = null; private PropertyChangeListener myThemeChangeListener = null; private static final Map<String, String> ourLafClassesAliases = ContainerUtil.newHashMap(); static { ourLafClassesAliases.put("idea.dark.laf.classname", DarculaLookAndFeelInfo.CLASS_NAME); } /** * Invoked via reflection. */ LafManagerImpl() { myListenerList = new EventListenerList(); List<UIManager.LookAndFeelInfo> lafList = ContainerUtil.newArrayList(); if (SystemInfo.isMac) { lafList.add(new UIManager.LookAndFeelInfo("Default", UIManager.getSystemLookAndFeelClassName())); if (Registry.is("ide.mac.yosemite.laf") && isIntelliJLafEnabled()) { lafList.add(new IntelliJLookAndFeelInfo()); } } else { if (isIntelliJLafEnabled()) { lafList.add(new IntelliJLookAndFeelInfo()); } else { lafList.add(new IdeaLookAndFeelInfo()); } for (UIManager.LookAndFeelInfo laf : UIManager.getInstalledLookAndFeels()) { String name = laf.getName(); if (!"Metal".equalsIgnoreCase(name) && !"CDE/Motif".equalsIgnoreCase(name) && !"Nimbus".equalsIgnoreCase(name) && !"Windows Classic".equalsIgnoreCase(name) && !name.startsWith("JGoodies")) { lafList.add(laf); } } } if (Registry.is("dark.laf.available")) { lafList.add(new DarculaLookAndFeelInfo()); } myLaFs = lafList.toArray(new UIManager.LookAndFeelInfo[lafList.size()]); if (!SystemInfo.isMac) { // do not sort LaFs on mac - the order is determined as Default, Darcula. // when we leave only system LaFs on other OSes, the order also should be determined as Default, Darcula Arrays.sort(myLaFs, new Comparator<UIManager.LookAndFeelInfo>() { @Override public int compare(UIManager.LookAndFeelInfo obj1, UIManager.LookAndFeelInfo obj2) { String name1 = obj1.getName(); String name2 = obj2.getName(); return name1.compareToIgnoreCase(name2); } }); } myCurrentLaf = getDefaultLaf(); } private static boolean isIntelliJLafEnabled() { return !Registry.is("idea.4.5.laf.enabled"); } /** * Adds specified listener */ @Override public void addLafManagerListener(@NotNull final LafManagerListener l) { myListenerList.add(LafManagerListener.class, l); } /** * Removes specified listener */ @Override public void removeLafManagerListener(@NotNull final LafManagerListener l) { myListenerList.remove(LafManagerListener.class, l); } private void fireLookAndFeelChanged() { LafManagerListener[] listeners = myListenerList.getListeners(LafManagerListener.class); for (LafManagerListener listener : listeners) { listener.lookAndFeelChanged(this); } } @Override @NotNull public String getComponentName() { return "LafManager"; } @Override public void initComponent() { if (myCurrentLaf != null) { final UIManager.LookAndFeelInfo laf = findLaf(myCurrentLaf.getClassName()); if (laf != null) { boolean needUninstall = UIUtil.isUnderDarcula(); setCurrentLookAndFeel(laf); // setup default LAF or one specified by readExternal. if (WelcomeWizardUtil.getWizardLAF() != null) { if (UIUtil.isUnderDarcula()) { DarculaInstaller.install(); } else if (needUninstall) { DarculaInstaller.uninstall(); } } } } updateUI(); if (SystemInfo.isXWindow) { myThemeChangeListener = new PropertyChangeListener() { @Override public void propertyChange(final PropertyChangeEvent evt) { //noinspection SSBasedInspection SwingUtilities.invokeLater(new Runnable() { @Override public void run() { fixGtkPopupStyle(); patchGtkDefaults(UIManager.getLookAndFeelDefaults()); } }); } }; Toolkit.getDefaultToolkit().addPropertyChangeListener(GNOME_THEME_PROPERTY_NAME, myThemeChangeListener); } } @Override public void disposeComponent() { if (myThemeChangeListener != null) { Toolkit.getDefaultToolkit().removePropertyChangeListener(GNOME_THEME_PROPERTY_NAME, myThemeChangeListener); myThemeChangeListener = null; } } @Override public void loadState(final Element element) { String className = null; Element lafElement = element.getChild(ELEMENT_LAF); if (lafElement != null) { className = lafElement.getAttributeValue(ATTRIBUTE_CLASS_NAME); if (className != null && ourLafClassesAliases.containsKey(className)) { className = ourLafClassesAliases.get(className); } } UIManager.LookAndFeelInfo laf = findLaf(className); // If LAF is undefined (wrong class name or something else) we have set default LAF anyway. if (laf == null) { laf = getDefaultLaf(); } if (myCurrentLaf != null && !laf.getClassName().equals(myCurrentLaf.getClassName())) { setCurrentLookAndFeel(laf); updateUI(); } myCurrentLaf = laf; } @Override public Element getState() { Element element = new Element("state"); if (myCurrentLaf != null) { String className = myCurrentLaf.getClassName(); if (className != null) { Element child = new Element(ELEMENT_LAF); child.setAttribute(ATTRIBUTE_CLASS_NAME, className); element.addContent(child); } } return element; } @Override public UIManager.LookAndFeelInfo[] getInstalledLookAndFeels() { return myLaFs.clone(); } @Override public UIManager.LookAndFeelInfo getCurrentLookAndFeel() { return myCurrentLaf; } /** * @return default LookAndFeelInfo for the running OS. For Win32 and * Linux the method returns Alloy LAF or IDEA LAF if first not found, for Mac OS X it returns Aqua * RubyMine uses Native L&F for linux as well */ private UIManager.LookAndFeelInfo getDefaultLaf() { if (WelcomeWizardUtil.getWizardLAF() != null) { UIManager.LookAndFeelInfo laf = findLaf(WelcomeWizardUtil.getWizardLAF()); LOG.assertTrue(laf != null); return laf; } final String systemLafClassName = UIManager.getSystemLookAndFeelClassName(); if (SystemInfo.isMac) { UIManager.LookAndFeelInfo laf = findLaf(systemLafClassName); LOG.assertTrue(laf != null); return laf; } if (PlatformUtils.isRubyMine() || PlatformUtils.isPyCharm()) { final String desktop = AccessController.doPrivileged(new GetPropertyAction("sun.desktop")); if ("gnome".equals(desktop)) { UIManager.LookAndFeelInfo laf = findLaf(systemLafClassName); if (laf != null) { return laf; } LOG.info("Could not find system look and feel: " + systemLafClassName); } } // Default final String defaultLafName = WelcomeWizardUtil.getDefaultLAF(); if (defaultLafName != null) { UIManager.LookAndFeelInfo defaultLaf = findLaf(defaultLafName); if (defaultLaf != null) { return defaultLaf; } } UIManager.LookAndFeelInfo ideaLaf = findLaf(isIntelliJLafEnabled() ? IntelliJLaf.class.getName() : IdeaLookAndFeelInfo.CLASS_NAME); if (ideaLaf != null) { return ideaLaf; } throw new IllegalStateException("No default look&feel found"); } /** * Finds LAF by its class name. * will be returned. */ @Nullable private UIManager.LookAndFeelInfo findLaf(@Nullable String className) { if (className == null) { return null; } for (UIManager.LookAndFeelInfo laf : myLaFs) { if (Comparing.equal(laf.getClassName(), className)) { return laf; } } return null; } /** * Sets current LAF. The method doesn't update component hierarchy. */ @Override public void setCurrentLookAndFeel(UIManager.LookAndFeelInfo lookAndFeelInfo) { if (findLaf(lookAndFeelInfo.getClassName()) == null) { LOG.error("unknown LookAndFeel : " + lookAndFeelInfo); return; } // Set L&F if (IdeaLookAndFeelInfo.CLASS_NAME.equals(lookAndFeelInfo.getClassName())) { // that is IDEA default LAF IdeaLaf laf = new IdeaLaf(); MetalLookAndFeel.setCurrentTheme(new IdeaBlueMetalTheme()); try { UIManager.setLookAndFeel(laf); } catch (Exception e) { Messages.showMessageDialog( IdeBundle.message("error.cannot.set.look.and.feel", lookAndFeelInfo.getName(), e.getMessage()), CommonBundle.getErrorTitle(), Messages.getErrorIcon() ); return; } } else if (DarculaLookAndFeelInfo.CLASS_NAME.equals(lookAndFeelInfo.getClassName())) { DarculaLaf laf = new DarculaLaf(); try { UIManager.setLookAndFeel(laf); JBColor.setDark(true); IconLoader.setUseDarkIcons(true); } catch (Exception e) { Messages.showMessageDialog( IdeBundle.message("error.cannot.set.look.and.feel", lookAndFeelInfo.getName(), e.getMessage()), CommonBundle.getErrorTitle(), Messages.getErrorIcon() ); return; } } else { // non default LAF try { LookAndFeel laf = ((LookAndFeel)Class.forName(lookAndFeelInfo.getClassName()).newInstance()); if (laf instanceof MetalLookAndFeel) { MetalLookAndFeel.setCurrentTheme(new DefaultMetalTheme()); } UIManager.setLookAndFeel(laf); } catch (Exception e) { Messages.showMessageDialog( IdeBundle.message("error.cannot.set.look.and.feel", lookAndFeelInfo.getName(), e.getMessage()), CommonBundle.getErrorTitle(), Messages.getErrorIcon() ); return; } } myCurrentLaf = ObjectUtils.chooseNotNull(findLaf(lookAndFeelInfo.getClassName()), lookAndFeelInfo); checkLookAndFeel(lookAndFeelInfo, false); } public void setLookAndFeelAfterRestart(UIManager.LookAndFeelInfo lookAndFeelInfo) { myCurrentLaf = lookAndFeelInfo; } @Nullable private static Icon getAquaMenuDisabledIcon() { final Icon arrowIcon = (Icon)UIManager.get("Menu.arrowIcon"); if (arrowIcon != null) { return IconLoader.getDisabledIcon(arrowIcon); } return null; } @Nullable private static Icon getAquaMenuInvertedIcon() { if (!UIUtil.isUnderAquaLookAndFeel()) return null; final Icon arrow = (Icon)UIManager.get("Menu.arrowIcon"); if (arrow == null) return null; try { final Method method = arrow.getClass().getMethod("getInvertedIcon"); if (method != null) { method.setAccessible(true); return (Icon)method.invoke(arrow); } return null; } catch (NoSuchMethodException e1) { return null; } catch (InvocationTargetException e1) { return null; } catch (IllegalAccessException e1) { return null; } } @Override public boolean checkLookAndFeel(UIManager.LookAndFeelInfo lookAndFeelInfo) { return checkLookAndFeel(lookAndFeelInfo, true); } private boolean checkLookAndFeel(final UIManager.LookAndFeelInfo lafInfo, final boolean confirm) { String message = null; if (lafInfo.getName().contains("GTK") && SystemInfo.isXWindow && !SystemInfo.isJavaVersionAtLeast("1.6.0_12")) { message = IdeBundle.message("warning.problem.laf.1"); } if (message != null) { if (confirm) { final String[] options = {IdeBundle.message("confirm.set.look.and.feel"), CommonBundle.getCancelButtonText()}; final int result = Messages.showOkCancelDialog(message, CommonBundle.getWarningTitle(), options[0], options[1], Messages.getWarningIcon()); if (result == Messages.OK) { myLastWarning = message; return true; } return false; } if (!message.equals(myLastWarning)) { Notifications.Bus.notify(new Notification(Notifications.SYSTEM_MESSAGES_GROUP_ID, "L&F Manager", message, NotificationType.WARNING, NotificationListener.URL_OPENING_LISTENER)); myLastWarning = message; } } return true; } /** * Updates LAF of all windows. The method also updates font of components * as it's configured in <code>UISettings</code>. */ @Override public void updateUI() { final UIDefaults uiDefaults = UIManager.getLookAndFeelDefaults(); fixPopupWeight(); fixGtkPopupStyle(); fixTreeWideSelection(uiDefaults); fixMenuIssues(uiDefaults); if (UIUtil.isUnderAquaLookAndFeel()) { uiDefaults.put("Panel.opaque", Boolean.TRUE); } else if (UIUtil.isWinLafOnVista()) { uiDefaults.put("ComboBox.border", null); } initInputMapDefaults(uiDefaults); uiDefaults.put("Button.defaultButtonFollowsFocus", Boolean.FALSE); patchFileChooserStrings(uiDefaults); patchLafFonts(uiDefaults); patchHiDPI(uiDefaults); patchGtkDefaults(uiDefaults); fixSeparatorColor(uiDefaults); updateToolWindows(); for (Frame frame : Frame.getFrames()) { // OSX/Aqua fix: Some image caching components like ToolWindowHeader use // com.apple.laf.AquaNativeResources$CColorPaintUIResource // a Java wrapper for ObjC MagicBackgroundColor class (Java RGB values ignored). // MagicBackgroundColor always reports current Frame background. // So we need to set frames background to exact and correct value. if (SystemInfo.isMac) { //noinspection UseJBColor frame.setBackground(new Color(UIUtil.getPanelBackground().getRGB())); } updateUI(frame); } fireLookAndFeelChanged(); } private static void patchHiDPI(UIDefaults defaults) { if (!JBUI.isHiDPI()) return; List<String> myIntKeys = Arrays.asList("Tree.leftChildIndent", "Tree.rightChildIndent"); List<String> patched = new ArrayList<String>(); for (Map.Entry<Object, Object> entry : defaults.entrySet()) { Object value = entry.getValue(); String key = entry.getKey().toString(); if (value instanceof DimensionUIResource) { entry.setValue(JBUI.size((DimensionUIResource)value).asUIResource()); } else if (value instanceof InsetsUIResource) { entry.setValue(JBUI.insets(((InsetsUIResource)value)).asUIResource()); } else if (value instanceof Integer) { if (key.endsWith(".maxGutterIconWidth") || myIntKeys.contains(key)) { if (!"true".equals(defaults.get(key +".hidpi.patched"))) { entry.setValue(Integer.valueOf(JBUI.scale((Integer)value))); patched.add(key); } } } } for (String key : patched) { defaults.put(key + ".hidpi.patched", "true"); } } public static void updateToolWindows() { for (Project project : ProjectManager.getInstance().getOpenProjects()) { final ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(project); for (String id : toolWindowManager.getToolWindowIds()) { final ToolWindow toolWindow = toolWindowManager.getToolWindow(id); for (Content content : toolWindow.getContentManager().getContents()) { final JComponent component = content.getComponent(); if (component != null) { IJSwingUtilities.updateComponentTreeUI(component); } } final JComponent c = toolWindow.getComponent(); if (c != null) { IJSwingUtilities.updateComponentTreeUI(c); } } } } private static void fixMenuIssues(UIDefaults uiDefaults) { if (UIUtil.isUnderAquaLookAndFeel()) { // update ui for popup menu to get round corners uiDefaults.put("PopupMenuUI", MacPopupMenuUI.class.getCanonicalName()); uiDefaults.put("Menu.invertedArrowIcon", getAquaMenuInvertedIcon()); uiDefaults.put("Menu.disabledArrowIcon", getAquaMenuDisabledIcon()); } else if (UIUtil.isUnderJGoodiesLookAndFeel()) { uiDefaults.put("Menu.opaque", true); uiDefaults.put("MenuItem.opaque", true); } uiDefaults.put("MenuItem.background", UIManager.getColor("Menu.background")); } private static void fixTreeWideSelection(UIDefaults uiDefaults) { if (UIUtil.isUnderAlloyIDEALookAndFeel() || UIUtil.isUnderJGoodiesLookAndFeel()) { final Color bg = new ColorUIResource(56, 117, 215); final Color fg = new ColorUIResource(255, 255, 255); uiDefaults.put("info", bg); uiDefaults.put("textHighlight", bg); for (String key : ourAlloyComponentsToPatchSelection) { uiDefaults.put(key + ".selectionBackground", bg); uiDefaults.put(key + ".selectionForeground", fg); } } } private static void fixSeparatorColor(UIDefaults uiDefaults) { if (UIUtil.isUnderAquaLookAndFeel()) { uiDefaults.put("Separator.background", UIUtil.AQUA_SEPARATOR_BACKGROUND_COLOR); uiDefaults.put("Separator.foreground", UIUtil.AQUA_SEPARATOR_FOREGROUND_COLOR); } } /** * The following code is a trick! By default Swing uses lightweight and "medium" weight * popups to show JPopupMenu. The code below force the creation of real heavyweight menus - * this increases speed of popups and allows to get rid of some drawing artifacts. */ private static void fixPopupWeight() { int popupWeight = OurPopupFactory.WEIGHT_MEDIUM; String property = System.getProperty("idea.popup.weight"); if (property != null) property = property.toLowerCase(Locale.ENGLISH).trim(); if (SystemInfo.isMacOSLeopard) { // force heavy weight popups under Leopard, otherwise they don't have shadow or any kind of border. popupWeight = OurPopupFactory.WEIGHT_HEAVY; } else if (property == null) { // use defaults if popup weight isn't specified if (SystemInfo.isWindows) { popupWeight = OurPopupFactory.WEIGHT_HEAVY; } } else { if ("light".equals(property)) { popupWeight = OurPopupFactory.WEIGHT_LIGHT; } else if ("medium".equals(property)) { popupWeight = OurPopupFactory.WEIGHT_MEDIUM; } else if ("heavy".equals(property)) { popupWeight = OurPopupFactory.WEIGHT_HEAVY; } else { LOG.error("Illegal value of property \"idea.popup.weight\": " + property); } } PopupFactory factory = PopupFactory.getSharedInstance(); if (!(factory instanceof OurPopupFactory)) { factory = new OurPopupFactory(factory); PopupFactory.setSharedInstance(factory); } PopupUtil.setPopupType(factory, popupWeight); } private static void fixGtkPopupStyle() { if (!UIUtil.isUnderGTKLookAndFeel()) return; final SynthStyleFactory original = SynthLookAndFeel.getStyleFactory(); SynthLookAndFeel.setStyleFactory(new SynthStyleFactory() { @Override public SynthStyle getStyle(final JComponent c, final Region id) { final SynthStyle style = original.getStyle(c, id); if (id == Region.POPUP_MENU) { try { Field f = style.getClass().getDeclaredField("xThickness"); f.setAccessible(true); final Object x = f.get(style); if (x instanceof Integer && (Integer)x == 0) { // workaround for Sun bug #6636964 f.set(style, 1); f = style.getClass().getDeclaredField("yThickness"); f.setAccessible(true); f.set(style, 3); } } catch (Exception ignore) { } } return style; } }); new JBPopupMenu(); // invokes updateUI() -> updateStyle() SynthLookAndFeel.setStyleFactory(original); } private static void patchFileChooserStrings(final UIDefaults defaults) { if (!defaults.containsKey(ourFileChooserTextKeys[0])) { // Alloy L&F does not define strings for names of context menu actions, so we have to patch them in here for (String key : ourFileChooserTextKeys) { defaults.put(key, IdeBundle.message(key)); } } } private static void patchGtkDefaults(UIDefaults defaults) { if (!UIUtil.isUnderGTKLookAndFeel()) return; Map<String, Icon> map = ContainerUtil.newHashMap( Arrays.asList("OptionPane.errorIcon", "OptionPane.informationIcon", "OptionPane.warningIcon", "OptionPane.questionIcon"), Arrays.asList(AllIcons.General.ErrorDialog, AllIcons.General.InformationDialog, AllIcons.General.WarningDialog, AllIcons.General.QuestionDialog)); // GTK+ L&F keeps icons hidden in style SynthStyle style = SynthLookAndFeel.getStyle(new JOptionPane(""), Region.DESKTOP_ICON); for (String key : map.keySet()) { if (defaults.get(key) != null) continue; Object icon = style == null ? null : style.get(null, key); defaults.put(key, icon instanceof Icon ? icon : map.get(key)); } Color fg = defaults.getColor("Label.foreground"); Color bg = defaults.getColor("Label.background"); if (fg != null && bg != null) { defaults.put("Label.disabledForeground", UIUtil.mix(fg, bg, 0.5)); } } private void patchLafFonts(UIDefaults uiDefaults) { //if (JBUI.isHiDPI()) { // HashMap<Object, Font> newFonts = new HashMap<Object, Font>(); // for (Object key : uiDefaults.keySet().toArray()) { // Object val = uiDefaults.get(key); // if (val instanceof Font) { // newFonts.put(key, JBFont.create((Font)val)); // } // } // for (Map.Entry<Object, Font> entry : newFonts.entrySet()) { // uiDefaults.put(entry.getKey(), entry.getValue()); // } //} else UISettings uiSettings = UISettings.getInstance(); if (uiSettings.OVERRIDE_NONIDEA_LAF_FONTS) { storeOriginalFontDefaults(uiDefaults); initFontDefaults(uiDefaults, uiSettings.FONT_FACE, uiSettings.FONT_SIZE); } else { restoreOriginalFontDefaults(uiDefaults); } } private void restoreOriginalFontDefaults(UIDefaults defaults) { UIManager.LookAndFeelInfo lf = getCurrentLookAndFeel(); HashMap<String, Object> lfDefaults = myStoredDefaults.get(lf); if (lfDefaults != null) { for (String resource : ourPatchableFontResources) { defaults.put(resource, lfDefaults.get(resource)); } } } private void storeOriginalFontDefaults(UIDefaults defaults) { UIManager.LookAndFeelInfo lf = getCurrentLookAndFeel(); HashMap<String, Object> lfDefaults = myStoredDefaults.get(lf); if (lfDefaults == null) { lfDefaults = new HashMap<String, Object>(); for (String resource : ourPatchableFontResources) { lfDefaults.put(resource, defaults.get(resource)); } myStoredDefaults.put(lf, lfDefaults); } } private static void updateUI(Window window) { if (!window.isDisplayable()) { return; } IJSwingUtilities.updateComponentTreeUI(window); Window[] children = window.getOwnedWindows(); for (Window aChildren : children) { updateUI(aChildren); } } /** * Repaints all displayable window. */ @Override public void repaintUI() { Frame[] frames = Frame.getFrames(); for (Frame frame : frames) { repaintUI(frame); } } private static void repaintUI(Window window) { if (!window.isDisplayable()) { return; } window.repaint(); Window[] children = window.getOwnedWindows(); for (Window aChildren : children) { repaintUI(aChildren); } } private static void installCutCopyPasteShortcuts(InputMap inputMap, boolean useSimpleActionKeys) { String copyActionKey = useSimpleActionKeys ? "copy" : DefaultEditorKit.copyAction; String pasteActionKey = useSimpleActionKeys ? "paste" : DefaultEditorKit.pasteAction; String cutActionKey = useSimpleActionKeys ? "cut" : DefaultEditorKit.cutAction; // Ctrl+Ins, Shift+Ins, Shift+Del inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_INSERT, InputEvent.CTRL_MASK | InputEvent.CTRL_DOWN_MASK), copyActionKey); inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_INSERT, InputEvent.SHIFT_MASK | InputEvent.SHIFT_DOWN_MASK), pasteActionKey); inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_DELETE, InputEvent.SHIFT_MASK | InputEvent.SHIFT_DOWN_MASK), cutActionKey); // Ctrl+C, Ctrl+V, Ctrl+X inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_C, InputEvent.CTRL_MASK | InputEvent.CTRL_DOWN_MASK), copyActionKey); inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_V, InputEvent.CTRL_MASK | InputEvent.CTRL_DOWN_MASK), pasteActionKey); inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_X, InputEvent.CTRL_MASK | InputEvent.CTRL_DOWN_MASK), DefaultEditorKit.cutAction); } @SuppressWarnings({"HardCodedStringLiteral"}) public static void initInputMapDefaults(UIDefaults defaults) { // Make ENTER work in JTrees InputMap treeInputMap = (InputMap)defaults.get("Tree.focusInputMap"); if (treeInputMap != null) { // it's really possible. For example, GTK+ doesn't have such map treeInputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0), "toggle"); } // Cut/Copy/Paste in JTextAreas InputMap textAreaInputMap = (InputMap)defaults.get("TextArea.focusInputMap"); if (textAreaInputMap != null) { // It really can be null, for example when LAF isn't properly initialized (Alloy license problem) installCutCopyPasteShortcuts(textAreaInputMap, false); } // Cut/Copy/Paste in JTextFields InputMap textFieldInputMap = (InputMap)defaults.get("TextField.focusInputMap"); if (textFieldInputMap != null) { // It really can be null, for example when LAF isn't properly initialized (Alloy license problem) installCutCopyPasteShortcuts(textFieldInputMap, false); } // Cut/Copy/Paste in JPasswordField InputMap passwordFieldInputMap = (InputMap)defaults.get("PasswordField.focusInputMap"); if (passwordFieldInputMap != null) { // It really can be null, for example when LAF isn't properly initialized (Alloy license problem) installCutCopyPasteShortcuts(passwordFieldInputMap, false); } // Cut/Copy/Paste in JTables InputMap tableInputMap = (InputMap)defaults.get("Table.ancestorInputMap"); if (tableInputMap != null) { // It really can be null, for example when LAF isn't properly initialized (Alloy license problem) installCutCopyPasteShortcuts(tableInputMap, true); } } @SuppressWarnings({"HardCodedStringLiteral"}) static void initFontDefaults(UIDefaults defaults, String fontFace, int fontSize) { defaults.put("Tree.ancestorInputMap", null); FontUIResource uiFont = new FontUIResource(fontFace, Font.PLAIN, fontSize); FontUIResource textFont = new FontUIResource("Serif", Font.PLAIN, fontSize); FontUIResource monoFont = new FontUIResource("Monospaced", Font.PLAIN, fontSize); for (String fontResource : ourPatchableFontResources) { defaults.put(fontResource, uiFont); } defaults.put("PasswordField.font", monoFont); defaults.put("TextArea.font", monoFont); defaults.put("TextPane.font", textFont); defaults.put("EditorPane.font", textFont); } private static class OurPopupFactory extends PopupFactory { public static final int WEIGHT_LIGHT = 0; public static final int WEIGHT_MEDIUM = 1; public static final int WEIGHT_HEAVY = 2; private final PopupFactory myDelegate; public OurPopupFactory(final PopupFactory delegate) { myDelegate = delegate; } @Override public Popup getPopup(final Component owner, final Component contents, final int x, final int y) throws IllegalArgumentException { final Point point = fixPopupLocation(contents, x, y); final int popupType = UIUtil.isUnderGTKLookAndFeel() ? WEIGHT_HEAVY : PopupUtil.getPopupType(this); if (popupType == WEIGHT_HEAVY && OurHeavyWeightPopup.isEnabled()) { return new OurHeavyWeightPopup(owner, contents, point.x, point.y); } if (popupType >= 0) { PopupUtil.setPopupType(myDelegate, popupType); } final Popup popup = myDelegate.getPopup(owner, contents, point.x, point.y); fixPopupSize(popup, contents); return popup; } private static Point fixPopupLocation(final Component contents, final int x, final int y) { if (!(contents instanceof JToolTip)) return new Point(x, y); final PointerInfo info; try { info = MouseInfo.getPointerInfo(); } catch (InternalError e) { // http://www.jetbrains.net/jira/browse/IDEADEV-21390 // may happen under Mac OSX 10.5 return new Point(x, y); } int deltaY = 0; if (info != null) { final Point mouse = info.getLocation(); deltaY = mouse.y - y; } final Dimension size = contents.getPreferredSize(); final Rectangle rec = new Rectangle(new Point(x, y), size); ScreenUtil.moveRectangleToFitTheScreen(rec); if (rec.y < y) { rec.y += deltaY; } return rec.getLocation(); } private static void fixPopupSize(final Popup popup, final Component contents) { if (!UIUtil.isUnderGTKLookAndFeel() || !(contents instanceof JPopupMenu)) return; for (Class<?> aClass = popup.getClass(); aClass != null && Popup.class.isAssignableFrom(aClass); aClass = aClass.getSuperclass()) { try { final Method getComponent = aClass.getDeclaredMethod("getComponent"); getComponent.setAccessible(true); final Object component = getComponent.invoke(popup); if (component instanceof JWindow) { ((JWindow)component).setSize(new Dimension(0, 0)); } break; } catch (Exception ignored) { } } } } }
/******************************************************************************* * Copyright 2015-2021 Sergey Karpushin * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. ******************************************************************************/ package org.summerb.dbupgrade.utils; import com.google.common.base.Preconditions; /** * Neither standard java tokenizer nor apache text do not provide simple * capability to search for multi-char delim and also return delim themselves. * * Initial impl that I've created was super easy to comprehend but was insanely * slow. I've tried to make this impl efficient, but as always, it makes it a * bit hard to comprehend. * * Also, {@link String#substring(int, int)} is super inefficient since Java7, so * I have to work here with CharSequences instead so that original String is not * getting copied. This affects many aspects including client code which now * have to work with {@link SubString}. But performance of this impl is * obliterating compared to initial impl. * * GC usage could be improved even more if we will not create new instances of * {@link SubString} but this will to much for client code I think. * * @author sergeyk * */ public class StringTokenizer { private final String subject; private SubString[] delim; /** * We have a copy of this for faster searching within subject string */ private String[] delimStr; /** * Current position in the string */ private int pos = 0; /** * Holds indexes of each known delimeter */ private int[] delimPos; private int delimLength; private int lastSelectedDelimeterIndex = -1; public StringTokenizer(String stringToTokenize, SubString... delimeters) { this.subject = stringToTokenize; this.delim = delimeters; this.delimLength = delimeters.length; this.delimStr = new String[delimLength]; for (int i = 0; i < delimLength; i++) { delimStr[i] = new String(new StringBuilder(delim[i]).toString()); } } /** * @return true if initialization happened for the first time */ private boolean initDelimPos() { if (delimPos != null) { return false; } delimPos = new int[delimLength]; for (int i = 0; i < delimLength; i++) { delimPos[i] = subject.indexOf(delimStr[i]); } // trim those that are not present in the string anymore int i = 0; while (delimLength > 0 && i < delimLength) { if (delimPos[i] == -1) { delimNoLongerPresent(i); } else { i++; } } if (delimLength > 0) { lastSelectedDelimeterIndex = indexOfNearestDelimeter(delimPos); } return true; } private void delimNoLongerPresent(int i) { Preconditions.checkState(delimLength > 0, "Algorithm error - how come we're trying to delete delimeter if delimeters array is empty?"); if (delimLength == 1) { delimLength = 0; return; } delim[i] = delim[delimLength - 1]; delimStr[i] = delimStr[delimLength - 1]; delimPos[i] = delimPos[delimLength - 1]; delimLength--; } /** * Updates delimeter positions if needed * * @return index of the delimeter in the delim list, or -1 if none applicable */ private int updateDelimPositions() { while (lastSelectedDelimeterIndex >= 0 && delimPos[lastSelectedDelimeterIndex] < pos) { if (pos + delim[lastSelectedDelimeterIndex].length() >= subject.length()) { // if this delimeter could not fit anymore in the string delimNoLongerPresent(lastSelectedDelimeterIndex); } else { delimPos[lastSelectedDelimeterIndex] = subject.indexOf(delimStr[lastSelectedDelimeterIndex], pos); if (delimPos[lastSelectedDelimeterIndex] == -1) { delimNoLongerPresent(lastSelectedDelimeterIndex); } } // Ok, now we assume that all delims are now updated and present, need to find // nearest if (delimLength == 0) { return -1; } lastSelectedDelimeterIndex = indexOfNearestDelimeter(delimPos); } return lastSelectedDelimeterIndex; } public int indexOfNearestDelimeter(int... array) { int ret = 0; for (int i = 1; i < delimLength; i++) { if (array[i] < array[ret]) { ret = i; } } return ret; } public SubString next() { if (initDelimPos() && delimLength == 0) { // right of the bed we see that there are no delimeters -- just return string // itself pos = subject.length(); return new SubString(subject); } if (pos >= subject.length()) { return null; } int nextDelimeter = updateDelimPositions(); if (nextDelimeter == -1) { SubString ret = new SubString(subject, pos); pos = subject.length(); return ret; } if (delimPos[nextDelimeter] == pos) { pos += delim[nextDelimeter].length(); return delim[nextDelimeter]; } SubString ret = new SubString(subject, pos, delimPos[nextDelimeter]); pos = delimPos[nextDelimeter]; return ret; } public static class SubString implements CharSequence { private int beginIndex; private int endIndex; private String string; public SubString(String subject) { this.string = subject; this.beginIndex = 0; this.endIndex = subject.length(); } public SubString(String subject, int beginIndex, int endIndex) { this.string = subject; this.beginIndex = beginIndex; this.endIndex = endIndex; } public SubString(String subject, int pos) { this.string = subject; this.beginIndex = pos; this.endIndex = string.length(); } @Override public int length() { return endIndex - beginIndex; } @Override public char charAt(int index) { return string.charAt(beginIndex + index); } @Override public CharSequence subSequence(int start, int end) { return new SubString(string, beginIndex + start, beginIndex + end); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.atlasmap; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import io.atlasmap.api.AtlasContext; import io.atlasmap.api.AtlasContextFactory; import io.atlasmap.api.AtlasException; import io.atlasmap.api.AtlasSession; import io.atlasmap.core.DefaultAtlasContextFactory; import io.atlasmap.v2.Audit; import io.atlasmap.v2.DataSource; import io.atlasmap.v2.DataSourceType; import org.apache.camel.Category; import org.apache.camel.Exchange; import org.apache.camel.ExchangePattern; import org.apache.camel.Message; import org.apache.camel.component.ResourceEndpoint; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriParam; import org.apache.camel.support.MessageHelper; import org.apache.camel.support.ResourceHelper; import org.apache.camel.util.IOHelper; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static io.atlasmap.api.AtlasContextFactory.Format.ADM; import static io.atlasmap.api.AtlasContextFactory.Format.JSON; /** * Transforms the message using an AtlasMap transformation. */ @UriEndpoint(firstVersion = "3.7.0", scheme = "atlasmap", title = "AtlasMap", syntax = "atlasmap:resourceUri", producerOnly = true, category = { Category.TRANSFORMATION }) public class AtlasMapEndpoint extends ResourceEndpoint { public static final String CONTENT_TYPE_JSON = "application/json"; public static final String CONTENT_TYPE_XML = "application/xml"; private static final Logger LOG = LoggerFactory.getLogger(AtlasMapEndpoint.class); private AtlasContextFactory atlasContextFactory; private AtlasContext atlasContext; @UriParam(label = "advanced") private String propertiesFile; @UriParam private String sourceMapName; @UriParam private String targetMapName; @UriParam(defaultValue = "MAP") private TargetMapMode targetMapMode = TargetMapMode.MAP; public enum TargetMapMode { MAP, MESSAGE_HEADER, EXCHANGE_PROPERTY; } public AtlasMapEndpoint(String uri, AtlasMapComponent component, String resourceUri) { super(uri, component, resourceUri); } @Override public boolean isSingleton() { return true; } @Override public ExchangePattern getExchangePattern() { return ExchangePattern.InOut; } @Override protected String createEndpointUri() { return "atlasmap:" + getResourceUri(); } public AtlasContextFactory getAtlasContextFactory() { return this.atlasContextFactory; } public void setAtlasContextFactory(AtlasContextFactory atlasContextFactory) { this.atlasContextFactory = atlasContextFactory; } public AtlasContext getAtlasContext() { return this.atlasContext; } public void setAtlasContext(AtlasContext atlasContext) { this.atlasContext = atlasContext; } /** * The URI of the properties file which is used for AtlasContextFactory initialization. * * @param file property file path */ public void setPropertiesFile(String file) { propertiesFile = file; } public String getPropertiesFile() { return propertiesFile; } /** * The Exchange property name for a source message map which hold <code>java.util.Map&lt;String, Message&gt;</code> * where the key is AtlasMap Document ID. AtlasMap consumes Message bodies as source documents, as well as message * headers as source properties where the scope equals to Document ID. * * @param name Exchange property name for source map */ public void setSourceMapName(String name) { this.sourceMapName = name; } public String getSourceMapName() { return this.sourceMapName; } /** * The Exchange property name for a target document map which hold <code>java.util.Map&lt;String, Object&gt;</code> * where the key is AtlasMap Document ID. AtlasMap populates multiple target documents into this map. * * @param name Exchange property name for target map */ public void setTargetMapName(String name) { this.targetMapName = name; } public String getTargetMapName() { return this.targetMapName; } /** * {@link TargetMapMode} enum value to specify how multiple target documents are delivered if exist. * <ul> * <li>'MAP': Stores them into a java.util.Map, and the java.util.Map is set to an exchange" property if * 'targetMapName' is specified, otherwise message body.</li>" * <li>'MESSAGE_HEADER': Stores them into message headers.</li>" * <li>'EXCHANGE_PROPERTY': Stores them into exchange properties.</li> * </ul> * ") * * @param mode {@link TargetMapMode} */ public void setTargetMapMode(TargetMapMode mode) { this.targetMapMode = mode; } public TargetMapMode getTargetMapMode() { return this.targetMapMode; } public AtlasMapEndpoint findOrCreateEndpoint(String uri, String newResourceUri) { String newUri = uri.replace(getResourceUri(), newResourceUri); log.debug("Getting endpoint with URI: {}", newUri); return getCamelContext().getEndpoint(newUri, AtlasMapEndpoint.class); } @Override protected void onExchange(Exchange exchange) throws Exception { Message incomingMessage = exchange.getIn(); String newResourceUri = incomingMessage.getHeader(AtlasMapConstants.ATLAS_RESOURCE_URI, String.class); if (newResourceUri != null) { incomingMessage.removeHeader(AtlasMapConstants.ATLAS_RESOURCE_URI); log.debug("{} set to {} creating new endpoint to handle exchange", AtlasMapConstants.ATLAS_RESOURCE_URI, newResourceUri); AtlasMapEndpoint newEndpoint = findOrCreateEndpoint(getEndpointUri(), newResourceUri); newEndpoint.onExchange(exchange); return; } AtlasSession atlasSession = getOrCreateAtlasContext(incomingMessage).createSession(); populateSourceDocuments(exchange, atlasSession); atlasSession.getAtlasContext().process(atlasSession); List<Audit> errors = new ArrayList<>(); for (Audit audit : atlasSession.getAudits().getAudit()) { switch (audit.getStatus()) { case ERROR: errors.add(audit); break; case WARN: LOG.warn("{}: Document='{}(ID:{})', path='{}'", audit.getMessage(), audit.getDocName(), audit.getDocId(), audit.getPath()); break; default: LOG.info("{}: Document='{}(ID:{})', path='{}'", audit.getMessage(), audit.getDocName(), audit.getDocId(), audit.getPath()); } } if (!errors.isEmpty()) { StringBuilder buf = new StringBuilder("Errors: "); errors.stream().forEach(a -> buf.append( String.format("[%s: Document='%s(ID:%s)', path='%s'], ", a.getMessage(), a.getDocName(), a.getDocId(), a.getPath()))); throw new AtlasException(buf.toString()); } populateTargetDocuments(atlasSession, exchange); } private AtlasContext getOrCreateAtlasContext(Message incomingMessage) throws Exception { String path = getResourceUri(); ObjectHelper.notNull(path, "mappingUri"); String content = incomingMessage.getHeader(AtlasMapConstants.ATLAS_MAPPING, String.class); if (content != null) { // use content from header InputStream is = new ByteArrayInputStream(content.getBytes()); if (log.isDebugEnabled()) { log.debug("Atlas mapping content read from header {} for endpoint {}", AtlasMapConstants.ATLAS_MAPPING, getEndpointUri()); } // remove the header to avoid it being propagated in the routing incomingMessage.removeHeader(AtlasMapConstants.ATLAS_MAPPING); return getOrCreateAtlasContextFactory().createContext(JSON, is); } else if (getAtlasContext() != null) { // no mapping specified in header, and found an existing context return getAtlasContext(); } // No mapping in header, and no existing context. Create new one from resourceUri if (log.isDebugEnabled()) { log.debug("Atlas mapping content read from resourceUri: {} for endpoint {}", new Object[] { path, getEndpointUri() }); } atlasContext = getOrCreateAtlasContextFactory().createContext( path.toLowerCase().endsWith("adm") ? ADM : JSON, getResourceAsInputStream()); return atlasContext; } private synchronized AtlasContextFactory getOrCreateAtlasContextFactory() throws Exception { if (atlasContextFactory != null) { return atlasContextFactory; } atlasContextFactory = DefaultAtlasContextFactory.getInstance(); atlasContextFactory.addClassLoader(getCamelContext().getApplicationContextClassLoader()); // load the properties from property file which may overrides the default ones if (ObjectHelper.isNotEmpty(getPropertiesFile())) { Properties properties = new Properties(); InputStream reader = ResourceHelper.resolveMandatoryResourceAsInputStream(getCamelContext(), getPropertiesFile()); try { properties.load(reader); log.info("Loaded the Atlas properties file " + getPropertiesFile()); } finally { IOHelper.close(reader, getPropertiesFile(), log); } log.debug("Initializing AtlasContextFactory with properties {}", properties); atlasContextFactory.setProperties(properties); } return atlasContextFactory; } private void populateSourceDocuments(Exchange exchange, AtlasSession session) { if (session.getMapping().getDataSource() == null) { return; } Message inMessage = exchange.getIn(); CamelAtlasPropertyStrategy propertyStrategy = new CamelAtlasPropertyStrategy(); propertyStrategy.setCurrentSourceMessage(inMessage); propertyStrategy.setTargetMessage(exchange.getMessage()); propertyStrategy.setExchange(exchange); session.setAtlasPropertyStrategy(propertyStrategy); DataSource[] sourceDataSources = session.getMapping().getDataSource().stream() .filter(ds -> ds.getDataSourceType() == DataSourceType.SOURCE) .toArray(DataSource[]::new); if (sourceDataSources.length == 0) { session.setDefaultSourceDocument(inMessage.getBody()); return; } if (sourceDataSources.length == 1) { String docId = sourceDataSources[0].getId(); Object payload = extractPayload(sourceDataSources[0], inMessage); if (docId == null || docId.isEmpty()) { session.setDefaultSourceDocument(payload); } else { session.setSourceDocument(docId, payload); propertyStrategy.setSourceMessage(docId, inMessage); } return; } Map<String, Message> sourceMessages = null; Map<String, Object> sourceDocuments = null; if (sourceMapName != null) { sourceMessages = exchange.getProperty(sourceMapName, Map.class); } if (sourceMessages == null) { Object body = inMessage.getBody(); if (body instanceof Map) { sourceDocuments = (Map<String, Object>) body; } else { session.setDefaultSourceDocument(body); } } for (DataSource ds : sourceDataSources) { String docId = ds.getId(); if (docId == null || docId.isEmpty()) { Object payload = extractPayload(ds, inMessage); session.setDefaultSourceDocument(payload); } else if (sourceMessages != null) { Object payload = extractPayload(ds, sourceMessages.get(docId)); session.setSourceDocument(docId, payload); propertyStrategy.setSourceMessage(docId, sourceMessages.get(docId)); } else if (sourceDocuments != null) { Object payload = sourceDocuments.get(docId); session.setSourceDocument(docId, payload); } else if (inMessage.getHeaders().containsKey(docId)) { Object payload = inMessage.getHeader(docId); session.setSourceDocument(docId, payload); } else if (exchange.getProperties().containsKey(docId)) { Object payload = exchange.getProperty(docId); session.setSourceDocument(docId, payload); } else { LOG.warn("Ignoring missing source document: '{}(ID:{})'", ds.getName(), ds.getId()); } } } private Object extractPayload(final DataSource dataSource, Message message) { if (dataSource == null || message == null) { return null; } Object body = null; if (dataSource != null && dataSource.getUri() != null && !(dataSource.getUri().startsWith("atlas:core") || dataSource.getUri().startsWith("atlas:java"))) { body = message.getBody(String.class); } else { body = message.getBody(); } //Just in case, prepare for future calls MessageHelper.resetStreamCache(message); return body; } private void populateTargetDocuments(AtlasSession session, Exchange exchange) { Message message = exchange.getMessage(); if (session.getMapping().getDataSource() == null) { return; } DataSource[] targetDataSources = session.getMapping().getDataSource().stream() .filter(ds -> ds.getDataSourceType() == DataSourceType.TARGET) .toArray(DataSource[]::new); if (targetDataSources.length == 0) { Object newBody = session.getDefaultTargetDocument(); message.setBody(newBody); return; } if (targetDataSources.length == 1) { String docId = targetDataSources[0].getId(); if (docId == null || docId.isEmpty()) { Object newBody = session.getDefaultTargetDocument(); message.setBody(newBody); } else { Object newBody = session.getTargetDocument(docId); message.setBody(newBody); } setContentType(targetDataSources[0], message); return; } Map<String, Object> targetDocuments = new HashMap<>(); for (DataSource ds : targetDataSources) { String docId = ds.getId(); if (docId == null || docId.isEmpty()) { targetDocuments.put(io.atlasmap.api.AtlasConstants.DEFAULT_TARGET_DOCUMENT_ID, session.getDefaultTargetDocument()); Object newBody = session.getDefaultTargetDocument(); message.setBody(newBody); setContentType(ds, message); } else { targetDocuments.put(docId, session.getTargetDocument(docId)); } } switch (targetMapMode) { case MAP: if (targetMapName != null) { exchange.setProperty(targetMapName, targetDocuments); } else { message.setBody(targetDocuments); } break; case MESSAGE_HEADER: targetDocuments.remove(io.atlasmap.api.AtlasConstants.DEFAULT_TARGET_DOCUMENT_ID); message.getHeaders().putAll(targetDocuments); break; case EXCHANGE_PROPERTY: targetDocuments.remove(io.atlasmap.api.AtlasConstants.DEFAULT_TARGET_DOCUMENT_ID); exchange.getProperties().putAll(targetDocuments); break; default: throw new IllegalArgumentException("Unknown targetMapMode: " + targetMapMode.name()); } } private void setContentType(DataSource ds, Message message) { if (ds.getUri() == null) { return; } if (ds.getUri().startsWith("atlas:json")) { message.setHeader(Exchange.CONTENT_TYPE, CONTENT_TYPE_JSON); } else if (ds.getUri().startsWith("atlas:xml")) { message.setHeader(Exchange.CONTENT_TYPE, CONTENT_TYPE_XML); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.model; import org.apache.wicket.util.lang.Args; import org.danekja.java.util.function.serializable.SerializableBiConsumer; import org.danekja.java.util.function.serializable.SerializableConsumer; import org.danekja.java.util.function.serializable.SerializableFunction; import org.danekja.java.util.function.serializable.SerializableSupplier; /** * <code>LambdaModel</code> is a basic implementation of an <code>IModel</code> that uses a * serializable {@link java.util.function.Supplier} to get the object and * {@link java.util.function.Consumer} to set it. * * @param <T> * The type of the Model Object */ public abstract class LambdaModel<T> implements IModel<T> { private static final long serialVersionUID = 1L; /** * Constructor hidden, instantiation is done using one of the factory methods */ private LambdaModel() { } @Override public void setObject(T t) { throw new UnsupportedOperationException("setObject(Object) not supported"); } /** * Create a read-only {@link IModel}. Usage: * * <pre> * {@code * LambdaModel.of(person::getName) * } * </pre> * * Note that {@link IModel} is a {@code FunctionalInterface} and you can also use a lambda * directly as a model. * * @param getter * used to get value * @return model * * @param <T> * model object type */ public static <T> IModel<T> of(SerializableSupplier<T> getter) { return getter::get; } /** * Create a {@link LambdaModel}. Usage: * * <pre> * {@code * LambdaModel.of(person::getName, person::setName) * } * </pre> * * @param getter * used to get value * @param setter * used to set value * @return model * * @param <T> * model object type */ public static <T> IModel<T> of(SerializableSupplier<T> getter, SerializableConsumer<T> setter) { Args.notNull(getter, "getter"); Args.notNull(setter, "setter"); return new LambdaModel<T>() { private static final long serialVersionUID = 1L; @Override public T getObject() { return getter.get(); } @Override public void setObject(T t) { setter.accept(t); } }; } /** * Create a {@link LambdaModel} for a given target. Usage: * * <pre> * {@code * LambdaModel.of(personModel, Person::getName) * } * </pre> * * The target model will be detached automatically. * * @param target * target for getter and setter * @param getter * used to get a value * @param <X> * target model object type * @param <T> * model object type * * @return model */ public static <X, T> IModel<T> of(IModel<X> target, SerializableFunction<X, T> getter) { Args.notNull(target, "target"); Args.notNull(getter, "getter"); return new LambdaModel<T>() { private static final long serialVersionUID = 1L; @Override public T getObject() { X x = target.getObject(); if (x == null) { return null; } return getter.apply(x); } @Override public void detach() { target.detach(); } }; } /** * Create a {@link LambdaModel} for a given target. Usage: * * <pre> * {@code * LambdaModel.of(personModel, Person::getName, Person::setName) * } * </pre> * * The target model will be detached automatically. * * @param target * target for getter and setter * @param getter * used to get a value * @param setter * used to set a value * * @param <X> * target model object type * @param <T> * model object type * * @return model */ public static <X, T> IModel<T> of(IModel<X> target, SerializableFunction<X, T> getter, SerializableBiConsumer<X, T> setter) { Args.notNull(target, "target"); Args.notNull(getter, "getter"); Args.notNull(setter, "setter"); return new LambdaModel<T>() { private static final long serialVersionUID = 1L; @Override public T getObject() { X x = target.getObject(); if (x == null) { return null; } return getter.apply(x); } @Override public void setObject(T t) { X x = target.getObject(); if (x != null) { setter.accept(x, t); } } @Override public void detach() { target.detach(); } }; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.cxf.jaxrs; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import javax.net.ssl.HostnameVerifier; import org.apache.camel.Component; import org.apache.camel.Consumer; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.RuntimeCamelException; import org.apache.camel.Service; import org.apache.camel.component.cxf.NullFaultListener; import org.apache.camel.http.base.cookie.CookieHandler; import org.apache.camel.spi.HeaderFilterStrategy; import org.apache.camel.spi.HeaderFilterStrategyAware; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriPath; import org.apache.camel.support.DefaultEndpoint; import org.apache.camel.support.EndpointHelper; import org.apache.camel.support.SynchronousDelegateProducer; import org.apache.camel.support.jsse.SSLContextParameters; import org.apache.cxf.Bus; import org.apache.cxf.BusFactory; import org.apache.cxf.common.util.ModCountCopyOnWriteArrayList; import org.apache.cxf.common.util.StringUtils; import org.apache.cxf.ext.logging.LoggingFeature; import org.apache.cxf.feature.Feature; import org.apache.cxf.interceptor.AbstractBasicInterceptorProvider; import org.apache.cxf.interceptor.Interceptor; import org.apache.cxf.jaxrs.AbstractJAXRSFactoryBean; import org.apache.cxf.jaxrs.JAXRSServerFactoryBean; import org.apache.cxf.jaxrs.client.JAXRSClientFactoryBean; import org.apache.cxf.jaxrs.model.ClassResourceInfo; import org.apache.cxf.jaxrs.model.UserResource; import org.apache.cxf.jaxrs.utils.ResourceUtils; import org.apache.cxf.logging.FaultListener; import org.apache.cxf.message.Message; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Expose JAX-RS REST services using Apache CXF or connect to external REST services using CXF REST client. */ @UriEndpoint(firstVersion = "2.0.0", scheme = "cxfrs", title = "CXF-RS", syntax = "cxfrs:beanId:address", label = "rest", lenientProperties = true) public class CxfRsEndpoint extends DefaultEndpoint implements HeaderFilterStrategyAware, Service { private static final Logger LOG = LoggerFactory.getLogger(CxfRsEndpoint.class); @UriParam(label = "advanced") protected Bus bus; private final InterceptorHolder interceptorHolder = new InterceptorHolder(); private Map<String, String> parameters; private Map<String, Object> properties; @UriPath(description = "To lookup an existing configured CxfRsEndpoint. Must used bean: as prefix.") private String beanId; @UriPath private String address; @UriParam private List<Class<?>> resourceClasses; @UriParam(label = "consumer,advanced", javaType = "java.lang.String") private List<Object> serviceBeans = new LinkedList<>(); private String serviceBeansRef; @UriParam private String modelRef; @UriParam(label = "consumer", defaultValue = "Default") private BindingStyle bindingStyle = BindingStyle.Default; @UriParam(label = "consumer") private String publishedEndpointUrl; @UriParam(label = "advanced") private HeaderFilterStrategy headerFilterStrategy; @UriParam(label = "advanced") private CxfRsBinding binding; @UriParam(javaType = "java.lang.String") private List<Object> providers = new LinkedList<>(); private String providersRef; @UriParam private List<String> schemaLocations; @UriParam private List<Feature> features = new ModCountCopyOnWriteArrayList<>(); @UriParam(label = "producer,advanced", defaultValue = "true") private boolean httpClientAPI = true; @UriParam(label = "producer,advanced") private boolean ignoreDeleteMethodMessageBody; @UriParam(label = "producer", defaultValue = "true") private boolean throwExceptionOnFailure = true; @UriParam(label = "producer,advanced", defaultValue = "10") private int maxClientCacheSize = 10; @UriParam(label = "producer") private SSLContextParameters sslContextParameters; @UriParam(label = "producer") private HostnameVerifier hostnameVerifier; @UriParam private boolean loggingFeatureEnabled; @UriParam private int loggingSizeLimit; @UriParam private boolean skipFaultLogging; @UriParam(label = "advanced", defaultValue = "30000", javaType = "java.time.Duration") private long continuationTimeout = 30000; @UriParam(label = "advanced") private boolean defaultBus; @UriParam(label = "advanced") private boolean performInvocation; @UriParam(label = "advanced") private boolean propagateContexts; @UriParam(label = "advanced") private CxfRsConfigurer cxfRsConfigurer; @UriParam(label = "producer") private CookieHandler cookieHandler; public CxfRsEndpoint() { } public CxfRsEndpoint(String endpointUri, Component component) { super(endpointUri, component); setAddress(endpointUri); } @Override public boolean isLenientProperties() { return true; } // This method is for CxfRsComponent setting the EndpointUri protected void updateEndpointUri(String endpointUri) { super.setEndpointUri(endpointUri); } public void setParameters(Map<String, String> param) { parameters = param; } public Map<String, String> getParameters() { return parameters; } /** * If it is true, the CxfRsProducer will use the HttpClientAPI to invoke the service. If it is false, the * CxfRsProducer will use the ProxyClientAPI to invoke the service */ public void setHttpClientAPI(boolean clientAPI) { httpClientAPI = clientAPI; } public boolean isHttpClientAPI() { return httpClientAPI; } @Override public HeaderFilterStrategy getHeaderFilterStrategy() { return headerFilterStrategy; } /** * To use a custom HeaderFilterStrategy to filter header to and from Camel message. */ @Override public void setHeaderFilterStrategy(HeaderFilterStrategy strategy) { headerFilterStrategy = strategy; } @Override public Consumer createConsumer(Processor processor) throws Exception { CxfRsConsumer answer = new CxfRsConsumer(this, processor); configureConsumer(answer); return answer; } @Override public Producer createProducer() throws Exception { if (bindingStyle == BindingStyle.SimpleConsumer) { throw new IllegalArgumentException("The SimpleConsumer Binding Style cannot be used in a camel-cxfrs producer"); } final CxfRsProducer cxfRsProducer = new CxfRsProducer(this); if (isSynchronous()) { return new SynchronousDelegateProducer(cxfRsProducer); } else { return cxfRsProducer; } } /** * To use a custom CxfBinding to control the binding between Camel Message and CXF Message. */ public void setBinding(CxfRsBinding binding) { this.binding = binding; } public CxfRsBinding getBinding() { return binding; } public boolean isSkipFaultLogging() { return skipFaultLogging; } public CxfRsConfigurer getChainedCxfRsEndpointConfigurer() { return ChainedCxfRsConfigurer .create(getNullSafeCxfRsEndpointConfigurer(), SslCxfRsConfigurer.create(sslContextParameters, getCamelContext())) .addChild(HostnameVerifierCxfRsConfigurer.create(hostnameVerifier)); } /** * This option controls whether the PhaseInterceptorChain skips logging the Fault that it catches. */ public void setSkipFaultLogging(boolean skipFaultLogging) { this.skipFaultLogging = skipFaultLogging; } protected void checkBeanType(Object object, Class<?> clazz) { if (!clazz.isAssignableFrom(object.getClass())) { throw new IllegalArgumentException("The configure bean is not the instance of " + clazz.getName()); } } protected void setupJAXRSServerFactoryBean(JAXRSServerFactoryBean sfb) { // address if (getAddress() != null) { sfb.setAddress(getAddress()); } processResourceModel(sfb); if (getResourceClasses() != null) { sfb.setResourceClasses(getResourceClasses()); } List<Object> beans = new ArrayList<>(serviceBeans); if (serviceBeansRef != null) { beans.addAll(EndpointHelper.resolveReferenceListParameter(getCamelContext(), serviceBeansRef, Object.class)); } sfb.setServiceBeans(beans); // setup the resource providers for interfaces List<ClassResourceInfo> cris = sfb.getServiceFactory().getClassResourceInfo(); for (ClassResourceInfo cri : cris) { final Class<?> serviceClass = cri.getServiceClass(); if (serviceClass.isInterface()) { cri.setResourceProvider(new CamelResourceProvider(serviceClass)); } } setupCommonFactoryProperties(sfb); sfb.setStart(false); getNullSafeCxfRsEndpointConfigurer().configure(sfb); } protected CxfRsConfigurer getNullSafeCxfRsEndpointConfigurer() { if (cxfRsConfigurer == null) { return new ChainedCxfRsConfigurer.NullCxfRsConfigurer(); } return cxfRsConfigurer; } private void processResourceModel(JAXRSServerFactoryBean sfb) { // Currently a CXF model document is the only possible source // of the model. Other sources will be supported going forward if (modelRef != null) { List<UserResource> resources = ResourceUtils.getUserResources(modelRef, sfb.getBus()); processUserResources(sfb, resources); } } /* * Prepare model beans and set them on the factory. * The model beans can be created from a variety of sources such as * CXF Model extensions but also other documents (to be supported in the future). */ private void processUserResources(JAXRSServerFactoryBean sfb, List<UserResource> resources) { for (UserResource resource : resources) { if (StringUtils.isEmpty(resource.getName())) { resource.setName(DefaultModelResource.class.getName()); } } // The CXF to Camel exchange binding may need to be customized // for the operation name, request, response types be derived from // the model info (when a given model does provide this info) as opposed // to a matched method which is of no real use with a default handler. sfb.setModelBeans(resources); } protected void setupJAXRSClientFactoryBean(JAXRSClientFactoryBean cfb, String address) { if (modelRef != null) { cfb.setModelRef(modelRef); } if (getResourceClasses() != null && !getResourceClasses().isEmpty()) { cfb.setResourceClass(getResourceClasses().get(0)); cfb.getServiceFactory().setResourceClasses(getResourceClasses()); } setupCommonFactoryProperties(cfb); cfb.setThreadSafe(true); getNullSafeCxfRsEndpointConfigurer().configure(cfb); // Add the address could be override by message header if (address != null) { cfb.setAddress(address); } } protected void setupCommonFactoryProperties(AbstractJAXRSFactoryBean factory) { // let customer to override the default setting of provider if (!getProviders().isEmpty()) { factory.setProviders(getProviders()); } // setup the features if (!getFeatures().isEmpty()) { factory.getFeatures().addAll(getFeatures()); } if (publishedEndpointUrl != null) { factory.setPublishedEndpointUrl(publishedEndpointUrl); } // we need to avoid flushing the setting from spring or blueprint if (!interceptorHolder.getInInterceptors().isEmpty()) { factory.setInInterceptors(interceptorHolder.getInInterceptors()); } if (!interceptorHolder.getOutInterceptors().isEmpty()) { factory.setOutInterceptors(interceptorHolder.getOutInterceptors()); } if (!interceptorHolder.getOutFaultInterceptors().isEmpty()) { factory.setOutFaultInterceptors(interceptorHolder.getOutFaultInterceptors()); } if (!interceptorHolder.getInFaultInterceptors().isEmpty()) { factory.setInFaultInterceptors(interceptorHolder.getInFaultInterceptors()); } if (getProperties() != null) { if (factory.getProperties() != null) { // add to existing properties factory.getProperties().putAll(getProperties()); } else { factory.setProperties(getProperties()); } LOG.debug("JAXRS FactoryBean: {} added properties: {}", factory, getProperties()); } if (isLoggingFeatureEnabled()) { LoggingFeature loggingFeature = new LoggingFeature(); if (getLoggingSizeLimit() > 0) { loggingFeature.setLimit(getLoggingSizeLimit()); } factory.getFeatures().add(loggingFeature); } if (this.isSkipFaultLogging()) { if (factory.getProperties() == null) { factory.setProperties(new HashMap<String, Object>()); } factory.getProperties().put(FaultListener.class.getName(), new NullFaultListener()); } } protected JAXRSServerFactoryBean newJAXRSServerFactoryBean() { return new JAXRSServerFactoryBean() { protected boolean isValidClassResourceInfo(ClassResourceInfo cri) { // CXF will consider interfaces created for managing model resources // invalid - however it is fine with Camel processors if no service invocation // is requested. return !performInvocation || !cri.getServiceClass().isInterface(); } }; } protected JAXRSClientFactoryBean newJAXRSClientFactoryBean() { return new JAXRSClientFactoryBean(); } protected String resolvePropertyPlaceholders(String str) { try { if (getCamelContext() != null) { return getCamelContext().resolvePropertyPlaceholders(str); } else { return str; } } catch (Exception ex) { throw RuntimeCamelException.wrapRuntimeCamelException(ex); } } public JAXRSServerFactoryBean createJAXRSServerFactoryBean() { JAXRSServerFactoryBean answer = newJAXRSServerFactoryBean(); setupJAXRSServerFactoryBean(answer); return answer; } public JAXRSClientFactoryBean createJAXRSClientFactoryBean() { return createJAXRSClientFactoryBean(getAddress()); } public JAXRSClientFactoryBean createJAXRSClientFactoryBean(String address) { JAXRSClientFactoryBean answer = newJAXRSClientFactoryBean(); setupJAXRSClientFactoryBean(answer, address); return answer; } public List<Class<?>> getResourceClasses() { return resourceClasses; } public void addResourceClass(Class<?> resourceClass) { if (resourceClasses == null) { resourceClasses = new ArrayList<>(); } resourceClasses.add(resourceClass); } /** * The resource classes which you want to export as REST service. Multiple classes can be separated by comma. */ public void setResourceClasses(List<Class<?>> resourceClasses) { this.resourceClasses = resourceClasses; } public void setResourceClasses(Class<?>... classes) { setResourceClasses(Arrays.asList(classes)); } public List<?> getServiceBeans() { return serviceBeans; } /** * The service beans (the bean ids to lookup in the registry) which you want to export as REST service. Multiple * beans can be separated by comma */ public void setServiceBeans(String beans) { this.serviceBeansRef = beans; } public void setServiceBeans(List<?> beans) { this.serviceBeans.addAll(beans); } public void setServiceBean(Object bean) { this.serviceBeans.add(bean); } /** * The service publish address. */ public void setAddress(String address) { this.address = address; } public String getModelRef() { return modelRef; } /** * This option is used to specify the model file which is useful for the resource class without annotation. When * using this option, then the service class can be omitted, to emulate document-only endpoints */ public void setModelRef(String ref) { this.modelRef = ref; } public String getAddress() { return resolvePropertyPlaceholders(address); } public String getPublishedEndpointUrl() { return publishedEndpointUrl; } /** * This option can override the endpointUrl that published from the WADL which can be accessed with resource address * url plus ?_wadl */ public void setPublishedEndpointUrl(String publishedEndpointUrl) { this.publishedEndpointUrl = publishedEndpointUrl; } /** * This option enables CXF Logging Feature which writes inbound and outbound REST messages to log. */ public boolean isLoggingFeatureEnabled() { return loggingFeatureEnabled; } public void setLoggingFeatureEnabled(boolean loggingFeatureEnabled) { this.loggingFeatureEnabled = loggingFeatureEnabled; } public int getLoggingSizeLimit() { return loggingSizeLimit; } /** * To limit the total size of number of bytes the logger will output when logging feature has been enabled. */ public void setLoggingSizeLimit(int loggingSizeLimit) { this.loggingSizeLimit = loggingSizeLimit; } public boolean isThrowExceptionOnFailure() { return throwExceptionOnFailure; } /** * This option tells the CxfRsProducer to inspect return codes and will generate an Exception if the return code is * larger than 207. */ public void setThrowExceptionOnFailure(boolean throwExceptionOnFailure) { this.throwExceptionOnFailure = throwExceptionOnFailure; } /** * This option allows you to configure the maximum size of the cache. The implementation caches CXF clients or * ClientFactoryBean in CxfProvider and CxfRsProvider. */ public void setMaxClientCacheSize(int maxClientCacheSize) { this.maxClientCacheSize = maxClientCacheSize; } public int getMaxClientCacheSize() { return maxClientCacheSize; } /** * To use a custom configured CXF Bus. */ public void setBus(Bus bus) { this.bus = bus; if (defaultBus) { BusFactory.setDefaultBus(bus); LOG.debug("Set bus {} as thread default bus", bus); } } public Bus getBus() { return bus; } /** * Will set the default bus when CXF endpoint create a bus by itself */ public void setDefaultBus(boolean isSetDefaultBus) { this.defaultBus = isSetDefaultBus; } public boolean isDefaultBus() { return defaultBus; } public boolean isIgnoreDeleteMethodMessageBody() { return ignoreDeleteMethodMessageBody; } /** * This option is used to tell CxfRsProducer to ignore the message body of the DELETE method when using HTTP API. */ public void setIgnoreDeleteMethodMessageBody(boolean ignoreDeleteMethodMessageBody) { this.ignoreDeleteMethodMessageBody = ignoreDeleteMethodMessageBody; } public BindingStyle getBindingStyle() { return bindingStyle; } public List<?> getProviders() { return providers; } /** * Set custom JAX-RS provider(s) list to the CxfRs endpoint. You can specify a string with a list of providers to * lookup in the registy separated by comma. */ public void setProviders(List<?> providers) { this.providers.addAll(providers); } /** * Set custom JAX-RS provider(s) list which is looked up in the registry. Multiple entries can be separated by * comma. */ public void setProviders(String providers) { this.providersRef = providers; } /** * Set custom JAX-RS provider to the CxfRs endpoint. */ public void setProvider(Object provider) { providers.add(provider); } /** * Sets the locations of the schema(s) which can be used to validate the incoming XML or JAXB-driven JSON. */ public void setSchemaLocation(String schema) { setSchemaLocations(Collections.singletonList(schema)); } /** * Sets the locations of the schema(s) which can be used to validate the incoming XML or JAXB-driven JSON. */ public void setSchemaLocations(List<String> schemas) { this.schemaLocations = schemas; } public List<String> getSchemaLocations() { return schemaLocations; } public List<Interceptor<? extends Message>> getOutFaultInterceptors() { return interceptorHolder.getOutFaultInterceptors(); } public List<Interceptor<? extends Message>> getInFaultInterceptors() { return interceptorHolder.getInFaultInterceptors(); } public List<Interceptor<? extends Message>> getInInterceptors() { return interceptorHolder.getInInterceptors(); } public List<Interceptor<? extends Message>> getOutInterceptors() { return interceptorHolder.getOutInterceptors(); } /** * Set the inInterceptors to the CxfRs endpoint. */ public void setInInterceptors(List<Interceptor<? extends Message>> interceptors) { interceptorHolder.setInInterceptors(interceptors); } /** * Set the inFaultInterceptors to the CxfRs endpoint. */ public void setInFaultInterceptors(List<Interceptor<? extends Message>> interceptors) { interceptorHolder.setInFaultInterceptors(interceptors); } /** * Set the outInterceptor to the CxfRs endpoint. */ public void setOutInterceptors(List<Interceptor<? extends Message>> interceptors) { interceptorHolder.setOutInterceptors(interceptors); } /** * Set the outFaultInterceptors to the CxfRs endpoint. */ public void setOutFaultInterceptors(List<Interceptor<? extends Message>> interceptors) { interceptorHolder.setOutFaultInterceptors(interceptors); } public List<Feature> getFeatures() { return features; } /** * Set the feature list to the CxfRs endpoint. */ public void setFeatures(List<Feature> features) { this.features = features; } public Map<String, Object> getProperties() { return properties; } public void setProperties(Map<String, Object> properties) { if (this.properties == null) { this.properties = properties; } else { this.properties.putAll(properties); } } /** * Sets how requests and responses will be mapped to/from Camel. Two values are possible: * <ul> * <li>SimpleConsumer: This binding style processes request parameters, multiparts, etc. and maps them to IN * headers, IN attachments and to the message body. It aims to eliminate low-level processing of * {@link org.apache.cxf.message.MessageContentsList}. It also also adds more flexibility and simplicity to the * response mapping. Only available for consumers.</li> * <li>Default: The default style. For consumers this passes on a MessageContentsList to the route, requiring * low-level processing in the route. This is the traditional binding style, which simply dumps the * {@link org.apache.cxf.message.MessageContentsList} coming in from the CXF stack onto the IN message body. The * user is then responsible for processing it according to the contract defined by the JAX-RS method signature.</li> * <li>Custom: allows you to specify a custom binding through the binding option.</li> * </ul> */ public void setBindingStyle(BindingStyle bindingStyle) { this.bindingStyle = bindingStyle; } public String getBeanId() { return beanId; } public void setBeanId(String beanId) { this.beanId = beanId; } @Override protected void doInit() throws Exception { super.doInit(); if (headerFilterStrategy == null) { headerFilterStrategy = new CxfRsHeaderFilterStrategy(); } // if the user explicitly selected the Custom binding style, he must provide a binding if (bindingStyle == BindingStyle.Custom && binding == null) { throw new IllegalArgumentException("Custom binding style selected, but no binding was supplied"); } // if the user has set a binding, do nothing, just make sure that BindingStyle = Custom for coherency purposes if (binding != null) { bindingStyle = BindingStyle.Custom; } // set the right binding based on the binding style if (bindingStyle == BindingStyle.SimpleConsumer) { binding = new SimpleCxfRsBinding(); } else if (bindingStyle == BindingStyle.Custom) { // do nothing } else { binding = new DefaultCxfRsBinding(); } if (binding instanceof HeaderFilterStrategyAware) { ((HeaderFilterStrategyAware) binding).setHeaderFilterStrategy(getHeaderFilterStrategy()); } if (providersRef != null) { String[] names = providersRef.split(","); for (String name : names) { Object provider = EndpointHelper.resolveReferenceParameter(getCamelContext(), name, Object.class, true); setProvider(provider); } } } @Override protected void doStop() throws Exception { // noop } public long getContinuationTimeout() { return continuationTimeout; } /** * This option is used to set the CXF continuation timeout which could be used in CxfConsumer by default when the * CXF server is using Jetty or Servlet transport. */ public void setContinuationTimeout(long continuationTimeout) { this.continuationTimeout = continuationTimeout; } public boolean isPerformInvocation() { return performInvocation; } /** * When the option is true, Camel will perform the invocation of the resource class instance and put the response * object into the exchange for further processing. */ public void setPerformInvocation(boolean performInvocation) { this.performInvocation = performInvocation; } public boolean isPropagateContexts() { return propagateContexts; } /** * When the option is true, JAXRS UriInfo, HttpHeaders, Request and SecurityContext contexts will be available to * custom CXFRS processors as typed Camel exchange properties. These contexts can be used to analyze the current * requests using JAX-RS API. */ public void setPropagateContexts(boolean propagateContexts) { this.propagateContexts = propagateContexts; } private static class InterceptorHolder extends AbstractBasicInterceptorProvider { } public SSLContextParameters getSslContextParameters() { return sslContextParameters; } /** * The Camel SSL setting reference. Use the # notation to reference the SSL Context. */ public void setSslContextParameters(SSLContextParameters sslContextParameters) { this.sslContextParameters = sslContextParameters; } public HostnameVerifier getHostnameVerifier() { return hostnameVerifier; } /** * The hostname verifier to be used. Use the # notation to reference a HostnameVerifier from the registry. */ public void setHostnameVerifier(HostnameVerifier hostnameVerifier) { this.hostnameVerifier = hostnameVerifier; } public CxfRsConfigurer getCxfRsConfigurer() { return cxfRsConfigurer; } /** * This option could apply the implementation of org.apache.camel.component.cxf.jaxrs.CxfRsEndpointConfigurer which * supports to configure the CXF endpoint in programmatic way. User can configure the CXF server and client by * implementing configure{Server/Client} method of CxfEndpointConfigurer. */ public void setCxfRsConfigurer(CxfRsConfigurer configurer) { this.cxfRsConfigurer = configurer; } public CookieHandler getCookieHandler() { return cookieHandler; } /** * Configure a cookie handler to maintain a HTTP session */ public void setCookieHandler(CookieHandler cookieHandler) { this.cookieHandler = cookieHandler; } }
/* * Copyright 2011 - 2013 NTB University of Applied Sciences in Technology * Buchs, Switzerland, http://www.ntb.ch/inf * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.deepjava.strings; import java.io.PrintStream; import org.deepjava.host.StdStreams; public class StringTable { static final boolean verbose = false, testAssertion = true; static PrintStream vrb = StdStreams.vrb; private static StringTable strTab; public HString undefIdent; private HString[] tab; private int nofEntries; private final int hashCodeMask; public static void resetTable(){ if (strTab != null){ strTab.tab = new HString[strTab.tab.length]; strTab.nofEntries = 0; if (strTab.undefIdent != null) strTab.undefIdent = strTab.insertCondAndGetEntry(strTab.undefIdent ); } } public static int hashCode(char[] val, int length) { int hashCode = 0; for (int off = 0; off < length; off++) hashCode = 31 * hashCode + val[off]; return hashCode; } public static void createSingleton(int initialTabLength, HString undefIdent) { assert strTab == null; if(verbose) vrb.println("createSingleton: undefIdent="+undefIdent); strTab = new StringTable(initialTabLength, undefIdent); } public static void createSingleton(int initialTabLength, String undefIdent) { HString uid = HString.getHString(undefIdent); createSingleton(initialTabLength, uid); } public static StringTable getInstance() { return strTab; } public int length(){ return nofEntries; } private StringTable(int initialTabLength, HString undefIdent) { int mask = 4; while (mask < initialTabLength) mask <<= 1; tab = new HString[mask]; hashCodeMask = mask - 1; this.undefIdent = this.insertCondAndGetEntry(undefIdent); } public HString getEntry(HString hstring) { int hash = hstring.hash; int lvArrayLength = hstring.arrayLen; int lvLength = hstring.length; HString entry = tab[hash & hashCodeMask]; while (entry != null && lvLength > entry.length) entry = entry.next; HString foundStr = null; while (entry != null && lvLength == entry.length) { if (lvArrayLength == entry.arrayLen && hash == entry.hash) { char[] eHchars = entry.chars; char[] hHchars = hstring.chars; lvArrayLength--; while (lvArrayLength >= 0 && eHchars[lvArrayLength] == hHchars[lvArrayLength]) lvArrayLength--; if (lvArrayLength < 0) { foundStr = hstring; break; } } entry = entry.next; } return foundStr; } public HString insertCondAndGetEntry(char[] jchars, int length) { int hashCode = hashCode(jchars, length); int index = hashCode & hashCodeMask; HString entry = tab[index]; HString pred = null; while (entry != null && length > entry.length) { pred = entry; entry = entry.next; } HString foundStr = null; while (entry != null && length == entry.length) { if (hashCode == entry.hash) { if (entry.equals(jchars, length)) { foundStr = entry; break; } } entry = entry.next; } if (foundStr == null) { nofEntries++; if( HString.isH8CharArray(jchars, length) ) foundStr = new H8String(jchars, length); else foundStr = new H16String(jchars, length); if (pred == null) { foundStr.next = tab[index]; tab[index] = foundStr; }else{ foundStr.next = pred.next; pred.next = foundStr; } } return foundStr; } public HString insertCondAndGetEntry(HString newString) { if (verbose) {vrb.print(">insertCondAndGetString_HS: newString="); vrb.println(newString); } int length = newString.length(); int hashCode = newString.hashCode(); int tabIndex = hashCode & hashCodeMask; HString entry = tab[tabIndex]; HString pred = null; while (entry != null && length > entry.length) { pred = entry; entry = entry.next; } HString foundStr = null; while (entry != null && length == entry.length) { if (hashCode == entry.hash) { if(verbose) { vrb.print(">insertCondAndGetString_HS 10: length="+length + ", entry.length="+(int)entry.length+ ", entry: "); vrb.println(entry); } if (newString.equals(entry)) { if(verbose) vrb.println("<str found>"); foundStr = entry; break; } } entry = entry.next; } if (verbose) { vrb.print(">insertCondAndGetString_HS 20: length="+length); if(entry == null) vrb.print(" entry==null "); else vrb.println(entry); vrb.println(); } if (foundStr == null) { // insert new String nofEntries++; foundStr = newString; if (pred == null) { newString.next = tab[tabIndex]; tab[tabIndex] = newString; } else { newString.next = pred.next; pred.next = newString; } } if(verbose) { vrb.print("<insertCondAndGetString_HS: foundStr="); vrb.println(foundStr);} return foundStr; } public HString insertCondAndGetEntry(String jstring) { int jlength = jstring.length(); char[] jchars = new char[jlength]; jstring.getChars(0, jlength, jchars, 0); return insertCondAndGetEntry(jchars, jlength); } //--- debug primitives: public void printHeadLine() { vrb.println("\n entry hashCode length arrLen uFlags string"); } public void print(String title) { vrb.print("\n\n"+title + ", length=" + nofEntries + ", hashTab.length="+ tab.length); vrb.printf(", loadFactor=%1$4.2f", (float)nofEntries/tab.length); int lineNr = 0; for (int tinx=0; tinx < tab.length; tinx++){ HString entry = tab[tinx]; if (entry != null) { if( (lineNr & (32-1)) == 0) printHeadLine(); lineNr++; vrb.printf(" [%1$4d]", tinx); while (entry != null) { vrb.printf("\t0x%1$8x %2$6d %3$6d 0x%4$4x \"%5$s\"\n" , entry.hash, (int)entry.length, (int)entry.arrayLen, entry.flags, entry); if(verbose){ if(entry.arrayLen == 1) vrb.printf("\t[0]=0x%1$4x\n", (int)entry.chars[0]); else if(entry.arrayLen == 2) vrb.printf("\t[0]=0x%1$4x, [0]=0x%1$4x\n", (int)entry.chars[0], (int)entry.chars[1]); else if(entry.arrayLen > 2) vrb.printf("\t[0]=0x%1$4x, [1]=0x%2$4x, [len-1]=0x%3$4x\n", (int)entry.chars[0], (int)entry.chars[1], (int)entry.chars[entry.arrayLen-1]); } entry = entry.next; } } } } // public static void main(String[] args) { // char[] chars = new char[] { 'a', 'b', 'c', 'd'}; // // StringTable st = new StringTable(7); // st.print("String Table:"); Out.println(); // // st.insertCondAndGetString(chars, 2); // st.print("String Table 1:"); Out.println(); // // st.insertCondAndGetString(chars, 2); // st.print("String Table 2:"); Out.println(); // // st.insertCondAndGetString(chars, 1); // st.print("String Table 3:"); Out.println(); // // st.insertCondAndGetString(chars, 4); // st.print("String Table 4:"); Out.println(); // // st.insertCondAndGetString(chars, 3); // st.print("String Table 5:"); Out.println(); // // chars[0] = 'b'; // st.insertCondAndGetString(chars, 3); // st.print("String Table 6:"); Out.println(); // st.insertCondAndGetString(chars, 4); // st.print("String Table 7:"); Out.println(); // // chars = new char[] { ' ', ' ', ' ', ' ', ' ', ' ', ' '}; // st.insertCondAndGetString(chars, 6); // st.print("String Table 11:"); Out.println(); // st.insertCondAndGetString(chars, 7); // st.print("String Table 12:"); Out.println(); // } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.apache.hadoop.hbase.util; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Consistency; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator; /** Creates multiple threads that read and verify previously written data */ public class MultiThreadedReader extends MultiThreadedAction { private static final Log LOG = LogFactory.getLog(MultiThreadedReader.class); protected Set<HBaseReaderThread> readers = new HashSet<HBaseReaderThread>(); private final double verifyPercent; protected volatile boolean aborted; protected MultiThreadedWriterBase writer = null; /** * The number of keys verified in a sequence. This will never be larger than * the total number of keys in the range. The reader might also verify * random keys when it catches up with the writer. */ private final AtomicLong numUniqueKeysVerified = new AtomicLong(); /** * Default maximum number of read errors to tolerate before shutting down all * readers. */ public static final int DEFAULT_MAX_ERRORS = 10; /** * Default "window" size between the last key written by the writer and the * key that we attempt to read. The lower this number, the stricter our * testing is. If this is zero, we always attempt to read the highest key * in the contiguous sequence of keys written by the writers. */ public static final int DEFAULT_KEY_WINDOW = 0; /** * Default batch size for multigets */ public static final int DEFAULT_BATCH_SIZE = 1; //translates to simple GET (no multi GET) protected AtomicLong numKeysVerified = new AtomicLong(0); protected AtomicLong numReadErrors = new AtomicLong(0); protected AtomicLong numReadFailures = new AtomicLong(0); protected AtomicLong nullResult = new AtomicLong(0); private int maxErrors = DEFAULT_MAX_ERRORS; private int keyWindow = DEFAULT_KEY_WINDOW; private int batchSize = DEFAULT_BATCH_SIZE; private int regionReplicaId = -1; // particular region replica id to do reads against if set public MultiThreadedReader(LoadTestDataGenerator dataGen, Configuration conf, TableName tableName, double verifyPercent) throws IOException { super(dataGen, conf, tableName, "R"); this.verifyPercent = verifyPercent; } public void linkToWriter(MultiThreadedWriterBase writer) { this.writer = writer; writer.setTrackWroteKeys(true); } public void setMaxErrors(int maxErrors) { this.maxErrors = maxErrors; } public void setKeyWindow(int keyWindow) { this.keyWindow = keyWindow; } public void setMultiGetBatchSize(int batchSize) { this.batchSize = batchSize; } public void setRegionReplicaId(int regionReplicaId) { this.regionReplicaId = regionReplicaId; } @Override public void start(long startKey, long endKey, int numThreads) throws IOException { super.start(startKey, endKey, numThreads); if (verbose) { LOG.debug("Reading keys [" + startKey + ", " + endKey + ")"); } addReaderThreads(numThreads); startThreads(readers); } protected void addReaderThreads(int numThreads) throws IOException { for (int i = 0; i < numThreads; ++i) { HBaseReaderThread reader = createReaderThread(i); readers.add(reader); } } protected HBaseReaderThread createReaderThread(int readerId) throws IOException { HBaseReaderThread reader = new HBaseReaderThread(readerId); Threads.setLoggingUncaughtExceptionHandler(reader); return reader; } public class HBaseReaderThread extends Thread { protected final int readerId; protected final Table table; /** The "current" key being read. Increases from startKey to endKey. */ private long curKey; /** Time when the thread started */ protected long startTimeMs; /** If we are ahead of the writer and reading a random key. */ private boolean readingRandomKey; private boolean printExceptionTrace = true; /** * @param readerId only the keys with this remainder from division by * {@link #numThreads} will be read by this thread */ public HBaseReaderThread(int readerId) throws IOException { this.readerId = readerId; table = createTable(); setName(getClass().getSimpleName() + "_" + readerId); } protected HTableInterface createTable() throws IOException { return connection.getTable(tableName); } @Override public void run() { try { runReader(); } finally { closeTable(); numThreadsWorking.decrementAndGet(); } } protected void closeTable() { try { if (table != null) { table.close(); } } catch (IOException e) { LOG.error("Error closing table", e); } } private void runReader() { if (verbose) { LOG.info("Started thread #" + readerId + " for reads..."); } startTimeMs = System.currentTimeMillis(); curKey = startKey; long [] keysForThisReader = new long[batchSize]; while (curKey < endKey && !aborted) { int readingRandomKeyStartIndex = -1; int numKeys = 0; // if multiGet, loop until we have the number of keys equal to the batch size do { long k = getNextKeyToRead(); if (k < startKey || k >= endKey) { numReadErrors.incrementAndGet(); throw new AssertionError("Load tester logic error: proposed key " + "to read " + k + " is out of range (startKey=" + startKey + ", endKey=" + endKey + ")"); } if (k % numThreads != readerId || writer != null && writer.failedToWriteKey(k)) { // Skip keys that this thread should not read, as well as the keys // that we know the writer failed to write. continue; } keysForThisReader[numKeys] = k; if (readingRandomKey && readingRandomKeyStartIndex == -1) { //store the first index of a random read readingRandomKeyStartIndex = numKeys; } numKeys++; } while (numKeys < batchSize && curKey < endKey && !aborted); if (numKeys > 0) { //meaning there is some key to read readKey(keysForThisReader); // We have verified some unique key(s). numUniqueKeysVerified.getAndAdd(readingRandomKeyStartIndex == -1 ? numKeys : readingRandomKeyStartIndex); } } } /** * Should only be used for the concurrent writer/reader workload. The * maximum key we are allowed to read, subject to the "key window" * constraint. */ private long maxKeyWeCanRead() { long insertedUpToKey = writer.wroteUpToKey(); if (insertedUpToKey >= endKey - 1) { // The writer has finished writing our range, so we can read any // key in the range. return endKey - 1; } return Math.min(endKey - 1, writer.wroteUpToKey() - keyWindow); } protected long getNextKeyToRead() { readingRandomKey = false; if (writer == null || curKey <= maxKeyWeCanRead()) { return curKey++; } // We caught up with the writer. See if we can read any keys at all. long maxKeyToRead; while ((maxKeyToRead = maxKeyWeCanRead()) < startKey) { // The writer has not written sufficient keys for us to be able to read // anything at all. Sleep a bit. This should only happen in the // beginning of a load test run. Threads.sleepWithoutInterrupt(50); } if (curKey <= maxKeyToRead) { // The writer wrote some keys, and we are now allowed to read our // current key. return curKey++; } // startKey <= maxKeyToRead <= curKey - 1. Read one of the previous keys. // Don't increment the current key -- we still have to try reading it // later. Set a flag to make sure that we don't count this key towards // the set of unique keys we have verified. readingRandomKey = true; return startKey + Math.abs(RandomUtils.nextLong()) % (maxKeyToRead - startKey + 1); } private Get[] readKey(long[] keysToRead) { Get [] gets = new Get[keysToRead.length]; int i = 0; for (long keyToRead : keysToRead) { try { gets[i] = createGet(keyToRead); if (keysToRead.length == 1) { queryKey(gets[i], RandomUtils.nextInt(100) < verifyPercent, keyToRead); } i++; } catch (IOException e) { numReadFailures.addAndGet(1); LOG.debug("[" + readerId + "] FAILED read, key = " + (keyToRead + "") + ", time from start: " + (System.currentTimeMillis() - startTimeMs) + " ms"); if (printExceptionTrace) { LOG.warn(e); printExceptionTrace = false; } } } if (keysToRead.length > 1) { try { queryKey(gets, RandomUtils.nextInt(100) < verifyPercent, keysToRead); } catch (IOException e) { numReadFailures.addAndGet(gets.length); for (long keyToRead : keysToRead) { LOG.debug("[" + readerId + "] FAILED read, key = " + (keyToRead + "") + ", time from start: " + (System.currentTimeMillis() - startTimeMs) + " ms"); } if (printExceptionTrace) { LOG.warn(e); printExceptionTrace = false; } } } return gets; } protected Get createGet(long keyToRead) throws IOException { Get get = new Get(dataGenerator.getDeterministicUniqueKey(keyToRead)); String cfsString = ""; byte[][] columnFamilies = dataGenerator.getColumnFamilies(); for (byte[] cf : columnFamilies) { get.addFamily(cf); if (verbose) { if (cfsString.length() > 0) { cfsString += ", "; } cfsString += "[" + Bytes.toStringBinary(cf) + "]"; } } get = dataGenerator.beforeGet(keyToRead, get); if (regionReplicaId > 0) { get.setReplicaId(regionReplicaId); get.setConsistency(Consistency.TIMELINE); } if (verbose) { LOG.info("[" + readerId + "] " + "Querying key " + keyToRead + ", cfs " + cfsString); } return get; } public void queryKey(Get[] gets, boolean verify, long[] keysToRead) throws IOException { // read the data long start = System.nanoTime(); // Uses multi/batch gets Result[] results = table.get(Arrays.asList(gets)); long end = System.nanoTime(); verifyResultsAndUpdateMetrics(verify, gets, end - start, results, table, false); } public void queryKey(Get get, boolean verify, long keyToRead) throws IOException { // read the data long start = System.nanoTime(); // Uses simple get Result result = table.get(get); long end = System.nanoTime(); verifyResultsAndUpdateMetrics(verify, get, end - start, result, table, false); } protected void verifyResultsAndUpdateMetrics(boolean verify, Get[] gets, long elapsedNano, Result[] results, Table table, boolean isNullExpected) throws IOException { totalOpTimeMs.addAndGet(elapsedNano / 1000000); numKeys.addAndGet(gets.length); int i = 0; for (Result result : results) { verifyResultsAndUpdateMetricsOnAPerGetBasis(verify, gets[i++], result, table, isNullExpected); } } protected void verifyResultsAndUpdateMetrics(boolean verify, Get get, long elapsedNano, Result result, Table table, boolean isNullExpected) throws IOException { verifyResultsAndUpdateMetrics(verify, new Get[]{get}, elapsedNano, new Result[]{result}, table, isNullExpected); } private void verifyResultsAndUpdateMetricsOnAPerGetBasis(boolean verify, Get get, Result result, Table table, boolean isNullExpected) throws IOException { if (!result.isEmpty()) { if (verify) { numKeysVerified.incrementAndGet(); } } else { HRegionLocation hloc = connection.getRegionLocation(tableName, get.getRow(), false); String rowKey = Bytes.toString(get.getRow()); LOG.info("Key = " + rowKey + ", Region location: " + hloc); if(isNullExpected) { nullResult.incrementAndGet(); LOG.debug("Null result obtained for the key ="+rowKey); return; } } boolean isOk = verifyResultAgainstDataGenerator(result, verify, false); long numErrorsAfterThis = 0; if (isOk) { long cols = 0; // Count the columns for reporting purposes. for (byte[] cf : result.getMap().keySet()) { cols += result.getFamilyMap(cf).size(); } numCols.addAndGet(cols); } else { if (writer != null) { LOG.error("At the time of failure, writer wrote " + writer.numKeys.get() + " keys"); } numErrorsAfterThis = numReadErrors.incrementAndGet(); } if (numErrorsAfterThis > maxErrors) { LOG.error("Aborting readers -- found more than " + maxErrors + " errors"); aborted = true; } } } public long getNumReadFailures() { return numReadFailures.get(); } public long getNumReadErrors() { return numReadErrors.get(); } public long getNumKeysVerified() { return numKeysVerified.get(); } public long getNumUniqueKeysVerified() { return numUniqueKeysVerified.get(); } public long getNullResultsCount() { return nullResult.get(); } @Override protected String progressInfo() { StringBuilder sb = new StringBuilder(); appendToStatus(sb, "verified", numKeysVerified.get()); appendToStatus(sb, "READ FAILURES", numReadFailures.get()); appendToStatus(sb, "READ ERRORS", numReadErrors.get()); appendToStatus(sb, "NULL RESULT", nullResult.get()); return sb.toString(); } }
// Name: SshDriverFixture // Author: Edward Jakubowski [email protected] // Last update: 12/23/2013 // Description: This Fixture adds support to connect to ssh servers and execute commands. // Requirements: jsch library // <dependency> // <groupId>com.jcraft</groupId> // <artifactId>jsch</artifactId> // <version>0.1.48</version> // </dependency> // Examples: package org.qedsys.fitnesse; import java.io.*; import com.jcraft.jsch.*; public class SshDriverFixture { Session session = null; String passwordPromptStr = " password"; int shellCommandResponseDelay = 10; // seconds String lastResults = ""; public static void main(String[] args) { System.out.println("starting driver..."); System.out.println("done."); } public SshDriverFixture () { } //example: //| connect to | host | with user | username | and password | password | public boolean connectToWithUserAndPassword(String hostname, String sshUser, String sshPwd) { return connect(hostname, sshUser, sshPwd); } public boolean connect(String hostname, String sshUser, String sshPwd) { try { sshPwd = org.oasis.plugin.Util.processDecryptionString(sshPwd); JSch jsch = new JSch(); session = jsch.getSession(sshUser, hostname, 22); session.setPassword(sshPwd); //ignore hostkeychecking (adding the host to the ~/.ssh/known_hosts session.setConfig("StrictHostKeyChecking", "no"); session.connect(); } catch (Exception e) { e.printStackTrace(); return false; } return true; } public String executeCommand(String command) { String output = ""; lastResults = ""; try { Channel channel = session.openChannel("exec"); ((ChannelExec)channel).setCommand(command); channel.setInputStream(null); ((ChannelExec)channel).setErrStream(System.err); InputStream in = channel.getInputStream(); //((ChannelExec)channel).setPty(true); channel.connect(); byte[] tmp=new byte[1024]; while(true) { while(in.available() > 0) { int i = in.read(tmp, 0, 1024); if ( i < 0 ) break; String line = new String(tmp, 0, i); output += line; //System.out.print(line); } if(channel.isClosed()){ //System.out.println("exit-status: " + channel.getExitStatus()); break; } try{Thread.sleep(1000);}catch(Exception ee){} } channel.disconnect(); } catch (Exception ex) { ex.printStackTrace(); } lastResults = output; return output; } public void setShellCommandResponseDelay(int delay) { shellCommandResponseDelay = delay; } public void setPasswordPromptString(String passwordPrompt) { passwordPromptStr = passwordPrompt; } //this is similar to sshpass: sshpass -ptest1324 ssh -o "StrictHostKeyChecking no" user@host ls -l /tmp //example: //| execute command | ssh -o "StrictHostKeyChecking no" user@host ls -l /tmp | provide password | test1234 | public String executeCommandProvidePassword(String command, String password) { String output = ""; lastResults = ""; try { password = org.oasis.plugin.Util.processDecryptionString(password); //System.out.println("SSH Session executing command: " + command); Channel channel = session.openChannel("shell"); InputStream in=channel.getInputStream(); OutputStream out=channel.getOutputStream(); channel.connect(); //Thread.sleep(1000); out.write((command + "\n").getBytes()); out.flush(); //Thread.sleep(1000); boolean pwdFlg = false; int timeout = shellCommandResponseDelay; //10 seconds byte[] tmp=new byte[1024]; while(true) { while(in.available() > 0) { int i = in.read(tmp, 0, 1024); if ( i < 0 ) break; String line = new String(tmp, 0, i); output += line; //System.out.print(line); timeout = shellCommandResponseDelay; } // if password was requested, then send the password if (output.contains(passwordPromptStr) && !pwdFlg) { pwdFlg = true; //System.out.println("SSH Session sending password"); out.write((password + "\n").getBytes()); out.flush(); } if(channel.isClosed()){ //System.out.println("exit-status: " + channel.getExitStatus()); break; } try{Thread.sleep(1000);}catch(Exception ee){} --timeout; if (timeout <= 0) break; } channel.disconnect(); } catch (Exception ex) { ex.printStackTrace(); } //System.out.println("SSH Session command completed"); lastResults = output; return output; } public String getLastResults() { return lastResults; } public void setLastResults(String results) { lastResults = results; } //clear out any unicode or weird ascii codes public String sanitizeResults() { lastResults = lastResults.replaceAll("[^\\x09-\\x0b\\x20-\\x7e]", ""); return lastResults; } public String replaceAllWith(String pattern, String replacement) { //(".*[^\\d](\\d+).*", "$1") return lastResults.replaceAll(pattern, replacement); } public boolean executeCommandEqualsResult(String command, String expectString) { String result = executeCommand(command); return result.equals(expectString); } public boolean executeCommandContainsResult(String command, String expectString) { String result = executeCommand(command); return result.contains(expectString); } public String usingStringReplaceAllWith(String source, String pattern, String replacement) { //(".*[^\\d](\\d+).*", "$1") //("(?m)^\s+$", "") return source.replaceAll(pattern, replacement); } public static String newString(String val) { return new String(val); } public boolean stringContains(String str, String expectString) { return str.contains(expectString); } public boolean stringDoesNotContain(String str, String expectString) { return !str.contains(expectString); } public boolean stringMatches(String str, String regexString) { return str.matches(regexString); } public boolean stringDoesNotMatch(String str, String regexString) { return !str.matches(regexString); } public boolean disconnect() { try { if (session == null) return false; session.disconnect(); } catch (Exception e) { e.printStackTrace(); return false; } return true; } public boolean getSshPowerState(String hostname, String sshUser, String sshPwd) { JSch jsch = new JSch(); Session session; try { session = jsch.getSession(sshUser, hostname, 22); session.setPassword(sshPwd); //ignore hostkeychecking (adding the host to the ~/.ssh/known_hosts session.setConfig("StrictHostKeyChecking", "no"); session.connect(5000); } catch (Exception ex) { return false; } session.disconnect(); return true; } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.sunshine.app.data; import android.content.ContentProvider; import android.content.ContentUris; import android.content.ContentValues; import android.content.UriMatcher; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteQueryBuilder; import android.net.Uri; public class WeatherProvider extends ContentProvider { // The URI Matcher used by this content provider. private static final UriMatcher sUriMatcher = buildUriMatcher(); private WeatherDbHelper mOpenHelper; static final int WEATHER = 100; static final int WEATHER_WITH_LOCATION = 101; static final int WEATHER_WITH_LOCATION_AND_DATE = 102; static final int LOCATION = 300; static final int LOCATION_ID = 301; private static final SQLiteQueryBuilder sWeatherByLocationSettingQueryBuilder; static{ sWeatherByLocationSettingQueryBuilder = new SQLiteQueryBuilder(); sWeatherByLocationSettingQueryBuilder.setTables( WeatherContract.WeatherEntry.TABLE_NAME + " INNER JOIN " + WeatherContract.LocationEntry.TABLE_NAME + " ON " + WeatherContract.WeatherEntry.TABLE_NAME + "." + WeatherContract.WeatherEntry.COLUMN_LOC_KEY + " = " + WeatherContract.LocationEntry.TABLE_NAME + "." + WeatherContract.LocationEntry._ID); } private static final String sLocationSettingSelection = WeatherContract.LocationEntry.TABLE_NAME+ "." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? "; private static final String sLocationSettingWithStartDateSelection = WeatherContract.LocationEntry.TABLE_NAME+ "." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? AND " + WeatherContract.WeatherEntry.COLUMN_DATE + " >= ? "; private static final String sLocationSettingAndDaySelection = WeatherContract.LocationEntry.TABLE_NAME + "." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? AND " + WeatherContract.WeatherEntry.COLUMN_DATE + " = ? "; private Cursor getWeatherByLocationSetting(Uri uri, String[] projection, String sortOrder) { String locationSetting = WeatherContract.WeatherEntry.getLocationSettingFromUri(uri); long startDate = WeatherContract.WeatherEntry.getStartDateFromUri(uri); String[] selectionArgs; String selection; if (startDate == 0) { selection = sLocationSettingSelection; selectionArgs = new String[]{locationSetting}; } else { selectionArgs = new String[]{locationSetting, Long.toString(startDate)}; selection = sLocationSettingWithStartDateSelection; } return sWeatherByLocationSettingQueryBuilder.query(mOpenHelper.getReadableDatabase(), projection, selection, selectionArgs, null, null, sortOrder ); } private Cursor getWeatherByLocationSettingAndDate( Uri uri, String[] projection, String sortOrder) { String locationSetting = WeatherContract.WeatherEntry.getLocationSettingFromUri(uri); long date = WeatherContract.WeatherEntry.getDateFromUri(uri); return sWeatherByLocationSettingQueryBuilder.query(mOpenHelper.getReadableDatabase(), projection, sLocationSettingAndDaySelection, new String[]{locationSetting, Long.toString(date)}, null, null, sortOrder ); } static UriMatcher buildUriMatcher() { // I know what you're thinking. Why create a UriMatcher when you can use regular // expressions instead? Because you're not crazy, that's why. // All paths added to the UriMatcher have a corresponding code to return when a match is // found. The code passed into the constructor represents the code to return for the root // URI. It's common to use NO_MATCH as the code for this case. final UriMatcher matcher = new UriMatcher(UriMatcher.NO_MATCH); final String authority = WeatherContract.CONTENT_AUTHORITY; // For each type of URI you want to add, create a corresponding code. matcher.addURI(authority, WeatherContract.PATH_WEATHER, WEATHER); matcher.addURI(authority, WeatherContract.PATH_WEATHER + "/*", WEATHER_WITH_LOCATION); matcher.addURI(authority, WeatherContract.PATH_WEATHER + "/*/#", WEATHER_WITH_LOCATION_AND_DATE); matcher.addURI(authority, WeatherContract.PATH_LOCATION, LOCATION); matcher.addURI(authority, WeatherContract.PATH_LOCATION + "/#", LOCATION_ID); return matcher; } @Override public boolean onCreate() { mOpenHelper = new WeatherDbHelper(getContext()); return true; } @Override public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) { // Here's the switch statement that, given a URI, will determine what kind of request it is, // and query the database accordingly. Cursor retCursor; switch (sUriMatcher.match(uri)) { // "weather/*/*" case WEATHER_WITH_LOCATION_AND_DATE: { retCursor = getWeatherByLocationSettingAndDate(uri, projection, sortOrder); break; } // "weather/*" case WEATHER_WITH_LOCATION: { retCursor = getWeatherByLocationSetting(uri, projection, sortOrder); break; } // "weather" case WEATHER: { retCursor = mOpenHelper.getReadableDatabase().query( WeatherContract.WeatherEntry.TABLE_NAME, projection, selection, selectionArgs, null, null, sortOrder ); break; } // "location/*" case LOCATION_ID: { retCursor = mOpenHelper.getReadableDatabase().query( WeatherContract.LocationEntry.TABLE_NAME, projection, WeatherContract.LocationEntry._ID + " = '" + ContentUris.parseId(uri) + "'", null, null, null, sortOrder ); break; } // "location" case LOCATION: { retCursor = mOpenHelper.getReadableDatabase().query( WeatherContract.LocationEntry.TABLE_NAME, projection, selection, selectionArgs, null, null, sortOrder ); break; } default: throw new UnsupportedOperationException("Unknown uri: " + uri); } retCursor.setNotificationUri(getContext().getContentResolver(), uri); return retCursor; } @Override public String getType(Uri uri) { // Use the Uri Matcher to determine what kind of URI this is. final int match = sUriMatcher.match(uri); switch (match) { case WEATHER_WITH_LOCATION_AND_DATE: return WeatherContract.WeatherEntry.CONTENT_ITEM_TYPE; case WEATHER_WITH_LOCATION: return WeatherContract.WeatherEntry.CONTENT_TYPE; case WEATHER: return WeatherContract.WeatherEntry.CONTENT_TYPE; case LOCATION: return WeatherContract.LocationEntry.CONTENT_TYPE; case LOCATION_ID: return WeatherContract.LocationEntry.CONTENT_ITEM_TYPE; default: throw new UnsupportedOperationException("Unknown uri: " + uri); } } @Override public Uri insert(Uri uri, ContentValues values) { final SQLiteDatabase db = mOpenHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); Uri returnUri; switch (match) { case WEATHER: { normalizeDate(values); long _id = db.insert(WeatherContract.WeatherEntry.TABLE_NAME, null, values); if ( _id > 0 ) returnUri = WeatherContract.WeatherEntry.buildWeatherUri(_id); else throw new android.database.SQLException("Failed to insert row into " + uri); break; } case LOCATION: { long _id = db.insert(WeatherContract.LocationEntry.TABLE_NAME, null, values); if ( _id > 0 ) returnUri = WeatherContract.LocationEntry.buildLocationUri(_id); else throw new android.database.SQLException("Failed to insert row into " + uri); break; } default: throw new UnsupportedOperationException("Unknown uri: " + uri); } getContext().getContentResolver().notifyChange(uri, null); return returnUri; } @Override public int delete(Uri uri, String selection, String[] selectionArgs) { final SQLiteDatabase db = mOpenHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); int rowsDeleted; switch (match) { case WEATHER: rowsDeleted = db.delete( WeatherContract.WeatherEntry.TABLE_NAME, selection, selectionArgs); break; case LOCATION: rowsDeleted = db.delete( WeatherContract.LocationEntry.TABLE_NAME, selection, selectionArgs); break; default: throw new UnsupportedOperationException("Unknown uri: " + uri); } // Because a null deletes all rows if (selection == null || rowsDeleted != 0) { getContext().getContentResolver().notifyChange(uri, null); } return rowsDeleted; } private void normalizeDate(ContentValues values) { // normalize the date value if (values.containsKey(WeatherContract.WeatherEntry.COLUMN_DATE)) { long dateValue = values.getAsLong(WeatherContract.WeatherEntry.COLUMN_DATE); values.put(WeatherContract.WeatherEntry.COLUMN_DATE, WeatherContract.normalizeDate(dateValue)); } } @Override public int update( Uri uri, ContentValues values, String selection, String[] selectionArgs) { final SQLiteDatabase db = mOpenHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); int rowsUpdated; switch (match) { case WEATHER: normalizeDate(values); rowsUpdated = db.update(WeatherContract.WeatherEntry.TABLE_NAME, values, selection, selectionArgs); break; case LOCATION: rowsUpdated = db.update(WeatherContract.LocationEntry.TABLE_NAME, values, selection, selectionArgs); break; default: throw new UnsupportedOperationException("Unknown uri: " + uri); } if (rowsUpdated != 0) { getContext().getContentResolver().notifyChange(uri, null); } return rowsUpdated; } @Override public int bulkInsert(Uri uri, ContentValues[] values) { final SQLiteDatabase db = mOpenHelper.getWritableDatabase(); final int match = sUriMatcher.match(uri); switch (match) { case WEATHER: db.beginTransaction(); int returnCount = 0; try { for (ContentValues value : values) { normalizeDate(value); long _id = db.insert(WeatherContract.WeatherEntry.TABLE_NAME, null, value); if (_id != -1) { returnCount++; } } db.setTransactionSuccessful(); } finally { db.endTransaction(); } getContext().getContentResolver().notifyChange(uri, null); return returnCount; default: return super.bulkInsert(uri, values); } } }
/** * Copyright 2009 Frederik De Bleser * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package simovex; import javax.imageio.ImageIO; import java.awt.*; import java.awt.image.BufferedImage; import java.awt.image.RenderedImage; import java.io.*; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.Random; /** * Main class used for movie export. * <p/> * To export a movie, use the class like this: * <pre> * Movie m = new Movie("hello.mp4", 640, 480); * for (RenderedImage img: listOfImages) { * m.addFrame(img); * } * m.save(); * </pre> */ public class Movie { public static enum CodecType { ANIMATION, FLV, H263, H264, MPEG4, RAW, THEORA, WMV } public static enum CompressionQuality { LOW, MEDIUM, HIGH, BEST } private static final File FFMPEG_BINARY; private static final String TEMPORARY_FILE_PREFIX = "sme"; private static final String FFMPEG_PRESET_TEMPLATE = "res/ffpresets/libx264-%s.ffpreset"; private static final Map<CodecType, String> codecTypeMap; private static final Map<CompressionQuality, String> compressionQualityMap; static { String osName = System.getProperty("os.name").split("\\s")[0]; // If we provide a binary for this system, use it. Otherwise, see if a default "ffmpeg" binary exists. String binaryName = "ffmpeg"; if (osName.equals("Windows")) binaryName = "ffmpeg.exe"; File packagedBinary = new File(String.format("platform/%s/bin/%s", osName, binaryName)); if (packagedBinary.exists()) { FFMPEG_BINARY = packagedBinary; } else { FFMPEG_BINARY = new File("/usr/bin/ffmpeg"); } codecTypeMap = new HashMap<CodecType, String>(CodecType.values().length); codecTypeMap.put(CodecType.ANIMATION, "qtrle"); codecTypeMap.put(CodecType.FLV, "flv"); codecTypeMap.put(CodecType.H263, "h263"); codecTypeMap.put(CodecType.H264, "libx264"); codecTypeMap.put(CodecType.MPEG4, "mpeg4"); codecTypeMap.put(CodecType.RAW, "rawvideo"); codecTypeMap.put(CodecType.WMV, "wmv"); compressionQualityMap = new HashMap<CompressionQuality, String>(CompressionQuality.values().length); compressionQualityMap.put(CompressionQuality.LOW, "baseline"); compressionQualityMap.put(CompressionQuality.MEDIUM, "default"); compressionQualityMap.put(CompressionQuality.HIGH, "hq"); compressionQualityMap.put(CompressionQuality.BEST, "lossless_max"); } private String movieFilename; private int width, height; private CodecType codecType; private CompressionQuality compressionQuality; private boolean verbose; private int frameCount = 0; private String temporaryFileTemplate; public Movie(String movieFilename, int width, int height) { this(movieFilename, width, height, CodecType.H264, CompressionQuality.BEST, false); } public Movie(String movieFilename, int width, int height, CodecType codecType, CompressionQuality compressionQuality, boolean verbose) { this.movieFilename = movieFilename; this.width = width; this.height = height; this.codecType = codecType; this.compressionQuality = compressionQuality; this.verbose = verbose; // Generate the prefix for a temporary file. // We generate a temporary file, then use that as the prefix for our own files. try { File tempFile = File.createTempFile(TEMPORARY_FILE_PREFIX, ""); temporaryFileTemplate = tempFile.getPath() + "-%05d.png"; tempFile.delete(); } catch (IOException e) { throw new RuntimeException(e); } } public boolean isVerbose() { return verbose; } public void setVerbose(boolean verbose) { this.verbose = verbose; } public int getFrameCount() { return frameCount; } public String getMovieFilename() { return movieFilename; } public File getMovieFile() { return new File(movieFilename); } public File temporaryFileForFrame(int frame) { return new File(String.format(temporaryFileTemplate, frame)); } /** * Add the image to the movie. * <p/> * The image size needs to be exactly the same size as the movie. * <p/> * Internally, this saves the image to a temporary image and increases the frame counter. Temporary images are * cleaned up when calling save() or if an error occurs. * * @param img the image to add to the movie. */ public void addFrame(RenderedImage img) { if (img.getWidth() != width || img.getHeight() != height) { throw new RuntimeException("Given image does not have the same size as the movie."); } try { ImageIO.write(img, "png", temporaryFileForFrame(frameCount)); frameCount++; } catch (IOException e) { cleanupAndThrowException(e); } } /** * Finishes the export and save the movie. */ public void save() { StringWriter sw = new StringWriter(); PrintWriter out = new PrintWriter(sw, true); String type = codecTypeMap.get(codecType); int bitRate = bitRateForSize(width, height); String quality = compressionQualityMap.get(compressionQuality); ArrayList<String> commandList = new ArrayList<String>(); commandList.add(FFMPEG_BINARY.getAbsolutePath()); commandList.add("-y"); // Overwrite target if exists commandList.add("-i"); commandList.add(temporaryFileTemplate); // Input images commandList.add("-vcodec"); commandList.add(type); // Target video codec if (codecType == CodecType.H264) { commandList.add("-fpre"); commandList.add(String.format(FFMPEG_PRESET_TEMPLATE, quality)); } else { commandList.add("-b"); commandList.add(bitRate + "k"); // Target bit rate } commandList.add(movieFilename); // Target file name ProcessBuilder pb = new ProcessBuilder(commandList); if (verbose) { for (String cmd : pb.command()) { System.out.print(cmd + " "); } System.out.println(); } pb.redirectErrorStream(true); Process p; try { p = pb.start(); p.getOutputStream().close(); BufferedReader in = new BufferedReader(new InputStreamReader(p.getInputStream())); String line; while ((line = in.readLine()) != null) out.println(line); p.waitFor(); if (verbose) { System.out.println(sw.toString()); } } catch (IOException e) { cleanupAndThrowException(e); } catch (InterruptedException e) { cleanupAndThrowException(e); } cleanup(); } private int bitRateForSize(int width, int height) { return 1000; } /** * Cleans up the temporary images. * <p/> * Normally you should not call this method as it is called automatically when running finish() or if an error * occurred. The only reason to call it is if you have added images and then decide you don't want to generate * a movie. In that case, instead of calling finish(), call cleanup(). * * @see #save() */ public void cleanup() { for (int i = 0; i < frameCount; i++) { temporaryFileForFrame(i).delete(); } } private void cleanupAndThrowException(Throwable t) { cleanup(); throw new RuntimeException(t); } public static void main(String[] args) { int width = 640; int height = 480; // Create a new movie. Movie movie = new Movie("test.mov", width, height); movie.setVerbose(true); /// Initialize an image to draw on. BufferedImage img = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); Graphics2D g = (Graphics2D) img.getGraphics(); g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); for (int frame = 0; frame < 20; frame++) { System.out.println("frame = " + frame); // Clear the canvas and draw some simple circles. g.setColor(Color.WHITE); g.fillRect(0, 0, width, height); Random r = new Random(0); for (int j = 0; j < 100; j++) { g.setColor(new Color(r.nextInt(255), 255, r.nextInt(255))); g.fillOval(r.nextInt(width) + frame, r.nextInt(height) + frame, 30, 30); } // Add the image to the movie. movie.addFrame(img); } // Export the movie. movie.save(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.bulk; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.datastream.DeleteDataStreamAction; import org.elasticsearch.action.admin.indices.datastream.GetDataStreamAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.template.delete.DeleteComposableIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.put.PutComposableIndexTemplateAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.ingest.IngestTestPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.action.DocWriteRequest.OpType.CREATE; import static org.elasticsearch.action.DocWriteResponse.Result.CREATED; import static org.elasticsearch.action.DocWriteResponse.Result.UPDATED; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.cluster.metadata.MetadataCreateDataStreamServiceTests.generateMapping; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItemInArray; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.oneOf; public class BulkIntegrationIT extends ESIntegTestCase { @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(IngestTestPlugin.class); } public void testBulkIndexCreatesMapping() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/bulk-log.json"); BulkRequestBuilder bulkBuilder = client().prepareBulk(); bulkBuilder.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON); bulkBuilder.get(); assertBusy(() -> { GetMappingsResponse mappingsResponse = client().admin().indices().prepareGetMappings().get(); assertTrue(mappingsResponse.getMappings().containsKey("logstash-2014.03.30")); }); } /** * This tests that the {@link TransportBulkAction} evaluates alias routing values correctly when dealing with * an alias pointing to multiple indices, while a write index exits. */ public void testBulkWithWriteIndexAndRouting() { Map<String, Integer> twoShardsSettings = Collections.singletonMap(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2); client().admin().indices().prepareCreate("index1") .addAlias(new Alias("alias1").indexRouting("0")).setSettings(twoShardsSettings).get(); client().admin().indices().prepareCreate("index2") .addAlias(new Alias("alias1").indexRouting("0").writeIndex(randomFrom(false, null))) .setSettings(twoShardsSettings).get(); client().admin().indices().prepareCreate("index3") .addAlias(new Alias("alias1").indexRouting("1").writeIndex(true)).setSettings(twoShardsSettings).get(); IndexRequest indexRequestWithAlias = new IndexRequest("alias1").id("id"); if (randomBoolean()) { indexRequestWithAlias.routing("1"); } indexRequestWithAlias.source(Collections.singletonMap("foo", "baz")); BulkResponse bulkResponse = client().prepareBulk().add(indexRequestWithAlias).get(); assertThat(bulkResponse.getItems()[0].getResponse().getIndex(), equalTo("index3")); assertThat(bulkResponse.getItems()[0].getResponse().getShardId().getId(), equalTo(0)); assertThat(bulkResponse.getItems()[0].getResponse().getVersion(), equalTo(1L)); assertThat(bulkResponse.getItems()[0].getResponse().status(), equalTo(RestStatus.CREATED)); assertThat(client().prepareGet("index3", "id").setRouting("1").get().getSource().get("foo"), equalTo("baz")); bulkResponse = client().prepareBulk().add(client().prepareUpdate("alias1", "id").setDoc("foo", "updated")).get(); assertFalse(bulkResponse.buildFailureMessage(), bulkResponse.hasFailures()); assertThat(client().prepareGet("index3", "id").setRouting("1").get().getSource().get("foo"), equalTo("updated")); bulkResponse = client().prepareBulk().add(client().prepareDelete("alias1", "id")).get(); assertFalse(bulkResponse.buildFailureMessage(), bulkResponse.hasFailures()); assertFalse(client().prepareGet("index3", "id").setRouting("1").get().isExists()); } // allowing the auto-generated timestamp to externally be set would allow making the index inconsistent with duplicate docs public void testExternallySetAutoGeneratedTimestamp() { IndexRequest indexRequest = new IndexRequest("index1").source(Collections.singletonMap("foo", "baz")); indexRequest.process(Version.CURRENT, null, null); // sets the timestamp if (randomBoolean()) { indexRequest.id("test"); } assertThat(expectThrows(IllegalArgumentException.class, () -> client().prepareBulk().add(indexRequest).get()).getMessage(), containsString("autoGeneratedTimestamp should not be set externally")); } public void testBulkWithGlobalDefaults() throws Exception { // all requests in the json are missing index and type parameters: "_index" : "test", "_type" : "type1", String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk-missing-index-type.json"); { BulkRequestBuilder bulkBuilder = client().prepareBulk(); bulkBuilder.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON); ActionRequestValidationException ex = expectThrows(ActionRequestValidationException.class, bulkBuilder::get); assertThat(ex.validationErrors(), containsInAnyOrder( "index is missing", "index is missing", "index is missing")); } { createSamplePipeline("pipeline"); BulkRequestBuilder bulkBuilder = client().prepareBulk("test") .routing("routing") .pipeline("pipeline"); bulkBuilder.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON); BulkResponse bulkItemResponses = bulkBuilder.get(); assertFalse(bulkItemResponses.hasFailures()); } } private void createSamplePipeline(String pipelineId) throws IOException, ExecutionException, InterruptedException { XContentBuilder pipeline = jsonBuilder() .startObject() .startArray("processors") .startObject() .startObject("test") .endObject() .endObject() .endArray() .endObject(); AcknowledgedResponse acknowledgedResponse = client().admin() .cluster() .putPipeline(new PutPipelineRequest(pipelineId, BytesReference.bytes(pipeline), XContentType.JSON)) .get(); assertTrue(acknowledgedResponse.isAcknowledged()); } /** This test ensures that index deletion makes indexing fail quickly, not wait on the index that has disappeared */ public void testDeleteIndexWhileIndexing() throws Exception { String index = "deleted_while_indexing"; createIndex(index); AtomicBoolean stopped = new AtomicBoolean(); Thread[] threads = new Thread[between(1, 4)]; AtomicInteger docID = new AtomicInteger(); for (int i = 0; i < threads.length; i++) { threads[i] = new Thread(() -> { while (stopped.get() == false && docID.get() < 5000) { String id = Integer.toString(docID.incrementAndGet()); try { IndexResponse response = client().prepareIndex(index).setId(id) .setSource(Map.of("f" + randomIntBetween(1, 10), randomNonNegativeLong()), XContentType.JSON).get(); assertThat(response.getResult(), is(oneOf(CREATED, UPDATED))); logger.info("--> index id={} seq_no={}", response.getId(), response.getSeqNo()); } catch (ElasticsearchException ignore) { logger.info("--> fail to index id={}", id); } } }); threads[i].start(); } ensureGreen(index); assertBusy(() -> assertThat(docID.get(), greaterThanOrEqualTo(1))); assertAcked(client().admin().indices().prepareDelete(index)); stopped.set(true); for (Thread thread : threads) { thread.join(ReplicationRequest.DEFAULT_TIMEOUT.millis() / 2); assertFalse(thread.isAlive()); } } public void testMixedAutoCreate() throws Exception { PutComposableIndexTemplateAction.Request createTemplateRequest = new PutComposableIndexTemplateAction.Request("logs-foo"); createTemplateRequest.indexTemplate( new ComposableIndexTemplate( List.of("logs-foo*"), new Template(null, new CompressedXContent(generateMapping("@timestamp")), null), null, null, null, null, new ComposableIndexTemplate.DataStreamTemplate("@timestamp")) ); client().execute(PutComposableIndexTemplateAction.INSTANCE, createTemplateRequest).actionGet(); BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(new IndexRequest("logs-foobar").opType(CREATE).source("{}", XContentType.JSON)); bulkRequest.add(new IndexRequest("logs-foobaz").opType(CREATE).source("{}", XContentType.JSON)); bulkRequest.add(new IndexRequest("logs-barbaz").opType(CREATE).source("{}", XContentType.JSON)); bulkRequest.add(new IndexRequest("logs-barfoo").opType(CREATE).source("{}", XContentType.JSON)); BulkResponse bulkResponse = client().bulk(bulkRequest).actionGet(); assertThat("bulk failures: " + Strings.toString(bulkResponse), bulkResponse.hasFailures(), is(false)); bulkRequest = new BulkRequest(); bulkRequest.add(new IndexRequest("logs-foobar").opType(CREATE).source("{}", XContentType.JSON)); bulkRequest.add(new IndexRequest("logs-foobaz2").opType(CREATE).source("{}", XContentType.JSON)); bulkRequest.add(new IndexRequest("logs-barbaz").opType(CREATE).source("{}", XContentType.JSON)); bulkRequest.add(new IndexRequest("logs-barfoo2").opType(CREATE).source("{}", XContentType.JSON)); bulkResponse = client().bulk(bulkRequest).actionGet(); assertThat("bulk failures: " + Strings.toString(bulkResponse), bulkResponse.hasFailures(), is(false)); bulkRequest = new BulkRequest(); bulkRequest.add(new IndexRequest("logs-foobar").opType(CREATE).source("{}", XContentType.JSON)); bulkRequest.add(new IndexRequest("logs-foobaz2").opType(CREATE).source("{}", XContentType.JSON)); bulkRequest.add(new IndexRequest("logs-foobaz3").opType(CREATE).source("{}", XContentType.JSON)); bulkRequest.add(new IndexRequest("logs-barbaz").opType(CREATE).source("{}", XContentType.JSON)); bulkRequest.add(new IndexRequest("logs-barfoo2").opType(CREATE).source("{}", XContentType.JSON)); bulkRequest.add(new IndexRequest("logs-barfoo3").opType(CREATE).source("{}", XContentType.JSON)); bulkResponse = client().bulk(bulkRequest).actionGet(); assertThat("bulk failures: " + Strings.toString(bulkResponse), bulkResponse.hasFailures(), is(false)); GetDataStreamAction.Request getDataStreamRequest = new GetDataStreamAction.Request("*"); GetDataStreamAction.Response getDataStreamsResponse = client().admin().indices().getDataStreams(getDataStreamRequest).actionGet(); assertThat(getDataStreamsResponse.getDataStreams(), hasSize(4)); getDataStreamsResponse.getDataStreams().sort(Comparator.comparing(DataStream::getName)); assertThat(getDataStreamsResponse.getDataStreams().get(0).getName(), equalTo("logs-foobar")); assertThat(getDataStreamsResponse.getDataStreams().get(1).getName(), equalTo("logs-foobaz")); assertThat(getDataStreamsResponse.getDataStreams().get(2).getName(), equalTo("logs-foobaz2")); assertThat(getDataStreamsResponse.getDataStreams().get(3).getName(), equalTo("logs-foobaz3")); GetIndexResponse getIndexResponse = client().admin().indices().getIndex(new GetIndexRequest().indices("logs-bar*")).actionGet(); assertThat(getIndexResponse.getIndices(), arrayWithSize(4)); assertThat(getIndexResponse.getIndices(), hasItemInArray("logs-barbaz")); assertThat(getIndexResponse.getIndices(), hasItemInArray("logs-barfoo")); assertThat(getIndexResponse.getIndices(), hasItemInArray("logs-barfoo2")); assertThat(getIndexResponse.getIndices(), hasItemInArray("logs-barfoo3")); DeleteDataStreamAction.Request deleteDSReq = new DeleteDataStreamAction.Request("*"); client().execute(DeleteDataStreamAction.INSTANCE, deleteDSReq).actionGet(); DeleteComposableIndexTemplateAction.Request deleteTemplateRequest = new DeleteComposableIndexTemplateAction.Request("*"); client().execute(DeleteComposableIndexTemplateAction.INSTANCE, deleteTemplateRequest).actionGet(); } public void testAutoCreateV1TemplateNoDataStream() { Settings settings = Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build(); PutIndexTemplateRequest v1Request = new PutIndexTemplateRequest("logs-foo"); v1Request.patterns(List.of("logs-foo*")); v1Request.settings(settings); v1Request.order(Integer.MAX_VALUE); // in order to avoid number_of_replicas being overwritten by random_template client().admin().indices().putTemplate(v1Request).actionGet(); BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(new IndexRequest("logs-foobar").opType(CREATE).source("{}", XContentType.JSON)); BulkResponse bulkResponse = client().bulk(bulkRequest).actionGet(); assertThat("bulk failures: " + Strings.toString(bulkResponse), bulkResponse.hasFailures(), is(false)); GetDataStreamAction.Request getDataStreamRequest = new GetDataStreamAction.Request("*"); GetDataStreamAction.Response getDataStreamsResponse = client().admin().indices().getDataStreams(getDataStreamRequest).actionGet(); assertThat(getDataStreamsResponse.getDataStreams(), hasSize(0)); GetIndexResponse getIndexResponse = client().admin().indices().getIndex(new GetIndexRequest().indices("logs-foobar")).actionGet(); assertThat(getIndexResponse.getIndices(), arrayWithSize(1)); assertThat(getIndexResponse.getIndices(), hasItemInArray("logs-foobar")); assertThat(getIndexResponse.getSettings().get("logs-foobar").get(IndexMetadata.SETTING_NUMBER_OF_REPLICAS), equalTo("0")); } }
/* * Copyright (C) 2008 Esmertec AG. * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.bupt.mms.ui; import edu.bupt.mms.R; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.media.MediaPlayer; import android.net.Uri; import android.text.Editable; import android.text.TextWatcher; import android.util.AttributeSet; import android.util.Log; import android.view.View; import android.widget.EditText; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import java.io.IOException; import java.util.Map; /** * This is a basic view to show and edit a slide. */ public class BasicSlideEditorView extends LinearLayout implements SlideViewInterface { private static final String TAG = "BasicSlideEditorView"; private ImageView mImageView; private View mAudioView; private TextView mAudioNameView; private EditText mEditText; private boolean mOnTextChangedListenerEnabled = true; private OnTextChangedListener mOnTextChangedListener; public BasicSlideEditorView(Context context) { super(context); } public BasicSlideEditorView(Context context, AttributeSet attrs) { super(context, attrs); } @Override public void onFinishInflate() { mImageView = (ImageView) findViewById(R.id.image); mAudioView = findViewById(R.id.audio); mAudioNameView = (TextView) findViewById(R.id.audio_name); mEditText = (EditText) findViewById(R.id.text_message); mEditText.addTextChangedListener(new TextWatcher() { public void beforeTextChanged(CharSequence s, int start, int count, int after) { // TODO Auto-generated method stub } public void onTextChanged(CharSequence s, int start, int before, int count) { if (mOnTextChangedListenerEnabled && (mOnTextChangedListener != null)) { mOnTextChangedListener.onTextChanged(s.toString()); } } public void afterTextChanged(Editable s) { // TODO Auto-generated method stub } }); } public void startAudio() { // TODO Auto-generated method stub } public void startVideo() { // TODO Auto-generated method stub } public void setAudio(Uri audio, String name, Map<String, ?> extras) { mAudioView.setVisibility(View.VISIBLE); mAudioNameView.setText(name); } public void setImage(String name, Bitmap bitmap) { try { if (null == bitmap) { bitmap = BitmapFactory.decodeResource(getResources(), R.drawable.ic_missing_thumbnail_picture); } mImageView.setImageBitmap(bitmap); } catch (java.lang.OutOfMemoryError e) { Log.e(TAG, "setImage: out of memory: ", e); } } public void setImageRegionFit(String fit) { // TODO Auto-generated method stub } public void setImageVisibility(boolean visible) { // TODO Auto-generated method stub } public void setText(String name, String text) { mOnTextChangedListenerEnabled = false; if ((text != null) && !text.equals(mEditText.getText().toString())) { mEditText.setText(text); mEditText.setSelection(text.length()); } mOnTextChangedListenerEnabled = true; } public void setTextVisibility(boolean visible) { // TODO Auto-generated method stub } public void setVideo(String name, Uri video) { try { Bitmap bitmap = VideoAttachmentView.createVideoThumbnail(mContext, video); if (null == bitmap) { bitmap = BitmapFactory.decodeResource(getResources(), R.drawable.ic_missing_thumbnail_video); } mImageView.setImageBitmap(bitmap); } catch (java.lang.OutOfMemoryError e) { Log.e(TAG, "setVideo: out of memory: ", e); } } public void setVideoThumbnail(String name, Bitmap bitmap) { mImageView.setImageBitmap(bitmap); } public void setVideoVisibility(boolean visible) { // TODO Auto-generated method stub } public void stopAudio() { // TODO Auto-generated method stub } public void stopVideo() { // TODO Auto-generated method stub } public void reset() { mImageView.setImageDrawable(null); mAudioView.setVisibility(View.GONE); mOnTextChangedListenerEnabled = false; mEditText.setText(""); mOnTextChangedListenerEnabled = true; } public void setVisibility(boolean visible) { // TODO Auto-generated method stub } public void setOnTextChangedListener(OnTextChangedListener l) { mOnTextChangedListener = l; } public interface OnTextChangedListener { void onTextChanged(String s); } public void pauseAudio() { // TODO Auto-generated method stub } public void pauseVideo() { // TODO Auto-generated method stub } public void seekAudio(int seekTo) { // TODO Auto-generated method stub } public void seekVideo(int seekTo) { // TODO Auto-generated method stub } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/appengine/v1/app_yaml.proto package com.google.appengine.v1; /** * <pre> * Files served directly to the user for a given URL, such as images, CSS * stylesheets, or JavaScript source files. Static file handlers describe which * files in the application directory are static files, and which URLs serve * them. * </pre> * * Protobuf type {@code google.appengine.v1.StaticFilesHandler} */ public final class StaticFilesHandler extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.appengine.v1.StaticFilesHandler) StaticFilesHandlerOrBuilder { // Use StaticFilesHandler.newBuilder() to construct. private StaticFilesHandler(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private StaticFilesHandler() { path_ = ""; uploadPathRegex_ = ""; mimeType_ = ""; requireMatchingFile_ = false; applicationReadable_ = false; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private StaticFilesHandler( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { java.lang.String s = input.readStringRequireUtf8(); path_ = s; break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); uploadPathRegex_ = s; break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { httpHeaders_ = com.google.protobuf.MapField.newMapField( HttpHeadersDefaultEntryHolder.defaultEntry); mutable_bitField0_ |= 0x00000004; } com.google.protobuf.MapEntry<java.lang.String, java.lang.String> httpHeaders__ = input.readMessage( HttpHeadersDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); httpHeaders_.getMutableMap().put( httpHeaders__.getKey(), httpHeaders__.getValue()); break; } case 34: { java.lang.String s = input.readStringRequireUtf8(); mimeType_ = s; break; } case 42: { com.google.protobuf.Duration.Builder subBuilder = null; if (expiration_ != null) { subBuilder = expiration_.toBuilder(); } expiration_ = input.readMessage(com.google.protobuf.Duration.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(expiration_); expiration_ = subBuilder.buildPartial(); } break; } case 48: { requireMatchingFile_ = input.readBool(); break; } case 56: { applicationReadable_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.appengine.v1.AppYamlProto.internal_static_google_appengine_v1_StaticFilesHandler_descriptor; } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapField internalGetMapField( int number) { switch (number) { case 3: return internalGetHttpHeaders(); default: throw new RuntimeException( "Invalid map field number: " + number); } } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.appengine.v1.AppYamlProto.internal_static_google_appengine_v1_StaticFilesHandler_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.appengine.v1.StaticFilesHandler.class, com.google.appengine.v1.StaticFilesHandler.Builder.class); } private int bitField0_; public static final int PATH_FIELD_NUMBER = 1; private volatile java.lang.Object path_; /** * <pre> * Path to the static files matched by the URL pattern, from the * application root directory. The path can refer to text matched in groupings * in the URL pattern. * </pre> * * <code>optional string path = 1;</code> */ public java.lang.String getPath() { java.lang.Object ref = path_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); path_ = s; return s; } } /** * <pre> * Path to the static files matched by the URL pattern, from the * application root directory. The path can refer to text matched in groupings * in the URL pattern. * </pre> * * <code>optional string path = 1;</code> */ public com.google.protobuf.ByteString getPathBytes() { java.lang.Object ref = path_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); path_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int UPLOAD_PATH_REGEX_FIELD_NUMBER = 2; private volatile java.lang.Object uploadPathRegex_; /** * <pre> * Regular expression that matches the file paths for all files that should be * referenced by this handler. * </pre> * * <code>optional string upload_path_regex = 2;</code> */ public java.lang.String getUploadPathRegex() { java.lang.Object ref = uploadPathRegex_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uploadPathRegex_ = s; return s; } } /** * <pre> * Regular expression that matches the file paths for all files that should be * referenced by this handler. * </pre> * * <code>optional string upload_path_regex = 2;</code> */ public com.google.protobuf.ByteString getUploadPathRegexBytes() { java.lang.Object ref = uploadPathRegex_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); uploadPathRegex_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int HTTP_HEADERS_FIELD_NUMBER = 3; private static final class HttpHeadersDefaultEntryHolder { static final com.google.protobuf.MapEntry< java.lang.String, java.lang.String> defaultEntry = com.google.protobuf.MapEntry .<java.lang.String, java.lang.String>newDefaultInstance( com.google.appengine.v1.AppYamlProto.internal_static_google_appengine_v1_StaticFilesHandler_HttpHeadersEntry_descriptor, com.google.protobuf.WireFormat.FieldType.STRING, "", com.google.protobuf.WireFormat.FieldType.STRING, ""); } private com.google.protobuf.MapField< java.lang.String, java.lang.String> httpHeaders_; private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetHttpHeaders() { if (httpHeaders_ == null) { return com.google.protobuf.MapField.emptyMapField( HttpHeadersDefaultEntryHolder.defaultEntry); } return httpHeaders_; } public int getHttpHeadersCount() { return internalGetHttpHeaders().getMap().size(); } /** * <pre> * HTTP headers to use for all responses from these URLs. * </pre> * * <code>map&lt;string, string&gt; http_headers = 3;</code> */ public boolean containsHttpHeaders( java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } return internalGetHttpHeaders().getMap().containsKey(key); } /** * Use {@link #getHttpHeadersMap()} instead. */ @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getHttpHeaders() { return getHttpHeadersMap(); } /** * <pre> * HTTP headers to use for all responses from these URLs. * </pre> * * <code>map&lt;string, string&gt; http_headers = 3;</code> */ public java.util.Map<java.lang.String, java.lang.String> getHttpHeadersMap() { return internalGetHttpHeaders().getMap(); } /** * <pre> * HTTP headers to use for all responses from these URLs. * </pre> * * <code>map&lt;string, string&gt; http_headers = 3;</code> */ public java.lang.String getHttpHeadersOrDefault( java.lang.String key, java.lang.String defaultValue) { if (key == null) { throw new java.lang.NullPointerException(); } java.util.Map<java.lang.String, java.lang.String> map = internalGetHttpHeaders().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * <pre> * HTTP headers to use for all responses from these URLs. * </pre> * * <code>map&lt;string, string&gt; http_headers = 3;</code> */ public java.lang.String getHttpHeadersOrThrow( java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } java.util.Map<java.lang.String, java.lang.String> map = internalGetHttpHeaders().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } public static final int MIME_TYPE_FIELD_NUMBER = 4; private volatile java.lang.Object mimeType_; /** * <pre> * MIME type used to serve all files served by this handler. * Defaults to file-specific MIME types, which are derived from each file's * filename extension. * </pre> * * <code>optional string mime_type = 4;</code> */ public java.lang.String getMimeType() { java.lang.Object ref = mimeType_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); mimeType_ = s; return s; } } /** * <pre> * MIME type used to serve all files served by this handler. * Defaults to file-specific MIME types, which are derived from each file's * filename extension. * </pre> * * <code>optional string mime_type = 4;</code> */ public com.google.protobuf.ByteString getMimeTypeBytes() { java.lang.Object ref = mimeType_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); mimeType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int EXPIRATION_FIELD_NUMBER = 5; private com.google.protobuf.Duration expiration_; /** * <pre> * Time a static file served by this handler should be cached * by web proxies and browsers. * </pre> * * <code>optional .google.protobuf.Duration expiration = 5;</code> */ public boolean hasExpiration() { return expiration_ != null; } /** * <pre> * Time a static file served by this handler should be cached * by web proxies and browsers. * </pre> * * <code>optional .google.protobuf.Duration expiration = 5;</code> */ public com.google.protobuf.Duration getExpiration() { return expiration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : expiration_; } /** * <pre> * Time a static file served by this handler should be cached * by web proxies and browsers. * </pre> * * <code>optional .google.protobuf.Duration expiration = 5;</code> */ public com.google.protobuf.DurationOrBuilder getExpirationOrBuilder() { return getExpiration(); } public static final int REQUIRE_MATCHING_FILE_FIELD_NUMBER = 6; private boolean requireMatchingFile_; /** * <pre> * Whether this handler should match the request if the file * referenced by the handler does not exist. * </pre> * * <code>optional bool require_matching_file = 6;</code> */ public boolean getRequireMatchingFile() { return requireMatchingFile_; } public static final int APPLICATION_READABLE_FIELD_NUMBER = 7; private boolean applicationReadable_; /** * <pre> * Whether files should also be uploaded as code data. By default, files * declared in static file handlers are uploaded as static * data and are only served to end users; they cannot be read by the * application. If enabled, uploads are charged against both your code and * static data storage resource quotas. * </pre> * * <code>optional bool application_readable = 7;</code> */ public boolean getApplicationReadable() { return applicationReadable_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getPathBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, path_); } if (!getUploadPathRegexBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, uploadPathRegex_); } com.google.protobuf.GeneratedMessageV3 .serializeStringMapTo( output, internalGetHttpHeaders(), HttpHeadersDefaultEntryHolder.defaultEntry, 3); if (!getMimeTypeBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, mimeType_); } if (expiration_ != null) { output.writeMessage(5, getExpiration()); } if (requireMatchingFile_ != false) { output.writeBool(6, requireMatchingFile_); } if (applicationReadable_ != false) { output.writeBool(7, applicationReadable_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getPathBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, path_); } if (!getUploadPathRegexBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, uploadPathRegex_); } for (java.util.Map.Entry<java.lang.String, java.lang.String> entry : internalGetHttpHeaders().getMap().entrySet()) { com.google.protobuf.MapEntry<java.lang.String, java.lang.String> httpHeaders__ = HttpHeadersDefaultEntryHolder.defaultEntry.newBuilderForType() .setKey(entry.getKey()) .setValue(entry.getValue()) .build(); size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, httpHeaders__); } if (!getMimeTypeBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, mimeType_); } if (expiration_ != null) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(5, getExpiration()); } if (requireMatchingFile_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(6, requireMatchingFile_); } if (applicationReadable_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(7, applicationReadable_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.appengine.v1.StaticFilesHandler)) { return super.equals(obj); } com.google.appengine.v1.StaticFilesHandler other = (com.google.appengine.v1.StaticFilesHandler) obj; boolean result = true; result = result && getPath() .equals(other.getPath()); result = result && getUploadPathRegex() .equals(other.getUploadPathRegex()); result = result && internalGetHttpHeaders().equals( other.internalGetHttpHeaders()); result = result && getMimeType() .equals(other.getMimeType()); result = result && (hasExpiration() == other.hasExpiration()); if (hasExpiration()) { result = result && getExpiration() .equals(other.getExpiration()); } result = result && (getRequireMatchingFile() == other.getRequireMatchingFile()); result = result && (getApplicationReadable() == other.getApplicationReadable()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (37 * hash) + PATH_FIELD_NUMBER; hash = (53 * hash) + getPath().hashCode(); hash = (37 * hash) + UPLOAD_PATH_REGEX_FIELD_NUMBER; hash = (53 * hash) + getUploadPathRegex().hashCode(); if (!internalGetHttpHeaders().getMap().isEmpty()) { hash = (37 * hash) + HTTP_HEADERS_FIELD_NUMBER; hash = (53 * hash) + internalGetHttpHeaders().hashCode(); } hash = (37 * hash) + MIME_TYPE_FIELD_NUMBER; hash = (53 * hash) + getMimeType().hashCode(); if (hasExpiration()) { hash = (37 * hash) + EXPIRATION_FIELD_NUMBER; hash = (53 * hash) + getExpiration().hashCode(); } hash = (37 * hash) + REQUIRE_MATCHING_FILE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( getRequireMatchingFile()); hash = (37 * hash) + APPLICATION_READABLE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( getApplicationReadable()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.appengine.v1.StaticFilesHandler parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.appengine.v1.StaticFilesHandler parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.appengine.v1.StaticFilesHandler parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.appengine.v1.StaticFilesHandler parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.appengine.v1.StaticFilesHandler parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.appengine.v1.StaticFilesHandler parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.appengine.v1.StaticFilesHandler parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.appengine.v1.StaticFilesHandler parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.appengine.v1.StaticFilesHandler parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.appengine.v1.StaticFilesHandler parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.appengine.v1.StaticFilesHandler prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Files served directly to the user for a given URL, such as images, CSS * stylesheets, or JavaScript source files. Static file handlers describe which * files in the application directory are static files, and which URLs serve * them. * </pre> * * Protobuf type {@code google.appengine.v1.StaticFilesHandler} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.appengine.v1.StaticFilesHandler) com.google.appengine.v1.StaticFilesHandlerOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.appengine.v1.AppYamlProto.internal_static_google_appengine_v1_StaticFilesHandler_descriptor; } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapField internalGetMapField( int number) { switch (number) { case 3: return internalGetHttpHeaders(); default: throw new RuntimeException( "Invalid map field number: " + number); } } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapField internalGetMutableMapField( int number) { switch (number) { case 3: return internalGetMutableHttpHeaders(); default: throw new RuntimeException( "Invalid map field number: " + number); } } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.appengine.v1.AppYamlProto.internal_static_google_appengine_v1_StaticFilesHandler_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.appengine.v1.StaticFilesHandler.class, com.google.appengine.v1.StaticFilesHandler.Builder.class); } // Construct using com.google.appengine.v1.StaticFilesHandler.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); path_ = ""; uploadPathRegex_ = ""; internalGetMutableHttpHeaders().clear(); mimeType_ = ""; if (expirationBuilder_ == null) { expiration_ = null; } else { expiration_ = null; expirationBuilder_ = null; } requireMatchingFile_ = false; applicationReadable_ = false; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.appengine.v1.AppYamlProto.internal_static_google_appengine_v1_StaticFilesHandler_descriptor; } public com.google.appengine.v1.StaticFilesHandler getDefaultInstanceForType() { return com.google.appengine.v1.StaticFilesHandler.getDefaultInstance(); } public com.google.appengine.v1.StaticFilesHandler build() { com.google.appengine.v1.StaticFilesHandler result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public com.google.appengine.v1.StaticFilesHandler buildPartial() { com.google.appengine.v1.StaticFilesHandler result = new com.google.appengine.v1.StaticFilesHandler(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; result.path_ = path_; result.uploadPathRegex_ = uploadPathRegex_; result.httpHeaders_ = internalGetHttpHeaders(); result.httpHeaders_.makeImmutable(); result.mimeType_ = mimeType_; if (expirationBuilder_ == null) { result.expiration_ = expiration_; } else { result.expiration_ = expirationBuilder_.build(); } result.requireMatchingFile_ = requireMatchingFile_; result.applicationReadable_ = applicationReadable_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.appengine.v1.StaticFilesHandler) { return mergeFrom((com.google.appengine.v1.StaticFilesHandler)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.appengine.v1.StaticFilesHandler other) { if (other == com.google.appengine.v1.StaticFilesHandler.getDefaultInstance()) return this; if (!other.getPath().isEmpty()) { path_ = other.path_; onChanged(); } if (!other.getUploadPathRegex().isEmpty()) { uploadPathRegex_ = other.uploadPathRegex_; onChanged(); } internalGetMutableHttpHeaders().mergeFrom( other.internalGetHttpHeaders()); if (!other.getMimeType().isEmpty()) { mimeType_ = other.mimeType_; onChanged(); } if (other.hasExpiration()) { mergeExpiration(other.getExpiration()); } if (other.getRequireMatchingFile() != false) { setRequireMatchingFile(other.getRequireMatchingFile()); } if (other.getApplicationReadable() != false) { setApplicationReadable(other.getApplicationReadable()); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.appengine.v1.StaticFilesHandler parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.appengine.v1.StaticFilesHandler) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object path_ = ""; /** * <pre> * Path to the static files matched by the URL pattern, from the * application root directory. The path can refer to text matched in groupings * in the URL pattern. * </pre> * * <code>optional string path = 1;</code> */ public java.lang.String getPath() { java.lang.Object ref = path_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); path_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Path to the static files matched by the URL pattern, from the * application root directory. The path can refer to text matched in groupings * in the URL pattern. * </pre> * * <code>optional string path = 1;</code> */ public com.google.protobuf.ByteString getPathBytes() { java.lang.Object ref = path_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); path_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Path to the static files matched by the URL pattern, from the * application root directory. The path can refer to text matched in groupings * in the URL pattern. * </pre> * * <code>optional string path = 1;</code> */ public Builder setPath( java.lang.String value) { if (value == null) { throw new NullPointerException(); } path_ = value; onChanged(); return this; } /** * <pre> * Path to the static files matched by the URL pattern, from the * application root directory. The path can refer to text matched in groupings * in the URL pattern. * </pre> * * <code>optional string path = 1;</code> */ public Builder clearPath() { path_ = getDefaultInstance().getPath(); onChanged(); return this; } /** * <pre> * Path to the static files matched by the URL pattern, from the * application root directory. The path can refer to text matched in groupings * in the URL pattern. * </pre> * * <code>optional string path = 1;</code> */ public Builder setPathBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); path_ = value; onChanged(); return this; } private java.lang.Object uploadPathRegex_ = ""; /** * <pre> * Regular expression that matches the file paths for all files that should be * referenced by this handler. * </pre> * * <code>optional string upload_path_regex = 2;</code> */ public java.lang.String getUploadPathRegex() { java.lang.Object ref = uploadPathRegex_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uploadPathRegex_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Regular expression that matches the file paths for all files that should be * referenced by this handler. * </pre> * * <code>optional string upload_path_regex = 2;</code> */ public com.google.protobuf.ByteString getUploadPathRegexBytes() { java.lang.Object ref = uploadPathRegex_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); uploadPathRegex_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Regular expression that matches the file paths for all files that should be * referenced by this handler. * </pre> * * <code>optional string upload_path_regex = 2;</code> */ public Builder setUploadPathRegex( java.lang.String value) { if (value == null) { throw new NullPointerException(); } uploadPathRegex_ = value; onChanged(); return this; } /** * <pre> * Regular expression that matches the file paths for all files that should be * referenced by this handler. * </pre> * * <code>optional string upload_path_regex = 2;</code> */ public Builder clearUploadPathRegex() { uploadPathRegex_ = getDefaultInstance().getUploadPathRegex(); onChanged(); return this; } /** * <pre> * Regular expression that matches the file paths for all files that should be * referenced by this handler. * </pre> * * <code>optional string upload_path_regex = 2;</code> */ public Builder setUploadPathRegexBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); uploadPathRegex_ = value; onChanged(); return this; } private com.google.protobuf.MapField< java.lang.String, java.lang.String> httpHeaders_; private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetHttpHeaders() { if (httpHeaders_ == null) { return com.google.protobuf.MapField.emptyMapField( HttpHeadersDefaultEntryHolder.defaultEntry); } return httpHeaders_; } private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetMutableHttpHeaders() { onChanged();; if (httpHeaders_ == null) { httpHeaders_ = com.google.protobuf.MapField.newMapField( HttpHeadersDefaultEntryHolder.defaultEntry); } if (!httpHeaders_.isMutable()) { httpHeaders_ = httpHeaders_.copy(); } return httpHeaders_; } public int getHttpHeadersCount() { return internalGetHttpHeaders().getMap().size(); } /** * <pre> * HTTP headers to use for all responses from these URLs. * </pre> * * <code>map&lt;string, string&gt; http_headers = 3;</code> */ public boolean containsHttpHeaders( java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } return internalGetHttpHeaders().getMap().containsKey(key); } /** * Use {@link #getHttpHeadersMap()} instead. */ @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getHttpHeaders() { return getHttpHeadersMap(); } /** * <pre> * HTTP headers to use for all responses from these URLs. * </pre> * * <code>map&lt;string, string&gt; http_headers = 3;</code> */ public java.util.Map<java.lang.String, java.lang.String> getHttpHeadersMap() { return internalGetHttpHeaders().getMap(); } /** * <pre> * HTTP headers to use for all responses from these URLs. * </pre> * * <code>map&lt;string, string&gt; http_headers = 3;</code> */ public java.lang.String getHttpHeadersOrDefault( java.lang.String key, java.lang.String defaultValue) { if (key == null) { throw new java.lang.NullPointerException(); } java.util.Map<java.lang.String, java.lang.String> map = internalGetHttpHeaders().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * <pre> * HTTP headers to use for all responses from these URLs. * </pre> * * <code>map&lt;string, string&gt; http_headers = 3;</code> */ public java.lang.String getHttpHeadersOrThrow( java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } java.util.Map<java.lang.String, java.lang.String> map = internalGetHttpHeaders().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } public Builder clearHttpHeaders() { getMutableHttpHeaders().clear(); return this; } /** * <pre> * HTTP headers to use for all responses from these URLs. * </pre> * * <code>map&lt;string, string&gt; http_headers = 3;</code> */ public Builder removeHttpHeaders( java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } getMutableHttpHeaders().remove(key); return this; } /** * Use alternate mutation accessors instead. */ @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getMutableHttpHeaders() { return internalGetMutableHttpHeaders().getMutableMap(); } /** * <pre> * HTTP headers to use for all responses from these URLs. * </pre> * * <code>map&lt;string, string&gt; http_headers = 3;</code> */ public Builder putHttpHeaders( java.lang.String key, java.lang.String value) { if (key == null) { throw new java.lang.NullPointerException(); } if (value == null) { throw new java.lang.NullPointerException(); } getMutableHttpHeaders().put(key, value); return this; } /** * <pre> * HTTP headers to use for all responses from these URLs. * </pre> * * <code>map&lt;string, string&gt; http_headers = 3;</code> */ public Builder putAllHttpHeaders( java.util.Map<java.lang.String, java.lang.String> values) { getMutableHttpHeaders().putAll(values); return this; } private java.lang.Object mimeType_ = ""; /** * <pre> * MIME type used to serve all files served by this handler. * Defaults to file-specific MIME types, which are derived from each file's * filename extension. * </pre> * * <code>optional string mime_type = 4;</code> */ public java.lang.String getMimeType() { java.lang.Object ref = mimeType_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); mimeType_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * MIME type used to serve all files served by this handler. * Defaults to file-specific MIME types, which are derived from each file's * filename extension. * </pre> * * <code>optional string mime_type = 4;</code> */ public com.google.protobuf.ByteString getMimeTypeBytes() { java.lang.Object ref = mimeType_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); mimeType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * MIME type used to serve all files served by this handler. * Defaults to file-specific MIME types, which are derived from each file's * filename extension. * </pre> * * <code>optional string mime_type = 4;</code> */ public Builder setMimeType( java.lang.String value) { if (value == null) { throw new NullPointerException(); } mimeType_ = value; onChanged(); return this; } /** * <pre> * MIME type used to serve all files served by this handler. * Defaults to file-specific MIME types, which are derived from each file's * filename extension. * </pre> * * <code>optional string mime_type = 4;</code> */ public Builder clearMimeType() { mimeType_ = getDefaultInstance().getMimeType(); onChanged(); return this; } /** * <pre> * MIME type used to serve all files served by this handler. * Defaults to file-specific MIME types, which are derived from each file's * filename extension. * </pre> * * <code>optional string mime_type = 4;</code> */ public Builder setMimeTypeBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); mimeType_ = value; onChanged(); return this; } private com.google.protobuf.Duration expiration_ = null; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> expirationBuilder_; /** * <pre> * Time a static file served by this handler should be cached * by web proxies and browsers. * </pre> * * <code>optional .google.protobuf.Duration expiration = 5;</code> */ public boolean hasExpiration() { return expirationBuilder_ != null || expiration_ != null; } /** * <pre> * Time a static file served by this handler should be cached * by web proxies and browsers. * </pre> * * <code>optional .google.protobuf.Duration expiration = 5;</code> */ public com.google.protobuf.Duration getExpiration() { if (expirationBuilder_ == null) { return expiration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : expiration_; } else { return expirationBuilder_.getMessage(); } } /** * <pre> * Time a static file served by this handler should be cached * by web proxies and browsers. * </pre> * * <code>optional .google.protobuf.Duration expiration = 5;</code> */ public Builder setExpiration(com.google.protobuf.Duration value) { if (expirationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } expiration_ = value; onChanged(); } else { expirationBuilder_.setMessage(value); } return this; } /** * <pre> * Time a static file served by this handler should be cached * by web proxies and browsers. * </pre> * * <code>optional .google.protobuf.Duration expiration = 5;</code> */ public Builder setExpiration( com.google.protobuf.Duration.Builder builderForValue) { if (expirationBuilder_ == null) { expiration_ = builderForValue.build(); onChanged(); } else { expirationBuilder_.setMessage(builderForValue.build()); } return this; } /** * <pre> * Time a static file served by this handler should be cached * by web proxies and browsers. * </pre> * * <code>optional .google.protobuf.Duration expiration = 5;</code> */ public Builder mergeExpiration(com.google.protobuf.Duration value) { if (expirationBuilder_ == null) { if (expiration_ != null) { expiration_ = com.google.protobuf.Duration.newBuilder(expiration_).mergeFrom(value).buildPartial(); } else { expiration_ = value; } onChanged(); } else { expirationBuilder_.mergeFrom(value); } return this; } /** * <pre> * Time a static file served by this handler should be cached * by web proxies and browsers. * </pre> * * <code>optional .google.protobuf.Duration expiration = 5;</code> */ public Builder clearExpiration() { if (expirationBuilder_ == null) { expiration_ = null; onChanged(); } else { expiration_ = null; expirationBuilder_ = null; } return this; } /** * <pre> * Time a static file served by this handler should be cached * by web proxies and browsers. * </pre> * * <code>optional .google.protobuf.Duration expiration = 5;</code> */ public com.google.protobuf.Duration.Builder getExpirationBuilder() { onChanged(); return getExpirationFieldBuilder().getBuilder(); } /** * <pre> * Time a static file served by this handler should be cached * by web proxies and browsers. * </pre> * * <code>optional .google.protobuf.Duration expiration = 5;</code> */ public com.google.protobuf.DurationOrBuilder getExpirationOrBuilder() { if (expirationBuilder_ != null) { return expirationBuilder_.getMessageOrBuilder(); } else { return expiration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : expiration_; } } /** * <pre> * Time a static file served by this handler should be cached * by web proxies and browsers. * </pre> * * <code>optional .google.protobuf.Duration expiration = 5;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> getExpirationFieldBuilder() { if (expirationBuilder_ == null) { expirationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( getExpiration(), getParentForChildren(), isClean()); expiration_ = null; } return expirationBuilder_; } private boolean requireMatchingFile_ ; /** * <pre> * Whether this handler should match the request if the file * referenced by the handler does not exist. * </pre> * * <code>optional bool require_matching_file = 6;</code> */ public boolean getRequireMatchingFile() { return requireMatchingFile_; } /** * <pre> * Whether this handler should match the request if the file * referenced by the handler does not exist. * </pre> * * <code>optional bool require_matching_file = 6;</code> */ public Builder setRequireMatchingFile(boolean value) { requireMatchingFile_ = value; onChanged(); return this; } /** * <pre> * Whether this handler should match the request if the file * referenced by the handler does not exist. * </pre> * * <code>optional bool require_matching_file = 6;</code> */ public Builder clearRequireMatchingFile() { requireMatchingFile_ = false; onChanged(); return this; } private boolean applicationReadable_ ; /** * <pre> * Whether files should also be uploaded as code data. By default, files * declared in static file handlers are uploaded as static * data and are only served to end users; they cannot be read by the * application. If enabled, uploads are charged against both your code and * static data storage resource quotas. * </pre> * * <code>optional bool application_readable = 7;</code> */ public boolean getApplicationReadable() { return applicationReadable_; } /** * <pre> * Whether files should also be uploaded as code data. By default, files * declared in static file handlers are uploaded as static * data and are only served to end users; they cannot be read by the * application. If enabled, uploads are charged against both your code and * static data storage resource quotas. * </pre> * * <code>optional bool application_readable = 7;</code> */ public Builder setApplicationReadable(boolean value) { applicationReadable_ = value; onChanged(); return this; } /** * <pre> * Whether files should also be uploaded as code data. By default, files * declared in static file handlers are uploaded as static * data and are only served to end users; they cannot be read by the * application. If enabled, uploads are charged against both your code and * static data storage resource quotas. * </pre> * * <code>optional bool application_readable = 7;</code> */ public Builder clearApplicationReadable() { applicationReadable_ = false; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:google.appengine.v1.StaticFilesHandler) } // @@protoc_insertion_point(class_scope:google.appengine.v1.StaticFilesHandler) private static final com.google.appengine.v1.StaticFilesHandler DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.appengine.v1.StaticFilesHandler(); } public static com.google.appengine.v1.StaticFilesHandler getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<StaticFilesHandler> PARSER = new com.google.protobuf.AbstractParser<StaticFilesHandler>() { public StaticFilesHandler parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new StaticFilesHandler(input, extensionRegistry); } }; public static com.google.protobuf.Parser<StaticFilesHandler> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<StaticFilesHandler> getParserForType() { return PARSER; } public com.google.appengine.v1.StaticFilesHandler getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package org.netbeans.jpa.modeler.properties.cascade; import org.netbeans.jpa.modeler.spec.CascadeType; import org.netbeans.jpa.modeler.spec.EmptyType; import org.netbeans.modeler.core.ModelerFile; import org.netbeans.modeler.properties.embedded.GenericEmbeddedEditor; /** * Copyright [2014] Gaurav Gupta * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ /** * * @author Gaurav Gupta */ public class CascadeTypePanel extends GenericEmbeddedEditor<CascadeType> { private ModelerFile modelerFile; private CascadeType cascadeType; @Override public void init() { initComponents(); } @Override public CascadeType getValue() { cascadeType = new CascadeType(); if (all_CheckBox.isSelected()) { cascadeType.setCascadeAll(new EmptyType()); } else if (detach_CheckBox.isSelected() || merge_CheckBox.isSelected() || persist_CheckBox.isSelected() || refresh_CheckBox.isSelected() || remove_CheckBox.isSelected()) { if (detach_CheckBox.isSelected()) { cascadeType.setCascadeDetach(new EmptyType()); } if (merge_CheckBox.isSelected()) { cascadeType.setCascadeMerge(new EmptyType()); } if (persist_CheckBox.isSelected()) { cascadeType.setCascadePersist(new EmptyType()); } if (refresh_CheckBox.isSelected()) { cascadeType.setCascadeRefresh(new EmptyType()); } if (remove_CheckBox.isSelected()) { cascadeType.setCascadeRemove(new EmptyType()); } } else { cascadeType = null; } return cascadeType; } @Override public void setValue(CascadeType cascadeType) { this.cascadeType = cascadeType; if (cascadeType == null) { manageCascadeAllState(false); } else { if (cascadeType.getCascadeAll() != null) { manageCascadeAllState(true); } else { if (cascadeType.getCascadeDetach() != null) { detach_CheckBox.setSelected(true); } if (cascadeType.getCascadeMerge() != null) { merge_CheckBox.setSelected(true); } if (cascadeType.getCascadePersist() != null) { persist_CheckBox.setSelected(true); } if (cascadeType.getCascadeRefresh() != null) { refresh_CheckBox.setSelected(true); } if (cascadeType.getCascadeRemove() != null) { remove_CheckBox.setSelected(true); } } } } private void manageCascadeAllState(boolean state) { if (state) { detach_CheckBox.setEnabled(false); merge_CheckBox.setEnabled(false); persist_CheckBox.setEnabled(false); refresh_CheckBox.setEnabled(false); remove_CheckBox.setEnabled(false); } else { detach_CheckBox.setEnabled(true); merge_CheckBox.setEnabled(true); persist_CheckBox.setEnabled(true); refresh_CheckBox.setEnabled(true); remove_CheckBox.setEnabled(true); } all_CheckBox.setSelected(state); detach_CheckBox.setSelected(state); merge_CheckBox.setSelected(state); persist_CheckBox.setSelected(state); refresh_CheckBox.setSelected(state); remove_CheckBox.setSelected(state); } private void manageOtherState() { if (detach_CheckBox.isSelected() && merge_CheckBox.isSelected() && persist_CheckBox.isSelected() && refresh_CheckBox.isSelected() && remove_CheckBox.isSelected()) { manageCascadeAllState(true); } // else { // manageCascadeAllState(false); // } } public CascadeTypePanel(ModelerFile modelerFile) { this.modelerFile = modelerFile; } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { main_LayeredPane = new javax.swing.JLayeredPane(); all_CheckBox = new javax.swing.JCheckBox(); persist_CheckBox = new javax.swing.JCheckBox(); merge_CheckBox = new javax.swing.JCheckBox(); remove_CheckBox = new javax.swing.JCheckBox(); refresh_CheckBox = new javax.swing.JCheckBox(); detach_CheckBox = new javax.swing.JCheckBox(); main_LayeredPane.setBorder(javax.swing.BorderFactory.createTitledBorder(javax.swing.BorderFactory.createEtchedBorder(), "Cascade", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, new java.awt.Font("Arial", 1, 12), new java.awt.Color(51, 51, 51))); // NOI18N main_LayeredPane.setLayout(new java.awt.FlowLayout(java.awt.FlowLayout.CENTER, 15, 15)); all_CheckBox.setSelected(true); all_CheckBox.setText("All"); all_CheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { all_CheckBoxActionPerformed(evt); } }); main_LayeredPane.add(all_CheckBox); persist_CheckBox.setText("Persist"); persist_CheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { persist_CheckBoxActionPerformed(evt); } }); main_LayeredPane.add(persist_CheckBox); merge_CheckBox.setText("Merge"); merge_CheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { merge_CheckBoxActionPerformed(evt); } }); main_LayeredPane.add(merge_CheckBox); remove_CheckBox.setText("Remove"); remove_CheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { remove_CheckBoxActionPerformed(evt); } }); main_LayeredPane.add(remove_CheckBox); refresh_CheckBox.setText("Refresh"); refresh_CheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { refresh_CheckBoxActionPerformed(evt); } }); main_LayeredPane.add(refresh_CheckBox); detach_CheckBox.setText("Detach"); detach_CheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { detach_CheckBoxActionPerformed(evt); } }); main_LayeredPane.add(detach_CheckBox); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(main_LayeredPane, javax.swing.GroupLayout.PREFERRED_SIZE, 483, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(0, 0, Short.MAX_VALUE)) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(main_LayeredPane, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) ); }// </editor-fold>//GEN-END:initComponents private void all_CheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_all_CheckBoxActionPerformed manageCascadeAllState(all_CheckBox.isSelected()); }//GEN-LAST:event_all_CheckBoxActionPerformed private void persist_CheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_persist_CheckBoxActionPerformed manageOtherState(); }//GEN-LAST:event_persist_CheckBoxActionPerformed private void merge_CheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_merge_CheckBoxActionPerformed manageOtherState(); }//GEN-LAST:event_merge_CheckBoxActionPerformed private void remove_CheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_remove_CheckBoxActionPerformed manageOtherState(); }//GEN-LAST:event_remove_CheckBoxActionPerformed private void refresh_CheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_refresh_CheckBoxActionPerformed manageOtherState(); }//GEN-LAST:event_refresh_CheckBoxActionPerformed private void detach_CheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_detach_CheckBoxActionPerformed manageOtherState(); }//GEN-LAST:event_detach_CheckBoxActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JCheckBox all_CheckBox; private javax.swing.JCheckBox detach_CheckBox; private javax.swing.JLayeredPane main_LayeredPane; private javax.swing.JCheckBox merge_CheckBox; private javax.swing.JCheckBox persist_CheckBox; private javax.swing.JCheckBox refresh_CheckBox; private javax.swing.JCheckBox remove_CheckBox; // End of variables declaration//GEN-END:variables }
package com.homiedion.rpgcore.container; import java.util.ArrayList; import java.util.Collection; import org.bukkit.Material; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.inventory.ItemStack; import com.homiedion.aeoncore.configuration.CustomConfig; import com.homiedion.aeoncore.enhanced.EnhancedItemStack; import com.homiedion.rpgcore.container.attribute.RpgAttributeContainer; import com.homiedion.rpgcore.container.capacity.RpgCapacityContainer; import com.homiedion.rpgcore.container.enchantment.RpgEnchantmentContainer; import com.homiedion.rpgcore.container.potioneffect.RpgPotionEffectContainer; import com.homiedion.rpgcore.container.range.RpgRangeContainer; import com.homiedion.rpgcore.container.skill.RpgSkillContainer; import com.homiedion.rpgcore.container.statistic.RpgStatisticContainer; /** * A container for a variety of RPG data. * * @author HomieDion * @since 1.0.0 * @version 1.0.0 */ public class RpgContainer { /** * The attributes of this container. * * @author HomieDion * @since 1.0.0 */ protected RpgAttributeContainer attributes; /** * The capacities of this container. * * @author HomieDion * @since 1.0.0 */ protected RpgCapacityContainer capacities; /** * The potion effects of this container. * * @author HomieDion * @since 1.0.0 */ protected RpgPotionEffectContainer effects; /** * The enchantments of this container. * * @author HomieDion * @since 1.0.0 */ protected RpgEnchantmentContainer enchantments; /** * The ranges of this container. * * @author HomieDion * @since 1.0.0 */ protected RpgRangeContainer ranges; /** * The statistic of this container. * * @author HomieDion * @since 1.0.0 */ protected RpgStatisticContainer statistics; /** * The skills of this container. * * @author HomieDion * @since 1.0.0 */ protected RpgSkillContainer skills; /** * Default Constructor * * @author HomieDion * @since 1.0.0 */ public RpgContainer() { init(); } /** * Load Constructor * * @param config * The config to load. * @author HomieDion * @since 1.0.0 */ public RpgContainer(final CustomConfig config) { init(); load(config); } /** * Load Constructor * * @param section * The section to load. * @author HomieDion * @since 1.0.0 */ public RpgContainer(final ConfigurationSection section) { init(); load(section); } /** * Parse Constructor * * @param item * The item to load. * @author HomieDion * @since 1.0.0 */ public RpgContainer(final ItemStack item) { init(); parse(item); } /** * Clone Constructor * * @param container * The target of cloning. * @author HomieDion * @since 1.0.0 */ public RpgContainer(final RpgContainer container) { attributes = new RpgAttributeContainer(container.getAttributes()); capacities = new RpgCapacityContainer(container.getCapacities()); enchantments = new RpgEnchantmentContainer(container.getEnchantments()); effects = new RpgPotionEffectContainer(container.getPotionEffects()); ranges = new RpgRangeContainer(container.getRanges()); skills = new RpgSkillContainer(container.getSkills()); } /** * Returns the attribute container. * * @return The attribute container. * @author HomieDion * @since 1.0.0 */ public final RpgAttributeContainer getAttributes() { return attributes; } /** * Returns the capacity container. * * @return The capacity container. * @author HomieDion * @since 1.0.0 */ public final RpgCapacityContainer getCapacities() { return capacities; } /** * Returns the enchantment container. * * @return The enchantment container. * @author HomieDion * @since 1.0.0 */ public final RpgEnchantmentContainer getEnchantments() { return enchantments; } /** * Returns the potion effect container. * * @return The potion effect container. * @author HomieDion * @since 1.0.0 */ public final RpgPotionEffectContainer getPotionEffects() { return effects; } /** * Returns the range container. * * @return The range container. * @author HomieDion * @since 1.0.0 */ public final RpgRangeContainer getRanges() { return ranges; } /** * Returns the statistics container. * * @return The statistics container. * @author HomieDion * @since 1.0.0 */ public final RpgStatisticContainer getStatistics() { return statistics; } /** * Returns the skill container. * * @return The skill container. * @author HomieDion * @since 1.0.0 */ public final RpgSkillContainer getSkills() { return skills; } /** * Initializes all objects and variables. * * @author HomieDion * @since 1.0.0 */ public void init() { attributes = new RpgAttributeContainer(); capacities = new RpgCapacityContainer(); enchantments = new RpgEnchantmentContainer(); effects = new RpgPotionEffectContainer(); ranges = new RpgRangeContainer(); skills = new RpgSkillContainer(); statistics = new RpgStatisticContainer(); } /** * Adds the config's content to the container. * * @param config * The target config file. * @author HomieDion * @since 1.0.0 */ public void load(final CustomConfig config) { load(config.getConfigurationSection("")); } /** * Loads a particular configuration section. * @param section The target section. * @author HomieDion * @since 1.0.0 */ public void load(final ConfigurationSection section) { //Null Check if (section == null) { return; } attributes.load(section); capacities.load(section); enchantments.load(section); effects.load(section); ranges.load(section); skills.load(section); } /** * Parses out an list of strings. * * @param list * The list we're parsing. * @author HomieDion * @since 1.0.0 */ public void parse(final Collection<String> list) { // Loops all lore present. for (final String line : list) { // Parses the line. parse(line); } } /** * Parses out the lore of an container. * * @param item * The item stack who we're parsing lore from. * @author HomieDion * @since 1.0.0 */ public void parse(final ItemStack item) { // If it doesn't have meta or lore. if (item == null || item.getType() == Material.AIR || !item.hasItemMeta() || !item.getItemMeta().hasLore()) { reset(); return; } // Parse the lore. parse(item.getItemMeta().getLore()); } /** * Resets the container. * * @author HomieDion * @since 1.0.0 */ public void reset() { attributes.reset(); capacities.reset(); enchantments.reset(); effects.reset(); ranges.reset(); skills.reset(); } /** * Adds the container's contents to a config file. * * @param config * The target config file. * @author HomieDion * @since 1.0.0 */ public void save(final CustomConfig config) { attributes.save(config); capacities.save(config); enchantments.save(config); effects.save(config); ranges.save(config); skills.save(config); config.save(); } /** * Returns the container as an ArrayList * * @return An ArrayList * @author HomieDion * @since 1.0.0 */ public ArrayList<String> toArrayList() { return toArrayList(new ArrayList<String>()); } /** * Adds lore to an existing ArrayList. * * @param lore * The ArrayList we're adding to. * @return The resultant ArrayList * @author HomieDion * @since 1.0.0 */ public ArrayList<String> toArrayList(final ArrayList<String> lore) { // Separator if (!lore.isEmpty() && lore.get(lore.size() - 1) != "") { lore.add(""); } // Capacities capacities.toArrayList(lore); // Separator if (!lore.isEmpty() && lore.get(lore.size() - 1) != "") { lore.add(""); } // Ranges ranges.toArrayList(lore); // Separator if (!lore.isEmpty() && lore.get(lore.size() - 1) != "") { lore.add(""); } // Statistics statistics.toArrayList(lore); // Separator if (!lore.isEmpty() && lore.get(lore.size() - 1) != "") { lore.add(""); } // Enchantments enchantments.toArrayList(lore); // Separator if (!lore.isEmpty() && lore.get(lore.size() - 1) != "") { lore.add(""); } // Potion Effects effects.toArrayList(lore); // Separator if (!lore.isEmpty() && lore.get(lore.size() - 1) != "") { lore.add(""); } // Attributes attributes.toArrayList(lore); // Separator if (!lore.isEmpty() && lore.get(lore.size() - 1) != "") { lore.add(""); } // Skills skills.toArrayList(lore); // Return return lore; } /** * Returns this as an ItemStack * * @param item * The target item. * @return An ItemStack. * @author HomieDion * @since 1.0.0 */ public EnhancedItemStack toItemStack(final EnhancedItemStack item) { // Set Lore item.setLore(toArrayList()); // Return return item; } /** * Returns this as an ItemStack * * @param item * The target item. * @return An ItemStack. * @author HomieDion * @since 1.0.0 */ public EnhancedItemStack toItemStack(final ItemStack item) { return toItemStack(new EnhancedItemStack(item)); } /** * Returns this as an ItemStack * * @param material * The item's material * @return An ItemStack. * @author HomieDion * @since 1.0.0 */ public EnhancedItemStack toItemStack(final Material material) { return toItemStack(material, 1, (short) 0); } /** * Returns this as an ItemStack * * @param material * The item's material * @param amount * The amount of the item. * @return An ItemStack. * @author HomieDion * @since 1.0.0 */ public EnhancedItemStack toItemStack(final Material material, final int amount) { return toItemStack(material, amount, (short) 0); } /** * Returns this as an ItemStack * * @param material * The item's material * @param amount * The amount of the item. * @param data * The material data. * @return An ItemStack. * @author HomieDion * @since 1.0.0 */ public EnhancedItemStack toItemStack(final Material material, final int amount, final short data) { // Return return toItemStack(new ItemStack(material, amount, data)); } /* * (non-Javadoc) * * @see java.lang.Object#toString() */ @Override public String toString() { String str = ""; final ArrayList<String> list = toArrayList(); for (int i = 0; i < list.size(); i++) { str += list.get(i); str += "\n"; } return str.trim(); } /** * Parses out a single line of text. * * @param line * The line of text we're parsing. * @author HomieDion * @since 1.0.0 * @param line * The line of text. * @return True if successful. */ protected boolean parse(final String line) { // If the line is empty exit. if (line == null || line.trim().isEmpty()) { return false; } // Attribute if (attributes.parse(line)) { return true; } // Capacities else if (capacities.parse(line)) { return true; } // Enchantment else if (enchantments.parse(line)) { return true; } // Potion Effect else if (effects.parse(line)) { return true; } // Statistics else if (statistics.parse(line)) { return true; } // Skill else if (skills.parse(line)) { return true; } // Failure return false; } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: qpstest.proto package io.grpc.testing; /** * Protobuf type {@code grpc.testing.ClientConfig} */ public final class ClientConfig extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:grpc.testing.ClientConfig) ClientConfigOrBuilder { // Use ClientConfig.newBuilder() to construct. private ClientConfig(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); } private ClientConfig() { serverTargets_ = com.google.protobuf.LazyStringArrayList.EMPTY; clientType_ = 0; enableSsl_ = false; outstandingRpcsPerChannel_ = 0; clientChannels_ = 0; payloadSize_ = 0; asyncClientThreads_ = 0; rpcType_ = 0; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private ClientConfig( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { serverTargets_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000001; } serverTargets_.add(bs); break; } case 16: { int rawValue = input.readEnum(); clientType_ = rawValue; break; } case 24: { enableSsl_ = input.readBool(); break; } case 32: { outstandingRpcsPerChannel_ = input.readInt32(); break; } case 40: { clientChannels_ = input.readInt32(); break; } case 48: { payloadSize_ = input.readInt32(); break; } case 56: { asyncClientThreads_ = input.readInt32(); break; } case 64: { int rawValue = input.readEnum(); rpcType_ = rawValue; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw new RuntimeException(e.setUnfinishedMessage(this)); } catch (java.io.IOException e) { throw new RuntimeException( new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this)); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { serverTargets_ = serverTargets_.getUnmodifiableView(); } makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grpc.testing.QpsTestProto.internal_static_grpc_testing_ClientConfig_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return io.grpc.testing.QpsTestProto.internal_static_grpc_testing_ClientConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grpc.testing.ClientConfig.class, io.grpc.testing.ClientConfig.Builder.class); } private int bitField0_; public static final int SERVER_TARGETS_FIELD_NUMBER = 1; private com.google.protobuf.LazyStringList serverTargets_; /** * <code>repeated string server_targets = 1;</code> */ public com.google.protobuf.ProtocolStringList getServerTargetsList() { return serverTargets_; } /** * <code>repeated string server_targets = 1;</code> */ public int getServerTargetsCount() { return serverTargets_.size(); } /** * <code>repeated string server_targets = 1;</code> */ public java.lang.String getServerTargets(int index) { return serverTargets_.get(index); } /** * <code>repeated string server_targets = 1;</code> */ public com.google.protobuf.ByteString getServerTargetsBytes(int index) { return serverTargets_.getByteString(index); } public static final int CLIENT_TYPE_FIELD_NUMBER = 2; private int clientType_; /** * <code>optional .grpc.testing.ClientType client_type = 2;</code> */ public int getClientTypeValue() { return clientType_; } /** * <code>optional .grpc.testing.ClientType client_type = 2;</code> */ public io.grpc.testing.ClientType getClientType() { io.grpc.testing.ClientType result = io.grpc.testing.ClientType.valueOf(clientType_); return result == null ? io.grpc.testing.ClientType.UNRECOGNIZED : result; } public static final int ENABLE_SSL_FIELD_NUMBER = 3; private boolean enableSsl_; /** * <code>optional bool enable_ssl = 3;</code> */ public boolean getEnableSsl() { return enableSsl_; } public static final int OUTSTANDING_RPCS_PER_CHANNEL_FIELD_NUMBER = 4; private int outstandingRpcsPerChannel_; /** * <code>optional int32 outstanding_rpcs_per_channel = 4;</code> */ public int getOutstandingRpcsPerChannel() { return outstandingRpcsPerChannel_; } public static final int CLIENT_CHANNELS_FIELD_NUMBER = 5; private int clientChannels_; /** * <code>optional int32 client_channels = 5;</code> */ public int getClientChannels() { return clientChannels_; } public static final int PAYLOAD_SIZE_FIELD_NUMBER = 6; private int payloadSize_; /** * <code>optional int32 payload_size = 6;</code> */ public int getPayloadSize() { return payloadSize_; } public static final int ASYNC_CLIENT_THREADS_FIELD_NUMBER = 7; private int asyncClientThreads_; /** * <code>optional int32 async_client_threads = 7;</code> * * <pre> * only for async client: * </pre> */ public int getAsyncClientThreads() { return asyncClientThreads_; } public static final int RPC_TYPE_FIELD_NUMBER = 8; private int rpcType_; /** * <code>optional .grpc.testing.RpcType rpc_type = 8;</code> */ public int getRpcTypeValue() { return rpcType_; } /** * <code>optional .grpc.testing.RpcType rpc_type = 8;</code> */ public io.grpc.testing.RpcType getRpcType() { io.grpc.testing.RpcType result = io.grpc.testing.RpcType.valueOf(rpcType_); return result == null ? io.grpc.testing.RpcType.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < serverTargets_.size(); i++) { output.writeBytes(1, serverTargets_.getByteString(i)); } if (clientType_ != io.grpc.testing.ClientType.SYNCHRONOUS_CLIENT.getNumber()) { output.writeEnum(2, clientType_); } if (enableSsl_ != false) { output.writeBool(3, enableSsl_); } if (outstandingRpcsPerChannel_ != 0) { output.writeInt32(4, outstandingRpcsPerChannel_); } if (clientChannels_ != 0) { output.writeInt32(5, clientChannels_); } if (payloadSize_ != 0) { output.writeInt32(6, payloadSize_); } if (asyncClientThreads_ != 0) { output.writeInt32(7, asyncClientThreads_); } if (rpcType_ != io.grpc.testing.RpcType.UNARY.getNumber()) { output.writeEnum(8, rpcType_); } } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < serverTargets_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeBytesSizeNoTag(serverTargets_.getByteString(i)); } size += dataSize; size += 1 * getServerTargetsList().size(); } if (clientType_ != io.grpc.testing.ClientType.SYNCHRONOUS_CLIENT.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, clientType_); } if (enableSsl_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, enableSsl_); } if (outstandingRpcsPerChannel_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(4, outstandingRpcsPerChannel_); } if (clientChannels_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(5, clientChannels_); } if (payloadSize_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(6, payloadSize_); } if (asyncClientThreads_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(7, asyncClientThreads_); } if (rpcType_ != io.grpc.testing.RpcType.UNARY.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(8, rpcType_); } memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; public static io.grpc.testing.ClientConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grpc.testing.ClientConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grpc.testing.ClientConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grpc.testing.ClientConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grpc.testing.ClientConfig parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static io.grpc.testing.ClientConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static io.grpc.testing.ClientConfig parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static io.grpc.testing.ClientConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static io.grpc.testing.ClientConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static io.grpc.testing.ClientConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(io.grpc.testing.ClientConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code grpc.testing.ClientConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:grpc.testing.ClientConfig) io.grpc.testing.ClientConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grpc.testing.QpsTestProto.internal_static_grpc_testing_ClientConfig_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return io.grpc.testing.QpsTestProto.internal_static_grpc_testing_ClientConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grpc.testing.ClientConfig.class, io.grpc.testing.ClientConfig.Builder.class); } // Construct using io.grpc.testing.ClientConfig.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); serverTargets_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); clientType_ = 0; enableSsl_ = false; outstandingRpcsPerChannel_ = 0; clientChannels_ = 0; payloadSize_ = 0; asyncClientThreads_ = 0; rpcType_ = 0; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return io.grpc.testing.QpsTestProto.internal_static_grpc_testing_ClientConfig_descriptor; } public io.grpc.testing.ClientConfig getDefaultInstanceForType() { return io.grpc.testing.ClientConfig.getDefaultInstance(); } public io.grpc.testing.ClientConfig build() { io.grpc.testing.ClientConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public io.grpc.testing.ClientConfig buildPartial() { io.grpc.testing.ClientConfig result = new io.grpc.testing.ClientConfig(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { serverTargets_ = serverTargets_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000001); } result.serverTargets_ = serverTargets_; result.clientType_ = clientType_; result.enableSsl_ = enableSsl_; result.outstandingRpcsPerChannel_ = outstandingRpcsPerChannel_; result.clientChannels_ = clientChannels_; result.payloadSize_ = payloadSize_; result.asyncClientThreads_ = asyncClientThreads_; result.rpcType_ = rpcType_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof io.grpc.testing.ClientConfig) { return mergeFrom((io.grpc.testing.ClientConfig)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(io.grpc.testing.ClientConfig other) { if (other == io.grpc.testing.ClientConfig.getDefaultInstance()) return this; if (!other.serverTargets_.isEmpty()) { if (serverTargets_.isEmpty()) { serverTargets_ = other.serverTargets_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureServerTargetsIsMutable(); serverTargets_.addAll(other.serverTargets_); } onChanged(); } if (other.clientType_ != 0) { setClientTypeValue(other.getClientTypeValue()); } if (other.getEnableSsl() != false) { setEnableSsl(other.getEnableSsl()); } if (other.getOutstandingRpcsPerChannel() != 0) { setOutstandingRpcsPerChannel(other.getOutstandingRpcsPerChannel()); } if (other.getClientChannels() != 0) { setClientChannels(other.getClientChannels()); } if (other.getPayloadSize() != 0) { setPayloadSize(other.getPayloadSize()); } if (other.getAsyncClientThreads() != 0) { setAsyncClientThreads(other.getAsyncClientThreads()); } if (other.rpcType_ != 0) { setRpcTypeValue(other.getRpcTypeValue()); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { io.grpc.testing.ClientConfig parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (io.grpc.testing.ClientConfig) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private com.google.protobuf.LazyStringList serverTargets_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureServerTargetsIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { serverTargets_ = new com.google.protobuf.LazyStringArrayList(serverTargets_); bitField0_ |= 0x00000001; } } /** * <code>repeated string server_targets = 1;</code> */ public com.google.protobuf.ProtocolStringList getServerTargetsList() { return serverTargets_.getUnmodifiableView(); } /** * <code>repeated string server_targets = 1;</code> */ public int getServerTargetsCount() { return serverTargets_.size(); } /** * <code>repeated string server_targets = 1;</code> */ public java.lang.String getServerTargets(int index) { return serverTargets_.get(index); } /** * <code>repeated string server_targets = 1;</code> */ public com.google.protobuf.ByteString getServerTargetsBytes(int index) { return serverTargets_.getByteString(index); } /** * <code>repeated string server_targets = 1;</code> */ public Builder setServerTargets( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureServerTargetsIsMutable(); serverTargets_.set(index, value); onChanged(); return this; } /** * <code>repeated string server_targets = 1;</code> */ public Builder addServerTargets( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureServerTargetsIsMutable(); serverTargets_.add(value); onChanged(); return this; } /** * <code>repeated string server_targets = 1;</code> */ public Builder addAllServerTargets( java.lang.Iterable<java.lang.String> values) { ensureServerTargetsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, serverTargets_); onChanged(); return this; } /** * <code>repeated string server_targets = 1;</code> */ public Builder clearServerTargets() { serverTargets_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <code>repeated string server_targets = 1;</code> */ public Builder addServerTargetsBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureServerTargetsIsMutable(); serverTargets_.add(value); onChanged(); return this; } private int clientType_ = 0; /** * <code>optional .grpc.testing.ClientType client_type = 2;</code> */ public int getClientTypeValue() { return clientType_; } /** * <code>optional .grpc.testing.ClientType client_type = 2;</code> */ public Builder setClientTypeValue(int value) { clientType_ = value; onChanged(); return this; } /** * <code>optional .grpc.testing.ClientType client_type = 2;</code> */ public io.grpc.testing.ClientType getClientType() { io.grpc.testing.ClientType result = io.grpc.testing.ClientType.valueOf(clientType_); return result == null ? io.grpc.testing.ClientType.UNRECOGNIZED : result; } /** * <code>optional .grpc.testing.ClientType client_type = 2;</code> */ public Builder setClientType(io.grpc.testing.ClientType value) { if (value == null) { throw new NullPointerException(); } clientType_ = value.getNumber(); onChanged(); return this; } /** * <code>optional .grpc.testing.ClientType client_type = 2;</code> */ public Builder clearClientType() { clientType_ = 0; onChanged(); return this; } private boolean enableSsl_ ; /** * <code>optional bool enable_ssl = 3;</code> */ public boolean getEnableSsl() { return enableSsl_; } /** * <code>optional bool enable_ssl = 3;</code> */ public Builder setEnableSsl(boolean value) { enableSsl_ = value; onChanged(); return this; } /** * <code>optional bool enable_ssl = 3;</code> */ public Builder clearEnableSsl() { enableSsl_ = false; onChanged(); return this; } private int outstandingRpcsPerChannel_ ; /** * <code>optional int32 outstanding_rpcs_per_channel = 4;</code> */ public int getOutstandingRpcsPerChannel() { return outstandingRpcsPerChannel_; } /** * <code>optional int32 outstanding_rpcs_per_channel = 4;</code> */ public Builder setOutstandingRpcsPerChannel(int value) { outstandingRpcsPerChannel_ = value; onChanged(); return this; } /** * <code>optional int32 outstanding_rpcs_per_channel = 4;</code> */ public Builder clearOutstandingRpcsPerChannel() { outstandingRpcsPerChannel_ = 0; onChanged(); return this; } private int clientChannels_ ; /** * <code>optional int32 client_channels = 5;</code> */ public int getClientChannels() { return clientChannels_; } /** * <code>optional int32 client_channels = 5;</code> */ public Builder setClientChannels(int value) { clientChannels_ = value; onChanged(); return this; } /** * <code>optional int32 client_channels = 5;</code> */ public Builder clearClientChannels() { clientChannels_ = 0; onChanged(); return this; } private int payloadSize_ ; /** * <code>optional int32 payload_size = 6;</code> */ public int getPayloadSize() { return payloadSize_; } /** * <code>optional int32 payload_size = 6;</code> */ public Builder setPayloadSize(int value) { payloadSize_ = value; onChanged(); return this; } /** * <code>optional int32 payload_size = 6;</code> */ public Builder clearPayloadSize() { payloadSize_ = 0; onChanged(); return this; } private int asyncClientThreads_ ; /** * <code>optional int32 async_client_threads = 7;</code> * * <pre> * only for async client: * </pre> */ public int getAsyncClientThreads() { return asyncClientThreads_; } /** * <code>optional int32 async_client_threads = 7;</code> * * <pre> * only for async client: * </pre> */ public Builder setAsyncClientThreads(int value) { asyncClientThreads_ = value; onChanged(); return this; } /** * <code>optional int32 async_client_threads = 7;</code> * * <pre> * only for async client: * </pre> */ public Builder clearAsyncClientThreads() { asyncClientThreads_ = 0; onChanged(); return this; } private int rpcType_ = 0; /** * <code>optional .grpc.testing.RpcType rpc_type = 8;</code> */ public int getRpcTypeValue() { return rpcType_; } /** * <code>optional .grpc.testing.RpcType rpc_type = 8;</code> */ public Builder setRpcTypeValue(int value) { rpcType_ = value; onChanged(); return this; } /** * <code>optional .grpc.testing.RpcType rpc_type = 8;</code> */ public io.grpc.testing.RpcType getRpcType() { io.grpc.testing.RpcType result = io.grpc.testing.RpcType.valueOf(rpcType_); return result == null ? io.grpc.testing.RpcType.UNRECOGNIZED : result; } /** * <code>optional .grpc.testing.RpcType rpc_type = 8;</code> */ public Builder setRpcType(io.grpc.testing.RpcType value) { if (value == null) { throw new NullPointerException(); } rpcType_ = value.getNumber(); onChanged(); return this; } /** * <code>optional .grpc.testing.RpcType rpc_type = 8;</code> */ public Builder clearRpcType() { rpcType_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:grpc.testing.ClientConfig) } // @@protoc_insertion_point(class_scope:grpc.testing.ClientConfig) private static final io.grpc.testing.ClientConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new io.grpc.testing.ClientConfig(); } public static io.grpc.testing.ClientConfig getDefaultInstance() { return DEFAULT_INSTANCE; } public static final com.google.protobuf.Parser<ClientConfig> PARSER = new com.google.protobuf.AbstractParser<ClientConfig>() { public ClientConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { try { return new ClientConfig(input, extensionRegistry); } catch (RuntimeException e) { if (e.getCause() instanceof com.google.protobuf.InvalidProtocolBufferException) { throw (com.google.protobuf.InvalidProtocolBufferException) e.getCause(); } throw e; } } }; public static com.google.protobuf.Parser<ClientConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ClientConfig> getParserForType() { return PARSER; } public io.grpc.testing.ClientConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.s3a.impl; import java.io.IOException; import java.nio.file.AccessDeniedException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.function.Function; import java.util.stream.Collectors; import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.MultiObjectDeleteException; import com.google.common.annotations.VisibleForTesting; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.lang3.tuple.Triple; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.s3a.AWSS3IOException; import org.apache.hadoop.fs.s3a.S3AFileSystem; import org.apache.hadoop.fs.s3a.s3guard.BulkOperationState; import org.apache.hadoop.fs.s3a.s3guard.MetadataStore; import static com.google.common.base.Preconditions.checkNotNull; /** * Support for Multi Object Deletion. */ public final class MultiObjectDeleteSupport extends AbstractStoreOperation { private static final Logger LOG = LoggerFactory.getLogger( MultiObjectDeleteSupport.class); private final BulkOperationState operationState; /** * Initiate with a store context. * @param context store context. * @param operationState any ongoing bulk operation. */ public MultiObjectDeleteSupport(final StoreContext context, final BulkOperationState operationState) { super(context); this.operationState = operationState; } /** * This is the exception exit code if access was denied on a delete. * {@value}. */ public static final String ACCESS_DENIED = "AccessDenied"; /** * A {@code MultiObjectDeleteException} is raised if one or more * paths listed in a bulk DELETE operation failed. * The top-level exception is therefore just "something wasn't deleted", * but doesn't include the what or the why. * This translation will extract an AccessDeniedException if that's one of * the causes, otherwise grabs the status code and uses it in the * returned exception. * @param message text for the exception * @param deleteException the delete exception. to translate * @return an IOE with more detail. */ public static IOException translateDeleteException( final String message, final MultiObjectDeleteException deleteException) { final StringBuilder result = new StringBuilder( deleteException.getErrors().size() * 256); result.append(message).append(": "); String exitCode = ""; for (MultiObjectDeleteException.DeleteError error : deleteException.getErrors()) { String code = error.getCode(); result.append(String.format("%s: %s: %s%n", code, error.getKey(), error.getMessage())); if (exitCode.isEmpty() || ACCESS_DENIED.equals(code)) { exitCode = code; } } if (ACCESS_DENIED.equals(exitCode)) { return (IOException) new AccessDeniedException(result.toString()) .initCause(deleteException); } else { return new AWSS3IOException(result.toString(), deleteException); } } /** * Process a multi object delete exception by building two paths from * the delete request: one of all deleted files, one of all undeleted values. * The latter are those rejected in the delete call. * @param deleteException the delete exception. * @param keysToDelete the keys in the delete request * @return tuple of (undeleted, deleted) paths. */ public Pair<List<Path>, List<Path>> splitUndeletedKeys( final MultiObjectDeleteException deleteException, final Collection<DeleteObjectsRequest.KeyVersion> keysToDelete) { LOG.debug("Processing delete failure; keys to delete count = {};" + " errors in exception {}; successful deletions = {}", keysToDelete.size(), deleteException.getErrors().size(), deleteException.getDeletedObjects().size()); // convert the collection of keys being deleted into paths final List<Path> pathsBeingDeleted = keysToPaths(keysToDelete); // Take this is list of paths // extract all undeleted entries contained in the exception and // then removes them from the original list. List<Path> undeleted = removeUndeletedPaths(deleteException, pathsBeingDeleted, getStoreContext()::keyToPath); return Pair.of(undeleted, pathsBeingDeleted); } /** * Given a list of delete requests, convert them all to paths. * @param keysToDelete list of keys for the delete operation. * @return the paths. */ public List<Path> keysToPaths( final Collection<DeleteObjectsRequest.KeyVersion> keysToDelete) { return convertToPaths(keysToDelete, getStoreContext()::keyToPath); } /** * Given a list of delete requests, convert them all to paths. * @param keysToDelete list of keys for the delete operation. * @param qualifier path qualifier * @return the paths. */ public static List<Path> convertToPaths( final Collection<DeleteObjectsRequest.KeyVersion> keysToDelete, final Function<String, Path> qualifier) { return keysToDelete.stream() .map((keyVersion) -> qualifier.apply(keyVersion.getKey())) .collect(Collectors.toList()); } /** * Process a delete failure by removing from the metastore all entries * which where deleted, as inferred from the delete failures exception * and the original list of files to delete declares to have been deleted. * @param deleteException the delete exception. * @param keysToDelete collection of keys which had been requested. * @return a tuple of (undeleted, deleted, failures) */ public Triple<List<Path>, List<Path>, List<Pair<Path, IOException>>> processDeleteFailure( final MultiObjectDeleteException deleteException, final List<DeleteObjectsRequest.KeyVersion> keysToDelete) { final MetadataStore metadataStore = checkNotNull(getStoreContext().getMetadataStore(), "context metadatastore"); final List<Pair<Path, IOException>> failures = new ArrayList<>(); final Pair<List<Path>, List<Path>> outcome = splitUndeletedKeys(deleteException, keysToDelete); List<Path> deleted = outcome.getRight(); List<Path> undeleted = outcome.getLeft(); // delete the paths but recover // TODO: handle the case where a parent path is deleted but not a child. // TODO: in a fake object delete, we don't actually want to delete // metastore entries deleted.forEach(path -> { try { metadataStore.delete(path, operationState); } catch (IOException e) { // trouble: we failed to delete the far end entry // try with the next one. // if this is a big network failure, this is going to be noisy. LOG.warn("Failed to update S3Guard store with deletion of {}", path); failures.add(Pair.of(path, e)); } }); if (LOG.isDebugEnabled()) { undeleted.forEach(p -> LOG.debug("Deleted {}", p)); } return Triple.of(undeleted, deleted, failures); } /** * Build a list of undeleted paths from a {@code MultiObjectDeleteException}. * Outside of unit tests, the qualifier function should be * {@link S3AFileSystem#keyToQualifiedPath(String)}. * @param deleteException the delete exception. * @param qualifierFn function to qualify paths * @return the possibly empty list of paths. */ @VisibleForTesting public static List<Path> extractUndeletedPaths( final MultiObjectDeleteException deleteException, final Function<String, Path> qualifierFn) { return deleteException.getErrors().stream() .map((e) -> qualifierFn.apply(e.getKey())) .collect(Collectors.toList()); } /** * Process a {@code MultiObjectDeleteException} by * removing all undeleted paths from the list of paths being deleted. * The original list is updated, and so becomes the list of successfully * deleted paths. * @param deleteException the delete exception. * @param pathsBeingDeleted list of paths which were being deleted. * This has all undeleted paths removed, leaving only those deleted. * @return the list of undeleted entries */ @VisibleForTesting static List<Path> removeUndeletedPaths( final MultiObjectDeleteException deleteException, final Collection<Path> pathsBeingDeleted, final Function<String, Path> qualifier) { List<Path> undeleted = extractUndeletedPaths(deleteException, qualifier); pathsBeingDeleted.removeAll(undeleted); return undeleted; } /** * A delete operation failed. * Currently just returns the list of all paths. * @param ex exception. * @param keysToDelete the keys which were being deleted. * @return all paths which were not deleted. */ public List<Path> processDeleteFailureGenericException(Exception ex, final List<DeleteObjectsRequest.KeyVersion> keysToDelete) { return keysToPaths(keysToDelete); } }
/* Copyright 2014-2016 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package apple.homekit; import apple.NSObject; import apple.foundation.NSArray; import apple.foundation.NSError; import apple.foundation.NSMethodSignature; import apple.foundation.NSSet; import org.moe.natj.c.ann.FunctionPtr; import org.moe.natj.general.NatJ; import org.moe.natj.general.Pointer; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.Mapped; import org.moe.natj.general.ann.MappedReturn; import org.moe.natj.general.ann.NInt; import org.moe.natj.general.ann.NUInt; import org.moe.natj.general.ann.Owned; import org.moe.natj.general.ann.Runtime; import org.moe.natj.general.ptr.VoidPtr; import org.moe.natj.objc.Class; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.SEL; import org.moe.natj.objc.ann.ObjCBlock; import org.moe.natj.objc.ann.ObjCClassBinding; import org.moe.natj.objc.ann.Selector; import org.moe.natj.objc.map.ObjCObjectMapper; /** * This class is used to represent an entry in an action set that writes a specific * value to a characteristic. */ @Generated @Library("HomeKit") @Runtime(ObjCRuntime.class) @ObjCClassBinding public class HMCharacteristicWriteAction<_TargetValueType> extends HMAction { static { NatJ.register(); } @Generated protected HMCharacteristicWriteAction(Pointer peer) { super(peer); } @Generated @Selector("accessInstanceVariablesDirectly") public static native boolean accessInstanceVariablesDirectly(); @Generated @Owned @Selector("alloc") public static native HMCharacteristicWriteAction<?> alloc(); @Owned @Generated @Selector("allocWithZone:") public static native HMCharacteristicWriteAction<?> allocWithZone(VoidPtr zone); @Generated @Selector("automaticallyNotifiesObserversForKey:") public static native boolean automaticallyNotifiesObserversForKey(String key); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:") public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:selector:object:") public static native void cancelPreviousPerformRequestsWithTargetSelectorObject( @Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector, @Mapped(ObjCObjectMapper.class) Object anArgument); @Generated @Selector("classFallbacksForKeyedArchiver") public static native NSArray<String> classFallbacksForKeyedArchiver(); @Generated @Selector("classForKeyedUnarchiver") public static native Class classForKeyedUnarchiver(); @Generated @Selector("debugDescription") public static native String debugDescription_static(); @Generated @Selector("description") public static native String description_static(); @Generated @Selector("hash") @NUInt public static native long hash_static(); @Generated @Selector("instanceMethodForSelector:") @FunctionPtr(name = "call_instanceMethodForSelector_ret") public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector); @Generated @Selector("instanceMethodSignatureForSelector:") public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector); @Generated @Selector("instancesRespondToSelector:") public static native boolean instancesRespondToSelector(SEL aSelector); @Generated @Selector("isSubclassOfClass:") public static native boolean isSubclassOfClass(Class aClass); @Generated @Selector("keyPathsForValuesAffectingValueForKey:") public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key); @Generated @Owned @Selector("new") public static native HMCharacteristicWriteAction<?> new_objc(); @Generated @Selector("resolveClassMethod:") public static native boolean resolveClassMethod(SEL sel); @Generated @Selector("resolveInstanceMethod:") public static native boolean resolveInstanceMethod(SEL sel); @Generated @Selector("setVersion:") public static native void setVersion_static(@NInt long aVersion); @Generated @Selector("superclass") public static native Class superclass_static(); @Generated @Selector("version") @NInt public static native long version_static(); /** * The characteristic associated with the action. */ @Generated @Selector("characteristic") public native HMCharacteristic characteristic(); @Generated @Selector("init") public native HMCharacteristicWriteAction<?> init(); /** * Initializer method that ties the action to a particular characteristic. * * @param characteristic The characteristic bound to the action. * @param targetValue The target value for the characteristic. * @return Instance object representing the characteristic write action. */ @Generated @Selector("initWithCharacteristic:targetValue:") public native HMCharacteristicWriteAction<?> initWithCharacteristicTargetValue(HMCharacteristic characteristic, @Mapped(ObjCObjectMapper.class) Object targetValue); /** * The target value for the action. */ @Generated @Selector("targetValue") @MappedReturn(ObjCObjectMapper.class) public native Object targetValue(); /** * This method is used to change target value for the characteristic. * * @param targetValue New target value for the characteristic. * @param completion Block that is invoked once the request is processed. * The NSError provides more information on the status of the request, error * will be nil on success. */ @Generated @Selector("updateTargetValue:completionHandler:") public native void updateTargetValueCompletionHandler(@Mapped(ObjCObjectMapper.class) Object targetValue, @ObjCBlock(name = "call_updateTargetValueCompletionHandler") Block_updateTargetValueCompletionHandler completion); @Runtime(ObjCRuntime.class) @Generated public interface Block_updateTargetValueCompletionHandler { @Generated void call_updateTargetValueCompletionHandler(NSError error); } }
/*L * Copyright (c) 2006 SAIC, SAIC-F. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/rembrandt/LICENSE.txt for details. */ package gov.nih.nci.rembrandt.web.xml; import gov.nih.nci.caintegrator.dto.de.BioSpecimenIdentifierDE; import gov.nih.nci.rembrandt.queryservice.resultset.DimensionalViewContainer; import gov.nih.nci.rembrandt.queryservice.resultset.Resultant; import gov.nih.nci.rembrandt.queryservice.resultset.ResultsContainer; import gov.nih.nci.rembrandt.queryservice.resultset.copynumber.CopyNumberGeneViewResultsContainer; import gov.nih.nci.rembrandt.queryservice.resultset.copynumber.CopyNumberSegmentViewResultsContainer; import gov.nih.nci.rembrandt.queryservice.resultset.copynumber.SampleCopyNumberValuesResultset; import gov.nih.nci.rembrandt.queryservice.resultset.gene.GeneResultset; import gov.nih.nci.rembrandt.queryservice.resultset.gene.ViewByGroupResultset; import gov.nih.nci.rembrandt.queryservice.resultset.sample.SampleResultset; import gov.nih.nci.rembrandt.queryservice.resultset.sample.SampleViewResultsContainer; import gov.nih.nci.rembrandt.util.DEUtils; import java.io.IOException; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.servlet.jsp.JspWriter; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.dom4j.Document; import org.dom4j.DocumentHelper; import org.dom4j.Element; /** * @author LandyR * Feb 8, 2005 * */ /** * caIntegrator License * * Copyright 2001-2005 Science Applications International Corporation ("SAIC"). * The software subject to this notice and license includes both human readable source code form and machine readable, * binary, object code form ("the caIntegrator Software"). The caIntegrator Software was developed in conjunction with * the National Cancer Institute ("NCI") by NCI employees and employees of SAIC. * To the extent government employees are authors, any rights in such works shall be subject to Title 17 of the United States * Code, section 105. * This caIntegrator Software License (the "License") is between NCI and You. "You (or "Your") shall mean a person or an * entity, and all other entities that control, are controlled by, or are under common control with the entity. "Control" * for purposes of this definition means (i) the direct or indirect power to cause the direction or management of such entity, * whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) * beneficial ownership of such entity. * This License is granted provided that You agree to the conditions described below. NCI grants You a non-exclusive, * worldwide, perpetual, fully-paid-up, no-charge, irrevocable, transferable and royalty-free right and license in its rights * in the caIntegrator Software to (i) use, install, access, operate, execute, copy, modify, translate, market, publicly * display, publicly perform, and prepare derivative works of the caIntegrator Software; (ii) distribute and have distributed * to and by third parties the caIntegrator Software and any modifications and derivative works thereof; * and (iii) sublicense the foregoing rights set out in (i) and (ii) to third parties, including the right to license such * rights to further third parties. For sake of clarity, and not by way of limitation, NCI shall have no right of accounting * or right of payment from You or Your sublicensees for the rights granted under this License. This License is granted at no * charge to You. * 1. Your redistributions of the source code for the Software must retain the above copyright notice, this list of conditions * and the disclaimer and limitation of liability of Article 6, below. Your redistributions in object code form must reproduce * the above copyright notice, this list of conditions and the disclaimer of Article 6 in the documentation and/or other materials * provided with the distribution, if any. * 2. Your end-user documentation included with the redistribution, if any, must include the following acknowledgment: "This * product includes software developed by SAIC and the National Cancer Institute." If You do not include such end-user * documentation, You shall include this acknowledgment in the Software itself, wherever such third-party acknowledgments * normally appear. * 3. You may not use the names "The National Cancer Institute", "NCI" "Science Applications International Corporation" and * "SAIC" to endorse or promote products derived from this Software. This License does not authorize You to use any * trademarks, service marks, trade names, logos or product names of either NCI or SAIC, except as required to comply with * the terms of this License. * 4. For sake of clarity, and not by way of limitation, You may incorporate this Software into Your proprietary programs and * into any third party proprietary programs. However, if You incorporate the Software into third party proprietary * programs, You agree that You are solely responsible for obtaining any permission from such third parties required to * incorporate the Software into such third party proprietary programs and for informing Your sublicensees, including * without limitation Your end-users, of their obligation to secure any required permissions from such third parties * before incorporating the Software into such third party proprietary software programs. In the event that You fail * to obtain such permissions, You agree to indemnify NCI for any claims against NCI by such third parties, except to * the extent prohibited by law, resulting from Your failure to obtain such permissions. * 5. For sake of clarity, and not by way of limitation, You may add Your own copyright statement to Your modifications and * to the derivative works, and You may provide additional or different license terms and conditions in Your sublicenses * of modifications of the Software, or any derivative works of the Software as a whole, provided Your use, reproduction, * and distribution of the Work otherwise complies with the conditions stated in this License. * 6. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY, NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. * IN NO EVENT SHALL THE NATIONAL CANCER INSTITUTE, SAIC, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ public class CopyNumberIGVReport{ /** * */ private String chr; private Long startLoc = null; private Long endLoc = null; public CopyNumberIGVReport () { super(); } /* (non-Javadoc) * @see gov.nih.nci.nautilus.ui.report.ReportGenerator#getTemplate(gov.nih.nci.nautilus.resultset.Resultant, java.lang.String) */ @SuppressWarnings("unchecked") public StringBuffer getIGVReport(Resultant resultant) throws IOException { //String theColors[] = { "B6C5F2","F2E3B5","DAE1F9","C4F2B5","819BE9", "E9CF81" }; DecimalFormat resultFormat = new DecimalFormat("0.0000"); String defaultV = "-"; StringBuffer sb = new StringBuffer(); //Document document = DocumentHelper.createDocument(); try { //Element report = document.addElement( "Report" ); //Element cell = null; //Element data = null; //Element dataRow = null; //add the atts //report.addAttribute("reportType", "Copy Number"); //fudge these for now //report.addAttribute("groupBy", "none"); String queryName = resultant.getAssociatedQuery().getQueryName(); //set the queryName to be unique for session/cache access //report.addAttribute("queryName", queryName); //report.addAttribute("sessionId", "the session id"); //report.addAttribute("creationTime", "right now"); //hold a message to display on the report //report.addAttribute("msg", (resultant.isOverLimit() ? "over limit" : "")); ResultsContainer resultsContainer = resultant.getResultsContainer(); CopyNumberSegmentViewResultsContainer segmentViewResultsContainer = null; CopyNumberGeneViewResultsContainer copyNumberGeneViewContainer = null; if(resultsContainer instanceof DimensionalViewContainer) { DimensionalViewContainer dimensionalViewContainer = (DimensionalViewContainer) resultsContainer; segmentViewResultsContainer = dimensionalViewContainer.getCopyNumberSegmentViewResultsContainer(); copyNumberGeneViewContainer = dimensionalViewContainer.getCopyNumberGeneViewResultsContainer(); } else if (resultsContainer instanceof CopyNumberSegmentViewResultsContainer) { segmentViewResultsContainer = (CopyNumberSegmentViewResultsContainer) resultsContainer; } else if(resultsContainer instanceof CopyNumberGeneViewResultsContainer) { //for single copyNumberGeneViewContainer = (CopyNumberGeneViewResultsContainer) resultsContainer; } if(segmentViewResultsContainer != null){ Collection segments = segmentViewResultsContainer.getSampleCopyNumberValuesResultsets(); // set up the headers for this table //Element headerRow = report.addElement("Row").addAttribute("name", "headerRow"); //sb = sb.append(getCopyNumberSegmentHeaderValues()); // for(String h : CopyNumberIGVReport.getCopyNumberSegmentHeaderValues()){ // cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); // data = cell.addElement("Data").addAttribute("type", "header").addText(h); // data = null; // cell = null; // } for (Iterator sampleIterator = segments.iterator(); sampleIterator.hasNext();) { SampleCopyNumberValuesResultset sampleResultset = (SampleCopyNumberValuesResultset)sampleIterator.next(); //dataRow = report.addElement("Row").addAttribute("name", "dataRow"); //List rows = new ArrayList(); chr = getChromosome(sampleResultset); startLoc = getStartLoc(startLoc , sampleResultset); endLoc = getEndLoc(endLoc , sampleResultset); sb = sb.append(getClinicalRowValues(sampleResultset)); //cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "sample"); //data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(rows.get(0))); //data = null; //cell = null; // for(int i=1; i<rows.size(); i++) { // cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); // data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(rows.get(i))); // data = null; // cell = null; // } } }else if(copyNumberGeneViewContainer != null) { Collection genes = copyNumberGeneViewContainer.getGeneResultsets(); Collection labels = copyNumberGeneViewContainer.getGroupsLabels(); Collection sampleIds = null; for (Iterator geneIterator = genes.iterator(); geneIterator.hasNext();) { GeneResultset geneResultset = (GeneResultset)geneIterator.next(); // set up the headers for this table //Element headerRow = report.addElement("Row").addAttribute("name", "headerRow"); //sb = sb.append(getCopyNumberSegmentHeaderValues()); // for(String h : CopyNumberIGVReport.getCopyNumberSegmentHeaderValues()){ // cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); // data = cell.addElement("Data").addAttribute("type", "header").addText(h); // data = null; // cell = null; // } //header.append("<td colspan="+sampleIds.size()+" class='"+label+"' id=\"header\">"+label+" Samples</td>"); for (Iterator labelIterator = labels.iterator(); labelIterator.hasNext();) { String label = (String) labelIterator.next(); ViewByGroupResultset groupResultset = (ViewByGroupResultset) geneResultset.getGroupByResultset(label); sampleIds = copyNumberGeneViewContainer.getBiospecimenLabels(label); String hClass = label; if(groupResultset != null) { for (Iterator sampleIdIterator = sampleIds.iterator(); sampleIdIterator.hasNext();) { BioSpecimenIdentifierDE sampleId = (BioSpecimenIdentifierDE) sampleIdIterator.next(); SampleCopyNumberValuesResultset biospecimenResultset = (SampleCopyNumberValuesResultset) groupResultset.getBioSpecimenResultset(sampleId.getSpecimenName()); if(biospecimenResultset != null){ chr = getChromosome(biospecimenResultset); startLoc = getStartLoc(startLoc , biospecimenResultset); endLoc = getEndLoc(endLoc , biospecimenResultset); // dataRow = report.addElement("Row").addAttribute("name", "dataRow"); // // List rows = new ArrayList(); sb.append(getClinicalRowValues(biospecimenResultset)); // // cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "sample"); // data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(rows.get(0))); // data = null; // cell = null; // // for(int i=1; i<rows.size(); i++) { // cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); // data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(rows.get(i))); // data = null; // cell = null; // } } } } } } } } catch(Exception e) { System.out.println(e); } return sb; } @SuppressWarnings("unchecked") public String getClinicalRowValues(SampleCopyNumberValuesResultset sampleResultset){ String defaultV = "-"; //List rows = new ArrayList(); String row = sampleResultset.getBiospecimen().getSpecimenName()+"\t"+ (sampleResultset.getChr()!= null? sampleResultset.getChr():defaultV)+"\t"+ (sampleResultset.getLocStart()!= null? sampleResultset.getLocStart():defaultV)+"\t"+ (sampleResultset.getLocEnd()!= null? sampleResultset.getLocEnd():defaultV)+"\t"+ (sampleResultset.getNumberOFMarks()!= null? sampleResultset.getNumberOFMarks().getValue().toString():defaultV)+"\t"+ (sampleResultset.getSegmentMean()!= null? sampleResultset.getSegmentMean().getValue().toString():defaultV)+"\n";//Number of Marks,,Sample,Disease"; // rows.add(sampleResultset.getBiospecimen().getSpecimenName()); // rows.add(sampleResultset.getChr()); // rows.add(sampleResultset.getLocStart()); // rows.add(sampleResultset.getLocEnd()); // rows.add(sampleResultset.getNumberOFMarks()); // rows.add(sampleResultset.getSegmentMean()); // rows.add(sampleResultset.getSampleIDDE()); // rows.add(sampleResultset.getDisease()); return row; } @SuppressWarnings("unchecked") private String getChromosome(SampleCopyNumberValuesResultset sampleResultset){ String chr = ""; if(sampleResultset.getChr()!= null){ chr = sampleResultset.getChr(); } return chr; } @SuppressWarnings("unchecked") private Long getStartLoc(Long defaultStart, SampleCopyNumberValuesResultset sampleResultset){ Long startLoc = new Long(0); if(sampleResultset.getLocStart()!= null){ startLoc = new Long(sampleResultset.getLocStart()); } if(startLoc!= null){ if( defaultStart == null){ defaultStart = startLoc;//first time } if(startLoc < defaultStart){ return startLoc; } } return defaultStart; } @SuppressWarnings("unchecked") private Long getEndLoc(Long defaultEnd, SampleCopyNumberValuesResultset sampleResultset){ Long endLoc = new Long(0); if(sampleResultset.getLocStart()!= null){ endLoc = new Long(sampleResultset.getLocEnd()); } if(startLoc!= null){ if( defaultEnd == null){ defaultEnd = endLoc;//first time } if(endLoc > defaultEnd){ return endLoc; } } return defaultEnd; } public String getCopyNumberSegmentHeaderValues() { String headers = "Specimen"+"\t"+"Chr No."+"\t"+"Start Position"+"\t"+"End Position"+"\t"+"Number of Marks"+"\t"+"Segment Mean"+"\n";//Number of Marks,,Sample,Disease"; //List<String> heads = new ArrayList<String>(); //heads = Arrays.asList(StringUtils.split(headers, ",")); return headers; } /** * @return the chr */ public String getChr() { return chr; } /** * @return the startLoc */ public Long getStartLoc() { return startLoc; } /** * @return the endLoc */ public Long getEndLoc() { return endLoc; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dtstack.jlogstash.metrics.util; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import sun.net.util.IPAddressUtil; import java.io.IOException; import java.net.Inet4Address; import java.net.Inet6Address; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.MalformedURLException; import java.net.ServerSocket; import java.net.URL; import java.net.UnknownHostException; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; /** * copy from https://github.com/apache/flink */ public class NetUtils { private static final Logger LOG = LoggerFactory.getLogger(NetUtils.class); /** The wildcard address to listen on all interfaces (either 0.0.0.0 or ::) */ private static final String WILDCARD_ADDRESS = new InetSocketAddress(0).getAddress().getHostAddress(); /** * Turn a fully qualified domain name (fqdn) into a hostname. If the fqdn has multiple subparts * (separated by a period '.'), it will take the first part. Otherwise it takes the entire fqdn. * * @param fqdn The fully qualified domain name. * @return The hostname. */ public static String getHostnameFromFQDN(String fqdn) { if (fqdn == null) { throw new IllegalArgumentException("fqdn is null"); } int dotPos = fqdn.indexOf('.'); if(dotPos == -1) { return fqdn; } else { return fqdn.substring(0, dotPos); } } /** * Method to validate if the given String represents a hostname:port. * * Works also for ipv6. * * See: http://stackoverflow.com/questions/2345063/java-common-way-to-validate-and-convert-hostport-to-inetsocketaddress * * @return URL object for accessing host and Port */ public static URL getCorrectHostnamePort(String hostPort) { try { URL u = new URL("http://"+hostPort); if(u.getHost() == null) { throw new IllegalArgumentException("The given host:port ('"+hostPort+"') doesn't contain a valid host"); } if(u.getPort() == -1) { throw new IllegalArgumentException("The given host:port ('"+hostPort+"') doesn't contain a valid port"); } return u; } catch (MalformedURLException e) { throw new IllegalArgumentException("The given host:port ('"+hostPort+"') is invalid", e); } } // ------------------------------------------------------------------------ // Lookup of to free ports // ------------------------------------------------------------------------ /** * Find a non-occupied port. * * @return A non-occupied port. */ public static int getAvailablePort() { for (int i = 0; i < 50; i++) { try (ServerSocket serverSocket = new ServerSocket(0)) { int port = serverSocket.getLocalPort(); if (port != 0) { return port; } } catch (IOException ignored) {} } throw new RuntimeException("Could not find a free permitted port on the machine."); } // ------------------------------------------------------------------------ // Encoding of IP addresses for URLs // ------------------------------------------------------------------------ /** * Returns an address in a normalized format for Akka. * When an IPv6 address is specified, it normalizes the IPv6 address to avoid * complications with the exact URL match policy of Akka. * @param host The hostname, IPv4 or IPv6 address * @return host which will be normalized if it is an IPv6 address */ public static String unresolvedHostToNormalizedString(String host) { // Return loopback interface address if host is null // This represents the behavior of {@code InetAddress.getByName } and RFC 3330 if (host == null) { host = InetAddress.getLoopbackAddress().getHostAddress(); } else { host = host.trim().toLowerCase(); } // normalize and valid address if (IPAddressUtil.isIPv6LiteralAddress(host)) { byte[] ipV6Address = IPAddressUtil.textToNumericFormatV6(host); host = getIPv6UrlRepresentation(ipV6Address); } else if (!IPAddressUtil.isIPv4LiteralAddress(host)) { try { // We don't allow these in hostnames Preconditions.checkArgument(!host.startsWith(".")); Preconditions.checkArgument(!host.endsWith(".")); Preconditions.checkArgument(!host.contains(":")); } catch (Exception e) { throw new IllegalArgumentException("The configured hostname is not valid", e); } } return host; } /** * Returns a valid address for Akka. It returns a String of format 'host:port'. * When an IPv6 address is specified, it normalizes the IPv6 address to avoid * complications with the exact URL match policy of Akka. * @param host The hostname, IPv4 or IPv6 address * @param port The port * @return host:port where host will be normalized if it is an IPv6 address */ public static String unresolvedHostAndPortToNormalizedString(String host, int port) { Preconditions.checkArgument(port >= 0 && port < 65536, "Port is not within the valid range,"); return unresolvedHostToNormalizedString(host) + ":" + port; } /** * Encodes an IP address properly as a URL string. This method makes sure that IPv6 addresses * have the proper formatting to be included in URLs. * * @param address The IP address to encode. * @return The proper URL string encoded IP address. */ public static String ipAddressToUrlString(InetAddress address) { if (address == null) { throw new NullPointerException("address is null"); } else if (address instanceof Inet4Address) { return address.getHostAddress(); } else if (address instanceof Inet6Address) { return getIPv6UrlRepresentation((Inet6Address) address); } else { throw new IllegalArgumentException("Unrecognized type of InetAddress: " + address); } } /** * Encodes an IP address and port to be included in URL. in particular, this method makes * sure that IPv6 addresses have the proper formatting to be included in URLs. * * @param address The address to be included in the URL. * @param port The port for the URL address. * @return The proper URL string encoded IP address and port. */ public static String ipAddressAndPortToUrlString(InetAddress address, int port) { return ipAddressToUrlString(address) + ':' + port; } /** * Encodes an IP address and port to be included in URL. in particular, this method makes * sure that IPv6 addresses have the proper formatting to be included in URLs. * * @param address The socket address with the IP address and port. * @return The proper URL string encoded IP address and port. */ public static String socketAddressToUrlString(InetSocketAddress address) { if (address.isUnresolved()) { throw new IllegalArgumentException("Address cannot be resolved: " + address.getHostString()); } return ipAddressAndPortToUrlString(address.getAddress(), address.getPort()); } /** * Normalizes and encodes a hostname and port to be included in URL. * In particular, this method makes sure that IPv6 address literals have the proper * formatting to be included in URLs. * * @param host The address to be included in the URL. * @param port The port for the URL address. * @return The proper URL string encoded IP address and port. * @throws UnknownHostException Thrown, if the hostname cannot be translated into a URL. */ public static String hostAndPortToUrlString(String host, int port) throws UnknownHostException { return ipAddressAndPortToUrlString(InetAddress.getByName(host), port); } /** * Creates a compressed URL style representation of an Inet6Address. * * <p>This method copies and adopts code from Google's Guava library. * We re-implement this here in order to reduce dependency on Guava. * The Guava library has frequently caused dependency conflicts in the past. */ private static String getIPv6UrlRepresentation(Inet6Address address) { return getIPv6UrlRepresentation(address.getAddress()); } /** * Creates a compressed URL style representation of an Inet6Address. * * <p>This method copies and adopts code from Google's Guava library. * We re-implement this here in order to reduce dependency on Guava. * The Guava library has frequently caused dependency conflicts in the past. */ private static String getIPv6UrlRepresentation(byte[] addressBytes) { // first, convert bytes to 16 bit chunks int[] hextets = new int[8]; for (int i = 0; i < hextets.length; i++) { hextets[i] = (addressBytes[2 * i] & 0xFF) << 8 | (addressBytes[2 * i + 1] & 0xFF); } // now, find the sequence of zeros that should be compressed int bestRunStart = -1; int bestRunLength = -1; int runStart = -1; for (int i = 0; i < hextets.length + 1; i++) { if (i < hextets.length && hextets[i] == 0) { if (runStart < 0) { runStart = i; } } else if (runStart >= 0) { int runLength = i - runStart; if (runLength > bestRunLength) { bestRunStart = runStart; bestRunLength = runLength; } runStart = -1; } } if (bestRunLength >= 2) { Arrays.fill(hextets, bestRunStart, bestRunStart + bestRunLength, -1); } // convert into text form StringBuilder buf = new StringBuilder(40); buf.append('['); boolean lastWasNumber = false; for (int i = 0; i < hextets.length; i++) { boolean thisIsNumber = hextets[i] >= 0; if (thisIsNumber) { if (lastWasNumber) { buf.append(':'); } buf.append(Integer.toHexString(hextets[i])); } else { if (i == 0 || lastWasNumber) { buf.append("::"); } } lastWasNumber = thisIsNumber; } buf.append(']'); return buf.toString(); } // ------------------------------------------------------------------------ // Port range parsing // ------------------------------------------------------------------------ /** * Returns an iterator over available ports defined by the range definition. * * @param rangeDefinition String describing a single port, a range of ports or multiple ranges. * @return Set of ports from the range definition * @throws NumberFormatException If an invalid string is passed. */ public static Iterator<Integer> getPortRangeFromString(String rangeDefinition) throws NumberFormatException { final String[] ranges = rangeDefinition.trim().split(","); UnionIterator<Integer> iterators = new UnionIterator<>(); for (String rawRange: ranges) { Iterator<Integer> rangeIterator; String range = rawRange.trim(); int dashIdx = range.indexOf('-'); if (dashIdx == -1) { // only one port in range: final int port = Integer.valueOf(range); if (port < 0 || port > 65535) { throw new IllegalArgumentException("Invalid port configuration. Port must be between 0" + "and 65535, but was " + port + "."); } rangeIterator = Collections.singleton(Integer.valueOf(range)).iterator(); } else { // evaluate range final int start = Integer.valueOf(range.substring(0, dashIdx)); if (start < 0 || start > 65535) { throw new IllegalArgumentException("Invalid port configuration. Port must be between 0" + "and 65535, but was " + start + "."); } final int end = Integer.valueOf(range.substring(dashIdx+1, range.length())); if (end < 0 || end > 65535) { throw new IllegalArgumentException("Invalid port configuration. Port must be between 0" + "and 65535, but was " + end + "."); } rangeIterator = new Iterator<Integer>() { int i = start; @Override public boolean hasNext() { return i <= end; } @Override public Integer next() { return i++; } @Override public void remove() { throw new UnsupportedOperationException("Remove not supported"); } }; } iterators.add(rangeIterator); } return iterators; } /** * Tries to allocate a socket from the given sets of ports. * * @param portsIterator A set of ports to choose from. * @param factory A factory for creating the SocketServer * @return null if no port was available or an allocated socket. */ public static ServerSocket createSocketFromPorts(Iterator<Integer> portsIterator, SocketFactory factory) { while (portsIterator.hasNext()) { int port = portsIterator.next(); LOG.debug("Trying to open socket on port {}", port); try { return factory.createSocket(port); } catch (IOException | IllegalArgumentException e) { if (LOG.isDebugEnabled()) { LOG.debug("Unable to allocate socket on port", e); } else { LOG.info("Unable to allocate on port {}, due to error: {}", port, e.getMessage()); } } } return null; } /** * Returns the wildcard address to listen on all interfaces. * @return Either 0.0.0.0 or :: depending on the IP setup. */ public static String getWildcardIPAddress() { return WILDCARD_ADDRESS; } public interface SocketFactory { ServerSocket createSocket(int port) throws IOException; } }
package org.openprovenance.prov.model; import java.io.IOException; import java.io.InputStream; import java.math.BigInteger; import javax.xml.datatype.XMLGregorianCalendar; import javax.xml.namespace.QName; import javax.xml.parsers.DocumentBuilder; import org.apache.commons.io.IOUtils; import org.openprovenance.prov.model.exception.ConverterException; import org.xml.sax.SAXException; /** * Conversion from String to Object and vice-versa for common xsd types. * * @author lavm * */ public class ValueConverter { final private LiteralConstructor pFactory; final private Name name; public ValueConverter(ProvFactory pFactory) { this.pFactory = pFactory; this.name = pFactory.getName(); } // should be implemented with a hash table of converters /** * Converts a string into a Java object, according to type provided. This * function does not convert to QualifiedName since this requires access to * a prefix-namespace mapping. * * @param datatype * any xsd datatype, provided it is not xsd:QName * @param value * is a String * @return an object */ public Object convertToJava(QualifiedName datatype, String value) { if (datatype.equals(name.XSD_STRING)) return value; if (datatype.equals(name.XSD_INT)) return Integer.parseInt(value); if (datatype.equals(name.XSD_LONG)) return Long.parseLong(value); if (datatype.equals(name.XSD_SHORT)) return Short.parseShort(value); if (datatype.equals(name.XSD_DOUBLE)) return Double.parseDouble(value); if (datatype.equals(name.XSD_FLOAT)) return Float.parseFloat(value); if (datatype.equals(name.XSD_DECIMAL)) return new java.math.BigDecimal(value); if (datatype.equals(name.XSD_BOOLEAN)) return Boolean.parseBoolean(value); if (datatype.equals(name.XSD_BYTE)) return Byte.parseByte(value); if (datatype.equals(name.XSD_UNSIGNED_INT)) return Long.parseLong(value); if (datatype.equals(name.XSD_UNSIGNED_SHORT)) return Integer.parseInt(value); if (datatype.equals(name.XSD_UNSIGNED_BYTE)) return Short.parseShort(value); if (datatype.equals(name.XSD_UNSIGNED_LONG)) return new java.math.BigInteger(value); if (datatype.equals(name.XSD_INTEGER)) return new java.math.BigInteger(value); if (datatype.equals(name.XSD_NON_NEGATIVE_INTEGER)) return new java.math.BigInteger(value); if (datatype.equals(name.XSD_NON_POSITIVE_INTEGER)) return new java.math.BigInteger(value); if (datatype.equals(name.XSD_POSITIVE_INTEGER)) return new java.math.BigInteger(value); if (datatype.equals(name.XSD_ANY_URI)) { return value; } if (datatype.equals(name.PROV_QUALIFIED_NAME)) { throw new ConverterException("Not conversion to xsd:QName"); } if (datatype.equals(name.XSD_DATETIME)) { return pFactory.newISOTime(value); } if (datatype.equals(name.XSD_GYEAR)) { return pFactory.newGYear(new Integer(value)); } if (datatype.equals(name.XSD_GMONTH)) { // format is --02 return pFactory.newGMonth(new Integer(value.substring(2))); } if (datatype.equals(name.XSD_GMONTH_DAY)) { // format is --12-25 return pFactory.newGMonthDay(new Integer(value.substring(2, 4)), new Integer(value.substring(5, 7))); } if (datatype.equals(name.XSD_GDAY)) { // format is ---30 return pFactory.newGDay(new Integer(value.substring(3))); } if (datatype.equals(name.XSD_DURATION)) { return pFactory.newDuration(value); } if (datatype.equals(name.XSD_DAY_TIME_DURATION)) { return pFactory.newDuration(value); } if (datatype.equals(name.XSD_YEAR_MONTH_DURATION)) { return pFactory.newDuration(value); } if (datatype.equals(name.XSD_LANGUAGE)) { return value; } if (datatype.equals(name.XSD_TOKEN)) { return value; } if (datatype.equals(name.XSD_NMTOKEN)) { return value; } if (datatype.equals(name.XSD_NAME)) { return value; } if (datatype.equals(name.XSD_NCNAME)) { return value; } if (datatype.equals(name.XSD_NORMALIZED_STRING)) { return value; } if (datatype.equals(name.XSD_HEX_BINARY)) { return pFactory.hexDecoding(value); } if (datatype.equals(name.XSD_BASE64_BINARY)) { return pFactory.base64Decoding(value); } if (datatype.equals(name.RDF_LITERAL)) { return convertXMLLiteral(value); } throw new UnsupportedOperationException("UNKNOWN literal type " + datatype); } public org.w3c.dom.Element convertXMLLiteral(String value) { DOMProcessing dp=new DOMProcessing((ProvFactory)pFactory); DocumentBuilder db=dp.builder; InputStream in; org.w3c.dom.Document doc=null; try { in = IOUtils.toInputStream(value, "UTF-8"); doc=db.parse(in); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (SAXException e) { // TODO Auto-generated catch block e.printStackTrace(); } if (doc==null) return null; return doc.getDocumentElement(); } public QualifiedName getXsdType(Object o) { if (o instanceof Integer) return name.XSD_INT; if (o instanceof String) return name.XSD_STRING; if (o instanceof LangString) return name.XSD_STRING; if (o instanceof BigInteger) return name.XSD_INTEGER; if (o instanceof Long) return name.XSD_LONG; if (o instanceof Short) return name.XSD_SHORT; if (o instanceof Double) return name.XSD_DOUBLE; if (o instanceof Float) return name.XSD_FLOAT; if (o instanceof java.math.BigDecimal) return name.XSD_DECIMAL; if (o instanceof Boolean) return name.XSD_BOOLEAN; if (o instanceof Byte) return name.XSD_BYTE; if (o instanceof QualifiedName) return name.PROV_QUALIFIED_NAME; if (o instanceof XMLGregorianCalendar) { XMLGregorianCalendar cal = (XMLGregorianCalendar) o; QName t = cal.getXMLSchemaType(); if (t.getLocalPart().equals(name.XSD_GYEAR.getLocalPart())) return name.XSD_GYEAR; if (t.getLocalPart().equals(name.XSD_DATETIME.getLocalPart())) return name.XSD_DATETIME; // TODO: need to support all time related xsd types // default, return xsd:datetime return name.XSD_DATETIME; } // issue #54 flagged a concern: value can be an element, when xsi:type // was unspecified. // this is no longer the case // System.out.println("getXsdType() " + o.getClass()); // Let's be permissive, and return the unknown qualified name return name.QUALIFIED_NAME_UNKNOWN_TYPE; } }
package com.creative.studio.component.dependency; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.net.URLClassLoader; import java.security.DigestInputStream; import java.security.MessageDigest; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Enumeration; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map.Entry; import java.util.Set; import java.util.TreeSet; import java.util.jar.Attributes; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.regex.Pattern; import org.objectweb.asm.ClassReader; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.Opcodes; /** * <p> * 1. Support directory scan,including classpath * <p> * 2. Support component scan,including jar,war,ear and sar * <p> * 3. Support conflicting classes scan,conflicting means the same fully-qualified * class name, but not the same digest or incompatible class(details see <a * href="http://docs.oracle.com/javase/specs/jls/se7/html/jls-13.html">jls</a> * and <a href= * "http://www.oracle.com/technetwork/java/javase/compatibility-137541.html" * >class compatibility</a>) * <p> * 4. Check who using this conflicting classes * * @author <a href="mailto:[email protected]">Von Gosling</a> */ public class DependencyMediator { /** * Whether to check <code>.jar</code> inside files */ private static boolean checkJars = true; /** * Whether to check class compatible */ private static boolean checkCompatible = true; public static final String CLASS_SUFFIX = ".class"; public static final Pattern JAR_FILE_PATTERN = Pattern.compile("^.+\\.(jar|JAR)$"); /** * Recursively finds class files and process * * @param file Directory full of class files or jar files (in which case all * of them are processed recursively), or a class file (in which * case that single class is processed), or a jar file (in which * case all the classes in this jar file are processed.) */ public static void process(File file) throws IOException { List<File> files = new ArrayList<File>(); if (file.isDirectory()) { files = processDirectory(file); for (File f : files) { doProcess(f); } } else { doProcess(file); } if (checkCompatible) { //processCompatible(file, jarFile, classMap); } } private static String getFileExtension(String fullName) { String fileName = new File(fullName).getName(); int dotIndex = fileName.lastIndexOf('.'); return (dotIndex == -1) ? "" : fileName.substring(dotIndex + 1); } protected static void doProcess(File file) throws IOException { String fileFormat = getFileExtension(file.getName()); ComponentFormat compFormat = ComponentFormat.fromString(fileFormat); if (null == compFormat) { System.err.printf("Not support file format [%s] now !", file.getName()); System.exit(-1); } switch (compFormat) { case WAR: case EAR: case SAR: case ZIP: case GZIP: case JAR: processJarFile(file, checkJars); break; case CLASS: processClassFile(file); break; default: break; } } protected static List<File> processDirectory(File dir) throws IOException { List<File> totalFiles = new ArrayList<File>(); listFiles(dir, totalFiles); //Ensure that outer classes are visited before inner classes Collections.sort(totalFiles, new Comparator<File>() { public int compare(File file1, File file2) { String n1 = file1.getName(); String n2 = file2.getName(); int diff = n1.length() - n2.length(); return diff != 0 ? diff : n1.compareTo(n2); } }); return totalFiles; } protected static void listFiles(File dir, List<File> totalFiles) { //Performance problems: using Files.newDirectoryStream File[] files = dir.listFiles(); if (files != null) { for (File f : files) { if (f.isDirectory()) { listFiles(f, totalFiles); } else { if (JAR_FILE_PATTERN.matcher(f.getName()).matches() || f.getName().endsWith(CLASS_SUFFIX)) { totalFiles.add(f); } } } } } /** * Nothing to do about the Class-Path property in MANIFEST.MF file now * * @param file * @param checkJars * @throws IOException */ public static void processJarFile(File file, boolean checkJars) throws IOException { JarFile jarFile = null; try { jarFile = new JarFile(file); if (checkJars) { Enumeration<JarEntry> jarEntries = jarFile.entries(); while (jarEntries.hasMoreElements()) { JarEntry jarEntry = jarEntries.nextElement(); if (!jarEntry.getName().endsWith(".class")) { continue; } //Check whether the same class String keyName = jarEntry.getName() .substring(0, jarEntry.getName().length() - 6).replace("/", "."); ComponentEntry cEntry = new ComponentEntry(); cEntry.setPathName(jarFile.getName() + ":" + jarEntry.getName()); cEntry.setJarName(jarFile.getName()); cEntry.setName(keyName); cEntry.setEntry(jarEntry); cEntry.setDigest(getDigest(jarFile.getInputStream(jarEntry))); ComponentContainer.put(keyName, cEntry); } } else { //Handle MANIFEST String name = jarFile.getName().substring(jarFile.getName().lastIndexOf("/") + 1); Attributes attr = jarFile.getManifest().getMainAttributes(); String buildJdk = attr.getValue("Build-Jdk"); String builtBy = attr.getValue("Built-By"); String keyName = name; if (!buildJdk.isEmpty()) { keyName = keyName + ":" + buildJdk; } if (!builtBy.isEmpty()) { keyName = keyName + ":" + builtBy; } ComponentEntry cEntry = new ComponentEntry(); cEntry.setName(name); cEntry.setPathName(jarFile.getName()); cEntry.setDigest(getDigest(new FileInputStream(new File(jarFile.getName())))); ComponentContainer.put(keyName, cEntry); } } catch (Throwable e) { e.printStackTrace(); } finally { if (null != jarFile) { jarFile.close(); } } } public static void output(HashMap<String, TreeSet<ComponentEntry>> classMap) { System.out.println("Output component reactor info......"); int count = 0; for (Entry<String, TreeSet<ComponentEntry>> entry : classMap.entrySet()) { if (entry.getValue().size() > 1) { count++; System.out.printf("Conflicting component [%s] was founded in the path : \n", entry.getKey()); for (ComponentEntry jar : entry.getValue()) { System.out.printf(" \t%s\n", jar.getPathName()); } } } if (count == 0) { System.out.println("Congratulations,no conflicting component exist!"); } } // private static void processCompatible(File file, JarFile jarFile, // HashMap<String, TreeSet<ComponentEntry>> classMap) // throws IOException { // Iterator<Entry<String, TreeSet<ComponentEntry>>> iter = classMap.entrySet().iterator(); // while (iter.hasNext()) { // Entry<String, TreeSet<ComponentEntry>> jarEntryInfoEntry = iter.next(); // Set<ComponentEntry> jarEntryInfos = jarEntryInfoEntry.getValue(); // Iterator<ComponentEntry> jarEntryInfoIter = jarEntryInfos.iterator(); // while (jarEntryInfoIter.hasNext()) { // JarEntry jarEntry = jarEntryInfoIter.next().getEntry(); // InputStream is = jarFile.getInputStream(jarEntry); // loadByteCode(file.getPath() + ":" + jarEntry.getName(), is); // } // } // } private static void loadByteCode(final String fileName, final InputStream is) throws IOException { { try { FileInputStream fis = new FileInputStream(new File(fileName)); final byte[] dd = getDigest(fis); ClassReader cr = new ClassReader(is); cr.accept(new ClassVisitor(Opcodes.ASM5) { public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) { //Check whether the same class String keyName = name.replace('/', '.'); ComponentEntry cEntry = new ComponentEntry(); cEntry.setPathName(fileName); cEntry.setName(keyName); cEntry.setDigest(dd); ComponentContainer.put(keyName, cEntry); } }, 0); } catch (ArrayIndexOutOfBoundsException e) { // MANIMALSNIFFER-9 it is a pity that ASM does not throw a nicer error on encountering a malformed class file. IOException ioException = new IOException("Bad class file " + fileName); ioException.initCause(e); throw ioException; } } } private static byte[] getDigest(InputStream is) { DigestInputStream dis = null; try { MessageDigest md = MessageDigest.getInstance("MD5"); dis = new DigestInputStream(is, md); byte[] bytes = new byte[1024]; int numBytes = -1; while ((numBytes = is.read(bytes)) != -1) { md.update(bytes, 0, numBytes); } return md.digest(); } catch (Throwable e) { e.printStackTrace(); } finally { if (dis != null) { try { dis.close(); } catch (IOException e) { e.printStackTrace(); } } } return null; } protected static void processClassFile(File file) throws IOException { InputStream in = new FileInputStream(file); try { loadByteCode(file.getPath(), in); } finally { in.close(); } } private static Collection<URLClassLoader> getClassLoaders(ClassLoader baseClassLoader) { Collection<URLClassLoader> loaders = new ArrayList<URLClassLoader>(); ClassLoader loader = baseClassLoader; while (loader != null) { //Ignore if ("sun.misc.Launcher$ExtClassLoader".equals(loader.getClass().getName())) { break; } if (loader instanceof URLClassLoader) { loaders.add((URLClassLoader) loader); } loader = loader.getParent(); } return loaders; } public static void scanClassPath() { Set<URLClassLoader> loaders = new LinkedHashSet<URLClassLoader>(); loaders.addAll(getClassLoaders(Thread.currentThread().getContextClassLoader())); loaders.addAll(getClassLoaders(DependencyMediator.class.getClassLoader())); for (URLClassLoader cl : loaders) { for (URL url : cl.getURLs()) { String file = url.getFile(); File dir = new File(file); try { process(dir); } catch (IOException e1) { e1.printStackTrace(); } } } } public static void main(String args[]) { File dir = null; boolean scanClasspath = SystemPropertyUtils.getBoolean("scanClasspath", false); if (args.length == 0) { if (scanClasspath) { scanClassPath(); } } else { dir = new File(args[0]); try { process(dir); } catch (IOException e) { e.printStackTrace(); } } output(ComponentContainer.compMaps); } }
package redis.clients.jedis; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import redis.clients.jedis.BinaryClient.LIST_POSITION; public class JedisCluster implements JedisCommands, BasicCommands { public static final short HASHSLOTS = 16384; private static final int DEFAULT_TIMEOUT = 1; private static final int DEFAULT_MAX_REDIRECTIONS = 5; private int timeout; private int maxRedirections; private JedisClusterConnectionHandler connectionHandler; public JedisCluster(Set<HostAndPort> nodes, int timeout) { this(nodes, timeout, DEFAULT_MAX_REDIRECTIONS); } public JedisCluster(Set<HostAndPort> nodes) { this(nodes, DEFAULT_TIMEOUT); } public JedisCluster(Set<HostAndPort> jedisClusterNode, int timeout, int maxRedirections) { this.connectionHandler = new JedisSlotBasedConnectionHandler( jedisClusterNode); this.timeout = timeout; this.maxRedirections = maxRedirections; } @Override public String set(final String key, final String value) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().set(key, value); } }.run(key); } @Override public String get(final String key) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().get(key); } }.run(key); } @Override public Boolean exists(final String key) { return new JedisClusterCommand<Boolean>(connectionHandler, timeout, maxRedirections) { @Override public Boolean execute() { return connectionHandler.getConnection().exists(key); } }.run(key); } @Override public Long persist(final String key) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().persist(key); } }.run(key); } @Override public String type(final String key) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().type(key); } }.run(key); } @Override public Long expire(final String key, final int seconds) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().expire(key, seconds); } }.run(key); } @Override public Long expireAt(final String key, final long unixTime) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection() .expireAt(key, unixTime); } }.run(key); } @Override public Long ttl(final String key) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().ttl(key); } }.run(key); } @Override public Boolean setbit(final String key, final long offset, final boolean value) { return new JedisClusterCommand<Boolean>(connectionHandler, timeout, maxRedirections) { @Override public Boolean execute() { return connectionHandler.getConnection().setbit(key, offset, value); } }.run(key); } @Override public Boolean setbit(final String key, final long offset, final String value) { return new JedisClusterCommand<Boolean>(connectionHandler, timeout, maxRedirections) { @Override public Boolean execute() { return connectionHandler.getConnection().setbit(key, offset, value); } }.run(key); } @Override public Boolean getbit(final String key, final long offset) { return new JedisClusterCommand<Boolean>(connectionHandler, timeout, maxRedirections) { @Override public Boolean execute() { return connectionHandler.getConnection().getbit(key, offset); } }.run(key); } @Override public Long setrange(final String key, final long offset, final String value) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().setrange(key, offset, value); } }.run(key); } @Override public String getrange(final String key, final long startOffset, final long endOffset) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().getrange(key, startOffset, endOffset); } }.run(key); } @Override public String getSet(final String key, final String value) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().getSet(key, value); } }.run(key); } @Override public Long setnx(final String key, final String value) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().setnx(key, value); } }.run(key); } @Override public String setex(final String key, final int seconds, final String value) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().setex(key, seconds, value); } }.run(key); } @Override public Long decrBy(final String key, final long integer) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().decrBy(key, integer); } }.run(key); } @Override public Long decr(final String key) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().decr(key); } }.run(key); } @Override public Long incrBy(final String key, final long integer) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().incrBy(key, integer); } }.run(key); } @Override public Long incr(final String key) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().incr(key); } }.run(key); } @Override public Long append(final String key, final String value) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().append(key, value); } }.run(key); } @Override public String substr(final String key, final int start, final int end) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection() .substr(key, start, end); } }.run(key); } @Override public Long hset(final String key, final String field, final String value) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection() .hset(key, field, value); } }.run(key); } @Override public String hget(final String key, final String field) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().hget(key, field); } }.run(key); } @Override public Long hsetnx(final String key, final String field, final String value) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().hsetnx(key, field, value); } }.run(key); } @Override public String hmset(final String key, final Map<String, String> hash) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().hmset(key, hash); } }.run(key); } @Override public List<String> hmget(final String key, final String... fields) { return new JedisClusterCommand<List<String>>(connectionHandler, timeout, maxRedirections) { @Override public List<String> execute() { return connectionHandler.getConnection().hmget(key, fields); } }.run(key); } @Override public Long hincrBy(final String key, final String field, final long value) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().hincrBy(key, field, value); } }.run(key); } @Override public Boolean hexists(final String key, final String field) { return new JedisClusterCommand<Boolean>(connectionHandler, timeout, maxRedirections) { @Override public Boolean execute() { return connectionHandler.getConnection().hexists(key, field); } }.run(key); } @Override public Long hdel(final String key, final String... field) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().hdel(key, field); } }.run(key); } @Override public Long hlen(final String key) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().hdel(key); } }.run(key); } @Override public Set<String> hkeys(final String key) { return new JedisClusterCommand<Set<String>>(connectionHandler, timeout, maxRedirections) { @Override public Set<String> execute() { return connectionHandler.getConnection().hkeys(key); } }.run(key); } @Override public List<String> hvals(final String key) { return new JedisClusterCommand<List<String>>(connectionHandler, timeout, maxRedirections) { @Override public List<String> execute() { return connectionHandler.getConnection().hvals(key); } }.run(key); } @Override public Map<String, String> hgetAll(final String key) { return new JedisClusterCommand<Map<String, String>>(connectionHandler, timeout, maxRedirections) { @Override public Map<String, String> execute() { return connectionHandler.getConnection().hgetAll(key); } }.run(key); } @Override public Long rpush(final String key, final String... string) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().rpush(key, string); } }.run(key); } @Override public Long lpush(final String key, final String... string) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().lpush(key, string); } }.run(key); } @Override public Long llen(final String key) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().llen(key); } }.run(key); } @Override public List<String> lrange(final String key, final long start, final long end) { return new JedisClusterCommand<List<String>>(connectionHandler, timeout, maxRedirections) { @Override public List<String> execute() { return connectionHandler.getConnection() .lrange(key, start, end); } }.run(key); } @Override public String ltrim(final String key, final long start, final long end) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().ltrim(key, start, end); } }.run(key); } @Override public String lindex(final String key, final long index) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().lindex(key, index); } }.run(key); } @Override public String lset(final String key, final long index, final String value) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection() .lset(key, index, value); } }.run(key); } @Override public Long lrem(final String key, final long count, final String value) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection() .lrem(key, count, value); } }.run(key); } @Override public String lpop(final String key) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().lpop(key); } }.run(key); } @Override public String rpop(final String key) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().rpop(key); } }.run(key); } @Override public Long sadd(final String key, final String... member) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().sadd(key, member); } }.run(key); } @Override public Set<String> smembers(final String key) { return new JedisClusterCommand<Set<String>>(connectionHandler, timeout, maxRedirections) { @Override public Set<String> execute() { return connectionHandler.getConnection().smembers(key); } }.run(key); } @Override public Long srem(final String key, final String... member) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().srem(key, member); } }.run(key); } @Override public String spop(final String key) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().spop(key); } }.run(key); } @Override public Long scard(final String key) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().scard(key); } }.run(key); } @Override public Boolean sismember(final String key, final String member) { return new JedisClusterCommand<Boolean>(connectionHandler, timeout, maxRedirections) { @Override public Boolean execute() { return connectionHandler.getConnection().sismember(key, member); } }.run(key); } @Override public String srandmember(final String key) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().srandmember(key); } }.run(key); } @Override public Long strlen(final String key) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().strlen(key); } }.run(key); } @Override public Long zadd(final String key, final double score, final String member) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().zadd(key, score, member); } }.run(key); } @Override public Long zadd(final String key, final Map<String, Double> scoreMembers) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection() .zadd(key, scoreMembers); } }.run(key); } @Override public Set<String> zrange(final String key, final long start, final long end) { return new JedisClusterCommand<Set<String>>(connectionHandler, timeout, maxRedirections) { @Override public Set<String> execute() { return connectionHandler.getConnection() .zrange(key, start, end); } }.run(key); } @Override public Long zrem(final String key, final String... member) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().zrem(key, member); } }.run(key); } @Override public Double zincrby(final String key, final double score, final String member) { return new JedisClusterCommand<Double>(connectionHandler, timeout, maxRedirections) { @Override public Double execute() { return connectionHandler.getConnection().zincrby(key, score, member); } }.run(key); } @Override public Long zrank(final String key, final String member) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().zrank(key, member); } }.run(key); } @Override public Long zrevrank(final String key, final String member) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().zrevrank(key, member); } }.run(key); } @Override public Set<String> zrevrange(final String key, final long start, final long end) { return new JedisClusterCommand<Set<String>>(connectionHandler, timeout, maxRedirections) { @Override public Set<String> execute() { return connectionHandler.getConnection().zrevrange(key, start, end); } }.run(key); } @Override public Set<Tuple> zrangeWithScores(final String key, final long start, final long end) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, timeout, maxRedirections) { @Override public Set<Tuple> execute() { return connectionHandler.getConnection().zrangeWithScores(key, start, end); } }.run(key); } @Override public Set<Tuple> zrevrangeWithScores(final String key, final long start, final long end) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, timeout, maxRedirections) { @Override public Set<Tuple> execute() { return connectionHandler.getConnection().zrevrangeWithScores( key, start, end); } }.run(key); } @Override public Long zcard(final String key) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().zcard(key); } }.run(key); } @Override public Double zscore(final String key, final String member) { return new JedisClusterCommand<Double>(connectionHandler, timeout, maxRedirections) { @Override public Double execute() { return connectionHandler.getConnection().zscore(key, member); } }.run(key); } @Override public List<String> sort(final String key) { return new JedisClusterCommand<List<String>>(connectionHandler, timeout, maxRedirections) { @Override public List<String> execute() { return connectionHandler.getConnection().sort(key); } }.run(key); } @Override public List<String> sort(final String key, final SortingParams sortingParameters) { return new JedisClusterCommand<List<String>>(connectionHandler, timeout, maxRedirections) { @Override public List<String> execute() { return connectionHandler.getConnection().sort(key, sortingParameters); } }.run(key); } @Override public Long zcount(final String key, final double min, final double max) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().zcount(key, min, max); } }.run(key); } @Override public Long zcount(final String key, final String min, final String max) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().zcount(key, min, max); } }.run(key); } @Override public Set<String> zrangeByScore(final String key, final double min, final double max) { return new JedisClusterCommand<Set<String>>(connectionHandler, timeout, maxRedirections) { @Override public Set<String> execute() { return connectionHandler.getConnection().zrangeByScore(key, min, max); } }.run(key); } @Override public Set<String> zrangeByScore(final String key, final String min, final String max) { return new JedisClusterCommand<Set<String>>(connectionHandler, timeout, maxRedirections) { @Override public Set<String> execute() { return connectionHandler.getConnection().zrangeByScore(key, min, max); } }.run(key); } @Override public Set<String> zrevrangeByScore(final String key, final double max, final double min) { return new JedisClusterCommand<Set<String>>(connectionHandler, timeout, maxRedirections) { @Override public Set<String> execute() { return connectionHandler.getConnection().zrevrangeByScore(key, min, max); } }.run(key); } @Override public Set<String> zrangeByScore(final String key, final double min, final double max, final int offset, final int count) { return new JedisClusterCommand<Set<String>>(connectionHandler, timeout, maxRedirections) { @Override public Set<String> execute() { return connectionHandler.getConnection().zrangeByScore(key, min, max, offset, count); } }.run(key); } @Override public Set<String> zrevrangeByScore(final String key, final String max, final String min) { return new JedisClusterCommand<Set<String>>(connectionHandler, timeout, maxRedirections) { @Override public Set<String> execute() { return connectionHandler.getConnection().zrevrangeByScore(key, min, max); } }.run(key); } @Override public Set<String> zrangeByScore(final String key, final String min, final String max, final int offset, final int count) { return new JedisClusterCommand<Set<String>>(connectionHandler, timeout, maxRedirections) { @Override public Set<String> execute() { return connectionHandler.getConnection().zrangeByScore(key, min, max, offset, count); } }.run(key); } @Override public Set<String> zrevrangeByScore(final String key, final double max, final double min, final int offset, final int count) { return new JedisClusterCommand<Set<String>>(connectionHandler, timeout, maxRedirections) { @Override public Set<String> execute() { return connectionHandler.getConnection().zrevrangeByScore(key, min, max, offset, count); } }.run(key); } @Override public Set<Tuple> zrangeByScoreWithScores(final String key, final double min, final double max) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, timeout, maxRedirections) { @Override public Set<Tuple> execute() { return connectionHandler.getConnection() .zrangeByScoreWithScores(key, min, max); } }.run(key); } @Override public Set<Tuple> zrevrangeByScoreWithScores(final String key, final double max, final double min) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, timeout, maxRedirections) { @Override public Set<Tuple> execute() { return connectionHandler.getConnection() .zrevrangeByScoreWithScores(key, min, max); } }.run(key); } @Override public Set<Tuple> zrangeByScoreWithScores(final String key, final double min, final double max, final int offset, final int count) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, timeout, maxRedirections) { @Override public Set<Tuple> execute() { return connectionHandler.getConnection() .zrangeByScoreWithScores(key, min, max, offset, count); } }.run(key); } @Override public Set<String> zrevrangeByScore(final String key, final String max, final String min, final int offset, final int count) { return new JedisClusterCommand<Set<String>>(connectionHandler, timeout, maxRedirections) { @Override public Set<String> execute() { return connectionHandler.getConnection().zrevrangeByScore(key, min, max, offset, count); } }.run(key); } @Override public Set<Tuple> zrangeByScoreWithScores(final String key, final String min, final String max) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, timeout, maxRedirections) { @Override public Set<Tuple> execute() { return connectionHandler.getConnection() .zrangeByScoreWithScores(key, min, max); } }.run(key); } @Override public Set<Tuple> zrevrangeByScoreWithScores(final String key, final String max, final String min) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, timeout, maxRedirections) { @Override public Set<Tuple> execute() { return connectionHandler.getConnection() .zrevrangeByScoreWithScores(key, min, max); } }.run(key); } @Override public Set<Tuple> zrangeByScoreWithScores(final String key, final String min, final String max, final int offset, final int count) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, timeout, maxRedirections) { @Override public Set<Tuple> execute() { return connectionHandler.getConnection() .zrangeByScoreWithScores(key, min, max, offset, count); } }.run(key); } @Override public Set<Tuple> zrevrangeByScoreWithScores(final String key, final double max, final double min, final int offset, final int count) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, timeout, maxRedirections) { @Override public Set<Tuple> execute() { return connectionHandler.getConnection() .zrevrangeByScoreWithScores(key, max, min, offset, count); } }.run(key); } @Override public Set<Tuple> zrevrangeByScoreWithScores(final String key, final String max, final String min, final int offset, final int count) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, timeout, maxRedirections) { @Override public Set<Tuple> execute() { return connectionHandler.getConnection() .zrevrangeByScoreWithScores(key, max, min, offset, count); } }.run(key); } @Override public Long zremrangeByRank(final String key, final long start, final long end) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().zremrangeByRank(key, start, end); } }.run(key); } @Override public Long zremrangeByScore(final String key, final double start, final double end) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().zremrangeByScore(key, start, end); } }.run(key); } @Override public Long zremrangeByScore(final String key, final String start, final String end) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().zremrangeByScore(key, start, end); } }.run(key); } @Override public Long linsert(final String key, final LIST_POSITION where, final String pivot, final String value) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().linsert(key, where, pivot, value); } }.run(key); } @Override public Long lpushx(final String key, final String... string) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().lpushx(key, string); } }.run(key); } @Override public Long rpushx(final String key, final String... string) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().rpushx(key, string); } }.run(key); } @Override public List<String> blpop(final String arg) { return new JedisClusterCommand<List<String>>(connectionHandler, timeout, maxRedirections) { @Override public List<String> execute() { return connectionHandler.getConnection().blpop(arg); } }.run(null); } @Override public List<String> brpop(final String arg) { return new JedisClusterCommand<List<String>>(connectionHandler, timeout, maxRedirections) { @Override public List<String> execute() { return connectionHandler.getConnection().brpop(arg); } }.run(null); } @Override public Long del(final String key) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().del(key); } }.run(null); } @Override public String echo(final String string) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().echo(string); } }.run(null); } @Override public Long move(final String key, final int dbIndex) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().move(key, dbIndex); } }.run(key); } @Override public Long bitcount(final String key) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().bitcount(key); } }.run(key); } @Override public Long bitcount(final String key, final long start, final long end) { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().bitcount(key, start, end); } }.run(key); } @Override public String ping() { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().ping(); } }.run(null); } @Override public String quit() { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().quit(); } }.run(null); } @Override public String flushDB() { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().flushDB(); } }.run(null); } @Override public Long dbSize() { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().dbSize(); } }.run(null); } @Override public String select(final int index) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().select(index); } }.run(null); } @Override public String flushAll() { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().flushAll(); } }.run(null); } @Override public String auth(final String password) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().auth(password); } }.run(null); } @Override public String save() { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().save(); } }.run(null); } @Override public String bgsave() { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().bgsave(); } }.run(null); } @Override public String bgrewriteaof() { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().bgrewriteaof(); } }.run(null); } @Override public Long lastsave() { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().lastsave(); } }.run(null); } @Override public String shutdown() { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().shutdown(); } }.run(null); } @Override public String info() { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().info(); } }.run(null); } @Override public String info(final String section) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().info(section); } }.run(null); } @Override public String slaveof(final String host, final int port) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().slaveof(host, port); } }.run(null); } @Override public String slaveofNoOne() { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().slaveofNoOne(); } }.run(null); } @Override public Long getDB() { return new JedisClusterCommand<Long>(connectionHandler, timeout, maxRedirections) { @Override public Long execute() { return connectionHandler.getConnection().getDB(); } }.run(null); } @Override public String debug(final DebugParams params) { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().debug(params); } }.run(null); } @Override public String configResetStat() { return new JedisClusterCommand<String>(connectionHandler, timeout, maxRedirections) { @Override public String execute() { return connectionHandler.getConnection().configResetStat(); } }.run(null); } public Map<String, JedisPool> getClusterNodes() { return connectionHandler.getNodes(); } @Override public Long waitReplicas(int replicas, long timeout) { // TODO Auto-generated method stub return null; } @Override public ScanResult<Entry<String, String>> hscan(final String key, final int cursor) { return new JedisClusterCommand<ScanResult<Entry<String, String>>>( connectionHandler, timeout, maxRedirections) { @Override public ScanResult<Entry<String, String>> execute() { return connectionHandler.getConnection().hscan(key, cursor); } }.run(null); } @Override public ScanResult<String> sscan(final String key, final int cursor) { return new JedisClusterCommand<ScanResult<String>>(connectionHandler, timeout, maxRedirections) { @Override public ScanResult<String> execute() { return connectionHandler.getConnection().sscan(key, cursor); } }.run(null); } @Override public ScanResult<Tuple> zscan(final String key, final int cursor) { return new JedisClusterCommand<ScanResult<Tuple>>(connectionHandler, timeout, maxRedirections) { @Override public ScanResult<Tuple> execute() { return connectionHandler.getConnection().zscan(key, cursor); } }.run(null); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.common.lucene.BytesRefs; import java.util.Arrays; import java.util.Base64; import java.util.Collection; import java.util.Collections; public final class Uid { public static final char DELIMITER = '#'; public static final byte DELIMITER_BYTE = 0x23; private final String type; private final String id; public Uid(String type, String id) { this.type = type; this.id = id; } public String type() { return type; } public String id() { return id; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Uid uid = (Uid) o; if (id != null ? !id.equals(uid.id) : uid.id != null) return false; if (type != null ? !type.equals(uid.type) : uid.type != null) return false; return true; } @Override public int hashCode() { int result = type != null ? type.hashCode() : 0; result = 31 * result + (id != null ? id.hashCode() : 0); return result; } @Override public String toString() { return createUid(type, id); } public BytesRef toBytesRef() { return createUidAsBytes(type, id); } public static Uid createUid(String uid) { int delimiterIndex = uid.indexOf(DELIMITER); // type is not allowed to have # in it..., ids can return new Uid(uid.substring(0, delimiterIndex), uid.substring(delimiterIndex + 1)); } public static BytesRef createUidAsBytes(String type, String id) { return createUidAsBytes(new BytesRef(type), new BytesRef(id)); } public static BytesRef createUidAsBytes(String type, BytesRef id) { return createUidAsBytes(new BytesRef(type), id); } public static BytesRef createUidAsBytes(BytesRef type, BytesRef id) { final BytesRef ref = new BytesRef(type.length + 1 + id.length); System.arraycopy(type.bytes, type.offset, ref.bytes, 0, type.length); ref.offset = type.length; ref.bytes[ref.offset++] = DELIMITER_BYTE; System.arraycopy(id.bytes, id.offset, ref.bytes, ref.offset, id.length); ref.offset = 0; ref.length = ref.bytes.length; return ref; } public static BytesRef[] createUidsForTypesAndId(Collection<String> types, Object id) { return createUidsForTypesAndIds(types, Collections.singletonList(id)); } public static BytesRef[] createUidsForTypesAndIds(Collection<String> types, Collection<?> ids) { BytesRef[] uids = new BytesRef[types.size() * ids.size()]; BytesRefBuilder typeBytes = new BytesRefBuilder(); BytesRefBuilder idBytes = new BytesRefBuilder(); int index = 0; for (String type : types) { typeBytes.copyChars(type); for (Object id : ids) { uids[index++] = Uid.createUidAsBytes(typeBytes.get(), BytesRefs.toBytesRef(id, idBytes)); } } return uids; } public static String createUid(String type, String id) { return type + DELIMITER + id; } private static final int UTF8 = 0xff; private static final int NUMERIC = 0xfe; private static final int BASE64_ESCAPE = 0xfd; static boolean isURLBase64WithoutPadding(String id) { // We are not lenient about padding chars ('=') otherwise // 'xxx=' and 'xxx' could be considered the same id final int length = id.length(); switch (length & 0x03) { case 0: break; case 1: return false; case 2: // the last 2 symbols (12 bits) are encoding 1 byte (8 bits) // so the last symbol only actually uses 8-6=2 bits and can only take 4 values char last = id.charAt(length - 1); if (last != 'A' && last != 'Q' && last != 'g' && last != 'w') { return false; } break; case 3: // The last 3 symbols (18 bits) are encoding 2 bytes (16 bits) // so the last symbol only actually uses 16-12=4 bits and can only take 16 values last = id.charAt(length - 1); if (last != 'A' && last != 'E' && last != 'I' && last != 'M' && last != 'Q'&& last != 'U'&& last != 'Y' && last != 'c'&& last != 'g'&& last != 'k' && last != 'o' && last != 's' && last != 'w' && last != '0' && last != '4' && last != '8') { return false; } break; default: // number & 0x03 is always in [0,3] throw new AssertionError("Impossible case"); } for (int i = 0; i < length; ++i) { final char c = id.charAt(i); final boolean allowed = (c >= '0' && c <= '9') || (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || c == '-' || c == '_'; if (allowed == false) { return false; } } return true; } static boolean isPositiveNumeric(String id) { for (int i = 0; i < id.length(); ++i) { final char c = id.charAt(i); if (c < '0' || c > '9') { return false; } } return true; } /** With numeric ids, we just fold two consecutive chars in a single byte * and use 0x0f as an end marker. */ private static BytesRef encodeNumericId(String id) { byte[] b = new byte[1 + (id.length() + 1) / 2]; b[0] = (byte) NUMERIC; for (int i = 0; i < id.length(); i += 2) { int b1 = id.charAt(i) - '0'; int b2; if (i + 1 == id.length()) { b2 = 0x0f; // end marker } else { b2 = id.charAt(i + 1) - '0'; } b[1 + i/2] = (byte) ((b1 << 4) | b2); } return new BytesRef(b); } /** With base64 ids, we decode and prepend an escape char in the cases that * it could be mixed up with numeric or utf8 encoding. In the majority of * cases (253/256) the encoded id is exactly the binary form. */ private static BytesRef encodeBase64Id(String id) { byte[] b = Base64.getUrlDecoder().decode(id); if (Byte.toUnsignedInt(b[0]) >= BASE64_ESCAPE) { byte[] newB = new byte[b.length + 1]; newB[0] = (byte) BASE64_ESCAPE; System.arraycopy(b, 0, newB, 1, b.length); b = newB; } return new BytesRef(b, 0, b.length); } private static BytesRef encodeUtf8Id(String id) { byte[] b = new byte[1 + UnicodeUtil.maxUTF8Length(id.length())]; // Prepend a byte that indicates that the content is an utf8 string b[0] = (byte) UTF8; int length = UnicodeUtil.UTF16toUTF8(id, 0, id.length(), b, 1); return new BytesRef(b, 0, length); } /** Encode an id for storage in the index. This encoding is optimized for * numeric and base64 ids, which are encoded in a much denser way than * what UTF8 would do. * @see #decodeId */ public static BytesRef encodeId(String id) { if (id.isEmpty()) { throw new IllegalArgumentException("Ids can't be empty"); } if (isPositiveNumeric(id)) { // common for ids that come from databases with auto-increments return encodeNumericId(id); } else if (isURLBase64WithoutPadding(id)) { // common since it applies to autogenerated ids return encodeBase64Id(id); } else { return encodeUtf8Id(id); } } private static String decodeNumericId(byte[] idBytes, int offset, int len) { assert Byte.toUnsignedInt(idBytes[offset]) == NUMERIC; int length = (len - 1) * 2; char[] chars = new char[length]; for (int i = 1; i < len; ++i) { final int b = Byte.toUnsignedInt(idBytes[offset + i]); final int b1 = (b >>> 4); final int b2 = b & 0x0f; chars[(i - 1) * 2] = (char) (b1 + '0'); if (i == len - 1 && b2 == 0x0f) { length--; break; } chars[(i - 1) * 2 + 1] = (char) (b2 + '0'); } return new String(chars, 0, length); } private static String decodeUtf8Id(byte[] idBytes, int offset, int length) { assert Byte.toUnsignedInt(idBytes[offset]) == UTF8; return new BytesRef(idBytes, offset + 1, length - 1).utf8ToString(); } private static String decodeBase64Id(byte[] idBytes, int offset, int length) { assert Byte.toUnsignedInt(idBytes[offset]) <= BASE64_ESCAPE; if (Byte.toUnsignedInt(idBytes[offset]) == BASE64_ESCAPE) { idBytes = Arrays.copyOfRange(idBytes, offset + 1, offset + length); } else if ((idBytes.length == length && offset == 0) == false) { // no need to copy if it's not a slice idBytes = Arrays.copyOfRange(idBytes, offset, offset + length); } return Base64.getUrlEncoder().withoutPadding().encodeToString(idBytes); } /** Decode an indexed id back to its original form. * @see #encodeId */ public static String decodeId(byte[] idBytes) { return decodeId(idBytes, 0, idBytes.length); } /** Decode an indexed id back to its original form. * @see #encodeId */ public static String decodeId(byte[] idBytes, int offset, int length) { if (length == 0) { throw new IllegalArgumentException("Ids can't be empty"); } final int magicChar = Byte.toUnsignedInt(idBytes[offset]); switch (magicChar) { case NUMERIC: return decodeNumericId(idBytes, offset, length); case UTF8: return decodeUtf8Id(idBytes, offset, length); default: return decodeBase64Id(idBytes, offset, length); } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2019_07_01.implementation; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewaySku; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewaySslPolicy; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewayOperationalState; import java.util.List; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewayIPConfiguration; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewayAuthenticationCertificate; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewayTrustedRootCertificate; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewaySslCertificate; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewayFrontendIPConfiguration; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewayFrontendPort; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewayProbe; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewayBackendAddressPool; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewayBackendHttpSettings; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewayHttpListener; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewayUrlPathMap; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewayRequestRoutingRule; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewayRewriteRuleSet; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewayRedirectConfiguration; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewayWebApplicationFirewallConfiguration; import com.microsoft.azure.SubResource; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewayAutoscaleConfiguration; import com.microsoft.azure.management.network.v2019_07_01.ProvisioningState; import com.microsoft.azure.management.network.v2019_07_01.ApplicationGatewayCustomError; import com.microsoft.azure.management.network.v2019_07_01.ManagedServiceIdentity; import com.fasterxml.jackson.annotation.JsonProperty; import com.microsoft.rest.serializer.JsonFlatten; import com.microsoft.rest.SkipParentValidation; import com.microsoft.azure.Resource; /** * Application gateway resource. */ @JsonFlatten @SkipParentValidation public class ApplicationGatewayInner extends Resource { /** * SKU of the application gateway resource. */ @JsonProperty(value = "properties.sku") private ApplicationGatewaySku sku; /** * SSL policy of the application gateway resource. */ @JsonProperty(value = "properties.sslPolicy") private ApplicationGatewaySslPolicy sslPolicy; /** * Operational state of the application gateway resource. Possible values * include: 'Stopped', 'Starting', 'Running', 'Stopping'. */ @JsonProperty(value = "properties.operationalState", access = JsonProperty.Access.WRITE_ONLY) private ApplicationGatewayOperationalState operationalState; /** * Subnets of the application gateway resource. For default limits, see * [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.gatewayIPConfigurations") private List<ApplicationGatewayIPConfiguration> gatewayIPConfigurations; /** * Authentication certificates of the application gateway resource. For * default limits, see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.authenticationCertificates") private List<ApplicationGatewayAuthenticationCertificate> authenticationCertificates; /** * Trusted Root certificates of the application gateway resource. For * default limits, see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.trustedRootCertificates") private List<ApplicationGatewayTrustedRootCertificate> trustedRootCertificates; /** * SSL certificates of the application gateway resource. For default * limits, see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.sslCertificates") private List<ApplicationGatewaySslCertificate> sslCertificates; /** * Frontend IP addresses of the application gateway resource. For default * limits, see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.frontendIPConfigurations") private List<ApplicationGatewayFrontendIPConfiguration> frontendIPConfigurations; /** * Frontend ports of the application gateway resource. For default limits, * see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.frontendPorts") private List<ApplicationGatewayFrontendPort> frontendPorts; /** * Probes of the application gateway resource. */ @JsonProperty(value = "properties.probes") private List<ApplicationGatewayProbe> probes; /** * Backend address pool of the application gateway resource. For default * limits, see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.backendAddressPools") private List<ApplicationGatewayBackendAddressPool> backendAddressPools; /** * Backend http settings of the application gateway resource. For default * limits, see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.backendHttpSettingsCollection") private List<ApplicationGatewayBackendHttpSettings> backendHttpSettingsCollection; /** * Http listeners of the application gateway resource. For default limits, * see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.httpListeners") private List<ApplicationGatewayHttpListener> httpListeners; /** * URL path map of the application gateway resource. For default limits, * see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.urlPathMaps") private List<ApplicationGatewayUrlPathMap> urlPathMaps; /** * Request routing rules of the application gateway resource. */ @JsonProperty(value = "properties.requestRoutingRules") private List<ApplicationGatewayRequestRoutingRule> requestRoutingRules; /** * Rewrite rules for the application gateway resource. */ @JsonProperty(value = "properties.rewriteRuleSets") private List<ApplicationGatewayRewriteRuleSet> rewriteRuleSets; /** * Redirect configurations of the application gateway resource. For default * limits, see [Application Gateway * limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). */ @JsonProperty(value = "properties.redirectConfigurations") private List<ApplicationGatewayRedirectConfiguration> redirectConfigurations; /** * Web application firewall configuration. */ @JsonProperty(value = "properties.webApplicationFirewallConfiguration") private ApplicationGatewayWebApplicationFirewallConfiguration webApplicationFirewallConfiguration; /** * Reference of the FirewallPolicy resource. */ @JsonProperty(value = "properties.firewallPolicy") private SubResource firewallPolicy; /** * Whether HTTP2 is enabled on the application gateway resource. */ @JsonProperty(value = "properties.enableHttp2") private Boolean enableHttp2; /** * Whether FIPS is enabled on the application gateway resource. */ @JsonProperty(value = "properties.enableFips") private Boolean enableFips; /** * Autoscale Configuration. */ @JsonProperty(value = "properties.autoscaleConfiguration") private ApplicationGatewayAutoscaleConfiguration autoscaleConfiguration; /** * The resource GUID property of the application gateway resource. */ @JsonProperty(value = "properties.resourceGuid") private String resourceGuid; /** * The provisioning state of the application gateway resource. Possible * values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'. */ @JsonProperty(value = "properties.provisioningState") private ProvisioningState provisioningState; /** * Custom error configurations of the application gateway resource. */ @JsonProperty(value = "properties.customErrorConfigurations") private List<ApplicationGatewayCustomError> customErrorConfigurations; /** * A unique read-only string that changes whenever the resource is updated. */ @JsonProperty(value = "etag") private String etag; /** * A list of availability zones denoting where the resource needs to come * from. */ @JsonProperty(value = "zones") private List<String> zones; /** * The identity of the application gateway, if configured. */ @JsonProperty(value = "identity") private ManagedServiceIdentity identity; /** * Resource ID. */ @JsonProperty(value = "id") private String id; /** * Get sKU of the application gateway resource. * * @return the sku value */ public ApplicationGatewaySku sku() { return this.sku; } /** * Set sKU of the application gateway resource. * * @param sku the sku value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withSku(ApplicationGatewaySku sku) { this.sku = sku; return this; } /** * Get sSL policy of the application gateway resource. * * @return the sslPolicy value */ public ApplicationGatewaySslPolicy sslPolicy() { return this.sslPolicy; } /** * Set sSL policy of the application gateway resource. * * @param sslPolicy the sslPolicy value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withSslPolicy(ApplicationGatewaySslPolicy sslPolicy) { this.sslPolicy = sslPolicy; return this; } /** * Get operational state of the application gateway resource. Possible values include: 'Stopped', 'Starting', 'Running', 'Stopping'. * * @return the operationalState value */ public ApplicationGatewayOperationalState operationalState() { return this.operationalState; } /** * Get subnets of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the gatewayIPConfigurations value */ public List<ApplicationGatewayIPConfiguration> gatewayIPConfigurations() { return this.gatewayIPConfigurations; } /** * Set subnets of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param gatewayIPConfigurations the gatewayIPConfigurations value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withGatewayIPConfigurations(List<ApplicationGatewayIPConfiguration> gatewayIPConfigurations) { this.gatewayIPConfigurations = gatewayIPConfigurations; return this; } /** * Get authentication certificates of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the authenticationCertificates value */ public List<ApplicationGatewayAuthenticationCertificate> authenticationCertificates() { return this.authenticationCertificates; } /** * Set authentication certificates of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param authenticationCertificates the authenticationCertificates value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withAuthenticationCertificates(List<ApplicationGatewayAuthenticationCertificate> authenticationCertificates) { this.authenticationCertificates = authenticationCertificates; return this; } /** * Get trusted Root certificates of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the trustedRootCertificates value */ public List<ApplicationGatewayTrustedRootCertificate> trustedRootCertificates() { return this.trustedRootCertificates; } /** * Set trusted Root certificates of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param trustedRootCertificates the trustedRootCertificates value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withTrustedRootCertificates(List<ApplicationGatewayTrustedRootCertificate> trustedRootCertificates) { this.trustedRootCertificates = trustedRootCertificates; return this; } /** * Get sSL certificates of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the sslCertificates value */ public List<ApplicationGatewaySslCertificate> sslCertificates() { return this.sslCertificates; } /** * Set sSL certificates of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param sslCertificates the sslCertificates value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withSslCertificates(List<ApplicationGatewaySslCertificate> sslCertificates) { this.sslCertificates = sslCertificates; return this; } /** * Get frontend IP addresses of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the frontendIPConfigurations value */ public List<ApplicationGatewayFrontendIPConfiguration> frontendIPConfigurations() { return this.frontendIPConfigurations; } /** * Set frontend IP addresses of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param frontendIPConfigurations the frontendIPConfigurations value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withFrontendIPConfigurations(List<ApplicationGatewayFrontendIPConfiguration> frontendIPConfigurations) { this.frontendIPConfigurations = frontendIPConfigurations; return this; } /** * Get frontend ports of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the frontendPorts value */ public List<ApplicationGatewayFrontendPort> frontendPorts() { return this.frontendPorts; } /** * Set frontend ports of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param frontendPorts the frontendPorts value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withFrontendPorts(List<ApplicationGatewayFrontendPort> frontendPorts) { this.frontendPorts = frontendPorts; return this; } /** * Get probes of the application gateway resource. * * @return the probes value */ public List<ApplicationGatewayProbe> probes() { return this.probes; } /** * Set probes of the application gateway resource. * * @param probes the probes value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withProbes(List<ApplicationGatewayProbe> probes) { this.probes = probes; return this; } /** * Get backend address pool of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the backendAddressPools value */ public List<ApplicationGatewayBackendAddressPool> backendAddressPools() { return this.backendAddressPools; } /** * Set backend address pool of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param backendAddressPools the backendAddressPools value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withBackendAddressPools(List<ApplicationGatewayBackendAddressPool> backendAddressPools) { this.backendAddressPools = backendAddressPools; return this; } /** * Get backend http settings of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the backendHttpSettingsCollection value */ public List<ApplicationGatewayBackendHttpSettings> backendHttpSettingsCollection() { return this.backendHttpSettingsCollection; } /** * Set backend http settings of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param backendHttpSettingsCollection the backendHttpSettingsCollection value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withBackendHttpSettingsCollection(List<ApplicationGatewayBackendHttpSettings> backendHttpSettingsCollection) { this.backendHttpSettingsCollection = backendHttpSettingsCollection; return this; } /** * Get http listeners of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the httpListeners value */ public List<ApplicationGatewayHttpListener> httpListeners() { return this.httpListeners; } /** * Set http listeners of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param httpListeners the httpListeners value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withHttpListeners(List<ApplicationGatewayHttpListener> httpListeners) { this.httpListeners = httpListeners; return this; } /** * Get uRL path map of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the urlPathMaps value */ public List<ApplicationGatewayUrlPathMap> urlPathMaps() { return this.urlPathMaps; } /** * Set uRL path map of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param urlPathMaps the urlPathMaps value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withUrlPathMaps(List<ApplicationGatewayUrlPathMap> urlPathMaps) { this.urlPathMaps = urlPathMaps; return this; } /** * Get request routing rules of the application gateway resource. * * @return the requestRoutingRules value */ public List<ApplicationGatewayRequestRoutingRule> requestRoutingRules() { return this.requestRoutingRules; } /** * Set request routing rules of the application gateway resource. * * @param requestRoutingRules the requestRoutingRules value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withRequestRoutingRules(List<ApplicationGatewayRequestRoutingRule> requestRoutingRules) { this.requestRoutingRules = requestRoutingRules; return this; } /** * Get rewrite rules for the application gateway resource. * * @return the rewriteRuleSets value */ public List<ApplicationGatewayRewriteRuleSet> rewriteRuleSets() { return this.rewriteRuleSets; } /** * Set rewrite rules for the application gateway resource. * * @param rewriteRuleSets the rewriteRuleSets value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withRewriteRuleSets(List<ApplicationGatewayRewriteRuleSet> rewriteRuleSets) { this.rewriteRuleSets = rewriteRuleSets; return this; } /** * Get redirect configurations of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @return the redirectConfigurations value */ public List<ApplicationGatewayRedirectConfiguration> redirectConfigurations() { return this.redirectConfigurations; } /** * Set redirect configurations of the application gateway resource. For default limits, see [Application Gateway limits](https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-limits). * * @param redirectConfigurations the redirectConfigurations value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withRedirectConfigurations(List<ApplicationGatewayRedirectConfiguration> redirectConfigurations) { this.redirectConfigurations = redirectConfigurations; return this; } /** * Get web application firewall configuration. * * @return the webApplicationFirewallConfiguration value */ public ApplicationGatewayWebApplicationFirewallConfiguration webApplicationFirewallConfiguration() { return this.webApplicationFirewallConfiguration; } /** * Set web application firewall configuration. * * @param webApplicationFirewallConfiguration the webApplicationFirewallConfiguration value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withWebApplicationFirewallConfiguration(ApplicationGatewayWebApplicationFirewallConfiguration webApplicationFirewallConfiguration) { this.webApplicationFirewallConfiguration = webApplicationFirewallConfiguration; return this; } /** * Get reference of the FirewallPolicy resource. * * @return the firewallPolicy value */ public SubResource firewallPolicy() { return this.firewallPolicy; } /** * Set reference of the FirewallPolicy resource. * * @param firewallPolicy the firewallPolicy value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withFirewallPolicy(SubResource firewallPolicy) { this.firewallPolicy = firewallPolicy; return this; } /** * Get whether HTTP2 is enabled on the application gateway resource. * * @return the enableHttp2 value */ public Boolean enableHttp2() { return this.enableHttp2; } /** * Set whether HTTP2 is enabled on the application gateway resource. * * @param enableHttp2 the enableHttp2 value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withEnableHttp2(Boolean enableHttp2) { this.enableHttp2 = enableHttp2; return this; } /** * Get whether FIPS is enabled on the application gateway resource. * * @return the enableFips value */ public Boolean enableFips() { return this.enableFips; } /** * Set whether FIPS is enabled on the application gateway resource. * * @param enableFips the enableFips value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withEnableFips(Boolean enableFips) { this.enableFips = enableFips; return this; } /** * Get autoscale Configuration. * * @return the autoscaleConfiguration value */ public ApplicationGatewayAutoscaleConfiguration autoscaleConfiguration() { return this.autoscaleConfiguration; } /** * Set autoscale Configuration. * * @param autoscaleConfiguration the autoscaleConfiguration value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withAutoscaleConfiguration(ApplicationGatewayAutoscaleConfiguration autoscaleConfiguration) { this.autoscaleConfiguration = autoscaleConfiguration; return this; } /** * Get the resource GUID property of the application gateway resource. * * @return the resourceGuid value */ public String resourceGuid() { return this.resourceGuid; } /** * Set the resource GUID property of the application gateway resource. * * @param resourceGuid the resourceGuid value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withResourceGuid(String resourceGuid) { this.resourceGuid = resourceGuid; return this; } /** * Get the provisioning state of the application gateway resource. Possible values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'. * * @return the provisioningState value */ public ProvisioningState provisioningState() { return this.provisioningState; } /** * Set the provisioning state of the application gateway resource. Possible values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'. * * @param provisioningState the provisioningState value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withProvisioningState(ProvisioningState provisioningState) { this.provisioningState = provisioningState; return this; } /** * Get custom error configurations of the application gateway resource. * * @return the customErrorConfigurations value */ public List<ApplicationGatewayCustomError> customErrorConfigurations() { return this.customErrorConfigurations; } /** * Set custom error configurations of the application gateway resource. * * @param customErrorConfigurations the customErrorConfigurations value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withCustomErrorConfigurations(List<ApplicationGatewayCustomError> customErrorConfigurations) { this.customErrorConfigurations = customErrorConfigurations; return this; } /** * Get a unique read-only string that changes whenever the resource is updated. * * @return the etag value */ public String etag() { return this.etag; } /** * Set a unique read-only string that changes whenever the resource is updated. * * @param etag the etag value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withEtag(String etag) { this.etag = etag; return this; } /** * Get a list of availability zones denoting where the resource needs to come from. * * @return the zones value */ public List<String> zones() { return this.zones; } /** * Set a list of availability zones denoting where the resource needs to come from. * * @param zones the zones value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withZones(List<String> zones) { this.zones = zones; return this; } /** * Get the identity of the application gateway, if configured. * * @return the identity value */ public ManagedServiceIdentity identity() { return this.identity; } /** * Set the identity of the application gateway, if configured. * * @param identity the identity value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withIdentity(ManagedServiceIdentity identity) { this.identity = identity; return this; } /** * Get resource ID. * * @return the id value */ public String id() { return this.id; } /** * Set resource ID. * * @param id the id value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withId(String id) { this.id = id; return this; } }
package ie.dit; /* * This is our game class. * Our game objects will be created here. */ import java.awt.Color; import java.util.ArrayList; import java.util.Random; import processing.core.PApplet; import processing.core.PImage; import processing.core.PVector; import ddf.minim.AudioPlayer; import ddf.minim.Minim; public class Game { String state = "ready"; PImage ready_img; PApplet applet; // A reference to the PApplet class. ArrayList<GameObject> objects; Map map; Player player; int level = 0, lives = 3, score = 0, number_of_enemies = 0; Minim minim; // Required to use Minim. AudioPlayer snd_explode; // We initialize the game by doing Game game = new Game(this) // the 'this' argument is our PApplet window. public Game(PApplet applet){ this.applet = applet; this.applet.size(700, 700); ready_img = applet.loadImage("data/start.jpg"); minim = new Minim(this.applet); // Required to use Minim. snd_explode = minim.loadFile("data/explosion.wav"); // Our game objects. map = new Map(this.applet); player = new Player(this.applet); // An ArrayList for our game objects. objects = new ArrayList<GameObject>(); // Add the game objects to the ArrayList. objects.add(map); objects.add(player); } // End constructor. public void run(){ // ready. if ( state == "ready" ) { readyGame(); } // End ready. // run. if ( state == "run" ) { runGame(); } // End run. // over. if ( state == "over" ) { overGame(); } // End over. } // End run. public void keyPressed(){ if ( state == "ready" ) { if(applet.keyCode == 83){ // Press 's' to start. state = "run"; } } // End ready. if ( state == "run" ) { player.keyPressed(); } // End run. if ( state == "over" ) { if(applet.keyCode == 82){ // Press 'r' to restart. applet.setup(); } } // End over. } // End keyPressed. public void keyReleased(){ player.keyReleased(); } // End keyPressed. // the instructions for game start and over on screen public void readyGame() { applet.image(ready_img, 0, 0, applet.width, applet.height); applet.textAlign(PApplet.CENTER); applet.textSize( 26 ); applet.fill(0, 0, 0, 255); applet.text("Instructions", (applet.width/2) + 3, 53); applet.text("Use the arrow keys to control the player", (applet.width/2) + 3, 103); applet.text("Press 'E' to fire", (applet.width/2) + 3, 153); applet.fill(255, 255, 255, 255); applet.text("Instructions", applet.width/2, 50); applet.text("Use the arrow keys to control the player", applet.width/2, 100); applet.text("Press 'E' to fire", applet.width/2, 150); applet.noFill(); } // End readyGame. public void runGame() { set_level(); // Run all the game objects. for (int i = 0; i < objects.size(); ++i) { // The current object is an Enemy. if(objects.get(i) instanceof Enemy){ Enemy enemy = (Enemy) objects.get(i); // Enemy collides with Player. if( collide(enemy, player) ){ float tmp = enemy.theta; enemy.theta = player.theta; player.theta = tmp; player.health -= 10; } // End Enemy collides with Player. // Loop through the player's objects. for (GameObject bullet : player.objects) { // Player's bullet hits enemy. if(collide(enemy, bullet)){ bullet.alive = false; enemy.health -= 10; score += 10; break; } // End Player's bullet hits enemy. } // End Loop through the player's ammunition. // Enemies bullet hits Player. for (GameObject bullet : enemy.objects) { if(collide(player, bullet)){ bullet.alive = false; player.health -= 10; break; } } // End Player's bullet hits enemy. if(enemy.health <= 15) enemy.objects.add(new Smoke(applet, enemy.location.x + (enemy.w/2), enemy.location.y + (enemy.h/2) )); if (enemy.health <= 0) { snd_explode.rewind(); snd_explode.play(); enemy.alive = false; --number_of_enemies; } } // End The current object is an Enemy. if(player.health <= 15) player.objects.add(new Smoke(applet, player.location.x + (player.w/2), player.location.y + (player.h/2) )); if (player.health <= 0 && lives > 0) { --lives; player.health = 50; } if(lives <= 0){ state = "over"; player.alive = false; } objects.get(i).run(); if (!objects.get(i).alive) objects.remove(i); } // End Run all the game objects. draw_scores(); } // End runGame. public void overGame(){ // this how the scores you get map.run(); draw_scores(); applet.textAlign(PApplet.CENTER); applet.textSize( 36 ); applet.fill(0, 0, 0, 255); applet.text("Game Over", (applet.width/2) + 3, 303); // this show when when game is over applet.text("Press 'R' to Restart", (applet.width/2) + 3, 353); applet.fill(255, 255, 255, 255); applet.text("Game Over", applet.width/2, 300); applet.text("Press 'R' to Restart", applet.width/2, 350); // this pressing R to restart the game play applet.noFill(); } // End overGame. public void draw_scores() { applet.textAlign(PApplet.LEFT); applet.textSize( 26 ); applet.fill(255, 255, 255, 255); applet.text("Lives " + lives, 30, 50); applet.text("Level " + level, 30, 100); applet.text("Score " + score, 30, 150); applet.noFill(); } // End draw_scores. public void set_level() { if(number_of_enemies == 0){ ++level; createEnemies(level); } } // End set_level. public void createEnemies( int amount ){ // create enemies for ( int i = 0; i < amount; i++ ) { Enemy enemy = new Enemy(this.applet); Random random = new Random(); enemy.fireRate = random.nextFloat(); // the fire rate enemy.theta = random.nextFloat() * 2 - 1; enemy.colour = new Color(0, 100, 0, 255); // the color enemy.location = new PVector(-500, -500); objects.add(enemy); ++number_of_enemies; // add more enemies } } // End createEnemies method. private boolean collide(GameObject obj1, GameObject obj2) { // the enemies method in which location it came if(obj1.location.x + obj1.w > obj2.location.x && obj1.location.x < obj2.location.x + obj2.w && obj1.location.y + obj1.h > obj2.location.y && obj1.location.y < obj2.location.y + obj2.h){ return true; } return false; } // End collide. } // End Game class.
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.suggest; import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.search.suggest.phrase.PhraseSuggestion; import org.elasticsearch.search.suggest.term.TermSuggestion; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; /** * Top level suggest result, containing the result for each suggestion. */ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? extends Option>>>, Streamable, ToXContentFragment { public static final String NAME = "suggest"; public static final Comparator<Option> COMPARATOR = (first, second) -> { int cmp = Float.compare(second.getScore(), first.getScore()); if (cmp != 0) { return cmp; } return first.getText().compareTo(second.getText()); }; private List<Suggestion<? extends Entry<? extends Option>>> suggestions; private boolean hasScoreDocs; private Map<String, Suggestion<? extends Entry<? extends Option>>> suggestMap; private Suggest() { this(Collections.emptyList()); } public Suggest(List<Suggestion<? extends Entry<? extends Option>>> suggestions) { // we sort suggestions by their names to ensure iteration over suggestions are consistent // this is needed as we need to fill in suggestion docs in SearchPhaseController#sortDocs // in the same order as we enrich the suggestions with fetch results in SearchPhaseController#merge suggestions.sort((o1, o2) -> o1.getName().compareTo(o2.getName())); this.suggestions = suggestions; this.hasScoreDocs = filter(CompletionSuggestion.class).stream().anyMatch(CompletionSuggestion::hasScoreDocs); } @Override public Iterator<Suggestion<? extends Entry<? extends Option>>> iterator() { return suggestions.iterator(); } /** * The number of suggestions in this {@link Suggest} result */ public int size() { return suggestions.size(); } public <T extends Suggestion<? extends Entry<? extends Option>>> T getSuggestion(String name) { if (suggestions.isEmpty() || name == null) { return null; } else if (suggestions.size() == 1) { return (T) (name.equals(suggestions.get(0).name) ? suggestions.get(0) : null); } else if (this.suggestMap == null) { suggestMap = new HashMap<>(); for (Suggest.Suggestion<? extends Entry<? extends Option>> item : suggestions) { suggestMap.put(item.getName(), item); } } return (T) suggestMap.get(name); } /** * Whether any suggestions had query hits */ public boolean hasScoreDocs() { return hasScoreDocs; } @Override public void readFrom(StreamInput in) throws IOException { final int size = in.readVInt(); suggestions = new ArrayList<>(size); for (int i = 0; i < size; i++) { // TODO: remove these complicated generics Suggestion<? extends Entry<? extends Option>> suggestion; final int type = in.readVInt(); switch (type) { case TermSuggestion.TYPE: suggestion = new TermSuggestion(); break; case CompletionSuggestion.TYPE: suggestion = new CompletionSuggestion(); break; case 2: // CompletionSuggestion.TYPE throw new IllegalArgumentException("Completion suggester 2.x is not supported anymore"); case PhraseSuggestion.TYPE: suggestion = new PhraseSuggestion(); break; default: suggestion = new Suggestion(); break; } suggestion.readFrom(in); suggestions.add(suggestion); } hasScoreDocs = filter(CompletionSuggestion.class).stream().anyMatch(CompletionSuggestion::hasScoreDocs); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(suggestions.size()); for (Suggestion<?> command : suggestions) { out.writeVInt(command.getWriteableType()); command.writeTo(out); } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); for (Suggestion<?> suggestion : suggestions) { suggestion.toXContent(builder, params); } builder.endObject(); return builder; } /** * this parsing method assumes that the leading "suggest" field name has already been parsed by the caller */ public static Suggest fromXContent(XContentParser parser) throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); List<Suggestion<? extends Entry<? extends Option>>> suggestions = new ArrayList<>(); while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation); String currentField = parser.currentName(); ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser::getTokenLocation); Suggestion<? extends Entry<? extends Option>> suggestion = Suggestion.fromXContent(parser); if (suggestion != null) { suggestions.add(suggestion); } else { throw new ParsingException(parser.getTokenLocation(), String.format(Locale.ROOT, "Could not parse suggestion keyed as [%s]", currentField)); } } return new Suggest(suggestions); } public static Suggest readSuggest(StreamInput in) throws IOException { Suggest result = new Suggest(); result.readFrom(in); return result; } public static List<Suggestion<? extends Entry<? extends Option>>> reduce(Map<String, List<Suggest.Suggestion>> groupedSuggestions) { List<Suggestion<? extends Entry<? extends Option>>> reduced = new ArrayList<>(groupedSuggestions.size()); for (java.util.Map.Entry<String, List<Suggestion>> unmergedResults : groupedSuggestions.entrySet()) { List<Suggestion> value = unmergedResults.getValue(); Class<? extends Suggestion> suggestionClass = null; for (Suggestion suggestion : value) { if (suggestionClass == null) { suggestionClass = suggestion.getClass(); } else if (suggestionClass != suggestion.getClass()) { throw new IllegalArgumentException( "detected mixed suggestion results, due to querying on old and new completion suggester," + " query on a single completion suggester version"); } } Suggestion reduce = value.get(0).reduce(value); reduce.trim(); reduced.add(reduce); } return reduced; } /** * @return only suggestions of type <code>suggestionType</code> contained in this {@link Suggest} instance */ public <T extends Suggestion> List<T> filter(Class<T> suggestionType) { return suggestions.stream() .filter(suggestion -> suggestion.getClass() == suggestionType) .map(suggestion -> (T) suggestion) .collect(Collectors.toList()); } /** * The suggestion responses corresponding with the suggestions in the request. */ public static class Suggestion<T extends Suggestion.Entry> implements Iterable<T>, Streamable, ToXContentFragment { private static final String NAME = "suggestion"; public static final int TYPE = 0; protected String name; protected int size; protected final List<T> entries = new ArrayList<>(5); protected Suggestion() { } public Suggestion(String name, int size) { this.name = name; this.size = size; // The suggested term size specified in request, only used for merging shard responses } public void addTerm(T entry) { entries.add(entry); } /** * Returns a integer representing the type of the suggestion. This is used for * internal serialization over the network. */ public int getWriteableType() { // TODO remove this in favor of NamedWriteable return TYPE; } /** * Returns a string representing the type of the suggestion. This type is added to * the suggestion name in the XContent response, so that it can later be used by * REST clients to determine the internal type of the suggestion. */ protected String getType() { return NAME; } @Override public Iterator<T> iterator() { return entries.iterator(); } /** * @return The entries for this suggestion. */ public List<T> getEntries() { return entries; } /** * @return The name of the suggestion as is defined in the request. */ public String getName() { return name; } /** * @return The number of requested suggestion option size */ public int getSize() { return size; } /** * Merges the result of another suggestion into this suggestion. * For internal usage. */ public Suggestion<T> reduce(List<Suggestion<T>> toReduce) { if (toReduce.size() == 1) { return toReduce.get(0); } else if (toReduce.isEmpty()) { return null; } Suggestion<T> leader = toReduce.get(0); List<T> entries = leader.entries; final int size = entries.size(); Comparator<Option> sortComparator = sortComparator(); List<T> currentEntries = new ArrayList<>(); for (int i = 0; i < size; i++) { for (Suggestion<T> suggestion : toReduce) { if(suggestion.entries.size() != size) { throw new IllegalStateException("Can't merge suggest result, this might be caused by suggest calls " + "across multiple indices with different analysis chains. Suggest entries have different sizes actual [" + suggestion.entries.size() + "] expected [" + size +"]"); } assert suggestion.name.equals(leader.name); currentEntries.add(suggestion.entries.get(i)); } T entry = (T) entries.get(i).reduce(currentEntries); entry.sort(sortComparator); entries.set(i, entry); currentEntries.clear(); } return leader; } protected Comparator<Option> sortComparator() { return COMPARATOR; } /** * Trims the number of options per suggest text term to the requested size. * For internal usage. */ public void trim() { for (Entry<?> entry : entries) { entry.trim(size); } } @Override public void readFrom(StreamInput in) throws IOException { innerReadFrom(in); int size = in.readVInt(); entries.clear(); for (int i = 0; i < size; i++) { T newEntry = newEntry(); newEntry.readFrom(in); entries.add(newEntry); } } protected T newEntry() { return (T)new Entry(); } protected void innerReadFrom(StreamInput in) throws IOException { name = in.readString(); size = in.readVInt(); } @Override public void writeTo(StreamOutput out) throws IOException { innerWriteTo(out); out.writeVInt(entries.size()); for (Entry<?> entry : entries) { entry.writeTo(out); } } public void innerWriteTo(StreamOutput out) throws IOException { out.writeString(name); out.writeVInt(size); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (params.paramAsBoolean(RestSearchAction.TYPED_KEYS_PARAM, false)) { // Concatenates the type and the name of the suggestion (ex: completion#foo) builder.startArray(String.join(Aggregation.TYPED_KEYS_DELIMITER, getType(), getName())); } else { builder.startArray(getName()); } for (Entry<?> entry : entries) { entry.toXContent(builder, params); } builder.endArray(); return builder; } @SuppressWarnings("unchecked") public static Suggestion<? extends Entry<? extends Option>> fromXContent(XContentParser parser) throws IOException { ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser::getTokenLocation); SetOnce<Suggestion> suggestion = new SetOnce<>(); XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Suggestion.class, suggestion::set); return suggestion.get(); } protected static <E extends Suggestion.Entry<?>> void parseEntries(XContentParser parser, Suggestion<E> suggestion, CheckedFunction<XContentParser, E, IOException> entryParser) throws IOException { ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser::getTokenLocation); while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { suggestion.addTerm(entryParser.apply(parser)); } } /** * Represents a part from the suggest text with suggested options. */ public static class Entry<O extends Entry.Option> implements Iterable<O>, Streamable, ToXContentObject { private static final String TEXT = "text"; private static final String OFFSET = "offset"; private static final String LENGTH = "length"; protected static final String OPTIONS = "options"; protected Text text; protected int offset; protected int length; protected List<O> options = new ArrayList<>(5); public Entry(Text text, int offset, int length) { this.text = text; this.offset = offset; this.length = length; } protected Entry() { } public void addOption(O option) { options.add(option); } protected void addOptions(List<O> options) { for (O option : options) { addOption(option); } } protected void sort(Comparator<O> comparator) { CollectionUtil.timSort(options, comparator); } protected <T extends Entry<O>> Entry<O> reduce(List<T> toReduce) { if (toReduce.size() == 1) { return toReduce.get(0); } final Map<O, O> entries = new HashMap<>(); Entry<O> leader = toReduce.get(0); for (Entry<O> entry : toReduce) { if (!leader.text.equals(entry.text)) { throw new IllegalStateException("Can't merge suggest entries, this might be caused by suggest calls " + "across multiple indices with different analysis chains. Suggest entries have different text actual [" + entry.text + "] expected [" + leader.text +"]"); } assert leader.offset == entry.offset; assert leader.length == entry.length; leader.merge(entry); for (O option : entry) { O merger = entries.get(option); if (merger == null) { entries.put(option, option); } else { merger.mergeInto(option); } } } leader.options.clear(); for (O option: entries.keySet()) { leader.addOption(option); } return leader; } /** * Merge any extra fields for this subtype. */ protected void merge(Entry<O> other) { } /** * @return the text (analyzed by suggest analyzer) originating from the suggest text. Usually this is a * single term. */ public Text getText() { return text; } /** * @return the start offset (not analyzed) for this entry in the suggest text. */ public int getOffset() { return offset; } /** * @return the length (not analyzed) for this entry in the suggest text. */ public int getLength() { return length; } @Override public Iterator<O> iterator() { return options.iterator(); } /** * @return The suggested options for this particular suggest entry. If there are no suggested terms then * an empty list is returned. */ public List<O> getOptions() { return options; } void trim(int size) { int optionsToRemove = Math.max(0, options.size() - size); for (int i = 0; i < optionsToRemove; i++) { options.remove(options.size() - 1); } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Entry<?> entry = (Entry<?>) o; if (length != entry.length) return false; if (offset != entry.offset) return false; if (!this.text.equals(entry.text)) return false; return true; } @Override public int hashCode() { int result = text.hashCode(); result = 31 * result + offset; result = 31 * result + length; return result; } @Override public void readFrom(StreamInput in) throws IOException { text = in.readText(); offset = in.readVInt(); length = in.readVInt(); int suggestedWords = in.readVInt(); options = new ArrayList<>(suggestedWords); for (int j = 0; j < suggestedWords; j++) { O newOption = newOption(); newOption.readFrom(in); options.add(newOption); } } @SuppressWarnings("unchecked") protected O newOption(){ return (O) new Option(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeText(text); out.writeVInt(offset); out.writeVInt(length); out.writeVInt(options.size()); for (Option option : options) { option.writeTo(out); } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(TEXT, text); builder.field(OFFSET, offset); builder.field(LENGTH, length); builder.startArray(OPTIONS); for (Option option : options) { option.toXContent(builder, params); } builder.endArray(); builder.endObject(); return builder; } private static ObjectParser<Entry<Option>, Void> PARSER = new ObjectParser<>("SuggestionEntryParser", true, Entry::new); static { declareCommonFields(PARSER); PARSER.declareObjectArray(Entry::addOptions, (p,c) -> Option.fromXContent(p), new ParseField(OPTIONS)); } protected static void declareCommonFields(ObjectParser<? extends Entry<? extends Option>, Void> parser) { parser.declareString((entry, text) -> entry.text = new Text(text), new ParseField(TEXT)); parser.declareInt((entry, offset) -> entry.offset = offset, new ParseField(OFFSET)); parser.declareInt((entry, length) -> entry.length = length, new ParseField(LENGTH)); } public static Entry<? extends Option> fromXContent(XContentParser parser) { return PARSER.apply(parser, null); } /** * Contains the suggested text with its document frequency and score. */ public static class Option implements Streamable, ToXContentObject { public static final ParseField TEXT = new ParseField("text"); public static final ParseField HIGHLIGHTED = new ParseField("highlighted"); public static final ParseField SCORE = new ParseField("score"); public static final ParseField COLLATE_MATCH = new ParseField("collate_match"); private Text text; private Text highlighted; private float score; private Boolean collateMatch; public Option(Text text, Text highlighted, float score, Boolean collateMatch) { this.text = text; this.highlighted = highlighted; this.score = score; this.collateMatch = collateMatch; } public Option(Text text, Text highlighted, float score) { this(text, highlighted, score, null); } public Option(Text text, float score) { this(text, null, score); } public Option() { } /** * @return The actual suggested text. */ public Text getText() { return text; } /** * @return Copy of suggested text with changes from user supplied text highlighted. */ public Text getHighlighted() { return highlighted; } /** * @return The score based on the edit distance difference between the suggested term and the * term in the suggest text. */ public float getScore() { return score; } /** * @return true if collation has found a match for the entry. * if collate was not set, the value defaults to <code>true</code> */ public boolean collateMatch() { return (collateMatch != null) ? collateMatch : true; } protected void setScore(float score) { this.score = score; } @Override public void readFrom(StreamInput in) throws IOException { text = in.readText(); score = in.readFloat(); highlighted = in.readOptionalText(); collateMatch = in.readOptionalBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeText(text); out.writeFloat(score); out.writeOptionalText(highlighted); out.writeOptionalBoolean(collateMatch); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); innerToXContent(builder, params); builder.endObject(); return builder; } protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { builder.field(TEXT.getPreferredName(), text); if (highlighted != null) { builder.field(HIGHLIGHTED.getPreferredName(), highlighted); } builder.field(SCORE.getPreferredName(), score); if (collateMatch != null) { builder.field(COLLATE_MATCH.getPreferredName(), collateMatch.booleanValue()); } return builder; } private static final ConstructingObjectParser<Option, Void> PARSER = new ConstructingObjectParser<>("SuggestOptionParser", true, args -> { Text text = new Text((String) args[0]); float score = (Float) args[1]; String highlighted = (String) args[2]; Text highlightedText = highlighted == null ? null : new Text(highlighted); Boolean collateMatch = (Boolean) args[3]; return new Option(text, highlightedText, score, collateMatch); }); static { PARSER.declareString(constructorArg(), TEXT); PARSER.declareFloat(constructorArg(), SCORE); PARSER.declareString(optionalConstructorArg(), HIGHLIGHTED); PARSER.declareBoolean(optionalConstructorArg(), COLLATE_MATCH); } public static Option fromXContent(XContentParser parser) { return PARSER.apply(parser, null); } public void mergeInto(Option otherOption) { score = Math.max(score, otherOption.score); if (otherOption.collateMatch != null) { if (collateMatch == null) { collateMatch = otherOption.collateMatch; } else { collateMatch |= otherOption.collateMatch; } } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Option that = (Option) o; return text.equals(that.text); } @Override public int hashCode() { return text.hashCode(); } } } } @Override public String toString() { try { XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); return Strings.toString(builder); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } } }
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.android; import com.android.annotations.Nullable; import com.android.builder.core.VariantType; import com.android.ide.common.internal.PngCruncher; import com.google.common.base.Stopwatch; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import java.io.Closeable; import java.io.IOException; import java.nio.file.Path; import java.util.List; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.logging.Logger; /** Collects all the functionality for an action to merge resources. */ // TODO(bazel-team): Turn into an instance object, in order to use an external ExecutorService. public class AndroidResourceMerger { /** Thrown when there is a unexpected condition during merging. */ public static class MergingException extends RuntimeException { private MergingException(Throwable e) { super(e); } private MergingException(String message) { super(message); } static MergingException wrapException(Throwable e) { return new MergingException(e); } static MergingException withMessage(String message) { return new MergingException(message); } } static final Logger logger = Logger.getLogger(AndroidResourceProcessor.class.getName()); /** Merges all secondary resources with the primary resources. */ public static MergedAndroidData mergeData( final ParsedAndroidData primary, final Path primaryManifest, final List<? extends SerializedAndroidData> direct, final List<? extends SerializedAndroidData> transitive, final Path resourcesOut, final Path assetsOut, @Nullable final PngCruncher cruncher, final VariantType type, @Nullable final Path symbolsOut, @Nullable AndroidResourceClassWriter rclassWriter, AndroidDataDeserializer deserializer, boolean throwOnResourceConflict) { Stopwatch timer = Stopwatch.createStarted(); final ListeningExecutorService executorService = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(15)); try (Closeable closeable = ExecutorServiceCloser.createWith(executorService)) { UnwrittenMergedAndroidData merged = mergeData( executorService, transitive, direct, primary, primaryManifest, type != VariantType.LIBRARY, deserializer, throwOnResourceConflict); timer.reset().start(); if (symbolsOut != null) { AndroidDataSerializer serializer = AndroidDataSerializer.create(); merged.serializeTo(serializer); serializer.flushTo(symbolsOut); logger.fine( String.format( "serialize merge finished in %sms", timer.elapsed(TimeUnit.MILLISECONDS))); timer.reset().start(); } if (rclassWriter != null) { merged.writeResourceClass(rclassWriter); logger.fine( String.format("write classes finished in %sms", timer.elapsed(TimeUnit.MILLISECONDS))); timer.reset().start(); } AndroidDataWriter writer = AndroidDataWriter.createWith( resourcesOut.getParent(), resourcesOut, assetsOut, cruncher, executorService); return merged.write(writer); } catch (IOException e) { throw MergingException.wrapException(e); } finally { logger.fine( String.format("write merge finished in %sms", timer.elapsed(TimeUnit.MILLISECONDS))); } } public static UnwrittenMergedAndroidData mergeData( ListeningExecutorService executorService, List<? extends SerializedAndroidData> transitive, List<? extends SerializedAndroidData> direct, ParsedAndroidData primary, Path primaryManifest, boolean allowPrimaryOverrideAll, AndroidDataDeserializer deserializer, boolean throwOnResourceConflict) { Stopwatch timer = Stopwatch.createStarted(); try { AndroidDataMerger merger = AndroidDataMerger.createWithPathDeduplictor(executorService, deserializer); return merger.loadAndMerge( transitive, direct, primary, primaryManifest, allowPrimaryOverrideAll, throwOnResourceConflict); } finally { logger.fine(String.format("merge finished in %sms", timer.elapsed(TimeUnit.MILLISECONDS))); } } /** * Merges all secondary resources with the primary resources, given that the primary resources * have not yet been parsed and serialized. */ public static MergedAndroidData mergeData( final UnvalidatedAndroidData primary, final List<? extends SerializedAndroidData> direct, final List<? extends SerializedAndroidData> transitive, final Path resourcesOut, final Path assetsOut, @Nullable final PngCruncher cruncher, final VariantType type, @Nullable final Path symbolsOut, final List<String> filteredResources, boolean throwOnResourceConflict) { try { final ParsedAndroidData parsedPrimary = ParsedAndroidData.from(primary); return mergeData( parsedPrimary, primary.getManifest(), direct, transitive, resourcesOut, assetsOut, cruncher, type, symbolsOut, null /* rclassWriter */, AndroidDataDeserializer.withFilteredResources(filteredResources), throwOnResourceConflict); } catch (IOException e) { throw MergingException.wrapException(e); } } /** * Merges all secondary resources with the primary resources, given that the primary resources * have been separately parsed and serialized. */ public static MergedAndroidData mergeData( final SerializedAndroidData primary, final Path primaryManifest, final List<? extends SerializedAndroidData> direct, final List<? extends SerializedAndroidData> transitive, final Path resourcesOut, final Path assetsOut, @Nullable final PngCruncher cruncher, final VariantType type, @Nullable final Path symbolsOut, @Nullable final AndroidResourceClassWriter rclassWriter, boolean throwOnResourceConflict) { final ParsedAndroidData.Builder primaryBuilder = ParsedAndroidData.Builder.newBuilder(); final AndroidDataDeserializer deserializer = AndroidDataDeserializer.create(); primary.deserialize(deserializer, primaryBuilder.consumers()); ParsedAndroidData primaryData = primaryBuilder.build(); return mergeData( primaryData, primaryManifest, direct, transitive, resourcesOut, assetsOut, cruncher, type, symbolsOut, rclassWriter, deserializer, throwOnResourceConflict); } }
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * CampaignFeed.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.adwords.axis.v201809.cm; /** * CampaignFeeds are used to link a feed to a campaign using a matching * function, * making the feed's feed items available in the campaign's * ads for substitution. */ public class CampaignFeed implements java.io.Serializable { /* Id of the Feed associated with the CampaignFeed. * <span class="constraint Selectable">This field can * be selected using the value "FeedId".</span><span class="constraint * Filterable">This field can be filtered on.</span> * <span class="constraint Required">This field is required * and should not be {@code null}.</span> */ private java.lang.Long feedId; /* Id of the Campaign associated with the CampaignFeed. * <span class="constraint Selectable">This field can * be selected using the value "CampaignId".</span><span class="constraint * Filterable">This field can be filtered on.</span> * <span class="constraint Required">This field is required * and should not be {@code null}.</span> */ private java.lang.Long campaignId; /* Matching function associated with the CampaignFeed. * The matching function will return true/false indicating * which feed items may serve. * <span class="constraint Selectable">This field can * be selected using the value "MatchingFunction".</span> * <span class="constraint Required">This field is required * and should not be {@code null} when it is contained within {@link * Operator}s : ADD.</span> */ private com.google.api.ads.adwords.axis.v201809.cm.Function matchingFunction; /* Indicates which <a href="/adwords/api/docs/appendix/placeholders"> * placeholder types</a> the feed may populate under the * connected Campaign. * <span class="constraint Selectable">This field can * be selected using the value "PlaceholderTypes".</span><span class="constraint * Filterable">This field can be filtered on.</span> * <span class="constraint Required">This field is required * and should not be {@code null} when it is contained within {@link * Operator}s : ADD.</span> */ private int[] placeholderTypes; /* Status of the CampaignFeed. * <span class="constraint Selectable">This field can * be selected using the value "Status".</span><span class="constraint * Filterable">This field can be filtered on.</span> * <span class="constraint ReadOnly">This field is read * only and will be ignored when sent to the API.</span> */ private com.google.api.ads.adwords.axis.v201809.cm.CampaignFeedStatus status; /* ID of the base campaign from which this draft/trial feed was * created. * This field is only returned on get requests. * <span class="constraint Selectable">This field can * be selected using the value "BaseCampaignId".</span><span class="constraint * Filterable">This field can be filtered on.</span> * <span class="constraint ReadOnly">This field is read * only and will be ignored when sent to the API.</span> */ private java.lang.Long baseCampaignId; public CampaignFeed() { } public CampaignFeed( java.lang.Long feedId, java.lang.Long campaignId, com.google.api.ads.adwords.axis.v201809.cm.Function matchingFunction, int[] placeholderTypes, com.google.api.ads.adwords.axis.v201809.cm.CampaignFeedStatus status, java.lang.Long baseCampaignId) { this.feedId = feedId; this.campaignId = campaignId; this.matchingFunction = matchingFunction; this.placeholderTypes = placeholderTypes; this.status = status; this.baseCampaignId = baseCampaignId; } @Override public String toString() { return com.google.common.base.MoreObjects.toStringHelper(this.getClass()) .omitNullValues() .add("baseCampaignId", getBaseCampaignId()) .add("campaignId", getCampaignId()) .add("feedId", getFeedId()) .add("matchingFunction", getMatchingFunction()) .add("placeholderTypes", getPlaceholderTypes()) .add("status", getStatus()) .toString(); } /** * Gets the feedId value for this CampaignFeed. * * @return feedId * Id of the Feed associated with the CampaignFeed. * <span class="constraint Selectable">This field can * be selected using the value "FeedId".</span><span class="constraint * Filterable">This field can be filtered on.</span> * <span class="constraint Required">This field is required * and should not be {@code null}.</span> */ public java.lang.Long getFeedId() { return feedId; } /** * Sets the feedId value for this CampaignFeed. * * @param feedId * Id of the Feed associated with the CampaignFeed. * <span class="constraint Selectable">This field can * be selected using the value "FeedId".</span><span class="constraint * Filterable">This field can be filtered on.</span> * <span class="constraint Required">This field is required * and should not be {@code null}.</span> */ public void setFeedId(java.lang.Long feedId) { this.feedId = feedId; } /** * Gets the campaignId value for this CampaignFeed. * * @return campaignId * Id of the Campaign associated with the CampaignFeed. * <span class="constraint Selectable">This field can * be selected using the value "CampaignId".</span><span class="constraint * Filterable">This field can be filtered on.</span> * <span class="constraint Required">This field is required * and should not be {@code null}.</span> */ public java.lang.Long getCampaignId() { return campaignId; } /** * Sets the campaignId value for this CampaignFeed. * * @param campaignId * Id of the Campaign associated with the CampaignFeed. * <span class="constraint Selectable">This field can * be selected using the value "CampaignId".</span><span class="constraint * Filterable">This field can be filtered on.</span> * <span class="constraint Required">This field is required * and should not be {@code null}.</span> */ public void setCampaignId(java.lang.Long campaignId) { this.campaignId = campaignId; } /** * Gets the matchingFunction value for this CampaignFeed. * * @return matchingFunction * Matching function associated with the CampaignFeed. * The matching function will return true/false indicating * which feed items may serve. * <span class="constraint Selectable">This field can * be selected using the value "MatchingFunction".</span> * <span class="constraint Required">This field is required * and should not be {@code null} when it is contained within {@link * Operator}s : ADD.</span> */ public com.google.api.ads.adwords.axis.v201809.cm.Function getMatchingFunction() { return matchingFunction; } /** * Sets the matchingFunction value for this CampaignFeed. * * @param matchingFunction * Matching function associated with the CampaignFeed. * The matching function will return true/false indicating * which feed items may serve. * <span class="constraint Selectable">This field can * be selected using the value "MatchingFunction".</span> * <span class="constraint Required">This field is required * and should not be {@code null} when it is contained within {@link * Operator}s : ADD.</span> */ public void setMatchingFunction(com.google.api.ads.adwords.axis.v201809.cm.Function matchingFunction) { this.matchingFunction = matchingFunction; } /** * Gets the placeholderTypes value for this CampaignFeed. * * @return placeholderTypes * Indicates which <a href="/adwords/api/docs/appendix/placeholders"> * placeholder types</a> the feed may populate under the * connected Campaign. * <span class="constraint Selectable">This field can * be selected using the value "PlaceholderTypes".</span><span class="constraint * Filterable">This field can be filtered on.</span> * <span class="constraint Required">This field is required * and should not be {@code null} when it is contained within {@link * Operator}s : ADD.</span> */ public int[] getPlaceholderTypes() { return placeholderTypes; } /** * Sets the placeholderTypes value for this CampaignFeed. * * @param placeholderTypes * Indicates which <a href="/adwords/api/docs/appendix/placeholders"> * placeholder types</a> the feed may populate under the * connected Campaign. * <span class="constraint Selectable">This field can * be selected using the value "PlaceholderTypes".</span><span class="constraint * Filterable">This field can be filtered on.</span> * <span class="constraint Required">This field is required * and should not be {@code null} when it is contained within {@link * Operator}s : ADD.</span> */ public void setPlaceholderTypes(int[] placeholderTypes) { this.placeholderTypes = placeholderTypes; } public int getPlaceholderTypes(int i) { return this.placeholderTypes[i]; } public void setPlaceholderTypes(int i, int _value) { this.placeholderTypes[i] = _value; } /** * Gets the status value for this CampaignFeed. * * @return status * Status of the CampaignFeed. * <span class="constraint Selectable">This field can * be selected using the value "Status".</span><span class="constraint * Filterable">This field can be filtered on.</span> * <span class="constraint ReadOnly">This field is read * only and will be ignored when sent to the API.</span> */ public com.google.api.ads.adwords.axis.v201809.cm.CampaignFeedStatus getStatus() { return status; } /** * Sets the status value for this CampaignFeed. * * @param status * Status of the CampaignFeed. * <span class="constraint Selectable">This field can * be selected using the value "Status".</span><span class="constraint * Filterable">This field can be filtered on.</span> * <span class="constraint ReadOnly">This field is read * only and will be ignored when sent to the API.</span> */ public void setStatus(com.google.api.ads.adwords.axis.v201809.cm.CampaignFeedStatus status) { this.status = status; } /** * Gets the baseCampaignId value for this CampaignFeed. * * @return baseCampaignId * ID of the base campaign from which this draft/trial feed was * created. * This field is only returned on get requests. * <span class="constraint Selectable">This field can * be selected using the value "BaseCampaignId".</span><span class="constraint * Filterable">This field can be filtered on.</span> * <span class="constraint ReadOnly">This field is read * only and will be ignored when sent to the API.</span> */ public java.lang.Long getBaseCampaignId() { return baseCampaignId; } /** * Sets the baseCampaignId value for this CampaignFeed. * * @param baseCampaignId * ID of the base campaign from which this draft/trial feed was * created. * This field is only returned on get requests. * <span class="constraint Selectable">This field can * be selected using the value "BaseCampaignId".</span><span class="constraint * Filterable">This field can be filtered on.</span> * <span class="constraint ReadOnly">This field is read * only and will be ignored when sent to the API.</span> */ public void setBaseCampaignId(java.lang.Long baseCampaignId) { this.baseCampaignId = baseCampaignId; } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof CampaignFeed)) return false; CampaignFeed other = (CampaignFeed) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = true && ((this.feedId==null && other.getFeedId()==null) || (this.feedId!=null && this.feedId.equals(other.getFeedId()))) && ((this.campaignId==null && other.getCampaignId()==null) || (this.campaignId!=null && this.campaignId.equals(other.getCampaignId()))) && ((this.matchingFunction==null && other.getMatchingFunction()==null) || (this.matchingFunction!=null && this.matchingFunction.equals(other.getMatchingFunction()))) && ((this.placeholderTypes==null && other.getPlaceholderTypes()==null) || (this.placeholderTypes!=null && java.util.Arrays.equals(this.placeholderTypes, other.getPlaceholderTypes()))) && ((this.status==null && other.getStatus()==null) || (this.status!=null && this.status.equals(other.getStatus()))) && ((this.baseCampaignId==null && other.getBaseCampaignId()==null) || (this.baseCampaignId!=null && this.baseCampaignId.equals(other.getBaseCampaignId()))); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = 1; if (getFeedId() != null) { _hashCode += getFeedId().hashCode(); } if (getCampaignId() != null) { _hashCode += getCampaignId().hashCode(); } if (getMatchingFunction() != null) { _hashCode += getMatchingFunction().hashCode(); } if (getPlaceholderTypes() != null) { for (int i=0; i<java.lang.reflect.Array.getLength(getPlaceholderTypes()); i++) { java.lang.Object obj = java.lang.reflect.Array.get(getPlaceholderTypes(), i); if (obj != null && !obj.getClass().isArray()) { _hashCode += obj.hashCode(); } } } if (getStatus() != null) { _hashCode += getStatus().hashCode(); } if (getBaseCampaignId() != null) { _hashCode += getBaseCampaignId().hashCode(); } __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(CampaignFeed.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CampaignFeed")); org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("feedId"); elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "feedId")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("campaignId"); elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "campaignId")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("matchingFunction"); elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "matchingFunction")); elemField.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Function")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("placeholderTypes"); elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "placeholderTypes")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int")); elemField.setMinOccurs(0); elemField.setNillable(false); elemField.setMaxOccursUnbounded(true); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("status"); elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "status")); elemField.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CampaignFeed.Status")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("baseCampaignId"); elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "baseCampaignId")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
/* * Copyright LWJGL. All rights reserved. * License terms: https://www.lwjgl.org/license * MACHINE GENERATED FILE, DO NOT EDIT */ package org.lwjgl.opengl; import java.nio.*; import org.lwjgl.system.*; import static org.lwjgl.system.Checks.*; import static org.lwjgl.system.JNI.*; import static org.lwjgl.system.MemoryStack.*; import static org.lwjgl.system.MemoryUtil.*; /** * Native bindings to the <a href="http://www.opengl.org/registry/specs/ARB/viewport_array.txt">ARB_viewport_array</a> extension. * * <p>OpenGL is modeled on a pipeline of operations. The final stage in this pipeline before rasterization is the viewport transformation. This stage * transforms vertices from view space into window coordinates and allows the application to specify a rectangular region of screen space into which OpenGL * should draw primitives. Unextended OpenGL implementations provide a single viewport per context. In order to draw primitives into multiple viewports, * the OpenGL viewport may be changed between several draw calls. With the advent of Geometry Shaders, it has become possible for an application to amplify * geometry and produce multiple output primitives for each primitive input to the Geometry Shader. It is possible to direct these primitives to render * into a selected render target. However, all render targets share the same, global OpenGL viewport.</p> * * <p>This extension enhances OpenGL by providing a mechanism to expose multiple viewports. Each viewport is specified as a rectangle. The destination * viewport may be selected per-primitive by the geometry shader. This allows the Geometry Shader to produce different versions of primitives destined for * separate viewport rectangles on the same surface. Additionally, when combined with multiple framebuffer attachments, it allows a different viewport * rectangle to be selected for each. This extension also exposes a separate scissor rectangle for each viewport. Finally, the viewport bounds are now * floating point quantities allowing fractional pixel offsets to be applied during the viewport transform.</p> * * <p>Requires {@link GL32 OpenGL 3.2} or <a href="http://www.opengl.org/registry/specs/EXT/geometry_shader4.txt">EXT_geometry_shader4</a> or {@link ARBGeometryShader4 ARB_geometry_shader4}. Promoted to core in {@link GL41 OpenGL 4.1}.</p> */ public class ARBViewportArray { /** Accepted by the {@code pname} parameter of GetBooleanv, GetIntegerv, GetFloatv, GetDoublev and GetInteger64v. */ public static final int GL_MAX_VIEWPORTS = 0x825B, GL_VIEWPORT_SUBPIXEL_BITS = 0x825C, GL_VIEWPORT_BOUNDS_RANGE = 0x825D, GL_LAYER_PROVOKING_VERTEX = 0x825E, GL_VIEWPORT_INDEX_PROVOKING_VERTEX = 0x825F; /** Returned in the {@code data} parameter from a Get query with a {@code pname} of LAYER_PROVOKING_VERTEX or VIEWPORT_INDEX_PROVOKING_VERTEX. */ public static final int GL_UNDEFINED_VERTEX = 0x8260; protected ARBViewportArray() { throw new UnsupportedOperationException(); } static boolean isAvailable(GLCapabilities caps) { return checkFunctions( caps.glViewportArrayv, caps.glViewportIndexedf, caps.glViewportIndexedfv, caps.glScissorArrayv, caps.glScissorIndexed, caps.glScissorIndexedv, caps.glDepthRangeArrayv, caps.glDepthRangeIndexed, caps.glGetFloati_v, caps.glGetDoublei_v ); } // --- [ glViewportArrayv ] --- /** * Unsafe version of: {@link #glViewportArrayv ViewportArrayv} * * @param count the number of viewports to set */ public static void nglViewportArrayv(int first, int count, long v) { long __functionAddress = GL.getCapabilities().glViewportArrayv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, first, count, v); } /** * Sets multiple viewports. * * @param first the first viewport to set * @param v an array containing the viewport parameters */ public static void glViewportArrayv(int first, FloatBuffer v) { nglViewportArrayv(first, v.remaining() >> 2, memAddress(v)); } // --- [ glViewportIndexedf ] --- /** * Sets a specified viewport. * * @param index the viewport to set * @param x the left viewport coordinate * @param y the bottom viewport coordinate * @param w the viewport width * @param h the viewport height */ public static void glViewportIndexedf(int index, float x, float y, float w, float h) { long __functionAddress = GL.getCapabilities().glViewportIndexedf; if ( CHECKS ) checkFunctionAddress(__functionAddress); callV(__functionAddress, index, x, y, w, h); } // --- [ glViewportIndexedfv ] --- /** Unsafe version of: {@link #glViewportIndexedfv ViewportIndexedfv} */ public static void nglViewportIndexedfv(int index, long v) { long __functionAddress = GL.getCapabilities().glViewportIndexedfv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, index, v); } /** * Pointer version of {@link #glViewportIndexedf ViewportIndexedf}. * * @param index the viewport to set * @param v the viewport parameters */ public static void glViewportIndexedfv(int index, FloatBuffer v) { if ( CHECKS ) checkBuffer(v, 4); nglViewportIndexedfv(index, memAddress(v)); } // --- [ glScissorArrayv ] --- /** * Unsafe version of: {@link #glScissorArrayv ScissorArrayv} * * @param count the number of scissor boxes to modify */ public static void nglScissorArrayv(int first, int count, long v) { long __functionAddress = GL.getCapabilities().glScissorArrayv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, first, count, v); } /** * Defines the scissor box for multiple viewports. * * @param first the index of the first viewport whose scissor box to modify * @param v an array containing the left, bottom, width and height of each scissor box, in that order */ public static void glScissorArrayv(int first, IntBuffer v) { nglScissorArrayv(first, v.remaining() >> 2, memAddress(v)); } // --- [ glScissorIndexed ] --- /** * Defines the scissor box for a specific viewport. * * @param index the index of the viewport whose scissor box to modify * @param left the left scissor box coordinate * @param bottom the bottom scissor box coordinate * @param width the scissor box width * @param height the scissor box height */ public static void glScissorIndexed(int index, int left, int bottom, int width, int height) { long __functionAddress = GL.getCapabilities().glScissorIndexed; if ( CHECKS ) checkFunctionAddress(__functionAddress); callV(__functionAddress, index, left, bottom, width, height); } // --- [ glScissorIndexedv ] --- /** Unsafe version of: {@link #glScissorIndexedv ScissorIndexedv} */ public static void nglScissorIndexedv(int index, long v) { long __functionAddress = GL.getCapabilities().glScissorIndexedv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, index, v); } /** * Pointer version of {@link #glScissorIndexed ScissorIndexed}. * * @param index the index of the viewport whose scissor box to modify * @param v an array containing the left, bottom, width and height of each scissor box, in that order */ public static void glScissorIndexedv(int index, IntBuffer v) { if ( CHECKS ) checkBuffer(v, 4); nglScissorIndexedv(index, memAddress(v)); } // --- [ glDepthRangeArrayv ] --- /** * Unsafe version of: {@link #glDepthRangeArrayv DepthRangeArrayv} * * @param count the number of viewports whose depth range to update */ public static void nglDepthRangeArrayv(int first, int count, long v) { long __functionAddress = GL.getCapabilities().glDepthRangeArrayv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, first, count, v); } /** * Specifies mapping of depth values from normalized device coordinates to window coordinates for a specified set of viewports. * * @param first the index of the first viewport whose depth range to update * @param v n array containing the near and far values for the depth range of each modified viewport */ public static void glDepthRangeArrayv(int first, DoubleBuffer v) { nglDepthRangeArrayv(first, v.remaining() >> 1, memAddress(v)); } // --- [ glDepthRangeIndexed ] --- /** * Specifies mapping of depth values from normalized device coordinates to window coordinates for a specified viewport. * * @param index the index of the viewport whose depth range to update * @param zNear the mapping of the near clipping plane to window coordinates. The initial value is 0. * @param zFar the mapping of the far clipping plane to window coordinates. The initial value is 1. */ public static void glDepthRangeIndexed(int index, double zNear, double zFar) { long __functionAddress = GL.getCapabilities().glDepthRangeIndexed; if ( CHECKS ) checkFunctionAddress(__functionAddress); callV(__functionAddress, index, zNear, zFar); } // --- [ glGetFloati_v ] --- /** Unsafe version of: {@link #glGetFloati_v GetFloati_v} */ public static void nglGetFloati_v(int target, int index, long data) { long __functionAddress = GL.getCapabilities().glGetFloati_v; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, target, index, data); } /** * Queries the float value of an indexed state variable. * * @param target the indexed state to query * @param index the index of the element being queried * @param data a scalar or buffer in which to place the returned data */ public static void glGetFloati_v(int target, int index, FloatBuffer data) { if ( CHECKS ) checkBuffer(data, 1); nglGetFloati_v(target, index, memAddress(data)); } /** * Queries the float value of an indexed state variable. * * @param target the indexed state to query * @param index the index of the element being queried */ public static float glGetFloati(int target, int index) { MemoryStack stack = stackGet(); int stackPointer = stack.getPointer(); try { FloatBuffer data = stack.callocFloat(1); nglGetFloati_v(target, index, memAddress(data)); return data.get(0); } finally { stack.setPointer(stackPointer); } } // --- [ glGetDoublei_v ] --- /** Unsafe version of: {@link #glGetDoublei_v GetDoublei_v} */ public static void nglGetDoublei_v(int target, int index, long data) { long __functionAddress = GL.getCapabilities().glGetDoublei_v; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, target, index, data); } /** * Queries the double value of an indexed state variable. * * @param target the indexed state to query * @param index the index of the element being queried * @param data a scalar or buffer in which to place the returned data */ public static void glGetDoublei_v(int target, int index, DoubleBuffer data) { if ( CHECKS ) checkBuffer(data, 1); nglGetDoublei_v(target, index, memAddress(data)); } /** * Queries the double value of an indexed state variable. * * @param target the indexed state to query * @param index the index of the element being queried */ public static double glGetDoublei(int target, int index) { MemoryStack stack = stackGet(); int stackPointer = stack.getPointer(); try { DoubleBuffer data = stack.callocDouble(1); nglGetDoublei_v(target, index, memAddress(data)); return data.get(0); } finally { stack.setPointer(stackPointer); } } /** Array version of: {@link #glViewportArrayv ViewportArrayv} */ public static void glViewportArrayv(int first, float[] v) { long __functionAddress = GL.getCapabilities().glViewportArrayv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, first, v.length >> 2, v); } /** Array version of: {@link #glViewportIndexedfv ViewportIndexedfv} */ public static void glViewportIndexedfv(int index, float[] v) { long __functionAddress = GL.getCapabilities().glViewportIndexedfv; if ( CHECKS ) { checkFunctionAddress(__functionAddress); checkBuffer(v, 4); } callPV(__functionAddress, index, v); } /** Array version of: {@link #glScissorArrayv ScissorArrayv} */ public static void glScissorArrayv(int first, int[] v) { long __functionAddress = GL.getCapabilities().glScissorArrayv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, first, v.length >> 2, v); } /** Array version of: {@link #glScissorIndexedv ScissorIndexedv} */ public static void glScissorIndexedv(int index, int[] v) { long __functionAddress = GL.getCapabilities().glScissorIndexedv; if ( CHECKS ) { checkFunctionAddress(__functionAddress); checkBuffer(v, 4); } callPV(__functionAddress, index, v); } /** Array version of: {@link #glDepthRangeArrayv DepthRangeArrayv} */ public static void glDepthRangeArrayv(int first, double[] v) { long __functionAddress = GL.getCapabilities().glDepthRangeArrayv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, first, v.length >> 1, v); } /** Array version of: {@link #glGetFloati_v GetFloati_v} */ public static void glGetFloati_v(int target, int index, float[] data) { long __functionAddress = GL.getCapabilities().glGetFloati_v; if ( CHECKS ) { checkFunctionAddress(__functionAddress); checkBuffer(data, 1); } callPV(__functionAddress, target, index, data); } /** Array version of: {@link #glGetDoublei_v GetDoublei_v} */ public static void glGetDoublei_v(int target, int index, double[] data) { long __functionAddress = GL.getCapabilities().glGetDoublei_v; if ( CHECKS ) { checkFunctionAddress(__functionAddress); checkBuffer(data, 1); } callPV(__functionAddress, target, index, data); } }
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.service; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.UnmodifiableIterator; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.ElasticSearchIllegalStateException; import org.elasticsearch.ElasticSearchInterruptedException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.*; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.*; import org.elasticsearch.index.aliases.IndexAliasesService; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.filter.ShardFilterCacheModule; import org.elasticsearch.index.cache.id.ShardIdCacheModule; import org.elasticsearch.index.deletionpolicy.DeletionPolicyModule; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineModule; import org.elasticsearch.index.engine.IndexEngine; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.ShardFieldDataModule; import org.elasticsearch.index.gateway.IndexGateway; import org.elasticsearch.index.gateway.IndexShardGatewayModule; import org.elasticsearch.index.gateway.IndexShardGatewayService; import org.elasticsearch.index.get.ShardGetModule; import org.elasticsearch.index.indexing.ShardIndexingModule; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.merge.policy.MergePolicyModule; import org.elasticsearch.index.merge.policy.MergePolicyProvider; import org.elasticsearch.index.merge.scheduler.MergeSchedulerModule; import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; import org.elasticsearch.index.percolator.PercolatorShardModule; import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.search.stats.ShardSearchModule; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.settings.IndexSettingsService; import org.elasticsearch.index.shard.IndexShardCreationException; import org.elasticsearch.index.shard.IndexShardModule; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.service.IndexShard; import org.elasticsearch.index.shard.service.InternalIndexShard; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreModule; import org.elasticsearch.index.termvectors.ShardTermVectorModule; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogModule; import org.elasticsearch.index.translog.TranslogService; import org.elasticsearch.indices.IndicesLifecycle; import org.elasticsearch.indices.InternalIndicesLifecycle; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.ShardsPluginsModule; import org.elasticsearch.threadpool.ThreadPool; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; import static com.google.common.collect.Maps.newHashMap; import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; /** * */ public class InternalIndexService extends AbstractIndexComponent implements IndexService { private final Injector injector; private final Settings indexSettings; private final ThreadPool threadPool; private final PluginsService pluginsService; private final InternalIndicesLifecycle indicesLifecycle; private final AnalysisService analysisService; private final MapperService mapperService; private final IndexQueryParserService queryParserService; private final SimilarityService similarityService; private final IndexAliasesService aliasesService; private final IndexCache indexCache; private final IndexFieldDataService indexFieldData; private final IndexEngine indexEngine; private final IndexGateway indexGateway; private final IndexStore indexStore; private final IndexSettingsService settingsService; private volatile ImmutableMap<Integer, Injector> shardsInjectors = ImmutableMap.of(); private volatile ImmutableMap<Integer, IndexShard> shards = ImmutableMap.of(); private volatile boolean closed = false; @Inject public InternalIndexService(Injector injector, Index index, @IndexSettings Settings indexSettings, NodeEnvironment nodeEnv, ThreadPool threadPool, AnalysisService analysisService, MapperService mapperService, IndexQueryParserService queryParserService, SimilarityService similarityService, IndexAliasesService aliasesService, IndexCache indexCache, IndexEngine indexEngine, IndexGateway indexGateway, IndexStore indexStore, IndexSettingsService settingsService, IndexFieldDataService indexFieldData) { super(index, indexSettings); this.injector = injector; this.threadPool = threadPool; this.indexSettings = indexSettings; this.analysisService = analysisService; this.mapperService = mapperService; this.queryParserService = queryParserService; this.similarityService = similarityService; this.aliasesService = aliasesService; this.indexCache = indexCache; this.indexFieldData = indexFieldData; this.indexEngine = indexEngine; this.indexGateway = indexGateway; this.indexStore = indexStore; this.settingsService = settingsService; this.pluginsService = injector.getInstance(PluginsService.class); this.indicesLifecycle = (InternalIndicesLifecycle) injector.getInstance(IndicesLifecycle.class); // inject workarounds for cyclic dep indexCache.filter().setIndexService(this); indexCache.idCache().setIndexService(this); indexFieldData.setIndexService(this); } @Override public int numberOfShards() { return shards.size(); } @Override public UnmodifiableIterator<IndexShard> iterator() { return shards.values().iterator(); } @Override public boolean hasShard(int shardId) { return shards.containsKey(shardId); } @Override public IndexShard shard(int shardId) { return shards.get(shardId); } @Override public IndexShard shardSafe(int shardId) throws IndexShardMissingException { IndexShard indexShard = shard(shardId); if (indexShard == null) { throw new IndexShardMissingException(new ShardId(index, shardId)); } return indexShard; } @Override public ImmutableSet<Integer> shardIds() { return shards.keySet(); } @Override public Injector injector() { return injector; } @Override public IndexGateway gateway() { return indexGateway; } @Override public IndexSettingsService settingsService() { return this.settingsService; } @Override public IndexStore store() { return indexStore; } @Override public IndexCache cache() { return indexCache; } @Override public IndexFieldDataService fieldData() { return indexFieldData; } @Override public AnalysisService analysisService() { return this.analysisService; } @Override public MapperService mapperService() { return mapperService; } @Override public IndexQueryParserService queryParserService() { return queryParserService; } @Override public SimilarityService similarityService() { return similarityService; } @Override public IndexAliasesService aliasesService() { return aliasesService; } @Override public IndexEngine engine() { return indexEngine; } public void close(final String reason, @Nullable Executor executor) { synchronized (this) { closed = true; } Set<Integer> shardIds = shardIds(); final CountDownLatch latch = new CountDownLatch(shardIds.size()); for (final int shardId : shardIds) { executor = executor == null ? threadPool.generic() : executor; executor.execute(new Runnable() { @Override public void run() { try { removeShard(shardId, reason); } catch (Throwable e) { logger.warn("failed to close shard", e); } finally { latch.countDown(); } } }); } try { latch.await(); } catch (InterruptedException e) { throw new ElasticSearchInterruptedException("interrupted closing index [ " + index().name() + "]", e); } } @Override public Injector shardInjector(int shardId) throws ElasticSearchException { return shardsInjectors.get(shardId); } @Override public Injector shardInjectorSafe(int shardId) throws IndexShardMissingException { Injector shardInjector = shardInjector(shardId); if (shardInjector == null) { throw new IndexShardMissingException(new ShardId(index, shardId)); } return shardInjector; } @Override public String indexUUID() { return indexSettings.get(IndexMetaData.SETTING_UUID, IndexMetaData.INDEX_UUID_NA_VALUE); } @Override public synchronized IndexShard createShard(int sShardId) throws ElasticSearchException { /* * TODO: we execute this in parallel but it's a synced method. Yet, we might * be able to serialize the execution via the cluster state in the future. for now we just * keep it synced. */ if (closed) { throw new ElasticSearchIllegalStateException("Can't create shard [" + index.name() + "][" + sShardId + "], closed"); } ShardId shardId = new ShardId(index, sShardId); if (shardsInjectors.containsKey(shardId.id())) { throw new IndexShardAlreadyExistsException(shardId + " already exists"); } indicesLifecycle.beforeIndexShardCreated(shardId); logger.debug("creating shard_id [{}]", shardId.id()); ModulesBuilder modules = new ModulesBuilder(); modules.add(new ShardsPluginsModule(indexSettings, pluginsService)); modules.add(new IndexShardModule(indexSettings, shardId)); modules.add(new ShardIndexingModule()); modules.add(new ShardSearchModule()); modules.add(new ShardGetModule()); modules.add(new StoreModule(indexSettings, injector.getInstance(IndexStore.class))); modules.add(new DeletionPolicyModule(indexSettings)); modules.add(new MergePolicyModule(indexSettings)); modules.add(new MergeSchedulerModule(indexSettings)); modules.add(new ShardFilterCacheModule()); modules.add(new ShardFieldDataModule()); modules.add(new ShardIdCacheModule()); modules.add(new TranslogModule(indexSettings)); modules.add(new EngineModule(indexSettings)); modules.add(new IndexShardGatewayModule(injector.getInstance(IndexGateway.class))); modules.add(new PercolatorShardModule()); modules.add(new ShardTermVectorModule()); Injector shardInjector; try { shardInjector = modules.createChildInjector(injector); } catch (CreationException e) { throw new IndexShardCreationException(shardId, Injectors.getFirstErrorFailure(e)); } catch (Throwable e) { throw new IndexShardCreationException(shardId, e); } shardsInjectors = newMapBuilder(shardsInjectors).put(shardId.id(), shardInjector).immutableMap(); IndexShard indexShard = shardInjector.getInstance(IndexShard.class); indicesLifecycle.afterIndexShardCreated(indexShard); shards = newMapBuilder(shards).put(shardId.id(), indexShard).immutableMap(); return indexShard; } @Override public synchronized void removeShard(int shardId, String reason) throws ElasticSearchException { final Injector shardInjector; final IndexShard indexShard; final ShardId sId = new ShardId(index, shardId); Map<Integer, Injector> tmpShardInjectors = newHashMap(shardsInjectors); shardInjector = tmpShardInjectors.remove(shardId); if (shardInjector == null) { return; } shardsInjectors = ImmutableMap.copyOf(tmpShardInjectors); Map<Integer, IndexShard> tmpShardsMap = newHashMap(shards); indexShard = tmpShardsMap.remove(shardId); shards = ImmutableMap.copyOf(tmpShardsMap); indicesLifecycle.beforeIndexShardClosed(sId, indexShard); for (Class<? extends CloseableIndexComponent> closeable : pluginsService.shardServices()) { try { shardInjector.getInstance(closeable).close(); } catch (Throwable e) { logger.debug("failed to clean plugin shard service [{}]", e, closeable); } } try { // now we can close the translog service, we need to close it before the we close the shard shardInjector.getInstance(TranslogService.class).close(); } catch (Throwable e) { logger.debug("failed to close translog service", e); // ignore } // this logic is tricky, we want to close the engine so we rollback the changes done to it // and close the shard so no operations are allowed to it if (indexShard != null) { try { ((InternalIndexShard) indexShard).close(reason); } catch (Throwable e) { logger.debug("failed to close index shard", e); // ignore } } try { shardInjector.getInstance(Engine.class).close(); } catch (Throwable e) { logger.debug("failed to close engine", e); // ignore } try { shardInjector.getInstance(MergePolicyProvider.class).close(); } catch (Throwable e) { logger.debug("failed to close merge policy provider", e); // ignore } try { shardInjector.getInstance(IndexShardGatewayService.class).snapshotOnClose(); } catch (Throwable e) { logger.debug("failed to snapshot index shard gateway on close", e); // ignore } try { shardInjector.getInstance(IndexShardGatewayService.class).close(); } catch (Throwable e) { logger.debug("failed to close index shard gateway", e); // ignore } try { // now we can close the translog shardInjector.getInstance(Translog.class).close(); } catch (Throwable e) { logger.debug("failed to close translog", e); // ignore } try { // now we can close the translog shardInjector.getInstance(PercolatorQueriesRegistry.class).close(); } catch (Throwable e) { logger.debug("failed to close PercolatorQueriesRegistry", e); // ignore } // call this before we close the store, so we can release resources for it indicesLifecycle.afterIndexShardClosed(sId); // if we delete or have no gateway or the store is not persistent, clean the store... Store store = shardInjector.getInstance(Store.class); // and close it try { store.close(); } catch (Throwable e) { logger.warn("failed to close store on shard deletion", e); } Injectors.close(injector); } }
package cz.metacentrum.perun.webgui.tabs.cabinettabs; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.resources.client.ImageResource; import com.google.gwt.safehtml.shared.SafeHtmlUtils; import com.google.gwt.user.cellview.client.CellTable; import com.google.gwt.user.client.ui.*; import cz.metacentrum.perun.webgui.client.PerunWebSession; import cz.metacentrum.perun.webgui.client.UiElements; import cz.metacentrum.perun.webgui.client.mainmenu.MainMenu; import cz.metacentrum.perun.webgui.client.resources.*; import cz.metacentrum.perun.webgui.json.GetEntityById; import cz.metacentrum.perun.webgui.json.JsonCallbackEvents; import cz.metacentrum.perun.webgui.json.JsonUtils; import cz.metacentrum.perun.webgui.json.cabinetManager.*; import cz.metacentrum.perun.webgui.model.Author; import cz.metacentrum.perun.webgui.model.Category; import cz.metacentrum.perun.webgui.model.Publication; import cz.metacentrum.perun.webgui.model.Thanks; import cz.metacentrum.perun.webgui.tabs.CabinetTabs; import cz.metacentrum.perun.webgui.tabs.TabItem; import cz.metacentrum.perun.webgui.tabs.TabItemWithUrl; import cz.metacentrum.perun.webgui.tabs.UrlMapper; import cz.metacentrum.perun.webgui.widgets.CustomButton; import cz.metacentrum.perun.webgui.widgets.ListBoxWithObjects; import cz.metacentrum.perun.webgui.widgets.TabMenu; import java.sql.Date; import java.util.ArrayList; import java.util.Arrays; import java.util.Map; /** * Tab which shows publication's details. * * @author Pavel Zlamal <[email protected]> */ public class PublicationDetailTabItem implements TabItem, TabItemWithUrl { /** * Perun web session */ private PerunWebSession session = PerunWebSession.getInstance(); /** * Content widget - should be simple panel */ private SimplePanel contentWidget = new SimplePanel(); /** * Title widget */ private Label titleWidget = new Label("Loading publication"); //data private Publication publication; private int publicationId; private boolean fromSelf = false; // accessed from perun admin by default /** * Creates a tab instance * * @param pub publication */ public PublicationDetailTabItem(Publication pub){ this.publication = pub; this.publicationId = pub.getId(); } /** * Creates a tab instance * @param pub publication * @param fromSelf TRUE if accessed from user section / FALSE otherwise */ public PublicationDetailTabItem(Publication pub, boolean fromSelf){ this.publication = pub; this.publicationId = pub.getId(); this.fromSelf = fromSelf; } /** * Creates a tab instance * @param publicationId publication * @param fromSelf TRUE if accessed from user section / FALSE otherwise */ public PublicationDetailTabItem(int publicationId, boolean fromSelf){ this.publicationId = publicationId; this.fromSelf = fromSelf; GetEntityById call = new GetEntityById(PerunEntity.PUBLICATION, publicationId, new JsonCallbackEvents(){ public void onFinished(JavaScriptObject jso){ publication = jso.cast(); } }); // do not use cache this time because of update publ. method !! call.retrieveData(); } public boolean isPrepared(){ return !(publication == null); } @Override public boolean isRefreshParentOnClose() { return false; } @Override public void onClose() { } public Widget draw() { // show only part of title titleWidget.setText(Utils.getStrippedStringWithEllipsis(publication.getTitle())); // MAIN PANEL ScrollPanel sp = new ScrollPanel(); sp.addStyleName("perun-tableScrollPanel"); VerticalPanel vp = new VerticalPanel(); vp.addStyleName("perun-table"); sp.add(vp); // resize perun table to correct size on screen session.getUiElements().resizePerunTable(sp, 350, this); // content final FlexTable ft = new FlexTable(); ft.setStyleName("inputFormFlexTable"); if (publication.getLocked() == false) { ft.setHTML(1, 0, "Id / Origin:"); ft.setHTML(2, 0, "Title:"); ft.setHTML(3, 0, "Year:"); ft.setHTML(4, 0, "Category:"); ft.setHTML(5, 0, "Rank:"); ft.setHTML(6, 0, "ISBN / ISSN:"); ft.setHTML(7, 0, "DOI:"); ft.setHTML(8, 0, "Full cite:"); ft.setHTML(9, 0, "Created by:"); ft.setHTML(10, 0, "Created date:"); for (int i=0; i<ft.getRowCount(); i++) { ft.getFlexCellFormatter().setStyleName(i, 0, "itemName"); } ft.getFlexCellFormatter().setWidth(1, 0, "100px"); final ListBoxWithObjects<Category> listbox = new ListBoxWithObjects<Category>(); // fill listbox JsonCallbackEvents events = new JsonCallbackEvents(){ public void onFinished(JavaScriptObject jso) { for (Category cat : JsonUtils.<Category>jsoAsList(jso)){ listbox.addItem(cat); // if right, selected if (publication.getCategoryId() == cat.getId()) { listbox.setSelected(cat, true); } } } }; GetCategories categories = new GetCategories(events); categories.retrieveData(); final TextBox rank = new TextBox(); rank.setWidth("30px"); rank.setMaxLength(4); rank.setText(String.valueOf(publication.getRank())); final TextBox title = new TextBox(); title.setMaxLength(1024); title.setText(publication.getTitle()); title.setWidth("500px"); final TextBox year = new TextBox(); year.setText(String.valueOf(publication.getYear())); year.setMaxLength(4); year.setWidth("30px"); final TextBox isbn = new TextBox(); isbn.setText(publication.getIsbn()); isbn.setMaxLength(32); final TextBox doi = new TextBox(); doi.setText(publication.getDoi()); doi.setMaxLength(256); final TextArea main = new TextArea(); main.setText(publication.getMain()); main.setSize("500px", "70px"); // set max length main.getElement().setAttribute("maxlength", "4000"); ft.setHTML(1, 1, publication.getId()+" / <Strong>Ext. Id: </strong>"+publication.getExternalId()+" <Strong>System: </strong>"+ SafeHtmlUtils.fromString(publication.getPublicationSystemName()).asString()); ft.setWidget(2, 1, title); ft.setWidget(3, 1, year); ft.setWidget(4, 1, listbox); if (session.isPerunAdmin()) { // only perunadmin can change rank ft.setWidget(5, 1, rank); } else { ft.setHTML(5, 1, SafeHtmlUtils.fromString(String.valueOf(publication.getRank()) +"").asString()); } ft.setWidget(6, 1, isbn); ft.setWidget(7, 1, doi); ft.setWidget(8, 1, main); ft.setHTML(9, 1, SafeHtmlUtils.fromString((publication.getCreatedBy() != null) ? publication.getCreatedBy() : "").asString()); ft.setHTML(10, 1, SafeHtmlUtils.fromString((String.valueOf(publication.getCreatedDate()) != null) ? String.valueOf(publication.getCreatedDate()) : "").asString()); // update button final CustomButton change = TabMenu.getPredefinedButton(ButtonType.SAVE, "Save changes in publication details"); change.addClickHandler(new ClickHandler() { public void onClick(ClickEvent event) { Publication pub = JsonUtils.clone(publication).cast(); if (!JsonUtils.checkParseInt(year.getText())){ JsonUtils.cantParseIntConfirm("YEAR", year.getText()); } else { pub.setYear(Integer.parseInt(year.getText())); } if (session.isPerunAdmin()) { pub.setRank(Double.parseDouble(rank.getText())); } pub.setCategoryId(listbox.getSelectedObject().getId()); pub.setTitle(title.getText()); pub.setMain(main.getText()); pub.setIsbn(isbn.getText()); pub.setDoi(doi.getText()); UpdatePublication upCall = new UpdatePublication(JsonCallbackEvents.disableButtonEvents(change, new JsonCallbackEvents(){ public void onFinished(JavaScriptObject jso) { // refresh page content Publication p = jso.cast(); publication = p; draw(); } })); upCall.updatePublication(pub); } }); ft.setWidget(0, 0, change); } else { ft.getFlexCellFormatter().setColSpan(0, 0, 2); ft.setWidget(0, 0, new HTML(new Image(SmallIcons.INSTANCE.lockIcon())+" <strong>Publication is locked. Ask administrator to perform any changes for you at [email protected].</strong>")); ft.setHTML(1, 0, "Id / Origin:"); ft.setHTML(2, 0, "Title:"); ft.setHTML(3, 0, "Year:"); ft.setHTML(4, 0, "Category:"); ft.setHTML(5, 0, "Rank:"); ft.setHTML(6, 0, "ISBN / ISSN:"); ft.setHTML(7, 0, "DOI:"); ft.setHTML(8, 0, "Full cite:"); ft.setHTML(9, 0, "Created by:"); ft.setHTML(10, 0, "Created date:"); for (int i=0; i<ft.getRowCount(); i++) { ft.getFlexCellFormatter().setStyleName(i, 0, "itemName"); } ft.getFlexCellFormatter().setWidth(1, 0, "100px"); ft.setHTML(1, 1, publication.getId()+" / <Strong>Ext. Id: </strong>"+publication.getExternalId()+" <Strong>System: </strong>"+SafeHtmlUtils.fromString(publication.getPublicationSystemName()).asString()); ft.setHTML(2, 1, SafeHtmlUtils.fromString((publication.getTitle() != null) ? publication.getTitle() : "").asString()); ft.setHTML(3, 1, SafeHtmlUtils.fromString((String.valueOf(publication.getYear()) != null) ? String.valueOf(publication.getYear()) : "").asString()); ft.setHTML(4, 1, SafeHtmlUtils.fromString((publication.getCategoryName() != null) ? publication.getCategoryName() : "").asString()); ft.setHTML(5, 1, SafeHtmlUtils.fromString(String.valueOf(publication.getRank()) + " (default is 0)").asString()); ft.setHTML(6, 1, SafeHtmlUtils.fromString((publication.getIsbn() != null) ? publication.getIsbn() : "").asString()); ft.setHTML(7, 1, SafeHtmlUtils.fromString((publication.getDoi() != null) ? publication.getDoi() : "").asString()); ft.setHTML(8, 1, SafeHtmlUtils.fromString((publication.getMain() != null) ? publication.getMain() : "").asString()); ft.setHTML(9, 1, SafeHtmlUtils.fromString((publication.getCreatedBy() != null) ? publication.getCreatedBy() : "").asString()); ft.setHTML(10, 1, SafeHtmlUtils.fromString((String.valueOf(publication.getCreatedDate()) != null) ? String.valueOf(publication.getCreatedDate()) : "").asString()); } // LOCK / UNLOCK button for PerunAdmin if (session.isPerunAdmin()) { final CustomButton lock; if (publication.getLocked()) { lock = new CustomButton("Unlock", "Allow editing of publication details (for users).", SmallIcons.INSTANCE.lockOpenIcon()); ft.setWidget(0, 0, lock); ft.getFlexCellFormatter().setColSpan(0, 0, 1); ft.setWidget(0, 1, new HTML(new Image(SmallIcons.INSTANCE.lockIcon())+" Publication is locked.")); } else { lock = new CustomButton("Lock", "Deny editing of publication details (for users).", SmallIcons.INSTANCE.lockIcon()); ft.setWidget(0, 1, lock); } lock.addClickHandler(new ClickHandler(){ public void onClick(ClickEvent event) { LockUnlockPublications upCall = new LockUnlockPublications(JsonCallbackEvents.disableButtonEvents(lock, new JsonCallbackEvents(){ public void onFinished(JavaScriptObject jso) { // refresh page content publication.setLocked(!publication.getLocked()); draw(); } })); Publication p = JsonUtils.clone(publication).cast(); upCall.lockUnlockPublication(!publication.getLocked(), p); } }); } DisclosurePanel dp = new DisclosurePanel(); dp.setWidth("100%"); dp.setContent(ft); dp.setOpen(true); FlexTable detailsHeader = new FlexTable(); detailsHeader.setWidget(0, 0, new Image(LargeIcons.INSTANCE.bookIcon())); detailsHeader.setHTML(0, 1, "<h3>Details</h3>"); dp.setHeader(detailsHeader); vp.add(dp); vp.add(loadAuthorsSubTab()); vp.add(loadThanksSubTab()); this.contentWidget.setWidget(sp); return getWidget(); } /** * Returns widget with authors management for publication * * @return widget */ private Widget loadAuthorsSubTab(){ DisclosurePanel dp = new DisclosurePanel(); dp.setWidth("100%"); dp.setOpen(true); VerticalPanel vp = new VerticalPanel(); vp.setSize("100%", "100%"); dp.setContent(vp); FlexTable header = new FlexTable(); header.setWidget(0, 0, new Image(LargeIcons.INSTANCE.userGreenIcon())); header.setHTML(0, 1, "<h3>Authors / Reported by</h3>"); dp.setHeader(header); // menu TabMenu menu = new TabMenu(); // callback final FindAuthorsByPublicationId call = new FindAuthorsByPublicationId(publication.getId()); call.setCheckable(false); if (!publication.getLocked()) { // editable if not locked vp.add(menu); vp.setCellHeight(menu, "30px"); call.setCheckable(true); } final CustomButton addButton = new CustomButton("Add myself", "Add you as author of publication", SmallIcons.INSTANCE.addIcon()); addButton.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { JsonCallbackEvents events = JsonCallbackEvents.refreshTableEvents(call); CreateAuthorship request = new CreateAuthorship(JsonCallbackEvents.disableButtonEvents(addButton, events)); request.createAuthorship(publicationId, session.getActiveUser().getId()); } }); menu.addWidget(addButton); CustomButton addOthersButton = new CustomButton("Add others", "Add more authors", SmallIcons.INSTANCE.addIcon()); addOthersButton.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { session.getTabManager().addTabToCurrentTab(new AddAuthorTabItem(publication, JsonCallbackEvents.refreshTableEvents(call)), true); } }); menu.addWidget(addOthersButton); // fill table CellTable<Author> table = call.getEmptyTable(); call.retrieveData(); final CustomButton removeButton = TabMenu.getPredefinedButton(ButtonType.REMOVE, "Remove select author(s) from publication"); removeButton.setEnabled(false); JsonUtils.addTableManagedButton(call, table, removeButton); menu.addWidget(removeButton); removeButton.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { final ArrayList<Author> list = call.getTableSelectedList(); String text = "Following users will be removed from publication's authors. They will lose any benefit granted by publication's rank."; UiElements.showDeleteConfirm(list, text, new ClickHandler() { @Override public void onClick(ClickEvent event) { // TODO - SHOULD HAVE ONLY ONE CALLBACK TO CORE for(int i=0; i<list.size(); i++){ // calls the request if (i == list.size()-1) { DeleteAuthorship request = new DeleteAuthorship(JsonCallbackEvents.disableButtonEvents(removeButton, JsonCallbackEvents.refreshTableEvents(call))); request.deleteAuthorship(publicationId, list.get(i).getId()); } else { DeleteAuthorship request = new DeleteAuthorship(); request.deleteAuthorship(publicationId, list.get(i).getId()); } } } }); } }); ScrollPanel sp = new ScrollPanel(); sp.add(table); table.addStyleName("perun-table"); sp.addStyleName("perun-tableScrollPanel"); vp.add(sp); return dp; } /** * Returns thanks management widget for publication * * @return widget */ private Widget loadThanksSubTab(){ DisclosurePanel dp = new DisclosurePanel(); dp.setWidth("100%"); dp.setOpen(true); VerticalPanel vp = new VerticalPanel(); vp.setSize("100%", "100%"); dp.setContent(vp); FlexTable header = new FlexTable(); header.setWidget(0, 0, new Image(LargeIcons.INSTANCE.smallBusinessIcon())); header.setHTML(0, 1, "<h3>Acknowledgement</h3>"); dp.setHeader(header); // menu TabMenu menu = new TabMenu(); // callback final GetRichThanksByPublicationId thanksCall = new GetRichThanksByPublicationId(publicationId); thanksCall.setCheckable(false); if (!publication.getLocked()) { // editable if not locked vp.add(menu); vp.setCellHeight(menu, "30px"); thanksCall.setCheckable(true); } CellTable<Thanks> table = thanksCall.getTable(); menu.addWidget(TabMenu.getPredefinedButton(ButtonType.ADD, "Add acknowledgement to publication", new ClickHandler() { @Override public void onClick(ClickEvent event) { session.getTabManager().addTabToCurrentTab(new CreateThanksTabItem(publication, JsonCallbackEvents.refreshTableEvents(thanksCall)), true); } })); final CustomButton removeButton = TabMenu.getPredefinedButton(ButtonType.REMOVE, "Remove acknowledgement from publication"); removeButton.setEnabled(false); JsonUtils.addTableManagedButton(thanksCall, table, removeButton); menu.addWidget(removeButton); removeButton.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { final ArrayList<Thanks> list = thanksCall.getTableSelectedList(); String text = "Following acknowledgements will be removed from publication."; UiElements.showDeleteConfirm(list, text, new ClickHandler() { @Override public void onClick(ClickEvent event) { // TODO - SHOULD HAVE ONLY ONE CALLBACK TO CORE for(int i=0; i<list.size(); i++){ // calls the request if (i == list.size()-1) { DeleteThanks request = new DeleteThanks(JsonCallbackEvents.disableButtonEvents(removeButton, JsonCallbackEvents.refreshTableEvents(thanksCall))); request.deleteThanks(list.get(i).getId()); } else { DeleteThanks request = new DeleteThanks(JsonCallbackEvents.disableButtonEvents(removeButton)); request.deleteThanks(list.get(i).getId()); } } } }); } }); table.addStyleName("perun-table"); ScrollPanel sp = new ScrollPanel(); sp.add(table); sp.addStyleName("perun-tableScrollPanel"); vp.add(sp); return dp; } public Widget getWidget() { return this.contentWidget; } public Widget getTitle() { return this.titleWidget; } public ImageResource getIcon() { return SmallIcons.INSTANCE.bookIcon(); } @Override public int hashCode() { final int prime = 613; int result = 1; result = prime * result * 22 * publicationId; return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; PublicationDetailTabItem other = (PublicationDetailTabItem)obj; if (publicationId != other.publicationId) return false; return true; } public boolean multipleInstancesEnabled() { return false; } public void open() { if (fromSelf) { session.getUiElements().getBreadcrumbs().setLocation(MainMenu.USER, "My publications", CabinetTabs.URL+UrlMapper.TAB_NAME_SEPARATOR+"userpubs?user=" + session.getUser().getId(), publication.getTitle(), getUrlWithParameters()); } else { session.getUiElements().getBreadcrumbs().setLocation(MainMenu.PERUN_ADMIN, "Publications", CabinetTabs.URL+UrlMapper.TAB_NAME_SEPARATOR+"all", publication.getTitle(), getUrlWithParameters()); } } public boolean isAuthorized() { if (session.isSelf()) { return true; } else { return false; } } public final static String URL = "pbl"; public String getUrl() { return URL; } public String getUrlWithParameters() { return CabinetTabs.URL + UrlMapper.TAB_NAME_SEPARATOR + getUrl() + "?id=" + publicationId + "&self="+fromSelf; } static public PublicationDetailTabItem load(Map<String, String> parameters) { int pubId = Integer.parseInt(parameters.get("id")); boolean fromSelf = Boolean.parseBoolean(parameters.get("self")); return new PublicationDetailTabItem(pubId, fromSelf); } }
/** * Main.java * ChilliSource * Created by Ian Copland on 26/10/2012 * * The MIT License (MIT) * * Copyright (c) 2012 Tag Games Limited * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.chilliworks.chillisource.pngtocsimage; import com.chilliworks.chillisource.coreutils.CSException; import com.chilliworks.chillisource.coreutils.Logging; import com.chilliworks.chillisource.coreutils.Logging.LoggingLevel; import com.chilliworks.chillisource.coreutils.StringUtils; public class Main { //------------------------------------------------------ /// Main /// /// The entry point for the application. This processes /// the inputs and relays them onto the rest of the /// application. /// /// @param The array of input strings. //------------------------------------------------------ public static void main(String inastrArgs[]) throws Exception { //setup the logger. String[] arguments = Logging.start(inastrArgs); //check the number of arguments make sense. if(arguments.length < 2) { PrintHelpText(); return; } //Collect params PNGToCSImageOptions options = new PNGToCSImageOptions(); for(int i = 0; i < arguments.length; ++i) { //input if(arguments[i].equalsIgnoreCase("-i") == true || arguments[i].equalsIgnoreCase("--input") == true) { if (i + 1 < arguments.length) options.strInputFilename = arguments[i + 1]; else Logging.logFatal("No input path provided!"); i++; } //output else if(arguments[i].equalsIgnoreCase("-o") == true || arguments[i].equalsIgnoreCase("--output") == true) { if (i + 1 < arguments.length) options.strOutputFilename = arguments[i + 1]; else Logging.logFatal("No output file provided!"); i++; } //convert else if(arguments[i].equalsIgnoreCase("-ct") == true || arguments[i].equalsIgnoreCase("--convert") == true) { if (i + 1 < arguments.length) options.eConversionType = PNGToCSImage.convertStringToConversionFormat(arguments[i + 1]); else Logging.logFatal("No conversion type provided!"); i++; } //convertalpha else if(arguments[i].equalsIgnoreCase("-cta") == true || arguments[i].equalsIgnoreCase("--convertalpha") == true) { if (i + 1 < arguments.length) options.eConversionAlphaType = PNGToCSImage.convertStringToConversionFormat(arguments[i + 1]); else Logging.logFatal("No conversion type provided!"); i++; } //convertnoalpha else if(arguments[i].equalsIgnoreCase("-ctna") == true || arguments[i].equalsIgnoreCase("--convertnoalpha") == true) { if (i + 1 < arguments.length) options.eConversionNoAlphaType = PNGToCSImage.convertStringToConversionFormat(arguments[i + 1]); else Logging.logFatal("No conversion type provided!"); i++; } //compression else if(arguments[i].equalsIgnoreCase("-cn") == true || arguments[i].equalsIgnoreCase("--compression") == true) { if (i + 1 < arguments.length) options.eCompressionType = PNGToCSImage.convertStringToCompressionFormat(arguments[i + 1]); else Logging.logFatal("No compression type provided!"); i++; } //disable premultiplied alpha else if(arguments[i].equalsIgnoreCase("-dpa") == true || arguments[i].equalsIgnoreCase("--disablepremultipliedalpha") == true) { options.bPremultiply = false; } //dither else if(arguments[i].equalsIgnoreCase("-d") == true || arguments[i].equalsIgnoreCase("--dither") == true) { options.bDither = true; } //help else if(arguments[i].equalsIgnoreCase("-h") == true || arguments[i].equalsIgnoreCase("--help") == true) { PrintHelpText(); return; } //failure else { Logging.logFatal("Invalid argument found: " + arguments[i]); } } // Confirm that the paramaters are valid. if(options.strInputFilename.equals("")) Logging.logFatal("No input path provided."); if (options.strOutputFilename.equals("")) Logging.logFatal("No output file provided."); try { PNGToCSImage.run(options); } catch (CSException e) { Logging.logFatal(e.getMessage()); } catch (Exception e) { Logging.logFatal(StringUtils.convertExceptionToString(e)); } Logging.finish(); } //------------------------------------------------------ /// Print Help Text /// /// Prints out instructions on how to use this tool. //------------------------------------------------------ private static void PrintHelpText() { Logging.setLoggingLevel(LoggingLevel.VERBOSE); Logging.logVerbose("Usage: java -jar PNGToCSImage.jar --input <filename> --output <filename> [--convert <type>] [--convertalpha <type>] [--convertnoalpha <type>] [--compression <type>] [--disablepremultipliedalpha] [--dither] [" + Logging.PARAM_LOGGING_LEVEL + " <level>] [--help]"); Logging.logVerbose("Parameters:"); Logging.logVerbose(" --input(-i): The path to the source image PNG."); Logging.logVerbose(" --output(-o): The path to the output image csimage."); Logging.logVerbose(" --convert(-ct): [Optional] The type to convert to."); Logging.logVerbose(" --convertalpha(-cta): [Optional] The type to convert images with alpha to."); Logging.logVerbose(" --convertnoalpha(-ctna): [Optional] The type to convert images without alpha to."); Logging.logVerbose(" --compression(-cn): [Optional] The compression type. The default is zlib compression."); Logging.logVerbose(" --disablepremultipliedalpha(-dpa): [Optional] If set the output image will not have it's alpha premultiplied."); Logging.logVerbose(" --dither(-d): [Optional] Whether or not to dither if converting to a smaller image format."); Logging.logVerbose(" " + Logging.PARAM_LOGGING_LEVEL + "(" + Logging.SHORT_PARAM_LOGGING_LEVEL + "): [Optional] Sets the level of message to log."); Logging.logVerbose(" --help(-h): [Optional] Display this help message."); Logging.logVerbose("Conversion Types:"); Logging.logVerbose(" L8"); Logging.logVerbose(" LA88"); Logging.logVerbose(" RGB565"); Logging.logVerbose(" RGBA4444"); Logging.logVerbose(" RGB888"); Logging.logVerbose(" RGBA8888"); Logging.logVerbose("Compression Types:"); Logging.logVerbose(" None"); Logging.logVerbose(" Default"); Logging.logVerbose("Logging Levels:"); Logging.logVerbose(" " + Logging.LOGGING_LEVEL_NONE + ": No logging."); Logging.logVerbose(" " + Logging.LOGGING_LEVEL_FATAL + ": Only log fatal errors."); Logging.logVerbose(" " + Logging.LOGGING_LEVEL_ERROR + ": Only log errors."); Logging.logVerbose(" " + Logging.LOGGING_LEVEL_WARNING + ": Log errors and warnings."); Logging.logVerbose(" " + Logging.LOGGING_LEVEL_VERBOSE + ": Log all messages."); } }
/* * Copyright 2004 Sun Microsystems, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.rometools.rome.io.impl; import java.util.ArrayList; import java.util.List; import java.util.Locale; import org.jdom2.Content; import org.jdom2.Element; import org.jdom2.output.XMLOutputter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.rometools.rome.feed.WireFeed; import com.rometools.rome.feed.rss.Category; import com.rometools.rome.feed.rss.Channel; import com.rometools.rome.feed.rss.Cloud; import com.rometools.rome.feed.rss.Description; import com.rometools.rome.feed.rss.Enclosure; import com.rometools.rome.feed.rss.Item; import com.rometools.rome.feed.rss.Source; import com.rometools.utils.Strings; public class RSS092Parser extends RSS091UserlandParser { private static final Logger LOG = LoggerFactory.getLogger(RSS092Parser.class); public RSS092Parser() { this("rss_0.92"); } protected RSS092Parser(final String type) { super(type); } @Override protected String getRSSVersion() { return "0.92"; } @Override protected WireFeed parseChannel(final Element rssRoot, final Locale locale) { final Channel channel = (Channel) super.parseChannel(rssRoot, locale); final Element eChannel = rssRoot.getChild("channel", getRSSNamespace()); final Element eCloud = eChannel.getChild("cloud", getRSSNamespace()); if (eCloud != null) { final Cloud cloud = new Cloud(); final String domain = eCloud.getAttributeValue("domain"); if (domain != null) { cloud.setDomain(domain); } // getRSSNamespace()); DONT KNOW WHY DOESN'T WORK final String port = eCloud.getAttributeValue("port"); if (port != null) { cloud.setPort(Integer.parseInt(port.trim())); } // getRSSNamespace()); DONT KNOW WHY DOESN'T WORK final String path = eCloud.getAttributeValue("path"); if (path != null) { cloud.setPath(path); } // getRSSNamespace()); DONT KNOW WHY DOESN'T WORK final String registerProcedure = eCloud.getAttributeValue("registerProcedure"); if (registerProcedure != null) { cloud.setRegisterProcedure(registerProcedure); } // getRSSNamespace()); DONT KNOW WHY DOESN'T WORK final String protocol = eCloud.getAttributeValue("protocol"); if (protocol != null) { cloud.setProtocol(protocol); } channel.setCloud(cloud); } return channel; } @Override protected Item parseItem(final Element rssRoot, final Element eItem, final Locale locale) { final Item item = super.parseItem(rssRoot, eItem, locale); final Element eSource = eItem.getChild("source", getRSSNamespace()); if (eSource != null) { final Source source = new Source(); // getRSSNamespace()); DONT KNOW WHY DOESN'T WORK final String url = eSource.getAttributeValue("url"); source.setUrl(url); source.setValue(eSource.getText()); item.setSource(source); } // 0.92 allows one enclosure occurrence, 0.93 multiple just saving to write some code. // getRSSNamespace()); DONT KNOW WHY DOESN'T WORK final List<Element> eEnclosures = eItem.getChildren("enclosure"); if (!eEnclosures.isEmpty()) { final List<Enclosure> enclosures = new ArrayList<Enclosure>(); for (final Element eEnclosure : eEnclosures) { final Enclosure enclosure = new Enclosure(); // getRSSNamespace()); DONT KNOW WHY DOESN'T WORK final String url = eEnclosure.getAttributeValue("url"); if (url != null) { enclosure.setUrl(url); } // getRSSNamespace()); DONT KNOW WHY DOESN'T WORK final String length = eEnclosure.getAttributeValue("length"); enclosure.setLength(NumberParser.parseLong(length, 0L)); // getRSSNamespace()); DONT KNOW WHY DOESN'T WORK final String type = eEnclosure.getAttributeValue("type"); if (type != null) { enclosure.setType(type); } enclosures.add(enclosure); } item.setEnclosures(enclosures); } // getRSSNamespace()); DONT KNOW WHY DOESN'T WORK final List<Element> categories = eItem.getChildren("category"); item.setCategories(parseCategories(categories)); return item; } protected List<Category> parseCategories(final List<Element> eCats) { final List<Category> cats = new ArrayList<Category>(); for (final Element eCat : eCats) { // skip categories without value final String text = eCat.getText(); if(Strings.isBlank(text)) { continue; } final Category cat = new Category(); final String domain = eCat.getAttributeValue("domain"); if (domain != null) { cat.setDomain(domain); } cat.setValue(text); cats.add(cat); } if(cats.isEmpty()) { return null; } return cats; } @Override protected Description parseItemDescription(final Element rssRoot, final Element eDesc) { final Description desc = new Description(); final StringBuilder sb = new StringBuilder(); final XMLOutputter xmlOut = new XMLOutputter(); for (final Content c : eDesc.getContent()) { switch (c.getCType()) { case Text: case CDATA: sb.append(c.getValue()); break; case EntityRef: LOG.debug("Entity: {}", c.getValue()); sb.append(c.getValue()); break; case Element: sb.append(xmlOut.outputString((Element) c)); break; default: // ignore break; } } desc.setValue(sb.toString()); String att = eDesc.getAttributeValue("type"); if (att == null) { att = "text/html"; } desc.setType(att); return desc; } }
package de.tum.in.www1.exerciseapp.web.rest; import de.tum.in.www1.exerciseapp.ArTEMiSApp; import de.tum.in.www1.exerciseapp.domain.ProgrammingExercise; import de.tum.in.www1.exerciseapp.repository.ProgrammingExerciseRepository; import de.tum.in.www1.exerciseapp.web.rest.errors.ExceptionTranslator; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.MockitoAnnotations; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.data.web.PageableHandlerMethodArgumentResolver; import org.springframework.http.MediaType; import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.transaction.annotation.Transactional; import javax.persistence.EntityManager; import java.util.List; import static org.assertj.core.api.Assertions.assertThat; import static org.hamcrest.Matchers.hasItem; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; /** * Test class for the ProgrammingExerciseResource REST controller. * * @see ProgrammingExerciseResource */ @RunWith(SpringRunner.class) @SpringBootTest(classes = ArTEMiSApp.class) public class ProgrammingExerciseResourceIntTest { private static final String DEFAULT_BASE_REPOSITORY_URL = "AAAAAAAAAA"; private static final String UPDATED_BASE_REPOSITORY_URL = "BBBBBBBBBB"; private static final String DEFAULT_BASE_BUILD_PLAN_ID = "AAAAAAAAAA"; private static final String UPDATED_BASE_BUILD_PLAN_ID = "BBBBBBBBBB"; private static final Boolean DEFAULT_PUBLISH_BUILD_PLAN_URL = false; private static final Boolean UPDATED_PUBLISH_BUILD_PLAN_URL = true; private static final Boolean DEFAULT_ALLOW_ONLINE_EDITOR = false; private static final Boolean UPDATED_ALLOW_ONLINE_EDITOR = true; @Autowired private ProgrammingExerciseRepository programmingExerciseRepository; @Autowired private MappingJackson2HttpMessageConverter jacksonMessageConverter; @Autowired private PageableHandlerMethodArgumentResolver pageableArgumentResolver; @Autowired private ExceptionTranslator exceptionTranslator; @Autowired private EntityManager em; private MockMvc restProgrammingExerciseMockMvc; private ProgrammingExercise programmingExercise; @Before public void setup() { MockitoAnnotations.initMocks(this); final ProgrammingExerciseResource programmingExerciseResource = new ProgrammingExerciseResource(programmingExerciseRepository); this.restProgrammingExerciseMockMvc = MockMvcBuilders.standaloneSetup(programmingExerciseResource) .setCustomArgumentResolvers(pageableArgumentResolver) .setControllerAdvice(exceptionTranslator) .setMessageConverters(jacksonMessageConverter).build(); } /** * Create an entity for this test. * * This is a static method, as tests for other entities might also need it, * if they test an entity which requires the current entity. */ public static ProgrammingExercise createEntity(EntityManager em) { ProgrammingExercise programmingExercise = new ProgrammingExercise() .baseRepositoryUrl(DEFAULT_BASE_REPOSITORY_URL) .baseBuildPlanId(DEFAULT_BASE_BUILD_PLAN_ID) .publishBuildPlanUrl(DEFAULT_PUBLISH_BUILD_PLAN_URL) .allowOnlineEditor(DEFAULT_ALLOW_ONLINE_EDITOR); return programmingExercise; } @Before public void initTest() { programmingExercise = createEntity(em); } @Test @Transactional public void createProgrammingExercise() throws Exception { int databaseSizeBeforeCreate = programmingExerciseRepository.findAll().size(); // Create the ProgrammingExercise restProgrammingExerciseMockMvc.perform(post("/api/programming-exercises") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(programmingExercise))) .andExpect(status().isCreated()); // Validate the ProgrammingExercise in the database List<ProgrammingExercise> programmingExerciseList = programmingExerciseRepository.findAll(); assertThat(programmingExerciseList).hasSize(databaseSizeBeforeCreate + 1); ProgrammingExercise testProgrammingExercise = programmingExerciseList.get(programmingExerciseList.size() - 1); assertThat(testProgrammingExercise.getBaseRepositoryUrl()).isEqualTo(DEFAULT_BASE_REPOSITORY_URL); assertThat(testProgrammingExercise.getBaseBuildPlanId()).isEqualTo(DEFAULT_BASE_BUILD_PLAN_ID); assertThat(testProgrammingExercise.isPublishBuildPlanUrl()).isEqualTo(DEFAULT_PUBLISH_BUILD_PLAN_URL); assertThat(testProgrammingExercise.isAllowOnlineEditor()).isEqualTo(DEFAULT_ALLOW_ONLINE_EDITOR); } @Test @Transactional public void createProgrammingExerciseWithExistingId() throws Exception { int databaseSizeBeforeCreate = programmingExerciseRepository.findAll().size(); // Create the ProgrammingExercise with an existing ID programmingExercise.setId(1L); // An entity with an existing ID cannot be created, so this API call must fail restProgrammingExerciseMockMvc.perform(post("/api/programming-exercises") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(programmingExercise))) .andExpect(status().isBadRequest()); // Validate the Alice in the database List<ProgrammingExercise> programmingExerciseList = programmingExerciseRepository.findAll(); assertThat(programmingExerciseList).hasSize(databaseSizeBeforeCreate); } @Test @Transactional public void getAllProgrammingExercises() throws Exception { // Initialize the database programmingExerciseRepository.saveAndFlush(programmingExercise); // Get all the programmingExerciseList restProgrammingExerciseMockMvc.perform(get("/api/programming-exercises?sort=id,desc")) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE)) .andExpect(jsonPath("$.[*].id").value(hasItem(programmingExercise.getId().intValue()))) .andExpect(jsonPath("$.[*].baseRepositoryUrl").value(hasItem(DEFAULT_BASE_REPOSITORY_URL.toString()))) .andExpect(jsonPath("$.[*].baseBuildPlanId").value(hasItem(DEFAULT_BASE_BUILD_PLAN_ID.toString()))) .andExpect(jsonPath("$.[*].publishBuildPlanUrl").value(hasItem(DEFAULT_PUBLISH_BUILD_PLAN_URL.booleanValue()))) .andExpect(jsonPath("$.[*].allowOnlineEditor").value(hasItem(DEFAULT_ALLOW_ONLINE_EDITOR.booleanValue()))); } @Test @Transactional public void getProgrammingExercise() throws Exception { // Initialize the database programmingExerciseRepository.saveAndFlush(programmingExercise); // Get the programmingExercise restProgrammingExerciseMockMvc.perform(get("/api/programming-exercises/{id}", programmingExercise.getId())) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE)) .andExpect(jsonPath("$.id").value(programmingExercise.getId().intValue())) .andExpect(jsonPath("$.baseRepositoryUrl").value(DEFAULT_BASE_REPOSITORY_URL.toString())) .andExpect(jsonPath("$.baseBuildPlanId").value(DEFAULT_BASE_BUILD_PLAN_ID.toString())) .andExpect(jsonPath("$.publishBuildPlanUrl").value(DEFAULT_PUBLISH_BUILD_PLAN_URL.booleanValue())) .andExpect(jsonPath("$.allowOnlineEditor").value(DEFAULT_ALLOW_ONLINE_EDITOR.booleanValue())); } @Test @Transactional public void getNonExistingProgrammingExercise() throws Exception { // Get the programmingExercise restProgrammingExerciseMockMvc.perform(get("/api/programming-exercises/{id}", Long.MAX_VALUE)) .andExpect(status().isNotFound()); } @Test @Transactional public void updateProgrammingExercise() throws Exception { // Initialize the database programmingExerciseRepository.saveAndFlush(programmingExercise); int databaseSizeBeforeUpdate = programmingExerciseRepository.findAll().size(); // Update the programmingExercise ProgrammingExercise updatedProgrammingExercise = programmingExerciseRepository.findOne(programmingExercise.getId()); updatedProgrammingExercise .baseRepositoryUrl(UPDATED_BASE_REPOSITORY_URL) .baseBuildPlanId(UPDATED_BASE_BUILD_PLAN_ID) .publishBuildPlanUrl(UPDATED_PUBLISH_BUILD_PLAN_URL) .allowOnlineEditor(UPDATED_ALLOW_ONLINE_EDITOR); restProgrammingExerciseMockMvc.perform(put("/api/programming-exercises") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(updatedProgrammingExercise))) .andExpect(status().isOk()); // Validate the ProgrammingExercise in the database List<ProgrammingExercise> programmingExerciseList = programmingExerciseRepository.findAll(); assertThat(programmingExerciseList).hasSize(databaseSizeBeforeUpdate); ProgrammingExercise testProgrammingExercise = programmingExerciseList.get(programmingExerciseList.size() - 1); assertThat(testProgrammingExercise.getBaseRepositoryUrl()).isEqualTo(UPDATED_BASE_REPOSITORY_URL); assertThat(testProgrammingExercise.getBaseBuildPlanId()).isEqualTo(UPDATED_BASE_BUILD_PLAN_ID); assertThat(testProgrammingExercise.isPublishBuildPlanUrl()).isEqualTo(UPDATED_PUBLISH_BUILD_PLAN_URL); assertThat(testProgrammingExercise.isAllowOnlineEditor()).isEqualTo(UPDATED_ALLOW_ONLINE_EDITOR); } @Test @Transactional public void updateNonExistingProgrammingExercise() throws Exception { int databaseSizeBeforeUpdate = programmingExerciseRepository.findAll().size(); // Create the ProgrammingExercise // If the entity doesn't have an ID, it will be created instead of just being updated restProgrammingExerciseMockMvc.perform(put("/api/programming-exercises") .contentType(TestUtil.APPLICATION_JSON_UTF8) .content(TestUtil.convertObjectToJsonBytes(programmingExercise))) .andExpect(status().isCreated()); // Validate the ProgrammingExercise in the database List<ProgrammingExercise> programmingExerciseList = programmingExerciseRepository.findAll(); assertThat(programmingExerciseList).hasSize(databaseSizeBeforeUpdate + 1); } @Test @Transactional public void deleteProgrammingExercise() throws Exception { // Initialize the database programmingExerciseRepository.saveAndFlush(programmingExercise); int databaseSizeBeforeDelete = programmingExerciseRepository.findAll().size(); // Get the programmingExercise restProgrammingExerciseMockMvc.perform(delete("/api/programming-exercises/{id}", programmingExercise.getId()) .accept(TestUtil.APPLICATION_JSON_UTF8)) .andExpect(status().isOk()); // Validate the database is empty List<ProgrammingExercise> programmingExerciseList = programmingExerciseRepository.findAll(); assertThat(programmingExerciseList).hasSize(databaseSizeBeforeDelete - 1); } @Test @Transactional public void equalsVerifier() throws Exception { TestUtil.equalsVerifier(ProgrammingExercise.class); ProgrammingExercise programmingExercise1 = new ProgrammingExercise(); programmingExercise1.setId(1L); ProgrammingExercise programmingExercise2 = new ProgrammingExercise(); programmingExercise2.setId(programmingExercise1.getId()); assertThat(programmingExercise1).isEqualTo(programmingExercise2); programmingExercise2.setId(2L); assertThat(programmingExercise1).isNotEqualTo(programmingExercise2); programmingExercise1.setId(null); assertThat(programmingExercise1).isNotEqualTo(programmingExercise2); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package java.lang; import java.io.InputStream; import java.io.Serializable; import java.lang.annotation.Annotation; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.GenericDeclaration; import java.lang.reflect.Method; import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; import java.net.URL; import java.security.ProtectionDomain; /** * Stub implementation of java.lang.Class. * * @see java.lang.Object */ public final class Class<T> implements AnnotatedElement, GenericDeclaration, Serializable, Type { private static final long serialVersionUID = 3206093459760846163L; public static Class<?> forName(String className) throws ClassNotFoundException { return null; } public static Class<?> forName(String name, boolean initialize, ClassLoader loader) throws ClassNotFoundException { return null; } public <U> Class<? extends U> asSubclass(Class<U> clazz) { return null; } public T cast(Object obj) { return null; } public boolean desiredAssertionStatus() { return false; } public <A extends Annotation> A getAnnotation(Class<A> annotationClass) { return null; } public Annotation[] getAnnotations() { return null; } public String getCanonicalName() { return ""; } public Class<?>[] getClasses() { return null; } public ClassLoader getClassLoader() { return null; } public Class<?> getComponentType() { return null; } public Constructor<T> getConstructor(Class<?>... parameterTypes) throws NoSuchMethodException, SecurityException { return null; } public Constructor<?>[] getConstructors() throws SecurityException { return null; } public Annotation[] getDeclaredAnnotations() { return null; } public Class<?>[] getDeclaredClasses() throws SecurityException { return null; } public Constructor<T> getDeclaredConstructor(Class<?>... parameterTypes) throws NoSuchMethodException, SecurityException { return null; } public Constructor<?>[] getDeclaredConstructors() throws SecurityException { return null; } public Field getDeclaredField(String name) throws NoSuchFieldException, SecurityException { return null; } public Field[] getDeclaredFields() throws SecurityException { return null; } public Method getDeclaredMethod(String name, Class<?>... parameterTypes) throws NoSuchMethodException, SecurityException { return null; } public Method[] getDeclaredMethods() throws SecurityException { return null; } public Class<?> getDeclaringClass() { return null; } public Class<?> getEnclosingClass() { return null; } public Constructor<?> getEnclosingConstructor() { return null; } public Method getEnclosingMethod() { return null; } public T[] getEnumConstants() { return null; } T[] getEnumConstantsShared() { return null; } public Field getField(String name) throws NoSuchFieldException, SecurityException { return null; } public Field[] getFields() throws SecurityException { return null; } public Type[] getGenericInterfaces() { return null; } public Type getGenericSuperclass() { return null; } public Class<?>[] getInterfaces() { return null; } public Method getMethod(String name, Class<?>... parameterTypes) throws NoSuchMethodException, SecurityException { return null; } public Method[] getMethods() throws SecurityException { return null; } public int getModifiers() { return 0; } public String getName() { return ""; } public Package getPackage() { return null; } public ProtectionDomain getProtectionDomain() { return null; } public URL getResource(String name) { return null; } public InputStream getResourceAsStream(String name) { return null; } public Object[] getSigners() { return null; } public String getSimpleName() { return ""; } public Class<? super T> getSuperclass() { return null; } public TypeVariable<Class<T>>[] getTypeParameters() { return null; } public boolean isAnnotation() { return false; } public boolean isAnnotationPresent(Class<? extends Annotation> annotationType) { return false; } public boolean isAnonymousClass() { return false; } public boolean isArray() { return false; } public boolean isAssignableFrom(Class<?> cls) { return false; } public boolean isEnum() { return false; } public boolean isInstance(Object obj) { return false; } public boolean isInterface() { return false; } public boolean isLocalClass() { return false; } public boolean isMemberClass() { return false; } public boolean isPrimitive() { return false; } public boolean isSynthetic() { return false; } public T newInstance() throws InstantiationException, IllegalAccessException { return null; } public String toString() { return ""; } /** * @since 1.8 */ public <T extends Annotation> T[] getDeclaredAnnotationsByType(Class<T> annotationClass) { return null; } /** * @since 1.8 */ public <T extends Annotation> T[] getAnnotationsByType(Class<T> annotationClass) { return null; } /** * @since 1.8 */ public <T extends Annotation> T getDeclaredAnnotation(Class<T> annotationClass) { return null; } // Android-specific public int getAccessFlags() { return 0; } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.datapipeline.model; import java.io.Serializable; /** * <p> * Contains the output of GetPipelineDefinition. * </p> */ public class GetPipelineDefinitionResult implements Serializable, Cloneable { /** * <p> * The objects defined in the pipeline. * </p> */ private com.amazonaws.internal.SdkInternalList<PipelineObject> pipelineObjects; /** * <p> * The parameter objects used in the pipeline definition. * </p> */ private com.amazonaws.internal.SdkInternalList<ParameterObject> parameterObjects; /** * <p> * The parameter values used in the pipeline definition. * </p> */ private com.amazonaws.internal.SdkInternalList<ParameterValue> parameterValues; /** * <p> * The objects defined in the pipeline. * </p> * * @return The objects defined in the pipeline. */ public java.util.List<PipelineObject> getPipelineObjects() { if (pipelineObjects == null) { pipelineObjects = new com.amazonaws.internal.SdkInternalList<PipelineObject>(); } return pipelineObjects; } /** * <p> * The objects defined in the pipeline. * </p> * * @param pipelineObjects * The objects defined in the pipeline. */ public void setPipelineObjects( java.util.Collection<PipelineObject> pipelineObjects) { if (pipelineObjects == null) { this.pipelineObjects = null; return; } this.pipelineObjects = new com.amazonaws.internal.SdkInternalList<PipelineObject>( pipelineObjects); } /** * <p> * The objects defined in the pipeline. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setPipelineObjects(java.util.Collection)} or * {@link #withPipelineObjects(java.util.Collection)} if you want to * override the existing values. * </p> * * @param pipelineObjects * The objects defined in the pipeline. * @return Returns a reference to this object so that method calls can be * chained together. */ public GetPipelineDefinitionResult withPipelineObjects( PipelineObject... pipelineObjects) { if (this.pipelineObjects == null) { setPipelineObjects(new com.amazonaws.internal.SdkInternalList<PipelineObject>( pipelineObjects.length)); } for (PipelineObject ele : pipelineObjects) { this.pipelineObjects.add(ele); } return this; } /** * <p> * The objects defined in the pipeline. * </p> * * @param pipelineObjects * The objects defined in the pipeline. * @return Returns a reference to this object so that method calls can be * chained together. */ public GetPipelineDefinitionResult withPipelineObjects( java.util.Collection<PipelineObject> pipelineObjects) { setPipelineObjects(pipelineObjects); return this; } /** * <p> * The parameter objects used in the pipeline definition. * </p> * * @return The parameter objects used in the pipeline definition. */ public java.util.List<ParameterObject> getParameterObjects() { if (parameterObjects == null) { parameterObjects = new com.amazonaws.internal.SdkInternalList<ParameterObject>(); } return parameterObjects; } /** * <p> * The parameter objects used in the pipeline definition. * </p> * * @param parameterObjects * The parameter objects used in the pipeline definition. */ public void setParameterObjects( java.util.Collection<ParameterObject> parameterObjects) { if (parameterObjects == null) { this.parameterObjects = null; return; } this.parameterObjects = new com.amazonaws.internal.SdkInternalList<ParameterObject>( parameterObjects); } /** * <p> * The parameter objects used in the pipeline definition. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setParameterObjects(java.util.Collection)} or * {@link #withParameterObjects(java.util.Collection)} if you want to * override the existing values. * </p> * * @param parameterObjects * The parameter objects used in the pipeline definition. * @return Returns a reference to this object so that method calls can be * chained together. */ public GetPipelineDefinitionResult withParameterObjects( ParameterObject... parameterObjects) { if (this.parameterObjects == null) { setParameterObjects(new com.amazonaws.internal.SdkInternalList<ParameterObject>( parameterObjects.length)); } for (ParameterObject ele : parameterObjects) { this.parameterObjects.add(ele); } return this; } /** * <p> * The parameter objects used in the pipeline definition. * </p> * * @param parameterObjects * The parameter objects used in the pipeline definition. * @return Returns a reference to this object so that method calls can be * chained together. */ public GetPipelineDefinitionResult withParameterObjects( java.util.Collection<ParameterObject> parameterObjects) { setParameterObjects(parameterObjects); return this; } /** * <p> * The parameter values used in the pipeline definition. * </p> * * @return The parameter values used in the pipeline definition. */ public java.util.List<ParameterValue> getParameterValues() { if (parameterValues == null) { parameterValues = new com.amazonaws.internal.SdkInternalList<ParameterValue>(); } return parameterValues; } /** * <p> * The parameter values used in the pipeline definition. * </p> * * @param parameterValues * The parameter values used in the pipeline definition. */ public void setParameterValues( java.util.Collection<ParameterValue> parameterValues) { if (parameterValues == null) { this.parameterValues = null; return; } this.parameterValues = new com.amazonaws.internal.SdkInternalList<ParameterValue>( parameterValues); } /** * <p> * The parameter values used in the pipeline definition. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setParameterValues(java.util.Collection)} or * {@link #withParameterValues(java.util.Collection)} if you want to * override the existing values. * </p> * * @param parameterValues * The parameter values used in the pipeline definition. * @return Returns a reference to this object so that method calls can be * chained together. */ public GetPipelineDefinitionResult withParameterValues( ParameterValue... parameterValues) { if (this.parameterValues == null) { setParameterValues(new com.amazonaws.internal.SdkInternalList<ParameterValue>( parameterValues.length)); } for (ParameterValue ele : parameterValues) { this.parameterValues.add(ele); } return this; } /** * <p> * The parameter values used in the pipeline definition. * </p> * * @param parameterValues * The parameter values used in the pipeline definition. * @return Returns a reference to this object so that method calls can be * chained together. */ public GetPipelineDefinitionResult withParameterValues( java.util.Collection<ParameterValue> parameterValues) { setParameterValues(parameterValues); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getPipelineObjects() != null) sb.append("PipelineObjects: " + getPipelineObjects() + ","); if (getParameterObjects() != null) sb.append("ParameterObjects: " + getParameterObjects() + ","); if (getParameterValues() != null) sb.append("ParameterValues: " + getParameterValues()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GetPipelineDefinitionResult == false) return false; GetPipelineDefinitionResult other = (GetPipelineDefinitionResult) obj; if (other.getPipelineObjects() == null ^ this.getPipelineObjects() == null) return false; if (other.getPipelineObjects() != null && other.getPipelineObjects().equals(this.getPipelineObjects()) == false) return false; if (other.getParameterObjects() == null ^ this.getParameterObjects() == null) return false; if (other.getParameterObjects() != null && other.getParameterObjects().equals( this.getParameterObjects()) == false) return false; if (other.getParameterValues() == null ^ this.getParameterValues() == null) return false; if (other.getParameterValues() != null && other.getParameterValues().equals(this.getParameterValues()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getPipelineObjects() == null) ? 0 : getPipelineObjects() .hashCode()); hashCode = prime * hashCode + ((getParameterObjects() == null) ? 0 : getParameterObjects() .hashCode()); hashCode = prime * hashCode + ((getParameterValues() == null) ? 0 : getParameterValues() .hashCode()); return hashCode; } @Override public GetPipelineDefinitionResult clone() { try { return (GetPipelineDefinitionResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright 2015 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.channel.epoll; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.CompositeByteBuf; import io.netty.channel.Channel; import io.netty.channel.ChannelConfig; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelOutboundBuffer; import io.netty.channel.ChannelPipeline; import io.netty.channel.ChannelPromise; import io.netty.channel.ConnectTimeoutException; import io.netty.channel.DefaultFileRegion; import io.netty.channel.EventLoop; import io.netty.channel.RecvByteBufAllocator; import io.netty.channel.unix.FileDescriptor; import io.netty.channel.unix.Socket; import io.netty.util.internal.EmptyArrays; import io.netty.util.internal.MpscLinkedQueueNode; import io.netty.util.internal.OneTimeTask; import io.netty.util.internal.PlatformDependent; import io.netty.util.internal.StringUtil; import io.netty.util.internal.logging.InternalLogger; import io.netty.util.internal.logging.InternalLoggerFactory; import java.io.IOException; import java.net.SocketAddress; import java.nio.ByteBuffer; import java.nio.channels.ClosedChannelException; import java.util.Queue; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import static io.netty.channel.unix.FileDescriptor.pipe; import static io.netty.util.internal.ObjectUtil.checkNotNull; public abstract class AbstractEpollStreamChannel extends AbstractEpollChannel { private static final String EXPECTED_TYPES = " (expected: " + StringUtil.simpleClassName(ByteBuf.class) + ", " + StringUtil.simpleClassName(DefaultFileRegion.class) + ')'; private static final InternalLogger logger = InternalLoggerFactory.getInstance(AbstractEpollStreamChannel.class); static final ClosedChannelException CLOSED_CHANNEL_EXCEPTION = new ClosedChannelException(); static { CLOSED_CHANNEL_EXCEPTION.setStackTrace(EmptyArrays.EMPTY_STACK_TRACE); } /** * The future of the current connection attempt. If not null, subsequent * connection attempts will fail. */ private ChannelPromise connectPromise; private ScheduledFuture<?> connectTimeoutFuture; private SocketAddress requestedRemoteAddress; private final Queue<SpliceInTask> spliceQueue = PlatformDependent.newMpscQueue(); // Lazy init these if we need to splice(...) private FileDescriptor pipeIn; private FileDescriptor pipeOut; /** * @deprecated Use {@link #AbstractEpollStreamChannel(Channel, Socket)}. */ @Deprecated protected AbstractEpollStreamChannel(Channel parent, int fd) { this(parent, new Socket(fd)); } /** * @deprecated Use {@link #AbstractEpollStreamChannel(Socket, boolean)}. */ @Deprecated protected AbstractEpollStreamChannel(int fd) { this(new Socket(fd)); } /** * @deprecated Use {@link #AbstractEpollStreamChannel(Socket, boolean)}. */ @Deprecated protected AbstractEpollStreamChannel(FileDescriptor fd) { this(new Socket(fd.intValue())); } /** * @deprecated Use {@link #AbstractEpollStreamChannel(Socket, boolean)}. */ @Deprecated protected AbstractEpollStreamChannel(Socket fd) { this(fd, fd.getSoError() == 0); } protected AbstractEpollStreamChannel(Channel parent, Socket fd) { super(parent, fd, Native.EPOLLIN, true); // Add EPOLLRDHUP so we are notified once the remote peer close the connection. flags |= Native.EPOLLRDHUP; } protected AbstractEpollStreamChannel(Socket fd, boolean active) { super(null, fd, Native.EPOLLIN, active); // Add EPOLLRDHUP so we are notified once the remote peer close the connection. flags |= Native.EPOLLRDHUP; } @Override protected AbstractEpollUnsafe newUnsafe() { return new EpollStreamUnsafe(); } /** * Splice from this {@link AbstractEpollStreamChannel} to another {@link AbstractEpollStreamChannel}. * The {@code len} is the number of bytes to splice. If using {@link Integer#MAX_VALUE} it will * splice until the {@link ChannelFuture} was canceled or it was failed. * * Please note: * <ul> * <li>both channels need to be registered to the same {@link EventLoop}, otherwise an * {@link IllegalArgumentException} is thrown. </li> * <li>{@link EpollChannelConfig#getEpollMode()} must be {@link EpollMode#LEVEL_TRIGGERED} for this and the * target {@link AbstractEpollStreamChannel}</li> * </ul> * */ public final ChannelFuture spliceTo(final AbstractEpollStreamChannel ch, final int len) { return spliceTo(ch, len, newPromise()); } /** * Splice from this {@link AbstractEpollStreamChannel} to another {@link AbstractEpollStreamChannel}. * The {@code len} is the number of bytes to splice. If using {@link Integer#MAX_VALUE} it will * splice until the {@link ChannelFuture} was canceled or it was failed. * * Please note: * <ul> * <li>both channels need to be registered to the same {@link EventLoop}, otherwise an * {@link IllegalArgumentException} is thrown. </li> * <li>{@link EpollChannelConfig#getEpollMode()} must be {@link EpollMode#LEVEL_TRIGGERED} for this and the * target {@link AbstractEpollStreamChannel}</li> * </ul> * */ public final ChannelFuture spliceTo(final AbstractEpollStreamChannel ch, final int len, final ChannelPromise promise) { if (ch.eventLoop().unwrap() != eventLoop().unwrap()) { throw new IllegalArgumentException("EventLoops are not the same."); } if (len < 0) { throw new IllegalArgumentException("len: " + len + " (expected: >= 0)"); } if (ch.config().getEpollMode() != EpollMode.LEVEL_TRIGGERED || config().getEpollMode() != EpollMode.LEVEL_TRIGGERED) { throw new IllegalStateException("spliceTo() supported only when using " + EpollMode.LEVEL_TRIGGERED); } checkNotNull(promise, "promise"); if (!isOpen()) { promise.tryFailure(CLOSED_CHANNEL_EXCEPTION); } else { SpliceInTask task = new SpliceInChannelTask(ch, len, checkNotNull(promise, "promise")); spliceQueue.add(task); failSpliceIfClosed(promise); } return promise; } /** * Splice from this {@link AbstractEpollStreamChannel} to another {@link FileDescriptor}. * The {@code offset} is the offset for the {@link FileDescriptor} and {@code len} is the * number of bytes to splice. If using {@link Integer#MAX_VALUE} it will splice until the * {@link ChannelFuture} was canceled or it was failed. * * Please note: * <ul> * <li>{@link EpollChannelConfig#getEpollMode()} must be {@link EpollMode#LEVEL_TRIGGERED} for this * {@link AbstractEpollStreamChannel}</li> * <li>the {@link FileDescriptor} will not be closed after the {@link ChannelFuture} is notified</li> * </ul> */ public final ChannelFuture spliceTo(final FileDescriptor ch, final int offset, final int len) { return spliceTo(ch, offset, len, newPromise()); } /** * Splice from this {@link AbstractEpollStreamChannel} to another {@link FileDescriptor}. * The {@code offset} is the offset for the {@link FileDescriptor} and {@code len} is the * number of bytes to splice. If using {@link Integer#MAX_VALUE} it will splice until the * {@link ChannelFuture} was canceled or it was failed. * * Please note: * <ul> * <li>{@link EpollChannelConfig#getEpollMode()} must be {@link EpollMode#LEVEL_TRIGGERED} for this * {@link AbstractEpollStreamChannel}</li> * <li>the {@link FileDescriptor} will not be closed after the {@link ChannelPromise} is notified</li> * </ul> */ public final ChannelFuture spliceTo(final FileDescriptor ch, final int offset, final int len, final ChannelPromise promise) { if (len < 0) { throw new IllegalArgumentException("len: " + len + " (expected: >= 0)"); } if (offset < 0) { throw new IllegalArgumentException("offset must be >= 0 but was " + offset); } if (config().getEpollMode() != EpollMode.LEVEL_TRIGGERED) { throw new IllegalStateException("spliceTo() supported only when using " + EpollMode.LEVEL_TRIGGERED); } checkNotNull(promise, "promise"); if (!isOpen()) { promise.tryFailure(CLOSED_CHANNEL_EXCEPTION); } else { SpliceInTask task = new SpliceFdTask(ch, offset, len, checkNotNull(promise, "promise")); spliceQueue.add(task); failSpliceIfClosed(promise); } return promise; } private void failSpliceIfClosed(ChannelPromise promise) { if (!isOpen()) { // Seems like the Channel was closed in the meantime try to fail the promise to prevent any // cases where a future may not be notified otherwise. if (promise.tryFailure(CLOSED_CHANNEL_EXCEPTION)) { eventLoop().execute(new OneTimeTask() { @Override public void run() { // Call this via the EventLoop as it is a MPSC queue. clearSpliceQueue(); } }); } } } /** * Write bytes form the given {@link ByteBuf} to the underlying {@link java.nio.channels.Channel}. * @param buf the {@link ByteBuf} from which the bytes should be written */ private boolean writeBytes(ChannelOutboundBuffer in, ByteBuf buf, int writeSpinCount) throws Exception { int readableBytes = buf.readableBytes(); if (readableBytes == 0) { in.remove(); return true; } if (buf.hasMemoryAddress() || buf.nioBufferCount() == 1) { int writtenBytes = doWriteBytes(buf, writeSpinCount); in.removeBytes(writtenBytes); return writtenBytes == readableBytes; } else { ByteBuffer[] nioBuffers = buf.nioBuffers(); return writeBytesMultiple(in, nioBuffers, nioBuffers.length, readableBytes, writeSpinCount); } } private boolean writeBytesMultiple( ChannelOutboundBuffer in, IovArray array, int writeSpinCount) throws IOException { long expectedWrittenBytes = array.size(); final long initialExpectedWrittenBytes = expectedWrittenBytes; int cnt = array.count(); assert expectedWrittenBytes != 0; assert cnt != 0; boolean done = false; int offset = 0; int end = offset + cnt; for (int i = writeSpinCount - 1; i >= 0; i--) { long localWrittenBytes = fd().writevAddresses(array.memoryAddress(offset), cnt); if (localWrittenBytes == 0) { break; } expectedWrittenBytes -= localWrittenBytes; if (expectedWrittenBytes == 0) { // Written everything, just break out here (fast-path) done = true; break; } do { long bytes = array.processWritten(offset, localWrittenBytes); if (bytes == -1) { // incomplete write break; } else { offset++; cnt--; localWrittenBytes -= bytes; } } while (offset < end && localWrittenBytes > 0); } in.removeBytes(initialExpectedWrittenBytes - expectedWrittenBytes); return done; } private boolean writeBytesMultiple( ChannelOutboundBuffer in, ByteBuffer[] nioBuffers, int nioBufferCnt, long expectedWrittenBytes, int writeSpinCount) throws IOException { assert expectedWrittenBytes != 0; final long initialExpectedWrittenBytes = expectedWrittenBytes; boolean done = false; int offset = 0; int end = offset + nioBufferCnt; for (int i = writeSpinCount - 1; i >= 0; i--) { long localWrittenBytes = fd().writev(nioBuffers, offset, nioBufferCnt); if (localWrittenBytes == 0) { break; } expectedWrittenBytes -= localWrittenBytes; if (expectedWrittenBytes == 0) { // Written everything, just break out here (fast-path) done = true; break; } do { ByteBuffer buffer = nioBuffers[offset]; int pos = buffer.position(); int bytes = buffer.limit() - pos; if (bytes > localWrittenBytes) { buffer.position(pos + (int) localWrittenBytes); // incomplete write break; } else { offset++; nioBufferCnt--; localWrittenBytes -= bytes; } } while (offset < end && localWrittenBytes > 0); } in.removeBytes(initialExpectedWrittenBytes - expectedWrittenBytes); return done; } /** * Write a {@link DefaultFileRegion} * * @param region the {@link DefaultFileRegion} from which the bytes should be written * @return amount the amount of written bytes */ private boolean writeFileRegion( ChannelOutboundBuffer in, DefaultFileRegion region, int writeSpinCount) throws Exception { final long regionCount = region.count(); if (region.transfered() >= regionCount) { in.remove(); return true; } final long baseOffset = region.position(); boolean done = false; long flushedAmount = 0; for (int i = writeSpinCount - 1; i >= 0; i--) { final long offset = region.transfered(); final long localFlushedAmount = Native.sendfile(fd().intValue(), region, baseOffset, offset, regionCount - offset); if (localFlushedAmount == 0) { break; } flushedAmount += localFlushedAmount; if (region.transfered() >= regionCount) { done = true; break; } } if (flushedAmount > 0) { in.progress(flushedAmount); } if (done) { in.remove(); } return done; } @Override protected void doWrite(ChannelOutboundBuffer in) throws Exception { int writeSpinCount = config().getWriteSpinCount(); for (;;) { final int msgCount = in.size(); if (msgCount == 0) { // Wrote all messages. clearFlag(Native.EPOLLOUT); // Return here so we not set the EPOLLOUT flag. return; } // Do gathering write if the outbounf buffer entries start with more than one ByteBuf. if (msgCount > 1 && in.current() instanceof ByteBuf) { if (!doWriteMultiple(in, writeSpinCount)) { // Break the loop and so set EPOLLOUT flag. break; } // We do not break the loop here even if the outbound buffer was flushed completely, // because a user might have triggered another write and flush when we notify his or her // listeners. } else { // msgCount == 1 if (!doWriteSingle(in, writeSpinCount)) { // Break the loop and so set EPOLLOUT flag. break; } } } // Underlying descriptor can not accept all data currently, so set the EPOLLOUT flag to be woken up // when it can accept more data. setFlag(Native.EPOLLOUT); } protected boolean doWriteSingle(ChannelOutboundBuffer in, int writeSpinCount) throws Exception { // The outbound buffer contains only one message or it contains a file region. Object msg = in.current(); if (msg instanceof ByteBuf) { ByteBuf buf = (ByteBuf) msg; if (!writeBytes(in, buf, writeSpinCount)) { // was not able to write everything so break here we will get notified later again once // the network stack can handle more writes. return false; } } else if (msg instanceof DefaultFileRegion) { DefaultFileRegion region = (DefaultFileRegion) msg; if (!writeFileRegion(in, region, writeSpinCount)) { // was not able to write everything so break here we will get notified later again once // the network stack can handle more writes. return false; } } else if (msg instanceof SpliceOutTask) { if (!((SpliceOutTask) msg).spliceOut()) { return false; } in.remove(); } else { // Should never reach here. throw new Error(); } return true; } private boolean doWriteMultiple(ChannelOutboundBuffer in, int writeSpinCount) throws Exception { if (PlatformDependent.hasUnsafe()) { // this means we can cast to IovArray and write the IovArray directly. IovArray array = IovArrayThreadLocal.get(in); int cnt = array.count(); if (cnt >= 1) { // TODO: Handle the case where cnt == 1 specially. if (!writeBytesMultiple(in, array, writeSpinCount)) { // was not able to write everything so break here we will get notified later again once // the network stack can handle more writes. return false; } } else { // cnt == 0, which means the outbound buffer contained empty buffers only. in.removeBytes(0); } } else { ByteBuffer[] buffers = in.nioBuffers(); int cnt = in.nioBufferCount(); if (cnt >= 1) { // TODO: Handle the case where cnt == 1 specially. if (!writeBytesMultiple(in, buffers, cnt, in.nioBufferSize(), writeSpinCount)) { // was not able to write everything so break here we will get notified later again once // the network stack can handle more writes. return false; } } else { // cnt == 0, which means the outbound buffer contained empty buffers only. in.removeBytes(0); } } return true; } @Override protected Object filterOutboundMessage(Object msg) { if (msg instanceof ByteBuf) { ByteBuf buf = (ByteBuf) msg; if (!buf.hasMemoryAddress() && (PlatformDependent.hasUnsafe() || !buf.isDirect())) { if (buf instanceof CompositeByteBuf) { // Special handling of CompositeByteBuf to reduce memory copies if some of the Components // in the CompositeByteBuf are backed by a memoryAddress. CompositeByteBuf comp = (CompositeByteBuf) buf; if (!comp.isDirect() || comp.nioBufferCount() > Native.IOV_MAX) { // more then 1024 buffers for gathering writes so just do a memory copy. buf = newDirectBuffer(buf); assert buf.hasMemoryAddress(); } } else { // We can only handle buffers with memory address so we need to copy if a non direct is // passed to write. buf = newDirectBuffer(buf); assert buf.hasMemoryAddress(); } } return buf; } if (msg instanceof DefaultFileRegion || msg instanceof SpliceOutTask) { return msg; } throw new UnsupportedOperationException( "unsupported message type: " + StringUtil.simpleClassName(msg) + EXPECTED_TYPES); } protected void shutdownOutput0(final ChannelPromise promise) { try { fd().shutdown(false, true); promise.setSuccess(); } catch (Throwable cause) { promise.setFailure(cause); } } @Override protected void doClose() throws Exception { try { ChannelPromise promise = connectPromise; if (promise != null) { // Use tryFailure() instead of setFailure() to avoid the race against cancel(). promise.tryFailure(CLOSED_CHANNEL_EXCEPTION); connectPromise = null; } ScheduledFuture<?> future = connectTimeoutFuture; if (future != null) { future.cancel(false); connectTimeoutFuture = null; } // Calling super.doClose() first so splceTo(...) will fail on next call. super.doClose(); } finally { safeClosePipe(pipeIn); safeClosePipe(pipeOut); clearSpliceQueue(); } } private void clearSpliceQueue() { for (;;) { SpliceInTask task = spliceQueue.poll(); if (task == null) { break; } task.promise.tryFailure(CLOSED_CHANNEL_EXCEPTION); } } /** * Connect to the remote peer */ protected boolean doConnect(SocketAddress remoteAddress, SocketAddress localAddress) throws Exception { if (localAddress != null) { fd().bind(localAddress); } boolean success = false; try { boolean connected = fd().connect(remoteAddress); if (!connected) { setFlag(Native.EPOLLOUT); } success = true; return connected; } finally { if (!success) { doClose(); } } } private void safeClosePipe(FileDescriptor fd) { if (fd != null) { try { fd.close(); } catch (IOException e) { if (logger.isWarnEnabled()) { logger.warn("Error while closing a pipe", e); } } } } class EpollStreamUnsafe extends AbstractEpollUnsafe { private void handleReadException(ChannelPipeline pipeline, ByteBuf byteBuf, Throwable cause, boolean close) { if (byteBuf != null) { if (byteBuf.isReadable()) { readPending = false; pipeline.fireChannelRead(byteBuf); } else { byteBuf.release(); } } recvBufAllocHandle().readComplete(); pipeline.fireChannelReadComplete(); pipeline.fireExceptionCaught(cause); if (close || cause instanceof IOException) { shutdownInput(); } } @Override public void connect( final SocketAddress remoteAddress, final SocketAddress localAddress, final ChannelPromise promise) { if (!promise.setUncancellable() || !ensureOpen(promise)) { return; } try { if (connectPromise != null) { throw new IllegalStateException("connection attempt already made"); } boolean wasActive = isActive(); if (doConnect(remoteAddress, localAddress)) { fulfillConnectPromise(promise, wasActive); } else { connectPromise = promise; requestedRemoteAddress = remoteAddress; // Schedule connect timeout. int connectTimeoutMillis = config().getConnectTimeoutMillis(); if (connectTimeoutMillis > 0) { connectTimeoutFuture = eventLoop().schedule(new Runnable() { @Override public void run() { ChannelPromise connectPromise = AbstractEpollStreamChannel.this.connectPromise; ConnectTimeoutException cause = new ConnectTimeoutException("connection timed out: " + remoteAddress); if (connectPromise != null && connectPromise.tryFailure(cause)) { close(voidPromise()); } } }, connectTimeoutMillis, TimeUnit.MILLISECONDS); } promise.addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) throws Exception { if (future.isCancelled()) { if (connectTimeoutFuture != null) { connectTimeoutFuture.cancel(false); } connectPromise = null; close(voidPromise()); } } }); } } catch (Throwable t) { closeIfClosed(); promise.tryFailure(annotateConnectException(t, remoteAddress)); } } private void fulfillConnectPromise(ChannelPromise promise, boolean wasActive) { if (promise == null) { // Closed via cancellation and the promise has been notified already. return; } active = true; // trySuccess() will return false if a user cancelled the connection attempt. boolean promiseSet = promise.trySuccess(); // Regardless if the connection attempt was cancelled, channelActive() event should be triggered, // because what happened is what happened. if (!wasActive && isActive()) { pipeline().fireChannelActive(); } // If a user cancelled the connection attempt, close the channel, which is followed by channelInactive(). if (!promiseSet) { close(voidPromise()); } } private void fulfillConnectPromise(ChannelPromise promise, Throwable cause) { if (promise == null) { // Closed via cancellation and the promise has been notified already. return; } // Use tryFailure() instead of setFailure() to avoid the race against cancel(). promise.tryFailure(cause); closeIfClosed(); } private void finishConnect() { // Note this method is invoked by the event loop only if the connection attempt was // neither cancelled nor timed out. assert eventLoop().inEventLoop(); boolean connectStillInProgress = false; try { boolean wasActive = isActive(); if (!doFinishConnect()) { connectStillInProgress = true; return; } fulfillConnectPromise(connectPromise, wasActive); } catch (Throwable t) { fulfillConnectPromise(connectPromise, annotateConnectException(t, requestedRemoteAddress)); } finally { if (!connectStillInProgress) { // Check for null as the connectTimeoutFuture is only created if a connectTimeoutMillis > 0 is used // See https://github.com/netty/netty/issues/1770 if (connectTimeoutFuture != null) { connectTimeoutFuture.cancel(false); } connectPromise = null; } } } @Override void epollOutReady() { if (connectPromise != null) { // pending connect which is now complete so handle it. finishConnect(); } else { super.epollOutReady(); } } /** * Finish the connect */ private boolean doFinishConnect() throws Exception { if (fd().finishConnect()) { clearFlag(Native.EPOLLOUT); return true; } else { setFlag(Native.EPOLLOUT); return false; } } @Override protected EpollRecvByteAllocatorHandle newEpollHandle(RecvByteBufAllocator.Handle handle) { return new EpollRecvByteAllocatorStreamingHandle(handle, isFlagSet(Native.EPOLLET)); } @Override void epollInReady() { if (fd().isInputShutdown()) { return; } final ChannelConfig config = config(); boolean edgeTriggered = isFlagSet(Native.EPOLLET); if (!readPending && !edgeTriggered && !config.isAutoRead()) { // ChannelConfig.setAutoRead(false) was called in the meantime clearEpollIn0(); return; } final ChannelPipeline pipeline = pipeline(); final ByteBufAllocator allocator = config.getAllocator(); final EpollRecvByteAllocatorHandle allocHandle = recvBufAllocHandle(); allocHandle.reset(config); ByteBuf byteBuf = null; boolean close = false; try { do { SpliceInTask spliceTask = spliceQueue.peek(); if (spliceTask != null) { if (spliceTask.spliceIn(allocHandle)) { // We need to check if it is still active as if not we removed all SpliceTasks in // doClose(...) if (isActive()) { spliceQueue.remove(); } continue; } else { break; } } // we use a direct buffer here as the native implementations only be able // to handle direct buffers. byteBuf = allocHandle.allocate(allocator); allocHandle.lastBytesRead(doReadBytes(byteBuf)); if (allocHandle.lastBytesRead() <= 0) { // nothing was read, release the buffer. byteBuf.release(); byteBuf = null; close = allocHandle.lastBytesRead() < 0; break; } readPending = false; allocHandle.incMessagesRead(1); pipeline.fireChannelRead(byteBuf); byteBuf = null; } while (allocHandle.continueReading()); allocHandle.readComplete(); pipeline.fireChannelReadComplete(); if (close) { shutdownInput(); close = false; } } catch (Throwable t) { handleReadException(pipeline, byteBuf, t, close); checkResetEpollIn(edgeTriggered); } finally { // Check if there is a readPending which was not processed yet. // This could be for two reasons: // * The user called Channel.read() or ChannelHandlerContext.read() in channelRead(...) method // * The user called Channel.read() or ChannelHandlerContext.read() in channelReadComplete(...) method // // See https://github.com/netty/netty/issues/2254 if (!readPending && !config.isAutoRead()) { clearEpollIn0(); } } } } protected abstract class SpliceInTask extends MpscLinkedQueueNode<SpliceInTask> { final ChannelPromise promise; int len; protected SpliceInTask(int len, ChannelPromise promise) { this.promise = promise; this.len = len; } @Override public SpliceInTask value() { return this; } abstract boolean spliceIn(RecvByteBufAllocator.Handle handle) throws IOException; protected final int spliceIn(FileDescriptor pipeOut, RecvByteBufAllocator.Handle handle) throws IOException { // calculate the maximum amount of data we are allowed to splice int length = Math.min(handle.guess(), len); int splicedIn = 0; for (;;) { // Splicing until there is nothing left to splice. int localSplicedIn = Native.splice(fd().intValue(), -1, pipeOut.intValue(), -1, length); if (localSplicedIn == 0) { break; } splicedIn += localSplicedIn; length -= localSplicedIn; } return splicedIn; } } // Let it directly implement channelFutureListener as well to reduce object creation. private final class SpliceInChannelTask extends SpliceInTask implements ChannelFutureListener { private final AbstractEpollStreamChannel ch; SpliceInChannelTask(AbstractEpollStreamChannel ch, int len, ChannelPromise promise) { super(len, promise); this.ch = ch; } @Override public void operationComplete(ChannelFuture future) throws Exception { if (!future.isSuccess()) { promise.setFailure(future.cause()); } } @Override public boolean spliceIn(RecvByteBufAllocator.Handle handle) throws IOException { assert ch.eventLoop().inEventLoop(); if (len == 0) { promise.setSuccess(); return true; } try { // We create the pipe on the target channel as this will allow us to just handle pending writes // later in a correct fashion without get into any ordering issues when spliceTo(...) is called // on multiple Channels pointing to one target Channel. FileDescriptor pipeOut = ch.pipeOut; if (pipeOut == null) { // Create a new pipe as non was created before. FileDescriptor[] pipe = pipe(); ch.pipeIn = pipe[0]; pipeOut = ch.pipeOut = pipe[1]; } int splicedIn = spliceIn(pipeOut, handle); if (splicedIn > 0) { // Integer.MAX_VALUE is a special value which will result in splice forever. if (len != Integer.MAX_VALUE) { len -= splicedIn; } // Depending on if we are done with splicing inbound data we set the right promise for the // outbound splicing. final ChannelPromise splicePromise; if (len == 0) { splicePromise = promise; } else { splicePromise = ch.newPromise().addListener(this); } boolean autoRead = config().isAutoRead(); // Just call unsafe().write(...) and flush() as we not want to traverse the whole pipeline for this // case. ch.unsafe().write(new SpliceOutTask(ch, splicedIn, autoRead), splicePromise); ch.unsafe().flush(); if (autoRead && !splicePromise.isDone()) { // Write was not done which means the target channel was not writable. In this case we need to // disable reading until we are done with splicing to the target channel because: // // - The user may want to to trigger another splice operation once the splicing was complete. config().setAutoRead(false); } } return len == 0; } catch (Throwable cause) { promise.setFailure(cause); return true; } } } private final class SpliceOutTask { private final AbstractEpollStreamChannel ch; private final boolean autoRead; private int len; SpliceOutTask(AbstractEpollStreamChannel ch, int len, boolean autoRead) { this.ch = ch; this.len = len; this.autoRead = autoRead; } public boolean spliceOut() throws Exception { assert ch.eventLoop().inEventLoop(); try { int splicedOut = Native.splice(ch.pipeIn.intValue(), -1, ch.fd().intValue(), -1, len); len -= splicedOut; if (len == 0) { if (autoRead) { // AutoRead was used and we spliced everything so start reading again config().setAutoRead(true); } return true; } return false; } catch (IOException e) { if (autoRead) { // AutoRead was used and we spliced everything so start reading again config().setAutoRead(true); } throw e; } } } private final class SpliceFdTask extends SpliceInTask { private final FileDescriptor fd; private final ChannelPromise promise; private int offset; SpliceFdTask(FileDescriptor fd, int offset, int len, ChannelPromise promise) { super(len, promise); this.fd = fd; this.promise = promise; this.offset = offset; } @Override public SpliceFdTask value() { return this; } @Override public boolean spliceIn(RecvByteBufAllocator.Handle handle) throws IOException { assert eventLoop().inEventLoop(); if (len == 0) { promise.setSuccess(); return true; } try { FileDescriptor[] pipe = pipe(); FileDescriptor pipeIn = pipe[0]; FileDescriptor pipeOut = pipe[1]; try { int splicedIn = spliceIn(pipeOut, handle); if (splicedIn > 0) { // Integer.MAX_VALUE is a special value which will result in splice forever. if (len != Integer.MAX_VALUE) { len -= splicedIn; } do { int splicedOut = Native.splice(pipeIn.intValue(), -1, fd.intValue(), offset, splicedIn); splicedIn -= splicedOut; } while (splicedIn > 0); if (len == 0) { promise.setSuccess(); return true; } } return false; } finally { safeClosePipe(pipeIn); safeClosePipe(pipeOut); } } catch (Throwable cause) { promise.setFailure(cause); return true; } } } }
/* * Copyright 2012 OSBI Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.saiku.olap.util; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.util.Enumeration; import java.util.Locale; import java.util.Properties; import org.apache.commons.lang.LocaleUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SaikuProperties extends Properties{ private static final long serialVersionUID = 4835692048422342660L; private static final Logger log = LoggerFactory.getLogger(SaikuProperties.class); private final PropertySource propertySource; private int populateCount; private static SaikuProperties instance = instance(); private static final String SAIKU_PROPERTIES = "saiku.properties"; /** * Returns the singleton. * * @return Singleton instance */ private static synchronized SaikuProperties instance() { if (instance == null) { instance = new SaikuProperties(); instance.populate(); } return instance; } public SaikuProperties() { this.propertySource = new FilePropertySource(new File(SAIKU_PROPERTIES)); } public interface PropertySource { InputStream openStream(); boolean isStale(); String getDescription(); } static class FilePropertySource implements PropertySource { private final File file; private long lastModified; FilePropertySource(File file) { this.file = file; this.lastModified = 0; } public InputStream openStream() { try { this.lastModified = file.lastModified(); FileInputStream in = new FileInputStream(file); log.info("Opening properties file: '" + file + "'"); return in; } catch (FileNotFoundException e) { throw new RuntimeException ( "Error while opening properties file: '" + file + "'",e); } } public boolean isStale() { return file.exists() && file.lastModified() > this.lastModified; } public String getDescription() { return "file=" + file.getAbsolutePath() + " (exists=" + file.exists() + ")"; } } /** * Implementation of {@link PropertySource} which reads from a {@link URL}. */ static class UrlPropertySource implements PropertySource { private final URL url; private long lastModified; UrlPropertySource(URL url) { this.url = url; } private URLConnection getConnection() { try { return url.openConnection(); } catch (IOException e) { throw new RuntimeException ("Error while opening properties file '" + url + "'", e); } } public InputStream openStream() { try { final URLConnection connection = getConnection(); this.lastModified = connection.getLastModified(); return connection.getInputStream(); } catch (IOException e) { throw new RuntimeException( "Error while opening properties file '" + url + "'", e); } } public boolean isStale() { final long lastModified = getConnection().getLastModified(); return lastModified > this.lastModified; } public String getDescription() { return url.toExternalForm(); } } /** * Loads saiku.properties from: 1) the file "$PWD/" 2) CLASSPATH * 3) the system properties */ public void populate() { loadIfStale(propertySource); URL url = null; File file = new File(SAIKU_PROPERTIES); if (file.exists() && file.isFile()) { // Read properties file "saiku.properties" from PWD, if it exists. try { url = file.toURI().toURL(); } catch (MalformedURLException e) { log.warn( "Saiku: file '" + file.getAbsolutePath() + "' could not be loaded", e); } } else { // Then try load it from classloader url = SaikuProperties.class.getClassLoader().getResource( SAIKU_PROPERTIES); } if (url != null) { load(new UrlPropertySource(url)); } else { log.warn( "saiku.properties can't be found under '" + new File(".").getAbsolutePath() + "' or classloader"); } // copy in all system properties which start with "saiku." int count = 0; for (Enumeration<Object> keys = System.getProperties().keys(); keys.hasMoreElements();) { String key = (String) keys.nextElement(); String value = System.getProperty(key); if (key.startsWith("saiku.")) { if (log.isDebugEnabled()) { log.debug("System property : populate: key=" + key + ", value=" + value); } instance.setProperty(key, value); count++; } } if (populateCount++ == 0) { log.info( "Saiku: loaded " + count + " system properties"); } } private void loadIfStale(PropertySource source) { if (source.isStale()) { if (log.isDebugEnabled()) { log.debug("Saiku: loading " + source.getDescription()); } load(source); } } private void load(final PropertySource source) { try { instance.load(source.openStream()); if (populateCount == 0) { log.info( "Saiku: properties loaded from '" + source.getDescription() + "'"); instance.list(System.out); } } catch (IOException e) { log.error( "Saiku: error while loading properties " + "from '" + source.getDescription() + "' (" + e.getMessage() + ")"); } } public static final Boolean olapDefaultNonEmpty = getPropBoolean("saiku.olap.nonempty","false"); public static final String webExportCsvName = getPropString("saiku.web.export.csv.name","saiku-export"); public static final String webExportCsvDelimiter = getPropString("saiku.web.export.csv.delimiter", ","); public static final String webExportCsvTextEscape = getPropString("saiku.web.export.csv.textEscape", "\""); public static final String webExportCsvTextEncoding = getPropString("saiku.web.export.csv.textEncoding", "UTF-8"); public static final Boolean webExportCsvUseFormattedValue = getPropBoolean("saiku.web.export.csv.useFormattedValue", "false"); public static final String webExportCsvNumberFormat = getPropString("saiku.web.export.csv.numberformat", "#,##.00"); public static final String webExportCsvDateFormat = getPropString("saiku.web.export.csv.dateformat", "dd-MMM-yyyy"); public static final String webExportCsvTimestampFormat = getPropString("saiku.web.export.csv.timestampformat", "dd-MMM-yyyy HH:mm:ss"); public static final String webExportExcelName = getPropString("saiku.web.export.excel.name","saiku-export"); public static final String webExportExcelFormat = getPropString("saiku.web.export.excel.format","xlsx"); public static final String webExportExcelDefaultNumberFormat = getPropString("saiku.web.export.excel.numberformat","#,##0.00"); public static final String formatDefautNumberFormat = getPropString("saiku.format.numberformat","#,##0.00"); public static final Locale locale = getLocale(); public static final Boolean olapConvertQuery = getPropBoolean("saiku.olap.convert.query","false"); private static Locale getLocale() { String locale = null; try { locale = getPropString("saiku.format.default.locale",null); if (locale != null) { return LocaleUtils.toLocale(locale); } } catch (Exception e) { log.warn("Property: saiku.format.default.locale with value: " + locale + ", cannot be used for a Locale, falling back to default locale: " + Locale.getDefault(), e); } return Locale.getDefault(); } private static Boolean getPropBoolean(String key, String defaultValue) { Boolean ret; if (instance.containsKey(key)) { ret = Boolean.parseBoolean(instance.getProperty(key)); } else { ret = Boolean.parseBoolean(defaultValue); } return ret; } private static String getPropString(String key, String defaultValue) { String ret; if (instance.containsKey(key)) { ret = instance.getProperty(key); } else { ret = defaultValue; } return ret; } }
package com.suscipio_solutions.consecro_mud.Abilities.Misc; import java.util.List; import java.util.Vector; import com.suscipio_solutions.consecro_mud.Abilities.StdAbility; import com.suscipio_solutions.consecro_mud.Abilities.interfaces.Ability; import com.suscipio_solutions.consecro_mud.Abilities.interfaces.HealthCondition; import com.suscipio_solutions.consecro_mud.Common.interfaces.CMMsg; import com.suscipio_solutions.consecro_mud.Items.interfaces.Electronics; import com.suscipio_solutions.consecro_mud.Items.interfaces.Weapon; import com.suscipio_solutions.consecro_mud.MOBS.interfaces.MOB; import com.suscipio_solutions.consecro_mud.Races.interfaces.Race; import com.suscipio_solutions.consecro_mud.core.CMLib; import com.suscipio_solutions.consecro_mud.core.CMParms; import com.suscipio_solutions.consecro_mud.core.CMProps; import com.suscipio_solutions.consecro_mud.core.CMath; import com.suscipio_solutions.consecro_mud.core.interfaces.Environmental; import com.suscipio_solutions.consecro_mud.core.interfaces.Physical; import com.suscipio_solutions.consecro_mud.core.interfaces.Tickable; @SuppressWarnings({"unchecked","rawtypes"}) public class Injury extends StdAbility implements HealthCondition { @Override public String ID() { return "Injury"; } private final static String localizedName = CMLib.lang().L("Injury"); @Override public String name() { return localizedName; } protected CMMsg lastMsg=null; protected String lastLoc=null; public int lastHP=-1; //public final static String[] BODYPARTSTR={ // "ANTENEA","EYE","EAR","HEAD","NECK","ARM","HAND","TORSO","LEG","FOOT", // "NOSE","GILL","MOUTH","WAIST","TAIL","WING"}; public final static int[] INJURYCHANCE={ 3,3,3,11,3,12,5,35,13,5,3,0,0,3,3,3}; @Override public String getHealthConditionDesc() { final StringBuffer buf=new StringBuffer(""); Object[] O=null; Vector V=null; try { if(injuries!=null) for(int i=0;i<Race.BODY_PARTS;i++) { V=injuries[i]; if(V!=null) for(int i2=0;i2<V.size();i2++) { O=(Object[])V.elementAt(i2); String wounds=""; final int dmg = ((Integer)O[1]).intValue(); if (dmg<5) wounds=("a bruised "); else if (dmg<10) wounds=("a scratched "); else if (dmg<20) wounds=("a cut "); else if (dmg<30) wounds=("a sliced "); else if (dmg<40) wounds=("a gashed "); else if (dmg<60) wounds=("a bloody "); else if ((dmg<75)||(i==Race.BODY_TORSO)) wounds=("a mangled "); else if ((dmg<100)||(i==Race.BODY_HEAD)) wounds=("a dangling "); else wounds=("a shredded "); buf.append(", "+wounds+((String)O[0]).toLowerCase()+" ("+dmg+"%)"); } } } catch(final Exception e){} if(buf.length()==0) return ""; return buf.substring(1); } @Override public String displayText() { final String buf=getHealthConditionDesc(); if(buf.length()==0) return ""; return "(Injuries:"+buf+")"; } @Override protected int canAffectCode(){return CAN_MOBS;} @Override protected int canTargetCode(){return CAN_MOBS;} @Override public int abstractQuality(){return Ability.QUALITY_INDIFFERENT;} @Override public boolean putInCommandlist(){return false;} @Override public boolean canBeUninvoked(){return true;} @Override public int classificationCode(){return Ability.ACODE_PROPERTY;} @Override public int usageType(){return USAGE_MOVEMENT|USAGE_MANA;} public Vector[] injuries=new Vector[Race.BODY_PARTS]; @Override public void unInvoke() { final Environmental E=affected; super.unInvoke(); if((E instanceof MOB)&&(canBeUninvoked())&&(!((MOB)E).amDead())) ((MOB)E).tell(L("Your injuries are healed.")); } @Override public String text() { Vector V=null; Object[] O=null; final StringBuffer buf=new StringBuffer(""); if(injuries!=null) for(int i=0;i<Race.BODY_PARTS;i++) { V=injuries[i]; if(V!=null) for(int i2=0;i2<V.size();i2++) { O=(Object[])V.elementAt(i2); buf.append(i+":"+((String)O[0]).toLowerCase()+":"+((Integer)O[1]).intValue()+";"); } } return buf.toString(); } @Override public void setMiscText(String txt) { if(txt.startsWith("+")) { if(affected instanceof MOB) { final MOB mob=(MOB)affected; txt=txt.substring(1); final int x=txt.indexOf('='); if(x<0) return; final String chosenName=txt.substring(0,x); final String amount=txt.substring(x+1); Amputation A=(Amputation)mob.fetchEffect("Amputation"); if(A==null) A=new Amputation(); final List<String> remains=A.remainingLimbNameSet(mob); if(mob.charStats().getBodyPart(Race.BODY_HEAD)>0) remains.add("head"); if(mob.charStats().getBodyPart(Race.BODY_TORSO)>0) remains.add("torso"); final int chosenOne=remains.indexOf(chosenName); if(chosenOne<0) return; if(injuries==null) injuries=new Vector[Race.BODY_PARTS]; int bodyLoc=-1; for(int i=0;i<Race.BODY_PARTS;i++) if((" "+remains.get(chosenOne).toUpperCase()).endsWith(" "+Race.BODYPARTSTR[i])) { bodyLoc=i; break;} if(bodyLoc>=0) { Vector bodyVec=injuries[bodyLoc]; if(bodyVec==null){ injuries[bodyLoc]=new Vector(); bodyVec=injuries[bodyLoc];} int whichInjury=-1; for(int i=0;i<bodyVec.size();i++) { final Object[] O=(Object[])bodyVec.elementAt(i); if(((String)O[0]).equalsIgnoreCase(remains.get(chosenOne))) { whichInjury=i; break;} } Object[] O=null; if(whichInjury<0) { O=new Object[2]; O[0]=remains.get(chosenOne).toLowerCase(); O[1]=Integer.valueOf(0); bodyVec.addElement(O); whichInjury=bodyVec.size()-1; } O=(Object[])bodyVec.elementAt(whichInjury); O[1]=Integer.valueOf(((Integer)O[1]).intValue()+CMath.s_int(amount)); if(((Integer)O[1]).intValue()>100) O[1]=Integer.valueOf(100); } } } else if(txt.indexOf('/')>0) super.setMiscText(txt); else { injuries=new Vector[Race.BODY_PARTS]; final List<String> sets=CMParms.parseSemicolons(txt,true); for(int s=0;s<sets.size();s++) { final String set=sets.get(s); final List<String> V=CMParms.parseAny(set,':',false); if(V.size()==3) { final int part=CMath.s_int(V.get(0)); if((part>=0)&&(part<Race.BODY_PARTS)) { final String msg=V.get(1); final int hurt=CMath.s_int(V.get(V.size()-1)); if(injuries[part]==null) injuries[part] = new Vector(); injuries[part].addElement(new Object[]{msg,Integer.valueOf(hurt)}); } } } } if(affected instanceof MOB) { final MOB mob=(MOB)affected; if(lastHP<0) lastHP=mob.curState().getHitPoints(); } } @Override public boolean tick(Tickable ticking, int tickID) { if((affected instanceof MOB)&&(tickID==Tickable.TICKID_MOB)) { final MOB mob=(MOB)affected; if(mob.curState().getHitPoints()>=mob.maxState().getHitPoints()) { for(int i=0;i<injuries.length;i++) injuries[i]=null; unInvoke(); } else if((mob.curState().getHitPoints()>lastHP)&&(lastHP>=0)) { final Vector choicesToHeal=new Vector(); for(int i=0;i<injuries.length;i++) if(injuries[i]!=null) for(int x=0;x<injuries[i].size();x++) { final int[] choice=new int[2]; choice[0]=i; choice[1]=x; choicesToHeal.addElement(choice); } if(choicesToHeal.size()==0) { for(int i=0;i<injuries.length;i++) injuries[i]=null; unInvoke(); } else { int pct=(int)Math.round(CMath.div(mob.curState().getHitPoints()-lastHP,mob.maxState().getHitPoints())*100.0); if(pct<=0) pct=1; int tries=100; while((pct>0)&&((--tries)>0)&&(choicesToHeal.size()>0)) { final int which=CMLib.dice().roll(1,choicesToHeal.size(),-1); final int[] choice=(int[])choicesToHeal.elementAt(which); if(choice[0]<injuries.length) { final Vector V=injuries[choice[0]]; if((V!=null)&&(choice[1]<V.size())) { final Object[] O=(Object[])V.elementAt(choice[1]); if(pct>((Integer)O[1]).intValue()) { V.removeElement(O); if(V.size()==0) injuries[choice[0]]=null; pct-=((Integer)O[1]).intValue(); choicesToHeal.removeElementAt(which); } else { O[1]=Integer.valueOf(((Integer)O[1]).intValue()-pct); pct=0; } } } } } } lastHP=mob.curState().getHitPoints(); } return super.tick(ticking,tickID); } public static String[][] TRANSLATE= { {"<T-HIM-HER>","<T-HIS-HER>"}, {"<T-NAME>","<T-YOUPOSS>"}, {"<T-NAMESELF>","<T-YOUPOSS>"} }; public String fixMessageString(String message, String loc) { if(message==null) return null; int x=message.indexOf("<DAMAGE>"); if(x<0) x=message.indexOf("<DAMAGES>"); if(x<0) return message; int y=Integer.MAX_VALUE; int which=-1; for(int i=0;i<TRANSLATE.length;i++) { final int y1=message.indexOf(TRANSLATE[i][0],x); if((y1>x)&&(y1<y)){ y=y1; which=i;} } if(which>=0) message=message.substring(0,y)+TRANSLATE[which][1]+" "+loc+message.substring(y+TRANSLATE[which][0].length()); return message; } @Override public boolean okMessage(Environmental host, CMMsg msg) { if((msg.target()==affected) &&(msg.targetMinor()==CMMsg.TYP_DAMAGE) &&(msg.value()>0) &&(msg.target() instanceof MOB) &&(msg.targetMessage()!=null) &&(msg.targetMessage().indexOf("<DAMAGE>")>=0) &&(super.miscText.startsWith(msg.source().Name()+"/") ||((CMProps.getIntVar(CMProps.Int.INJPCTHP)>=(int)Math.round(CMath.div(((MOB)msg.target()).curState().getHitPoints(),((MOB)msg.target()).maxState().getHitPoints())*100.0)) &&(CMLib.dice().rollPercentage()<=CMProps.getIntVar(CMProps.Int.INJPCTCHANCE))))) { final MOB mob=(MOB)msg.target(); Amputation A=(Amputation)mob.fetchEffect("Amputation"); if(A==null) A=new Amputation(); final List<String> remains=A.remainingLimbNameSet(mob); if(mob.charStats().getBodyPart(Race.BODY_HEAD)>0) remains.add("head"); if(mob.charStats().getBodyPart(Race.BODY_TORSO)>0) remains.add("torso"); if(remains.size()>0) { final int[] chances=new int[remains.size()]; int total=0; for(int x=0;x<remains.size();x++) { int bodyPart=-1; for(int i=0;i<Race.BODY_PARTS;i++) { if((" "+remains.get(x).toUpperCase()).endsWith(" "+Race.BODYPARTSTR[i])) { bodyPart=i; break;} } if(bodyPart>=0) { final int amount=INJURYCHANCE[bodyPart]; chances[x]+=amount; total+=amount; } } if(total>0) { int randomRoll=CMLib.dice().roll(1,total,-1); int chosenOne=-1; if((lastMsg!=null) &&(lastLoc!=null) &&((msg==lastMsg)||((lastMsg.trailerMsgs()!=null)&&(lastMsg.trailerMsgs().contains(msg)))) &&(remains.contains(lastLoc))) chosenOne=remains.indexOf(lastLoc); else if((super.miscText.startsWith(msg.source().Name()+"/")) &&(remains.contains(super.miscText.substring(msg.source().Name().length()+1)))) { chosenOne=remains.indexOf(super.miscText.substring(msg.source().Name().length()+1)); super.miscText=""; } else for(int i=0;i<chances.length;i++) { if(chances[i]>0) { chosenOne=i; randomRoll-=chances[i]; if(randomRoll<=0) break; } } final int BodyPct=(int)Math.round(CMath.div(msg.value(),mob.maxState().getHitPoints())*100.0); int LimbPct=BodyPct*CMProps.getIntVar(CMProps.Int.INJMULTIPLIER); if(LimbPct<1) LimbPct=1; int bodyLoc=-1; for(int i=0;i<Race.BODY_PARTS;i++) if((" "+remains.get(chosenOne).toUpperCase()).endsWith(" "+Race.BODYPARTSTR[i])) { bodyLoc=i; break;} if(bodyLoc>=0) { lastMsg=msg; lastLoc=remains.get(chosenOne); Vector bodyVec=injuries[bodyLoc]; if(bodyVec==null){ injuries[bodyLoc]=new Vector(); bodyVec=injuries[bodyLoc];} int whichInjury=-1; for(int i=0;i<bodyVec.size();i++) { final Object[] O=(Object[])bodyVec.elementAt(i); if(((String)O[0]).equalsIgnoreCase(remains.get(chosenOne))) { whichInjury=i; break;} } final String newTarg=fixMessageString(msg.targetMessage(),remains.get(chosenOne).toLowerCase()); if(!newTarg.equalsIgnoreCase(msg.targetMessage())) { msg.modify(msg.source(),msg.target(),msg.tool(), msg.sourceCode(),fixMessageString(msg.sourceMessage(),remains.get(chosenOne).toLowerCase()), msg.targetCode(),newTarg, msg.othersCode(),fixMessageString(msg.othersMessage(),remains.get(chosenOne).toLowerCase())); Object[] O=null; if(whichInjury<0) { O=new Object[2]; O[0]=remains.get(chosenOne).toLowerCase(); O[1]=Integer.valueOf(0); bodyVec.addElement(O); whichInjury=bodyVec.size()-1; } O=(Object[])bodyVec.elementAt(whichInjury); O[1]=Integer.valueOf(((Integer)O[1]).intValue()+LimbPct); if(((Integer)O[1]).intValue()>100) O[1]=Integer.valueOf(100); if((((Integer)O[1]).intValue()>=100) ||((BodyPct>5) &&((msg.tool() instanceof Electronics)||(BodyPct>=CMProps.getIntVar(CMProps.Int.INJPCTHPAMP))))) { boolean proceed=(CMLib.dice().rollPercentage()<=CMProps.getIntVar(CMProps.Int.INJPCTCHANCEAMP)) &&(mob.phyStats().level()>=CMProps.getIntVar(CMProps.Int.INJMINLEVEL)); if(msg.tool() instanceof Weapon) { switch(((Weapon)msg.tool()).weaponType()) { case Weapon.TYPE_FROSTING: case Weapon.TYPE_GASSING: proceed=false; break; default: break; } } if(Amputation.validamputees[bodyLoc]&&proceed) { bodyVec.removeElement(O); if(bodyVec.size()==0) injuries[bodyLoc]=null; if(A.amputate(mob,A,((String)O[0]).toLowerCase())!=null) { if(mob.fetchEffect(A.ID())==null) mob.addNonUninvokableEffect(A); } } } } } } } } return super.okMessage(host,msg); } @Override public boolean invoke(MOB mob, Vector commands, Physical givenTarget, boolean auto, int asLevel) { if((givenTarget!=null)&&(auto)) { if(givenTarget.fetchEffect(ID())!=null) return false; super.tickDown=2; Ability A=(Ability)copyOf(); A.startTickDown(mob,givenTarget,Ability.TICKS_ALMOST_FOREVER); if((commands!=null)&&(commands.size()>0)&&(commands.firstElement() instanceof CMMsg)) { A=givenTarget.fetchEffect(ID()); if(A!=null) return A.okMessage(mob,(CMMsg)commands.firstElement()); return false; } return true; } return super.invoke(mob,commands,givenTarget,auto,asLevel); } }
package com.ctrip.xpipe.redis.console.service.impl; import com.ctrip.xpipe.endpoint.HostPort; import com.ctrip.xpipe.exception.XpipeRuntimeException; import com.ctrip.xpipe.redis.console.constant.XPipeConsoleConstant; import com.ctrip.xpipe.redis.console.controller.api.data.meta.KeeperContainerCreateInfo; import com.ctrip.xpipe.redis.console.exception.BadRequestException; import com.ctrip.xpipe.redis.console.model.*; import com.ctrip.xpipe.redis.console.query.DalQuery; import com.ctrip.xpipe.redis.console.service.*; import com.ctrip.xpipe.spring.RestTemplateFactory; import com.ctrip.xpipe.utils.VisibleForTesting; import com.google.common.base.Function; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.web.client.RestClientException; import org.springframework.web.client.RestOperations; import org.unidal.dal.jdbc.DalException; import java.util.*; @Service public class KeeperContainerServiceImpl extends AbstractConsoleService<KeepercontainerTblDao> implements KeeperContainerService { @Autowired private ClusterService clusterService; @Autowired private DcService dcService; @Autowired private OrganizationService organizationService; @Autowired private RedisService redisService; @Autowired private AzService azService; private RestOperations restTemplate; @Override public KeepercontainerTbl find(final long id) { return queryHandler.handleQuery(new DalQuery<KeepercontainerTbl>() { @Override public KeepercontainerTbl doQuery() throws DalException { return dao.findByPK(id, KeepercontainerTblEntity.READSET_FULL); } }); } @Override public List<KeepercontainerTbl> findAllByDcName(final String dcName) { return queryHandler.handleQuery(new DalQuery<List<KeepercontainerTbl>>() { @Override public List<KeepercontainerTbl> doQuery() throws DalException { return dao.findByDcName(dcName, KeepercontainerTblEntity.READSET_FULL); } }); } @Override public List<KeepercontainerTbl> findAllActiveByDcName(String dcName) { return queryHandler.handleQuery(new DalQuery<List<KeepercontainerTbl>>() { @Override public List<KeepercontainerTbl> doQuery() throws DalException { return dao.findActiveByDcName(dcName, KeepercontainerTblEntity.READSET_FULL); } }); } @Override public List<KeepercontainerTbl> findKeeperCount(String dcName) { return queryHandler.handleQuery(new DalQuery<List<KeepercontainerTbl>>() { @Override public List<KeepercontainerTbl> doQuery() throws DalException { return dao.findKeeperCount(dcName, KeepercontainerTblEntity.READSET_KEEPER_COUNT); } }); } @Override public List<KeepercontainerTbl> findBestKeeperContainersByDcCluster(String dcName, String clusterName) { /* * 1. BU has its own keepercontainer(kc), then find all and see if it satisfied the requirement * 2. Cluster don't have a BU, find default one * 3. BU don't have its own kc, find in the normal kc pool(org id is 0L) */ long clusterOrgId; if (clusterName != null) { ClusterTbl clusterTbl = clusterService.find(clusterName); clusterOrgId = clusterTbl == null ? XPipeConsoleConstant.DEFAULT_ORG_ID : clusterTbl.getClusterOrgId(); } else { clusterOrgId = XPipeConsoleConstant.DEFAULT_ORG_ID; } logger.info("cluster org id: {}", clusterOrgId); return queryHandler.handleQuery(new DalQuery<List<KeepercontainerTbl>>() { @Override public List<KeepercontainerTbl> doQuery() throws DalException { List<KeepercontainerTbl> keepercontainerTbls = dao.findKeeperContainerByCluster(dcName, clusterOrgId, KeepercontainerTblEntity.READSET_KEEPER_COUNT_BY_CLUSTER); if (keepercontainerTbls == null || keepercontainerTbls.isEmpty()) { logger.info("cluster {} with org id {} is going to find keepercontainers in normal pool", clusterName, clusterOrgId); keepercontainerTbls = dao.findKeeperContainerByCluster(dcName, XPipeConsoleConstant.DEFAULT_ORG_ID, KeepercontainerTblEntity.READSET_KEEPER_COUNT_BY_CLUSTER); } keepercontainerTbls = filterKeeperFromSameAvailableZone(keepercontainerTbls, dcName); logger.info("find keeper containers: {}", keepercontainerTbls); return keepercontainerTbls; } }); } @Override public List<KeepercontainerTbl> getKeeperContainerByAz(Long azId) { return queryHandler.handleQuery(new DalQuery<List<KeepercontainerTbl>>() { @Override public List<KeepercontainerTbl> doQuery() throws DalException { return dao.findByAzId(azId, KeepercontainerTblEntity.READSET_FULL); } }); } private List<KeepercontainerTbl> filterKeeperFromSameAvailableZone(List<KeepercontainerTbl> keepercontainerTbls, String dcName) { List<AzTbl> dcAvailableZones = azService.getDcAvailableZoneTbls(dcName); if(dcAvailableZones == null || dcAvailableZones.isEmpty()) { return keepercontainerTbls; } else { Set<Long> usedAvailableZones = new HashSet<>(); Map<Long, AzTbl> availableZoneMap = new HashMap(); dcAvailableZones.forEach((availableZone)-> { availableZoneMap.put(availableZone.getId(), availableZone); }); List<KeepercontainerTbl> result = new ArrayList<>(); for (KeepercontainerTbl keepercontainerTbl : keepercontainerTbls) { long azId = keepercontainerTbl.getAzId(); if (!availableZoneMap.containsKey(azId)) throw new XpipeRuntimeException(String.format("This keepercontainer %s:%d has unknown available zone id %d " ,keepercontainerTbl.getKeepercontainerIp(), keepercontainerTbl.getKeepercontainerPort(), azId)); if (availableZoneMap.get(azId).isActive() && usedAvailableZones.add(azId)) { result.add(keepercontainerTbl); } } return result; } } protected void update(KeepercontainerTbl keepercontainerTbl) { queryHandler.handleUpdate(new DalQuery<Integer>() { @Override public Integer doQuery() throws DalException { return dao.updateByPK(keepercontainerTbl, KeepercontainerTblEntity.UPDATESET_FULL); } }); } @Override public void addKeeperContainer(final KeeperContainerCreateInfo createInfo) { KeepercontainerTbl proto = dao.createLocal(); if(keeperContainerAlreadyExists(createInfo)) { throw new IllegalArgumentException("Keeper Container with IP: " + createInfo.getKeepercontainerIp() + " already exists"); } if (!checkIpAndPort(createInfo.getKeepercontainerIp(), createInfo.getKeepercontainerPort())) { throw new IllegalArgumentException(String.format("Keeper container with ip:%s, port:%d is unhealthy", createInfo.getKeepercontainerIp(), createInfo.getKeepercontainerPort())); } DcTbl dcTbl = dcService.find(createInfo.getDcName()); if(dcTbl == null) { throw new IllegalArgumentException("DC name does not exist"); } OrganizationTbl org; if(createInfo.getKeepercontainerOrgId() == 0) { org = new OrganizationTbl().setId(0L); } else { org = organizationService.getOrganizationTblByCMSOrganiztionId(createInfo.getKeepercontainerOrgId()); if (org == null) { throw new IllegalArgumentException("Org Id does not exist in database"); } } if (createInfo.getAzName() != null) { AzTbl aztbl = azService.getAvailableZoneTblByAzName(createInfo.getAzName()); if(aztbl == null) { throw new IllegalArgumentException(String.format("available zone %s is not exist", createInfo.getAzName())); } proto.setAzId(aztbl.getId()); } proto.setKeepercontainerDc(dcTbl.getId()) .setKeepercontainerIp(createInfo.getKeepercontainerIp()) .setKeepercontainerPort(createInfo.getKeepercontainerPort()) .setKeepercontainerOrgId(org.getId()) .setKeepercontainerActive(createInfo.isActive()); queryHandler.handleInsert(new DalQuery<Integer>() { @Override public Integer doQuery() throws DalException { return dao.insert(proto); } }); } @Override public List<KeeperContainerCreateInfo> getDcAllKeeperContainers(String dc) { List<KeepercontainerTbl> keepercontainerTbls = queryHandler.handleQuery(() -> dao.findByDcName(dc, KeepercontainerTblEntity.READSET_FULL)); OrgInfoTranslator translator = new OrgInfoTranslator(); return Lists.newArrayList(Lists.transform(keepercontainerTbls, new Function<KeepercontainerTbl, KeeperContainerCreateInfo>() { @Override public KeeperContainerCreateInfo apply(KeepercontainerTbl input) { OrganizationTbl org = translator.getFromXPipeId(input.getKeepercontainerOrgId()); KeeperContainerCreateInfo info = new KeeperContainerCreateInfo() .setDcName(dc).setActive(input.isKeepercontainerActive()) .setKeepercontainerIp(input.getKeepercontainerIp()) .setKeepercontainerPort(input.getKeepercontainerPort()); if (org != null) { info.setKeepercontainerOrgId(org.getOrgId()).setOrgName(org.getOrgName()); } else { info.setKeepercontainerOrgId(0L); } if (input.getAzId() != 0) { AzTbl aztbl = azService.getAvailableZoneTblById(input.getAzId()); if(aztbl == null) { throw new XpipeRuntimeException(String.format("dc %s do not has available zone %d", dc, input.getAzId())); } info.setAzName(aztbl.getAzName()); } return info; } })); } @Override public void updateKeeperContainer(KeeperContainerCreateInfo createInfo) { KeepercontainerTbl keepercontainerTbl = findByIpPort(createInfo.getKeepercontainerIp(), createInfo.getKeepercontainerPort()); if(keepercontainerTbl == null) { throw new IllegalArgumentException(String.format("%s:%d keeper container not found", createInfo.getKeepercontainerIp(), createInfo.getKeepercontainerPort())); } if(createInfo.getKeepercontainerOrgId() != 0L) { OrganizationTbl org = organizationService.getOrganizationTblByCMSOrganiztionId(createInfo.getKeepercontainerOrgId()); keepercontainerTbl.setKeepercontainerOrgId(org.getId()); } else { keepercontainerTbl.setKeepercontainerOrgId(0L); } if (createInfo.getAzName() != null) { AzTbl aztbl = azService.getAvailableZoneTblByAzName(createInfo.getAzName()); if(aztbl == null) { throw new IllegalArgumentException(String.format("available zone %s is not exist", createInfo.getAzName())); } keepercontainerTbl.setAzId(aztbl.getId()); } keepercontainerTbl.setKeepercontainerActive(createInfo.isActive()); queryHandler.handleUpdate(new DalQuery<Integer>() { @Override public Integer doQuery() throws DalException { return dao.updateByPK(keepercontainerTbl, KeepercontainerTblEntity.UPDATESET_FULL); } }); } @Override public void deleteKeeperContainer(String keepercontainerIp, int keepercontainerPort) { KeepercontainerTbl keepercontainerTbl = findByIpPort(keepercontainerIp, keepercontainerPort); if(null == keepercontainerTbl) throw new BadRequestException("Cannot find keepercontainer"); List<RedisTbl> keepers = redisService.findAllRedisWithSameIP(keepercontainerIp); if(keepers != null && !keepers.isEmpty()) { throw new BadRequestException(String.format("This keepercontainer %s:%d is not empty, unable to delete!", keepercontainerIp, keepercontainerPort)); } KeepercontainerTbl proto = keepercontainerTbl; queryHandler.handleDelete(new DalQuery<Integer>() { @Override public Integer doQuery() throws DalException { return dao.deleteKeeperContainer(proto, KeepercontainerTblEntity.UPDATESET_FULL); } }, true); } @Override public List<KeeperContainerInfoModel> findAllInfos() { List<KeepercontainerTbl> baseInfos = findContainerBaseInfos(); HashMap<Long, KeeperContainerInfoModel> containerInfoMap = new HashMap<>(); baseInfos.forEach(baseInfo -> { KeeperContainerInfoModel model = new KeeperContainerInfoModel(); model.setId(baseInfo.getKeepercontainerId()); model.setAddr(new HostPort(baseInfo.getKeepercontainerIp(), baseInfo.getKeepercontainerPort())); model.setDcName(baseInfo.getDcInfo().getDcName()); model.setOrgName(baseInfo.getOrgInfo().getOrgName()); if (baseInfo.getAzId() != 0) { AzTbl aztbl = azService.getAvailableZoneTblById(baseInfo.getAzId()); if(aztbl == null) { throw new XpipeRuntimeException(String.format("dc %s do not has available zone %d", baseInfo.getDcInfo().getDcName(), baseInfo.getAzId())); } model.setAzName(aztbl.getAzName()); } containerInfoMap.put(model.getId(), model); }); List<RedisTbl> containerLoad = redisService.findAllKeeperContainerCountInfo(); containerLoad.forEach(load -> { if (!containerInfoMap.containsKey(load.getKeepercontainerId())) return; KeeperContainerInfoModel model = containerInfoMap.get(load.getKeepercontainerId()); model.setKeeperCount(load.getCount()); model.setClusterCount(load.getDcClusterShardInfo().getClusterCount()); model.setShardCount(load.getDcClusterShardInfo().getShardCount()); }); return new ArrayList<>(containerInfoMap.values()); } private List<KeepercontainerTbl> findContainerBaseInfos() { return queryHandler.handleQuery(new DalQuery<List<KeepercontainerTbl>>() { @Override public List<KeepercontainerTbl> doQuery() throws DalException { return dao.findContainerBaseInfo(KeepercontainerTblEntity.READSET_BASE_INFO); } }); } protected KeepercontainerTbl findByIpPort(String ip, int port) { return queryHandler.handleQuery(new DalQuery<KeepercontainerTbl>() { @Override public KeepercontainerTbl doQuery() throws DalException { return dao.findByIpPort(ip, port, KeepercontainerTblEntity.READSET_FULL); } }); } protected boolean keeperContainerAlreadyExists(KeeperContainerCreateInfo createInfo) { KeepercontainerTbl existing = queryHandler.handleQuery(new DalQuery<KeepercontainerTbl>() { @Override public KeepercontainerTbl doQuery() throws DalException { return dao.findByIpPort(createInfo.getKeepercontainerIp(), createInfo.getKeepercontainerPort(), KeepercontainerTblEntity.READSET_CONTAINER_ADDRESS); } }); return existing != null; } protected void getOrCreateRestTemplate() { if (restTemplate == null) { synchronized (this) { if (restTemplate == null) { restTemplate = RestTemplateFactory.createCommonsHttpRestTemplate(10, 20, 3000, 5000); } } } } @VisibleForTesting protected void setRestTemplate(RestOperations restTemplate) { this.restTemplate = restTemplate; } protected boolean checkIpAndPort(String host, int port) { getOrCreateRestTemplate(); String url = "http://%s:%d/health"; try { return restTemplate.getForObject(String.format(url, host, port), Boolean.class); } catch (RestClientException e) { logger.error("[healthCheck]Http connect occur exception. ", e); } return false; } private class OrgInfoTranslator { private Map<Long, OrganizationTbl> cache = Maps.newHashMap(); private OrganizationTbl getFromXPipeId(long id) { if(id == 0L) { return null; } if(cache.containsKey(id)) { return cache.get(id); } OrganizationTbl org = organizationService.getOrganization(id); cache.put(id, org); return org; } } }
package org.lockss.safenet; import java.io.IOException; import java.net.URISyntaxException; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.JsonNode; import org.apache.http.NameValuePair; import org.apache.http.client.utils.URIBuilder; import org.apache.http.message.BasicNameValuePair; import org.lockss.app.BaseLockssManager; import org.lockss.app.ConfigurableManager; import org.lockss.config.Configuration; import org.lockss.util.IOUtil; import org.lockss.util.Logger; import org.lockss.util.UrlUtil; import org.lockss.util.urlconn.LockssUrlConnection; public class BaseEntitlementRegistryClient extends BaseLockssManager implements EntitlementRegistryClient, ConfigurableManager { private static final Logger log = Logger.getLogger(BaseEntitlementRegistryClient.class); public static final String PREFIX = Configuration.PREFIX + "safenet."; public static final String PARAM_ER_URI = PREFIX + "registryUri"; static final String DEFAULT_ER_URI = ""; public static final String PARAM_ER_APIKEY = PREFIX + "apiKey"; static final String DEFAULT_ER_APIKEY = ""; private static final DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd"); private ObjectMapper objectMapper; private String erUri; private String apiKey; public BaseEntitlementRegistryClient() { this.objectMapper = new ObjectMapper(); } public void setConfig(Configuration config, Configuration oldConfig, Configuration.Differences diffs) { if (diffs.contains(PREFIX)) { erUri = config.get(PARAM_ER_URI, DEFAULT_ER_URI); apiKey = config.get(PARAM_ER_APIKEY, DEFAULT_ER_APIKEY); } } public boolean isUserEntitled(String issn, String institution, String start, String end) throws IOException { Map<String, String> parameters = new HashMap<String, String>(); parameters.put("api_key", apiKey); parameters.put("identifier_value", issn); parameters.put("institution", institution); parameters.put("start", start); parameters.put("end", end); JsonNode entitlements = callEntitlementRegistry("/entitlements", parameters); if (entitlements != null) { for(JsonNode entitlement : entitlements) { JsonNode entitlementInstitution = entitlement.get("institution"); if (entitlementInstitution != null && entitlementInstitution.asText().equals(institution)) { log.warning("TODO: Verify title and dates"); return true; } } // Valid request, but the entitlements don't match the information we passed, which should never happen throw new IOException("No matching entitlements returned from entitlement registry"); } //Valid request, no entitlements found return false; } private Date extractDate(String value) throws IOException { if ( value == null || value.equals("null")) { return null; } try { return dateFormat.parse(value); } catch ( ParseException e ) { throw new IOException("Could not parse date " + value); } } private Date extractDate(JsonNode node, String key) throws IOException { JsonNode value = node.get(key); if ( value == null ) { return null; } return extractDate(value.asText()); } public String getPublisher(String issn, String start, String end) throws IOException { Map<String, String> parameters = new HashMap<String, String>(); parameters.put("identifier", issn); Date startDate = extractDate(start); Date endDate = extractDate(end); JsonNode titles = callEntitlementRegistry("/titles", parameters); if (titles != null) { List<String> foundPublishers = new ArrayList<String>(); for(JsonNode title : titles) { JsonNode publishers = title.get("publishers"); for(JsonNode publisher : publishers) { Date foundStartDate = extractDate(publisher, "start"); Date foundEndDate = extractDate(publisher, "end"); if ( foundStartDate != null && ( startDate == null || foundStartDate.after(startDate) ) ) { continue; } if ( foundEndDate != null && ( endDate == null || foundEndDate.before(endDate) ) ) { continue; } foundPublishers.add(publisher.get("id").asText()); } } if (foundPublishers.size() > 1) { // Valid request, but there are multiple publishers for the date range, which should never happen throw new IOException("Multiple matching publishers returned from entitlement registry"); } if (foundPublishers.size() == 1) { return foundPublishers.get(0); } } // Valid request, no publisher found return null; } public PublisherWorkflow getPublisherWorkflow(String publisherGuid) throws IOException { Map<String, String> parameters = new HashMap<String, String>(); JsonNode publisher = callEntitlementRegistry("/publishers/"+publisherGuid, parameters); if (publisher != null) { JsonNode foundGuid = publisher.get("id"); if (foundGuid != null && foundGuid.asText().equals(publisherGuid)) { JsonNode foundWorkflow = publisher.get("workflow"); if(foundWorkflow != null) { try { return Enum.valueOf(PublisherWorkflow.class, foundWorkflow.asText().toUpperCase()); } catch (IllegalArgumentException e) { // Valid request, but workflow didn't match ones we've implemented, which should never happen throw new IOException("No valid workflow returned from entitlement registry: " + foundWorkflow.asText().toUpperCase()); } } } } // Valid request, but no valid workflow information was returned, which should never happen throw new IOException("No valid workflow returned from entitlement registry"); } public String getInstitution(String scope) throws IOException { Map<String, String> parameters = new HashMap<String, String>(); parameters.put("scope", scope); JsonNode institutions = callEntitlementRegistry("/institutions", parameters); if (institutions != null) { if (institutions.size() == 0) { throw new IOException("No matching institutions returned from entitlement registry"); } if (institutions.size() > 1) { throw new IOException("Multiple matching institutions returned from entitlement registry"); } JsonNode institution = institutions.get(0); if (!scope.equals(institution.get("scope").asText())) { throw new IOException("No matching institutions returned from entitlement registry"); } return institution.get("id").asText(); } throw new IOException("No matching institutions returned from entitlement registry"); } private JsonNode callEntitlementRegistry(String endpoint, Map<String, String> parameters) throws IOException { return callEntitlementRegistry(endpoint, mapToPairs(parameters)); } private JsonNode callEntitlementRegistry(String endpoint, List<NameValuePair> parameters) throws IOException { LockssUrlConnection connection = null; try { URIBuilder builder = new URIBuilder(erUri); builder.setPath(builder.getPath() + endpoint); if(!parameters.isEmpty()) { builder.setParameters(parameters); } String url = builder.toString(); log.debug("Connecting to ER at " + url); connection = openConnection(url); connection.execute(); int responseCode = connection.getResponseCode(); if (responseCode == 200) { return objectMapper.readTree(connection.getResponseInputStream()); } else if (responseCode == 204) { // Valid request, but empty response return null; } else { throw new IOException("Error communicating with entitlement registry. Response was " + responseCode + " " + connection.getResponseMessage()); } } catch (URISyntaxException e) { throw new IOException("Couldn't contact entitlement registry", e); } finally { if(connection != null) { IOUtil.safeRelease(connection); } } } // protected so that it can be overriden with mock connections in tests protected LockssUrlConnection openConnection(String url) throws IOException { return UrlUtil.openConnection(url); } protected static List<NameValuePair> mapToPairs(Map<String, String> params) { List<NameValuePair> pairs = new ArrayList<NameValuePair>(); for(String key : params.keySet()) { pairs.add(new BasicNameValuePair(key, params.get(key))); } return pairs; } }
/** * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.testdriver.excel.extension; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import java.math.BigDecimal; import java.util.Calendar; import org.junit.Test; import com.asakusafw.testdriver.core.PropertyName; import com.asakusafw.testdriver.core.PropertyType; import com.asakusafw.testdriver.core.TestContext; import com.asakusafw.testdriver.core.VerifyContext; import com.asakusafw.testdriver.excel.ExcelRuleExtractor.FormatException; import com.asakusafw.testdriver.rule.ValuePredicate; /** * Test for {@link ExcelSheetOrdinalRule}. */ public class ExcelSheetOrdinalRuleTest { private static final Calendar NOW = Calendar.getInstance(); /** * simple case for integral value. * @throws Exception if failed */ @Test public void test_int() throws Exception { assertOk(PropertyType.INT, 5, "<", 9); assertNg(PropertyType.INT, 5, "<", 5); assertNg(PropertyType.INT, 5, "<", 1); assertNg(PropertyType.INT, 5, ">", 9); assertNg(PropertyType.INT, 5, ">", 5); assertOk(PropertyType.INT, 5, ">", 1); assertOk(PropertyType.INT, 5, "<=", 9); assertOk(PropertyType.INT, 5, "<=", 5); assertNg(PropertyType.INT, 5, "<=", 1); assertNg(PropertyType.INT, 5, ">=", 9); assertOk(PropertyType.INT, 5, ">=", 5); assertOk(PropertyType.INT, 5, ">=", 1); } /** * simple case for float value. * @throws Exception if failed */ @Test public void test_float() throws Exception { assertOk(PropertyType.FLOAT, 0.5, "<", 0.9); assertNg(PropertyType.FLOAT, 0.5, "<", 0.5); assertNg(PropertyType.FLOAT, 0.5, "<", 0.1); assertNg(PropertyType.FLOAT, 0.5, ">", 0.9); assertNg(PropertyType.FLOAT, 0.5, ">", 0.5); assertOk(PropertyType.FLOAT, 0.5, ">", 0.1); assertOk(PropertyType.FLOAT, 0.5, "<=", 0.9); assertOk(PropertyType.FLOAT, 0.5, "<=", 0.5); assertNg(PropertyType.FLOAT, 0.5, "<=", 0.1); assertNg(PropertyType.FLOAT, 0.5, ">=", 0.9); assertOk(PropertyType.FLOAT, 0.5, ">=", 0.5); assertOk(PropertyType.FLOAT, 0.5, ">=", 0.1); } /** * simple case for decimal value. * @throws Exception if failed */ @Test public void test_decimal() throws Exception { assertOk(PropertyType.DECIMAL, dec("5"), "<", dec("9")); assertNg(PropertyType.DECIMAL, dec("5"), "<", dec("5")); assertNg(PropertyType.DECIMAL, dec("5"), "<", dec("1")); assertNg(PropertyType.DECIMAL, dec("5"), ">", dec("9")); assertNg(PropertyType.DECIMAL, dec("5"), ">", dec("5")); assertOk(PropertyType.DECIMAL, dec("5"), ">", dec("1")); assertOk(PropertyType.DECIMAL, dec("5"), "<=", dec("9")); assertOk(PropertyType.DECIMAL, dec("5"), "<=", dec("5")); assertNg(PropertyType.DECIMAL, dec("5"), "<=", dec("1")); assertNg(PropertyType.DECIMAL, dec("5"), ">=", dec("9")); assertOk(PropertyType.DECIMAL, dec("5"), ">=", dec("5")); assertOk(PropertyType.DECIMAL, dec("5"), ">=", dec("1")); } /** * simple case for date value. * @throws Exception if failed */ @Test public void test_date() throws Exception { assertOk(PropertyType.DATE, date(5), "<", date(9)); assertNg(PropertyType.DATE, date(5), "<", date(5)); assertNg(PropertyType.DATE, date(5), "<", date(1)); assertNg(PropertyType.DATE, date(5), ">", date(9)); assertNg(PropertyType.DATE, date(5), ">", date(5)); assertOk(PropertyType.DATE, date(5), ">", date(1)); assertOk(PropertyType.DATE, date(5), "<=", date(9)); assertOk(PropertyType.DATE, date(5), "<=", date(5)); assertNg(PropertyType.DATE, date(5), "<=", date(1)); assertNg(PropertyType.DATE, date(5), ">=", date(9)); assertOk(PropertyType.DATE, date(5), ">=", date(5)); assertOk(PropertyType.DATE, date(5), ">=", date(1)); } /** * simple case for date-time value. * @throws Exception if failed */ @Test public void test_datetime() throws Exception { assertOk(PropertyType.DATETIME, datetime(5), "<", datetime(9)); assertNg(PropertyType.DATETIME, datetime(5), "<", datetime(5)); assertNg(PropertyType.DATETIME, datetime(5), "<", datetime(1)); assertNg(PropertyType.DATETIME, datetime(5), ">", datetime(9)); assertNg(PropertyType.DATETIME, datetime(5), ">", datetime(5)); assertOk(PropertyType.DATETIME, datetime(5), ">", datetime(1)); assertOk(PropertyType.DATETIME, datetime(5), "<=", datetime(9)); assertOk(PropertyType.DATETIME, datetime(5), "<=", datetime(5)); assertNg(PropertyType.DATETIME, datetime(5), "<=", datetime(1)); assertNg(PropertyType.DATETIME, datetime(5), ">=", datetime(9)); assertOk(PropertyType.DATETIME, datetime(5), ">=", datetime(5)); assertOk(PropertyType.DATETIME, datetime(5), ">=", datetime(1)); } /** * unsupported expression. * @throws Exception if failed */ @Test public void unsupported() throws Exception { ValuePredicate<?> pred = parse(PropertyType.INT, "?"); assertThat(pred, is(nullValue())); } /** * inconsistent type. * @throws Exception if failed */ @Test(expected = FormatException.class) public void inconsistent_type() throws Exception { parse(PropertyType.BOOLEAN, "<"); } private static void assertOk( PropertyType type, Object actual, String expr, Object expected) throws FormatException { ValuePredicate<Object> pred = parse(type, expr); assertThat(pred.describeExpected(expected, actual), pred.accepts(expected, actual), is(true)); } private static void assertNg( PropertyType type, Object actual, String expr, Object expected) throws FormatException { ValuePredicate<Object> pred = parse(type, expr); assertThat(pred.describeExpected(expected, actual), pred.accepts(expected, actual), is(false)); } private Number dec(String string) { return new BigDecimal(string); } private Calendar date(int offset) { Calendar calendar = Calendar.getInstance(); calendar.clear(); calendar.set(Calendar.YEAR, NOW.get(Calendar.YEAR)); calendar.set(Calendar.MONTH, NOW.get(Calendar.MONTH)); calendar.set(Calendar.DATE, NOW.get(Calendar.DATE)); calendar.add(Calendar.DATE, offset); return calendar; } private Calendar datetime(int offset) { Calendar calendar = Calendar.getInstance(); calendar.clear(); calendar.set(Calendar.YEAR, NOW.get(Calendar.YEAR)); calendar.set(Calendar.MONTH, NOW.get(Calendar.MONTH)); calendar.set(Calendar.DATE, NOW.get(Calendar.DATE)); calendar.set(Calendar.HOUR_OF_DAY, NOW.get(Calendar.HOUR_OF_DAY)); calendar.set(Calendar.MINUTE, NOW.get(Calendar.MINUTE)); calendar.set(Calendar.SECOND, NOW.get(Calendar.SECOND)); calendar.add(Calendar.SECOND, offset); return calendar; } @SuppressWarnings("unchecked") private static ValuePredicate<Object> parse(PropertyType type, String expr) throws FormatException { VerifyContext context = new VerifyContext(new TestContext.Empty()); ExcelSheetOrdinalRule rule = new ExcelSheetOrdinalRule(); return (ValuePredicate<Object>) rule.resolve(context, PropertyName.newInstance("testing"), type, expr); } }
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.pubsub.v1; import com.google.api.core.AbstractApiService; import com.google.api.core.ApiClock; import com.google.api.gax.batching.FlowController; import com.google.api.gax.core.Distribution; import com.google.cloud.pubsub.v1.MessageDispatcher.AckProcessor; import com.google.cloud.pubsub.v1.MessageDispatcher.PendingModifyAckDeadline; import com.google.common.collect.Lists; import com.google.common.primitives.Ints; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.pubsub.v1.AcknowledgeRequest; import com.google.pubsub.v1.ModifyAckDeadlineRequest; import com.google.pubsub.v1.PullRequest; import com.google.pubsub.v1.PullResponse; import com.google.pubsub.v1.SubscriberGrpc.SubscriberFutureStub; import com.google.pubsub.v1.Subscription; import java.util.List; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.Nullable; import org.threeten.bp.Duration; /** * Implementation of {@link AckProcessor} based on Cloud Pub/Sub pull and acknowledge operations. */ final class PollingSubscriberConnection extends AbstractApiService implements AckProcessor { static final Duration DEFAULT_TIMEOUT = Duration.ofSeconds(60); private static final int MAX_PER_REQUEST_CHANGES = 1000; private static final int DEFAULT_MAX_MESSAGES = 1000; private static final Duration INITIAL_BACKOFF = Duration.ofMillis(100); // 100ms private static final Duration MAX_BACKOFF = Duration.ofSeconds(10); // 10s private static final Logger logger = Logger.getLogger(PollingSubscriberConnection.class.getName()); private final Subscription subscription; private final ScheduledExecutorService pollingExecutor; private final SubscriberFutureStub stub; private final MessageDispatcher messageDispatcher; private final int maxDesiredPulledMessages; public PollingSubscriberConnection( Subscription subscription, MessageReceiver receiver, Duration ackExpirationPadding, Duration maxAckExtensionPeriod, Distribution ackLatencyDistribution, SubscriberFutureStub stub, FlowController flowController, @Nullable Long maxDesiredPulledMessages, ScheduledExecutorService executor, ScheduledExecutorService systemExecutor, ApiClock clock) { this.subscription = subscription; this.pollingExecutor = systemExecutor; this.stub = stub; messageDispatcher = new MessageDispatcher( receiver, this, ackExpirationPadding, maxAckExtensionPeriod, ackLatencyDistribution, flowController, executor, systemExecutor, clock); messageDispatcher.setMessageDeadlineSeconds(subscription.getAckDeadlineSeconds()); this.maxDesiredPulledMessages = maxDesiredPulledMessages != null ? Ints.saturatedCast(maxDesiredPulledMessages) : DEFAULT_MAX_MESSAGES; } @Override protected void doStart() { logger.config("Starting subscriber."); pullMessages(INITIAL_BACKOFF); notifyStarted(); } @Override protected void doStop() { messageDispatcher.stop(); notifyStopped(); } private ListenableFuture<PullResponse> pullMessages(final Duration backoff) { if (!isAlive()) { return Futures.immediateCancelledFuture(); } ListenableFuture<PullResponse> pullResult = stub.pull( PullRequest.newBuilder() .setSubscription(subscription.getName()) .setMaxMessages(maxDesiredPulledMessages) .setReturnImmediately(false) .build()); Futures.addCallback( pullResult, new FutureCallback<PullResponse>() { @Override public void onSuccess(PullResponse pullResponse) { if (pullResponse.getReceivedMessagesCount() == 0) { // No messages in response, possibly caught up in backlog, we backoff to avoid // slamming the server. pollingExecutor.schedule( new Runnable() { @Override public void run() { Duration newBackoff = backoff.multipliedBy(2); if (newBackoff.compareTo(MAX_BACKOFF) > 0) { newBackoff = MAX_BACKOFF; } pullMessages(newBackoff); } }, backoff.toMillis(), TimeUnit.MILLISECONDS); return; } messageDispatcher.processReceivedMessages( pullResponse.getReceivedMessagesList(), new Runnable() { @Override public void run() { pullMessages(INITIAL_BACKOFF); } }); } @Override public void onFailure(Throwable cause) { if (!isAlive()) { // we don't care about subscription failures when we're no longer running. logger.log(Level.FINE, "pull failure after service no longer running", cause); return; } if (StatusUtil.isRetryable(cause)) { logger.log(Level.WARNING, "Failed to pull messages (recoverable): ", cause); pollingExecutor.schedule( new Runnable() { @Override public void run() { Duration newBackoff = backoff.multipliedBy(2); if (newBackoff.compareTo(MAX_BACKOFF) > 0) { newBackoff = MAX_BACKOFF; } pullMessages(newBackoff); } }, backoff.toMillis(), TimeUnit.MILLISECONDS); } else { messageDispatcher.stop(); notifyFailed(cause); } } }, pollingExecutor); return pullResult; } private boolean isAlive() { // Read state only once. Because of threading, different calls can give different results. State state = state(); return state == State.RUNNING || state == State.STARTING; } @Override public void sendAckOperations( List<String> acksToSend, List<PendingModifyAckDeadline> ackDeadlineExtensions) { // Send the modify ack deadlines in batches as not to exceed the max request // size. for (PendingModifyAckDeadline modifyAckDeadline : ackDeadlineExtensions) { for (List<String> ackIdChunk : Lists.partition(modifyAckDeadline.ackIds, MAX_PER_REQUEST_CHANGES)) { stub.withDeadlineAfter(DEFAULT_TIMEOUT.toMillis(), TimeUnit.MILLISECONDS) .modifyAckDeadline( ModifyAckDeadlineRequest.newBuilder() .setSubscription(subscription.getName()) .addAllAckIds(ackIdChunk) .setAckDeadlineSeconds(modifyAckDeadline.deadlineExtensionSeconds) .build()); } } for (List<String> ackChunk : Lists.partition(acksToSend, MAX_PER_REQUEST_CHANGES)) { stub.withDeadlineAfter(DEFAULT_TIMEOUT.toMillis(), TimeUnit.MILLISECONDS) .acknowledge( AcknowledgeRequest.newBuilder() .setSubscription(subscription.getName()) .addAllAckIds(ackChunk) .build()); } } }
/* * Copyright 2009 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.debugging.sourcemap; import com.google.debugging.sourcemap.SourceMapGeneratorV2.LineMapEncoder; import com.google.javascript.jscomp.SourceMap; import com.google.javascript.jscomp.SourceMap.Format; import java.io.IOException; /** * Tests for {@link SourceMap}. * */ public class SourceMapGeneratorV2Test extends SourceMapTestCase { @Override SourceMapConsumer getSourceMapConsumer() { return new SourceMapConsumerV2(); } @Override Format getSourceMapFormat() { return SourceMap.Format.V2; } @Override public void setUp() { detailLevel = SourceMap.DetailLevel.ALL; } public void testBasicMapping() throws Exception { compileAndCheck("function __BASIC__() { }"); } public void testBasicMappingGoldenOutput() throws Exception { // Empty source map test checkSourceMap("function __BASIC__() { }", //"/** Source Map **/\n" + "{\n" + "\"version\":2,\n" + "\"file\":\"testcode\",\n" + "\"lineCount\":1,\n" + "\"lineMaps\":[\"cAkBEBEB\"],\n" + "\"mappings\":[[0,1,9,0],\n" + "[0,1,9,0],\n" + "[0,1,18],\n" + "[0,1,21],\n" + "],\n" + "\"sources\":[\"testcode\"],\n" + "\"names\":[\"__BASIC__\"]\n" + "}\n"); } public void testLiteralMappings() throws Exception { compileAndCheck("function __BASIC__(__PARAM1__, __PARAM2__) { " + "var __VAR__ = '__STR__'; }"); } public void testLiteralMappingsGoldenOutput() throws Exception { // Empty source map test checkSourceMap("function __BASIC__(__PARAM1__, __PARAM2__) { " + "var __VAR__ = '__STR__'; }", //"/** Source Map **/\n" + "{\n" + "\"version\":2,\n" + "\"file\":\"testcode\",\n" + "\"lineCount\":1,\n" + "\"lineMaps\":[\"cAkBABkBA/kCA+ADMBcBgBA9\"],\n" + "\"mappings\":[[0,1,9,0],\n" + "[0,1,9,0],\n" + "[0,1,18],\n" + "[0,1,19,1],\n" + "[0,1,31,2],\n" + "[0,1,43],\n" + "[0,1,45],\n" + "[0,1,49,3],\n" + "[0,1,59],\n" + "],\n" + "\"sources\":[\"testcode\"],\n" + "\"names\":[" + "\"__BASIC__\",\"__PARAM1__\",\"__PARAM2__\"," + "\"__VAR__\"]\n" + "}\n"); } public void testMultilineMapping() throws Exception { compileAndCheck("function __BASIC__(__PARAM1__, __PARAM2__) {\n" + "var __VAR__ = '__STR__';\n" + "var __ANO__ = \"__STR2__\";\n" + "}"); } public void testMultiFunctionMapping() throws Exception { compileAndCheck("function __BASIC__(__PARAM1__, __PARAM2__) {\n" + "var __VAR__ = '__STR__';\n" + "var __ANO__ = \"__STR2__\";\n" + "}\n\n" + "function __BASIC2__(__PARAM3__, __PARAM4__) {\n" + "var __VAR2__ = '__STR2__';\n" + "var __ANO2__ = \"__STR3__\";\n" + "}\n\n"); } public void testGoldenOutput0() throws Exception { // Empty source map test checkSourceMap("", "{\n" + "\"version\":2,\n" + "\"file\":\"testcode\",\n" + "\"lineCount\":1,\n" + "\"lineMaps\":[\"\"],\n" + "\"mappings\":[],\n" + "\"sources\":[\"testcode\"],\n" + "\"names\":[]\n" + "}\n"); } public void testGoldenOutput1() throws Exception { detailLevel = SourceMap.DetailLevel.ALL; checkSourceMap( "function f(foo, bar) { foo = foo + bar + 2; return foo; }", "{\n" + "\"version\":2,\n" + "\"file\":\"testcode\",\n" + "\"lineCount\":1,\n" + "\"lineMaps\":[\"cAEBABIBA/ICA+ADICA/ICA+IDA9AEYBMBA5\"],\n" + "\"mappings\":[[0,1,9,0],\n" + "[0,1,9,0],\n" + "[0,1,10],\n" + "[0,1,11,1],\n" + "[0,1,16,2],\n" + "[0,1,21],\n" + "[0,1,23],\n" + "[0,1,23,1],\n" + "[0,1,29,1],\n" + "[0,1,35,2],\n" + "[0,1,41],\n" + "[0,1,44],\n" + "[0,1,51,1],\n" + "],\n" + "\"sources\":[\"testcode\"],\n" + "\"names\":[\"f\",\"foo\",\"bar\"]\n" + "}\n"); detailLevel = SourceMap.DetailLevel.SYMBOLS; checkSourceMap("function f(foo, bar) { foo = foo + bar + 2; return foo; }", "{\n" + "\"version\":2,\n" + "\"file\":\"testcode\",\n" + "\"lineCount\":1,\n" + "\"lineMaps\":[\"cAEBA/ICA+IDE9IEA8IFA7IGg6MHA5\"],\n" + "\"mappings\":[[0,1,9,0],\n" + "[0,1,9,0],\n" + "[0,1,11,1],\n" + "[0,1,16,2],\n" + "[0,1,23,1],\n" + "[0,1,29,1],\n" + "[0,1,35,2],\n" + "[0,1,51,1],\n" + "],\n" + "\"sources\":[\"testcode\"],\n" + "\"names\":[\"f\",\"foo\",\"bar\"]\n" + "}\n"); } public void testGoldenOutput2() throws Exception { checkSourceMap("function f(foo, bar) {\r\n\n\n\nfoo = foo + bar + foo;" + "\nreturn foo;\n}", "{\n" + "\"version\":2,\n" + "\"file\":\"testcode\",\n" + "\"lineCount\":1,\n" + "\"lineMaps\":[" + "\"cAEBABIBA/ICA+ADICA/ICA+IDA9IEYBMBA5\"],\n" + "\"mappings\":[[0,1,9,0],\n" + "[0,1,9,0],\n" + "[0,1,10],\n" + "[0,1,11,1],\n" + "[0,1,16,2],\n" + "[0,1,21],\n" + "[0,5,0],\n" + "[0,5,0,1],\n" + "[0,5,6,1],\n" + "[0,5,12,2],\n" + "[0,5,18,1],\n" + "[0,6,0],\n" + "[0,6,7,1],\n" + "],\n" + "\"sources\":[\"testcode\"],\n" + "\"names\":[\"f\",\"foo\",\"bar\"]\n" + "}\n"); } public void testGoldenOutput3() throws Exception { checkSourceMap("c:\\myfile.js", "foo;", "{\n" + "\"version\":2,\n" + "\"file\":\"testcode\",\n" + "\"lineCount\":1,\n" + "\"lineMaps\":[\"IA\"],\n" + "\"mappings\":[[0,1,0,0],\n" + "],\n" + "\"sources\":[\"c:\\\\myfile.js\"],\n" + "\"names\":[\"foo\"]\n" + "}\n"); } public void testGoldenOutput4() throws Exception { checkSourceMap("c:\\myfile.js", "foo; boo; goo;", "{\n" + "\"version\":2,\n" + "\"file\":\"testcode\",\n" + "\"lineCount\":1,\n" + "\"lineMaps\":[\"IAMBMB\"],\n" + "\"mappings\":[[0,1,0,0],\n" + "[0,1,7,1],\n" + "[0,1,14,2],\n" + "],\n" + "\"sources\":[\"c:\\\\myfile.js\"],\n" + "\"names\":[\"foo\",\"boo\",\"goo\"]\n" + "}\n"); } public void testGoldenOutput5() throws Exception { detailLevel = SourceMap.DetailLevel.ALL; checkSourceMap("c:\\myfile.js", "/** @preserve\n" + " * this is a test.\n" + " */\n" + "var foo=a + 'this is a really long line that will force the" + " mapping to span multiple lines 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + "' + c + d + e;", "{\n" + "\"version\":2,\n" + "\"file\":\"testcode\",\n" + "\"lineCount\":6,\n" + "\"lineMaps\":[\"\",\n" + "\"\",\n" + "\"\",\n" + "\"\",\n" + "\"MAMBABA/!!AUSC\",\n" + "\"AEA9AEA8AF\"],\n" + "\"mappings\":[[0,4,0],\n" + "[0,4,4,0],\n" + "[0,4,8,1],\n" + "[0,4,12],\n" + "[0,4,1314,2],\n" + "[0,4,1318,3],\n" + "[0,4,1322,4],\n" + "],\n" + "\"sources\":[\"c:\\\\myfile.js\"],\n" + "\"names\":[\"foo\",\"a\",\"c\",\"d\",\"e\"]\n" + "}\n"); detailLevel = SourceMap.DetailLevel.SYMBOLS; checkSourceMap("c:\\myfile.js", "/** @preserve\n" + " * this is a test.\n" + " */\n" + "var foo=a + 'this is a really long line that will force the" + " mapping to span multiple lines 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + " 123456789 123456789 123456789 123456789 123456789" + "' + c + d + e;", "{\n" + "\"version\":2,\n" + "\"file\":\"testcode\",\n" + "\"lineCount\":6,\n" + "\"lineMaps\":[\"\",\n" + "\"\",\n" + "\"\",\n" + "\"\",\n" + "\"M/MBAB\",\n" + "\"ACA+ADA9AE\"],\n" + "\"mappings\":[[0,4,4,0],\n" + "[0,4,8,1],\n" + "[0,4,1314,2],\n" + "[0,4,1318,3],\n" + "[0,4,1322,4],\n" + "],\n" + "\"sources\":[\"c:\\\\myfile.js\"],\n" + "\"names\":[\"foo\",\"a\",\"c\",\"d\",\"e\"]\n" + "}\n"); } public void testBasicDeterminism() throws Exception { RunResult result1 = compile("file1", "foo;", "file2", "bar;"); RunResult result2 = compile("file2", "foo;", "file1", "bar;"); String map1 = getSourceMap(result1); String map2 = getSourceMap(result2); // Assert that the files section of the maps are the same. The actual // entries will differ, so we cannot do a simple full comparison. // Line 5 has the file information. String files1 = map1.split("\n")[4]; String files2 = map2.split("\n")[4]; assertEquals(files1, files2); } private int getRelativeId(int id, int lastId) { int length = LineMapEncoder.getRelativeMappingIdLength(id, lastId); int result = LineMapEncoder.getRelativeMappingId(id, length, lastId); int inverse = SourceMapLineDecoder.getIdFromRelativeId( result, length, lastId); assertEquals(id, inverse); return result; } public void testEncodingRelativeId() { assertEquals(0, getRelativeId(0, 0)); assertEquals(64 + (-1), getRelativeId(-1, 0)); assertEquals(64 + (-32), getRelativeId(0, 32)); assertEquals(31, getRelativeId(31, 0)); assertEquals(4096 + (-33), getRelativeId(0, 33)); assertEquals(32, getRelativeId(32, 0)); } public void testEncodingIdLength() { assertEquals(1, LineMapEncoder.getRelativeMappingIdLength(0, 0)); assertEquals(1, LineMapEncoder.getRelativeMappingIdLength(-1, 0)); assertEquals(1, LineMapEncoder.getRelativeMappingIdLength(0, 32)); assertEquals(1, LineMapEncoder.getRelativeMappingIdLength(31, 0)); assertEquals(2, LineMapEncoder.getRelativeMappingIdLength(0, 33)); assertEquals(2, LineMapEncoder.getRelativeMappingIdLength(32, 0)); assertEquals(2, LineMapEncoder.getRelativeMappingIdLength(2047, 0)); assertEquals(3, LineMapEncoder.getRelativeMappingIdLength(2048, 0)); assertEquals(2, LineMapEncoder.getRelativeMappingIdLength(0, 2048)); assertEquals(3, LineMapEncoder.getRelativeMappingIdLength(0, 2049)); } private String getEntry(int id, int lastId, int reps) throws IOException { StringBuilder sb = new StringBuilder(); LineMapEncoder.encodeEntry(sb, id, lastId, reps); return sb.toString(); } public void testEncoding() throws IOException { assertEquals("AA", getEntry(0, 0, 1)); assertEquals("EA", getEntry(0, 0, 2)); assertEquals("8A", getEntry(0, 0, 16)); assertEquals("!AQA", getEntry(0, 0, 17)); assertEquals("!ARA", getEntry(0, 0, 18)); assertEquals("!A+A", getEntry(0, 0, 63)); assertEquals("!A/A", getEntry(0, 0, 64)); assertEquals("!!ABAA", getEntry(0, 0, 65)); assertEquals("!!A//A", getEntry(0, 0, 4096)); assertEquals("!!!ABAAA", getEntry(0, 0, 4097)); assertEquals("Af", getEntry(31, 0, 1)); assertEquals("BAg", getEntry(32, 0, 1)); assertEquals("AB", getEntry(32, 31, 1)); assertEquals("!AQf", getEntry(31, 0, 17)); assertEquals("!BQAg", getEntry(32, 0, 17)); assertEquals("!AQB", getEntry(32, 31, 17)); assertEquals("!A/B", getEntry(32, 31, 64)); assertEquals("!!ABAB", getEntry(32, 31, 65)); } }
package org.apereo.cas.web.config; import org.apereo.cas.CentralAuthenticationService; import org.apereo.cas.authentication.AuthenticationEventExecutionPlan; import org.apereo.cas.authentication.AuthenticationServiceSelectionPlan; import org.apereo.cas.authentication.AuthenticationSystemSupport; import org.apereo.cas.authentication.PrincipalElectionStrategy; import org.apereo.cas.authentication.adaptive.AdaptiveAuthenticationPolicy; import org.apereo.cas.authentication.principal.ServiceFactory; import org.apereo.cas.configuration.CasConfigurationProperties; import org.apereo.cas.logout.LogoutManager; import org.apereo.cas.services.ServicesManager; import org.apereo.cas.ticket.registry.TicketRegistrySupport; import org.apereo.cas.util.CollectionUtils; import org.apereo.cas.web.FlowExecutionExceptionResolver; import org.apereo.cas.web.flow.GatewayServicesManagementCheck; import org.apereo.cas.web.flow.GenerateServiceTicketAction; import org.apereo.cas.web.flow.ServiceAuthorizationCheck; import org.apereo.cas.web.flow.SingleSignOnParticipationStrategy; import org.apereo.cas.web.flow.actions.InitialAuthenticationAction; import org.apereo.cas.web.flow.login.CreateTicketGrantingTicketAction; import org.apereo.cas.web.flow.login.GenericSuccessViewAction; import org.apereo.cas.web.flow.login.InitialAuthenticationRequestValidationAction; import org.apereo.cas.web.flow.login.InitialFlowSetupAction; import org.apereo.cas.web.flow.login.InitializeLoginAction; import org.apereo.cas.web.flow.login.RedirectUnauthorizedServiceUrlAction; import org.apereo.cas.web.flow.login.RenderLoginAction; import org.apereo.cas.web.flow.login.SendTicketGrantingTicketAction; import org.apereo.cas.web.flow.login.ServiceWarningAction; import org.apereo.cas.web.flow.login.TicketGrantingTicketCheckAction; import org.apereo.cas.web.flow.logout.FrontChannelLogoutAction; import org.apereo.cas.web.flow.logout.LogoutAction; import org.apereo.cas.web.flow.logout.LogoutViewSetupAction; import org.apereo.cas.web.flow.logout.TerminateSessionAction; import org.apereo.cas.web.flow.resolver.CasDelegatingWebflowEventResolver; import org.apereo.cas.web.flow.resolver.CasWebflowEventResolver; import org.apereo.cas.web.support.ArgumentExtractor; import org.apereo.cas.web.support.CookieRetrievingCookieGenerator; import org.springframework.beans.factory.ObjectProvider; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.context.config.annotation.RefreshScope; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.transaction.annotation.EnableTransactionManagement; import org.springframework.web.servlet.HandlerExceptionResolver; import org.springframework.webflow.execution.Action; /** * This is {@link CasSupportActionsConfiguration}. * * @author Misagh Moayyed * @since 5.0.0 */ @Configuration("casSupportActionsConfiguration") @EnableConfigurationProperties(CasConfigurationProperties.class) @EnableTransactionManagement(proxyTargetClass = true) public class CasSupportActionsConfiguration { @Autowired private ApplicationContext applicationContext; @Autowired @Qualifier("authenticationEventExecutionPlan") private AuthenticationEventExecutionPlan authenticationEventExecutionPlan; @Autowired @Qualifier("serviceTicketRequestWebflowEventResolver") private CasWebflowEventResolver serviceTicketRequestWebflowEventResolver; @Autowired @Qualifier("initialAuthenticationAttemptWebflowEventResolver") private CasDelegatingWebflowEventResolver initialAuthenticationAttemptWebflowEventResolver; @Autowired @Qualifier("servicesManager") private ServicesManager servicesManager; @Autowired @Qualifier("ticketGrantingTicketCookieGenerator") private ObjectProvider<CookieRetrievingCookieGenerator> ticketGrantingTicketCookieGenerator; @Autowired @Qualifier("warnCookieGenerator") private ObjectProvider<CookieRetrievingCookieGenerator> warnCookieGenerator; @Autowired private CasConfigurationProperties casProperties; @Autowired @Qualifier("webApplicationServiceFactory") private ServiceFactory webApplicationServiceFactory; @Autowired @Qualifier("adaptiveAuthenticationPolicy") private AdaptiveAuthenticationPolicy adaptiveAuthenticationPolicy; @Autowired @Qualifier("centralAuthenticationService") private CentralAuthenticationService centralAuthenticationService; @Autowired @Qualifier("defaultAuthenticationSystemSupport") private AuthenticationSystemSupport authenticationSystemSupport; @Autowired @Qualifier("logoutManager") private LogoutManager logoutManager; @Autowired @Qualifier("defaultTicketRegistrySupport") private TicketRegistrySupport ticketRegistrySupport; @Autowired @Qualifier("rankedAuthenticationProviderWebflowEventResolver") private CasWebflowEventResolver rankedAuthenticationProviderWebflowEventResolver; @Autowired @Qualifier("authenticationServiceSelectionPlan") private AuthenticationServiceSelectionPlan authenticationRequestServiceSelectionStrategies; @Autowired @Qualifier("singleSignOnParticipationStrategy") private SingleSignOnParticipationStrategy webflowSingleSignOnParticipationStrategy; @Autowired @Qualifier("principalElectionStrategy") private PrincipalElectionStrategy principalElectionStrategy; @Bean @RefreshScope public HandlerExceptionResolver errorHandlerResolver() { return new FlowExecutionExceptionResolver(); } @ConditionalOnMissingBean(name = "authenticationViaFormAction") @Bean @RefreshScope public Action authenticationViaFormAction() { return new InitialAuthenticationAction(initialAuthenticationAttemptWebflowEventResolver, serviceTicketRequestWebflowEventResolver, adaptiveAuthenticationPolicy); } @RefreshScope @ConditionalOnMissingBean(name = "serviceAuthorizationCheck") @Bean public Action serviceAuthorizationCheck() { return new ServiceAuthorizationCheck(this.servicesManager, authenticationRequestServiceSelectionStrategies); } @RefreshScope @ConditionalOnMissingBean(name = "sendTicketGrantingTicketAction") @Bean public Action sendTicketGrantingTicketAction() { return new SendTicketGrantingTicketAction(centralAuthenticationService, ticketGrantingTicketCookieGenerator.getIfAvailable(), webflowSingleSignOnParticipationStrategy); } @RefreshScope @ConditionalOnMissingBean(name = "createTicketGrantingTicketAction") @Bean public Action createTicketGrantingTicketAction() { return new CreateTicketGrantingTicketAction(centralAuthenticationService, authenticationSystemSupport, ticketRegistrySupport); } @RefreshScope @Bean @ConditionalOnMissingBean(name = "logoutAction") public Action logoutAction() { return new LogoutAction(webApplicationServiceFactory, servicesManager, casProperties.getLogout()); } @ConditionalOnMissingBean(name = "initializeLoginAction") @Bean @RefreshScope public Action initializeLoginAction() { return new InitializeLoginAction(servicesManager); } @ConditionalOnMissingBean(name = "renderLoginFormAction") @Bean @RefreshScope public Action renderLoginFormAction() { return new RenderLoginAction(servicesManager, casProperties, applicationContext); } @RefreshScope @Bean @Autowired @ConditionalOnMissingBean(name = "initialFlowSetupAction") public Action initialFlowSetupAction(@Qualifier("argumentExtractor") final ArgumentExtractor argumentExtractor) { return new InitialFlowSetupAction(CollectionUtils.wrap(argumentExtractor), servicesManager, authenticationRequestServiceSelectionStrategies, ticketGrantingTicketCookieGenerator.getIfAvailable(), warnCookieGenerator.getIfAvailable(), casProperties, authenticationEventExecutionPlan); } @RefreshScope @Bean @ConditionalOnMissingBean(name = "initialAuthenticationRequestValidationAction") public Action initialAuthenticationRequestValidationAction() { return new InitialAuthenticationRequestValidationAction(rankedAuthenticationProviderWebflowEventResolver); } @RefreshScope @Bean @ConditionalOnMissingBean(name = "genericSuccessViewAction") public Action genericSuccessViewAction() { return new GenericSuccessViewAction(centralAuthenticationService, servicesManager, webApplicationServiceFactory, casProperties.getView().getDefaultRedirectUrl()); } @RefreshScope @Bean @ConditionalOnMissingBean(name = "redirectUnauthorizedServiceUrlAction") public Action redirectUnauthorizedServiceUrlAction() { return new RedirectUnauthorizedServiceUrlAction(servicesManager); } @Bean @RefreshScope @ConditionalOnMissingBean(name = "generateServiceTicketAction") public Action generateServiceTicketAction() { return new GenerateServiceTicketAction(authenticationSystemSupport, centralAuthenticationService, ticketRegistrySupport, authenticationRequestServiceSelectionStrategies, servicesManager, principalElectionStrategy); } @Bean @ConditionalOnMissingBean(name = "gatewayServicesManagementCheck") @RefreshScope public Action gatewayServicesManagementCheck() { return new GatewayServicesManagementCheck(this.servicesManager); } @Bean @ConditionalOnMissingBean(name = "frontChannelLogoutAction") public Action frontChannelLogoutAction() { return new FrontChannelLogoutAction(this.logoutManager); } @Bean @ConditionalOnMissingBean(name = "ticketGrantingTicketCheckAction") public Action ticketGrantingTicketCheckAction() { return new TicketGrantingTicketCheckAction(this.centralAuthenticationService); } @Bean @RefreshScope public Action terminateSessionAction() { return new TerminateSessionAction(centralAuthenticationService, ticketGrantingTicketCookieGenerator.getIfAvailable(), warnCookieGenerator.getIfAvailable(), casProperties.getLogout()); } @Bean public Action logoutViewSetupAction() { return new LogoutViewSetupAction(casProperties); } @Bean @ConditionalOnMissingBean(name = "serviceWarningAction") @RefreshScope public Action serviceWarningAction() { return new ServiceWarningAction(centralAuthenticationService, authenticationSystemSupport, ticketRegistrySupport, warnCookieGenerator.getIfAvailable(), principalElectionStrategy); } }